From 920a49cfa055733d575282973e23558c33087a4a Mon Sep 17 00:00:00 2001 From: RHE Date: Fri, 24 Nov 2017 13:54:26 +0100 Subject: remove keystone-moon Change-Id: I80d7c9b669f19d5f6607e162de8e0e55c2f80fdd Signed-off-by: RHE --- keystone-moon/keystone/__init__.py | 0 .../keystone/assignment/V8_backends/__init__.py | 0 .../keystone/assignment/V8_backends/sql.py | 452 -- .../assignment/V8_role_backends/__init__.py | 0 .../keystone/assignment/V8_role_backends/sql.py | 80 - keystone-moon/keystone/assignment/__init__.py | 16 - .../keystone/assignment/backends/__init__.py | 0 keystone-moon/keystone/assignment/backends/ldap.py | 545 -- keystone-moon/keystone/assignment/backends/sql.py | 319 - keystone-moon/keystone/assignment/controllers.py | 972 --- keystone-moon/keystone/assignment/core.py | 1790 ----- .../keystone/assignment/role_backends/__init__.py | 0 .../keystone/assignment/role_backends/ldap.py | 125 - .../keystone/assignment/role_backends/sql.py | 202 - keystone-moon/keystone/assignment/routers.py | 282 - keystone-moon/keystone/assignment/schema.py | 32 - keystone-moon/keystone/auth/__init__.py | 16 - keystone-moon/keystone/auth/controllers.py | 675 -- keystone-moon/keystone/auth/core.py | 94 - keystone-moon/keystone/auth/plugins/__init__.py | 15 - keystone-moon/keystone/auth/plugins/core.py | 211 - keystone-moon/keystone/auth/plugins/external.py | 101 - keystone-moon/keystone/auth/plugins/mapped.py | 258 - keystone-moon/keystone/auth/plugins/oauth1.py | 64 - keystone-moon/keystone/auth/plugins/password.py | 42 - keystone-moon/keystone/auth/plugins/saml2.py | 35 - keystone-moon/keystone/auth/plugins/token.py | 97 - keystone-moon/keystone/auth/plugins/totp.py | 99 - keystone-moon/keystone/auth/routers.py | 57 - keystone-moon/keystone/backends.py | 66 - keystone-moon/keystone/catalog/__init__.py | 16 - .../keystone/catalog/backends/__init__.py | 0 keystone-moon/keystone/catalog/backends/kvs.py | 154 - keystone-moon/keystone/catalog/backends/sql.py | 572 -- .../keystone/catalog/backends/templated.py | 298 - keystone-moon/keystone/catalog/controllers.py | 615 -- keystone-moon/keystone/catalog/core.py | 894 --- keystone-moon/keystone/catalog/routers.py | 182 - keystone-moon/keystone/catalog/schema.py | 119 - keystone-moon/keystone/clean.py | 87 - keystone-moon/keystone/cli.py | 596 -- keystone-moon/keystone/cmd/__init__.py | 0 keystone-moon/keystone/cmd/all.py | 39 - keystone-moon/keystone/cmd/cli.py | 992 --- keystone-moon/keystone/cmd/manage.py | 47 - keystone-moon/keystone/common/__init__.py | 0 keystone-moon/keystone/common/authorization.py | 111 - keystone-moon/keystone/common/base64utils.py | 401 -- keystone-moon/keystone/common/cache/__init__.py | 15 - .../keystone/common/cache/_context_cache.py | 129 - .../keystone/common/cache/_memcache_pool.py | 244 - .../keystone/common/cache/backends/__init__.py | 0 .../common/cache/backends/memcache_pool.py | 28 - .../keystone/common/cache/backends/mongo.py | 25 - .../keystone/common/cache/backends/noop.py | 56 - keystone-moon/keystone/common/cache/core.py | 124 - keystone-moon/keystone/common/clean.py | 87 - keystone-moon/keystone/common/config.py | 1259 ---- keystone-moon/keystone/common/controller.py | 835 --- keystone-moon/keystone/common/dependency.py | 230 - keystone-moon/keystone/common/driver_hints.py | 115 - .../keystone/common/environment/__init__.py | 102 - .../keystone/common/environment/eventlet_server.py | 212 - keystone-moon/keystone/common/extension.py | 44 - keystone-moon/keystone/common/json_home.py | 86 - keystone-moon/keystone/common/kvs/__init__.py | 32 - .../keystone/common/kvs/backends/__init__.py | 0 .../keystone/common/kvs/backends/inmemdb.py | 68 - .../keystone/common/kvs/backends/memcached.py | 195 - keystone-moon/keystone/common/kvs/core.py | 450 -- keystone-moon/keystone/common/kvs/legacy.py | 61 - keystone-moon/keystone/common/ldap/__init__.py | 15 - keystone-moon/keystone/common/ldap/core.py | 1955 ------ keystone-moon/keystone/common/manager.py | 220 - keystone-moon/keystone/common/models.py | 196 - keystone-moon/keystone/common/openssl.py | 337 - keystone-moon/keystone/common/pemutils.py | 509 -- keystone-moon/keystone/common/router.py | 82 - keystone-moon/keystone/common/sql/__init__.py | 15 - keystone-moon/keystone/common/sql/core.py | 434 -- .../keystone/common/sql/migrate_repo/README | 4 - .../keystone/common/sql/migrate_repo/__init__.py | 0 .../keystone/common/sql/migrate_repo/manage.py | 5 - .../keystone/common/sql/migrate_repo/migrate.cfg | 25 - .../common/sql/migrate_repo/versions/067_kilo.py | 317 - .../sql/migrate_repo/versions/068_placeholder.py | 18 - .../sql/migrate_repo/versions/069_placeholder.py | 18 - .../sql/migrate_repo/versions/070_placeholder.py | 18 - .../sql/migrate_repo/versions/071_placeholder.py | 18 - .../sql/migrate_repo/versions/072_placeholder.py | 18 - .../versions/073_insert_assignment_inherited_pk.py | 113 - .../versions/074_add_is_domain_project.py | 27 - .../versions/075_confirm_config_registration.py | 29 - .../sql/migrate_repo/versions/076_placeholder.py | 18 - .../sql/migrate_repo/versions/077_placeholder.py | 18 - .../sql/migrate_repo/versions/078_placeholder.py | 18 - .../sql/migrate_repo/versions/079_placeholder.py | 18 - .../sql/migrate_repo/versions/080_placeholder.py | 18 - .../versions/081_add_endpoint_policy_table.py | 54 - .../versions/082_add_federation_tables.py | 97 - .../migrate_repo/versions/083_add_oauth1_tables.py | 75 - .../migrate_repo/versions/084_add_revoke_tables.py | 55 - .../versions/085_add_endpoint_filtering_table.py | 70 - .../086_add_duplicate_constraint_trusts.py | 26 - .../sql/migrate_repo/versions/087_implied_roles.py | 43 - .../versions/088_domain_specific_roles.py | 60 - .../versions/089_add_root_of_all_domains.py | 76 - .../090_add_local_user_and_password_tables.py | 42 - ...grate_data_to_local_user_and_password_tables.py | 66 - .../092_make_implied_roles_fks_cascaded.py | 46 - .../versions/093_migrate_domains_to_projects.py | 125 - .../versions/094_add_federated_user_table.py | 43 - ...5_add_integer_pkey_to_revocation_event_table.py | 62 - .../versions/096_drop_role_name_constraint.py | 50 - .../common/sql/migrate_repo/versions/__init__.py | 0 .../keystone/common/sql/migration_helpers.py | 245 - keystone-moon/keystone/common/tokenless_auth.py | 192 - keystone-moon/keystone/common/utils.py | 598 -- .../keystone/common/validation/__init__.py | 96 - .../keystone/common/validation/parameter_types.py | 70 - .../keystone/common/validation/validators.py | 58 - keystone-moon/keystone/common/wsgi.py | 834 --- keystone-moon/keystone/config.py | 92 - keystone-moon/keystone/contrib/__init__.py | 0 .../keystone/contrib/admin_crud/__init__.py | 15 - keystone-moon/keystone/contrib/admin_crud/core.py | 32 - keystone-moon/keystone/contrib/ec2/__init__.py | 18 - keystone-moon/keystone/contrib/ec2/controllers.py | 435 -- keystone-moon/keystone/contrib/ec2/core.py | 34 - keystone-moon/keystone/contrib/ec2/routers.py | 91 - .../keystone/contrib/endpoint_filter/__init__.py | 0 .../contrib/endpoint_filter/backends/__init__.py | 0 .../endpoint_filter/backends/catalog_sql.py | 77 - .../contrib/endpoint_filter/backends/sql.py | 30 - .../contrib/endpoint_filter/controllers.py | 300 - .../keystone/contrib/endpoint_filter/core.py | 296 - .../endpoint_filter/migrate_repo/__init__.py | 0 .../endpoint_filter/migrate_repo/migrate.cfg | 25 - .../versions/001_add_endpoint_filtering_table.py | 19 - .../versions/002_add_endpoint_groups.py | 19 - .../migrate_repo/versions/__init__.py | 0 .../keystone/contrib/endpoint_filter/routers.py | 33 - .../keystone/contrib/endpoint_filter/schema.py | 35 - .../keystone/contrib/endpoint_policy/__init__.py | 0 .../contrib/endpoint_policy/backends/__init__.py | 0 .../contrib/endpoint_policy/backends/sql.py | 28 - .../contrib/endpoint_policy/controllers.py | 166 - .../keystone/contrib/endpoint_policy/core.py | 430 -- .../endpoint_policy/migrate_repo/__init__.py | 0 .../endpoint_policy/migrate_repo/migrate.cfg | 25 - .../versions/001_add_endpoint_policy_table.py | 19 - .../migrate_repo/versions/__init__.py | 0 .../keystone/contrib/endpoint_policy/routers.py | 28 - keystone-moon/keystone/contrib/example/__init__.py | 0 .../keystone/contrib/example/configuration.rst | 31 - .../keystone/contrib/example/controllers.py | 26 - keystone-moon/keystone/contrib/example/core.py | 97 - .../contrib/example/migrate_repo/__init__.py | 0 .../contrib/example/migrate_repo/migrate.cfg | 25 - .../migrate_repo/versions/001_example_table.py | 32 - .../example/migrate_repo/versions/__init__.py | 0 keystone-moon/keystone/contrib/example/routers.py | 38 - .../keystone/contrib/federation/__init__.py | 0 .../contrib/federation/backends/__init__.py | 0 .../keystone/contrib/federation/backends/sql.py | 29 - .../keystone/contrib/federation/constants.py | 15 - .../keystone/contrib/federation/controllers.py | 520 -- keystone-moon/keystone/contrib/federation/core.py | 355 - keystone-moon/keystone/contrib/federation/idp.py | 609 -- .../contrib/federation/migrate_repo/__init__.py | 0 .../contrib/federation/migrate_repo/migrate.cfg | 25 - .../versions/001_add_identity_provider_table.py | 17 - .../versions/002_add_mapping_tables.py | 17 - .../versions/003_mapping_id_nullable_false.py | 20 - .../versions/004_add_remote_id_column.py | 17 - .../versions/005_add_service_provider_table.py | 17 - .../006_fixup_service_provider_attributes.py | 17 - .../versions/007_add_remote_id_table.py | 17 - .../versions/008_add_relay_state_to_sp.py | 17 - .../federation/migrate_repo/versions/__init__.py | 0 .../keystone/contrib/federation/routers.py | 31 - .../keystone/contrib/federation/schema.py | 79 - keystone-moon/keystone/contrib/federation/utils.py | 776 --- keystone-moon/keystone/contrib/moon/__init__.py | 8 - keystone-moon/keystone/contrib/moon/algorithms.py | 78 - .../keystone/contrib/moon/backends/__init__.py | 97 - .../keystone/contrib/moon/backends/flat.py | 116 - .../keystone/contrib/moon/backends/memory.py | 59 - .../keystone/contrib/moon/backends/sql.py | 1105 ---- keystone-moon/keystone/contrib/moon/controllers.py | 917 --- keystone-moon/keystone/contrib/moon/core.py | 2990 --------- keystone-moon/keystone/contrib/moon/exception.py | 422 -- .../keystone/contrib/moon/migrate_repo/__init__.py | 0 .../keystone/contrib/moon/migrate_repo/migrate.cfg | 25 - .../contrib/moon/migrate_repo/versions/001_moon.py | 211 - .../contrib/moon/migrate_repo/versions/002_moon.py | 34 - .../contrib/moon/migrate_repo/versions/003_moon.py | 32 - .../contrib/moon/migrate_repo/versions/__init__.py | 0 keystone-moon/keystone/contrib/moon/routers.py | 507 -- keystone-moon/keystone/contrib/moon/service.py | 57 - keystone-moon/keystone/contrib/moon/wsgi.py | 8 - keystone-moon/keystone/contrib/oauth1/__init__.py | 0 .../keystone/contrib/oauth1/backends/__init__.py | 0 .../keystone/contrib/oauth1/backends/sql.py | 30 - .../keystone/contrib/oauth1/controllers.py | 411 -- keystone-moon/keystone/contrib/oauth1/core.py | 367 -- .../contrib/oauth1/migrate_repo/__init__.py | 0 .../contrib/oauth1/migrate_repo/migrate.cfg | 25 - .../migrate_repo/versions/001_add_oauth_tables.py | 19 - .../versions/002_fix_oauth_tables_fk.py | 19 - .../versions/003_consumer_description_nullalbe.py | 19 - .../versions/004_request_token_roles_nullable.py | 19 - .../migrate_repo/versions/005_consumer_id_index.py | 20 - .../oauth1/migrate_repo/versions/__init__.py | 0 keystone-moon/keystone/contrib/oauth1/routers.py | 33 - keystone-moon/keystone/contrib/oauth1/validator.py | 179 - keystone-moon/keystone/contrib/revoke/__init__.py | 0 .../keystone/contrib/revoke/backends/__init__.py | 0 .../keystone/contrib/revoke/backends/kvs.py | 74 - .../keystone/contrib/revoke/backends/sql.py | 28 - .../keystone/contrib/revoke/controllers.py | 44 - keystone-moon/keystone/contrib/revoke/core.py | 262 - .../contrib/revoke/migrate_repo/__init__.py | 0 .../contrib/revoke/migrate_repo/migrate.cfg | 25 - .../migrate_repo/versions/001_revoke_table.py | 17 - .../002_add_audit_id_and_chain_to_revoke_table.py | 17 - .../revoke/migrate_repo/versions/__init__.py | 0 keystone-moon/keystone/contrib/revoke/model.py | 371 -- keystone-moon/keystone/contrib/revoke/routers.py | 31 - keystone-moon/keystone/contrib/s3/__init__.py | 15 - keystone-moon/keystone/contrib/s3/core.py | 125 - .../keystone/contrib/simple_cert/__init__.py | 13 - .../keystone/contrib/simple_cert/controllers.py | 42 - keystone-moon/keystone/contrib/simple_cert/core.py | 32 - .../keystone/contrib/simple_cert/routers.py | 33 - .../keystone/contrib/user_crud/__init__.py | 15 - keystone-moon/keystone/contrib/user_crud/core.py | 32 - keystone-moon/keystone/controllers.py | 218 - keystone-moon/keystone/credential/__init__.py | 16 - .../keystone/credential/backends/__init__.py | 0 keystone-moon/keystone/credential/backends/sql.py | 100 - keystone-moon/keystone/credential/controllers.py | 108 - keystone-moon/keystone/credential/core.py | 149 - keystone-moon/keystone/credential/routers.py | 28 - keystone-moon/keystone/credential/schema.py | 62 - keystone-moon/keystone/endpoint_policy/__init__.py | 13 - .../keystone/endpoint_policy/backends/__init__.py | 0 .../keystone/endpoint_policy/backends/sql.py | 140 - .../keystone/endpoint_policy/controllers.py | 166 - keystone-moon/keystone/endpoint_policy/core.py | 439 -- keystone-moon/keystone/endpoint_policy/routers.py | 85 - keystone-moon/keystone/exception.py | 544 -- .../keystone/federation/V8_backends/__init__.py | 0 .../keystone/federation/V8_backends/sql.py | 389 -- keystone-moon/keystone/federation/__init__.py | 15 - .../keystone/federation/backends/__init__.py | 0 keystone-moon/keystone/federation/backends/sql.py | 393 -- keystone-moon/keystone/federation/constants.py | 15 - keystone-moon/keystone/federation/controllers.py | 519 -- keystone-moon/keystone/federation/core.py | 611 -- keystone-moon/keystone/federation/idp.py | 615 -- keystone-moon/keystone/federation/routers.py | 252 - keystone-moon/keystone/federation/schema.py | 115 - keystone-moon/keystone/federation/utils.py | 872 --- keystone-moon/keystone/hacking/__init__.py | 0 keystone-moon/keystone/hacking/checks.py | 446 -- keystone-moon/keystone/i18n.py | 37 - keystone-moon/keystone/identity/__init__.py | 17 - .../keystone/identity/backends/__init__.py | 0 keystone-moon/keystone/identity/backends/ldap.py | 425 -- keystone-moon/keystone/identity/backends/sql.py | 402 -- keystone-moon/keystone/identity/controllers.py | 344 - keystone-moon/keystone/identity/core.py | 1613 ----- keystone-moon/keystone/identity/generator.py | 55 - .../keystone/identity/id_generators/__init__.py | 0 .../keystone/identity/id_generators/sha256.py | 28 - .../keystone/identity/mapping_backends/__init__.py | 0 .../keystone/identity/mapping_backends/mapping.py | 18 - .../keystone/identity/mapping_backends/sql.py | 98 - keystone-moon/keystone/identity/routers.py | 84 - keystone-moon/keystone/identity/schema.py | 67 - .../keystone/identity/shadow_backends/__init__.py | 0 .../keystone/identity/shadow_backends/sql.py | 73 - .../locale/de/LC_MESSAGES/keystone-log-critical.po | 25 - .../locale/de/LC_MESSAGES/keystone-log-info.po | 212 - .../keystone/locale/de/LC_MESSAGES/keystone.po | 1657 ----- .../locale/el/LC_MESSAGES/keystone-log-critical.po | 26 - .../en_AU/LC_MESSAGES/keystone-log-critical.po | 25 - .../locale/en_AU/LC_MESSAGES/keystone-log-error.po | 65 - .../keystone/locale/en_AU/LC_MESSAGES/keystone.po | 348 - .../locale/en_GB/LC_MESSAGES/keystone-log-info.po | 214 - .../locale/es/LC_MESSAGES/keystone-log-critical.po | 25 - .../locale/es/LC_MESSAGES/keystone-log-error.po | 177 - .../keystone/locale/es/LC_MESSAGES/keystone.po | 1653 ----- .../locale/fr/LC_MESSAGES/keystone-log-critical.po | 25 - .../locale/fr/LC_MESSAGES/keystone-log-error.po | 94 - .../locale/fr/LC_MESSAGES/keystone-log-info.po | 97 - .../locale/fr/LC_MESSAGES/keystone-log-warning.po | 102 - .../keystone/locale/fr/LC_MESSAGES/keystone.po | 1649 ----- .../locale/hu/LC_MESSAGES/keystone-log-critical.po | 25 - .../locale/it/LC_MESSAGES/keystone-log-critical.po | 25 - .../locale/it/LC_MESSAGES/keystone-log-error.po | 173 - .../locale/it/LC_MESSAGES/keystone-log-info.po | 211 - .../keystone/locale/it/LC_MESSAGES/keystone.po | 1631 ----- .../locale/ja/LC_MESSAGES/keystone-log-critical.po | 25 - .../locale/ja/LC_MESSAGES/keystone-log-error.po | 177 - .../keystone/locale/ja/LC_MESSAGES/keystone.po | 1614 ----- .../keystone/locale/keystone-log-critical.pot | 24 - .../keystone/locale/keystone-log-error.pot | 177 - .../keystone/locale/keystone-log-info.pot | 238 - .../keystone/locale/keystone-log-warning.pot | 315 - keystone-moon/keystone/locale/keystone.pot | 1711 ----- .../ko_KR/LC_MESSAGES/keystone-log-critical.po | 25 - .../locale/ko_KR/LC_MESSAGES/keystone-log-error.po | 165 - .../locale/ko_KR/LC_MESSAGES/keystone-log-info.po | 210 - .../ko_KR/LC_MESSAGES/keystone-log-warning.po | 325 - .../keystone/locale/ko_KR/LC_MESSAGES/keystone.po | 1530 ----- .../pl_PL/LC_MESSAGES/keystone-log-critical.po | 26 - .../pt_BR/LC_MESSAGES/keystone-log-critical.po | 25 - .../locale/pt_BR/LC_MESSAGES/keystone-log-error.po | 57 - .../keystone/locale/pt_BR/LC_MESSAGES/keystone.po | 1620 ----- .../locale/ru/LC_MESSAGES/keystone-log-critical.po | 27 - .../keystone/locale/ru/LC_MESSAGES/keystone.po | 1603 ----- .../tr_TR/LC_MESSAGES/keystone-log-critical.po | 25 - .../locale/tr_TR/LC_MESSAGES/keystone-log-error.po | 148 - .../locale/tr_TR/LC_MESSAGES/keystone-log-info.po | 131 - .../tr_TR/LC_MESSAGES/keystone-log-warning.po | 238 - .../keystone/locale/tr_TR/LC_MESSAGES/keystone.po | 1158 ---- .../locale/vi_VN/LC_MESSAGES/keystone-log-info.po | 211 - .../zh_CN/LC_MESSAGES/keystone-log-critical.po | 25 - .../locale/zh_CN/LC_MESSAGES/keystone-log-error.po | 140 - .../locale/zh_CN/LC_MESSAGES/keystone-log-info.po | 83 - .../keystone/locale/zh_CN/LC_MESSAGES/keystone.po | 1454 ----- .../zh_TW/LC_MESSAGES/keystone-log-critical.po | 25 - .../keystone/locale/zh_TW/LC_MESSAGES/keystone.po | 1455 ----- keystone-moon/keystone/middleware/__init__.py | 16 - keystone-moon/keystone/middleware/auth.py | 222 - keystone-moon/keystone/middleware/core.py | 138 - keystone-moon/keystone/middleware/ec2_token.py | 44 - keystone-moon/keystone/models/__init__.py | 0 keystone-moon/keystone/models/revoke_model.py | 373 -- keystone-moon/keystone/models/token_model.py | 339 - keystone-moon/keystone/notifications.py | 741 --- keystone-moon/keystone/oauth1/__init__.py | 15 - keystone-moon/keystone/oauth1/backends/__init__.py | 0 keystone-moon/keystone/oauth1/backends/sql.py | 258 - keystone-moon/keystone/oauth1/controllers.py | 409 -- keystone-moon/keystone/oauth1/core.py | 367 -- keystone-moon/keystone/oauth1/routers.py | 154 - keystone-moon/keystone/oauth1/schema.py | 34 - keystone-moon/keystone/oauth1/validator.py | 177 - keystone-moon/keystone/openstack/__init__.py | 0 keystone-moon/keystone/openstack/common/README | 13 - .../keystone/openstack/common/__init__.py | 0 keystone-moon/keystone/openstack/common/_i18n.py | 45 - .../keystone/openstack/common/eventlet_backdoor.py | 151 - .../keystone/openstack/common/fileutils.py | 149 - .../keystone/openstack/common/loopingcall.py | 147 - keystone-moon/keystone/openstack/common/service.py | 495 -- keystone-moon/keystone/openstack/common/systemd.py | 105 - .../keystone/openstack/common/threadgroup.py | 149 - .../keystone/openstack/common/versionutils.py | 262 - keystone-moon/keystone/policy/__init__.py | 16 - keystone-moon/keystone/policy/backends/__init__.py | 0 keystone-moon/keystone/policy/backends/rules.py | 92 - keystone-moon/keystone/policy/backends/sql.py | 71 - keystone-moon/keystone/policy/controllers.py | 56 - keystone-moon/keystone/policy/core.py | 141 - keystone-moon/keystone/policy/routers.py | 24 - keystone-moon/keystone/policy/schema.py | 36 - .../keystone/resource/V8_backends/__init__.py | 0 keystone-moon/keystone/resource/V8_backends/sql.py | 260 - keystone-moon/keystone/resource/__init__.py | 14 - .../keystone/resource/backends/__init__.py | 0 keystone-moon/keystone/resource/backends/ldap.py | 217 - keystone-moon/keystone/resource/backends/sql.py | 267 - .../keystone/resource/config_backends/__init__.py | 0 .../keystone/resource/config_backends/sql.py | 152 - keystone-moon/keystone/resource/controllers.py | 334 - keystone-moon/keystone/resource/core.py | 2161 ------ keystone-moon/keystone/resource/routers.py | 125 - keystone-moon/keystone/resource/schema.py | 74 - keystone-moon/keystone/revoke/__init__.py | 13 - keystone-moon/keystone/revoke/backends/__init__.py | 0 keystone-moon/keystone/revoke/backends/sql.py | 100 - keystone-moon/keystone/revoke/controllers.py | 44 - keystone-moon/keystone/revoke/core.py | 261 - keystone-moon/keystone/revoke/model.py | 13 - keystone-moon/keystone/revoke/routers.py | 29 - keystone-moon/keystone/routers.py | 80 - keystone-moon/keystone/server/__init__.py | 0 keystone-moon/keystone/server/backends.py | 74 - keystone-moon/keystone/server/common.py | 53 - keystone-moon/keystone/server/eventlet.py | 156 - keystone-moon/keystone/server/wsgi.py | 62 - keystone-moon/keystone/service.py | 61 - keystone-moon/keystone/tests/__init__.py | 0 keystone-moon/keystone/tests/common/__init__.py | 0 keystone-moon/keystone/tests/common/auth.py | 109 - .../keystone/tests/functional/__init__.py | 0 keystone-moon/keystone/tests/functional/core.py | 85 - .../keystone/tests/functional/shared/__init__.py | 0 .../tests/functional/shared/test_running.py | 58 - keystone-moon/keystone/tests/hacking/__init__.py | 0 keystone-moon/keystone/tests/hacking/checks.py | 445 -- keystone-moon/keystone/tests/moon/__init__.py | 4 - .../keystone/tests/moon/backends/__init__.py | 1 - .../tests/moon/backends/test_sql_backend.py | 43 - keystone-moon/keystone/tests/moon/func/__init__.py | 4 - .../tests/moon/func/test_func_api_authz.py | 129 - .../func/test_func_api_intra_extension_admin.py | 1011 --- .../keystone/tests/moon/func/test_func_api_log.py | 148 - .../tests/moon/func/test_func_api_tenant.py | 154 - .../tests/moon/func/test_func_moon_auth.py | 48 - .../keystone/tests/moon/scenario/test_nova_a.sh | 33 - .../keystone/tests/moon/scenario/test_nova_b.sh | 39 - .../keystone/tests/moon/scenario/test_nova_c.sh | 37 - keystone-moon/keystone/tests/moon/unit/__init__.py | 67 - .../moon/unit/test_unit_core_configuration.py | 72 - .../unit/test_unit_core_intra_extension_admin.py | 2107 ------ .../unit/test_unit_core_intra_extension_authz.py | 2322 ------- .../keystone/tests/moon/unit/test_unit_core_log.py | 145 - .../tests/moon/unit/test_unit_core_tenant.py | 191 - keystone-moon/keystone/tests/unit/__init__.py | 42 - .../keystone/tests/unit/assignment/__init__.py | 0 .../unit/assignment/role_backends/__init__.py | 0 .../unit/assignment/role_backends/test_sql.py | 112 - .../tests/unit/assignment/test_backends.py | 3755 ----------- .../keystone/tests/unit/assignment/test_core.py | 123 - keystone-moon/keystone/tests/unit/auth/__init__.py | 0 .../keystone/tests/unit/auth/test_controllers.py | 98 - .../keystone/tests/unit/backend/__init__.py | 0 .../keystone/tests/unit/backend/core_ldap.py | 146 - .../keystone/tests/unit/backend/core_sql.py | 53 - .../tests/unit/backend/domain_config/__init__.py | 0 .../tests/unit/backend/domain_config/core.py | 601 -- .../tests/unit/backend/domain_config/test_sql.py | 41 - .../tests/unit/backend/legacy_drivers/__init__.py | 0 .../legacy_drivers/assignment/V8/__init__.py | 0 .../backend/legacy_drivers/assignment/V8/sql.py | 39 - .../backend/legacy_drivers/assignment/__init__.py | 0 .../legacy_drivers/federation/V8/__init__.py | 0 .../backend/legacy_drivers/federation/V8/api_v3.py | 108 - .../backend/legacy_drivers/federation/__init__.py | 0 .../backend/legacy_drivers/resource/V8/__init__.py | 0 .../unit/backend/legacy_drivers/resource/V8/sql.py | 71 - .../backend/legacy_drivers/resource/__init__.py | 0 .../backend/legacy_drivers/role/V8/__init__.py | 0 .../unit/backend/legacy_drivers/role/V8/sql.py | 30 - .../unit/backend/legacy_drivers/role/__init__.py | 0 .../keystone/tests/unit/backend/role/__init__.py | 0 .../keystone/tests/unit/backend/role/core.py | 130 - .../keystone/tests/unit/backend/role/test_ldap.py | 161 - .../keystone/tests/unit/backend/role/test_sql.py | 40 - .../keystone/tests/unit/catalog/__init__.py | 0 .../keystone/tests/unit/catalog/test_backends.py | 588 -- .../keystone/tests/unit/catalog/test_core.py | 100 - .../keystone/tests/unit/common/__init__.py | 0 .../tests/unit/common/test_authorization.py | 161 - .../keystone/tests/unit/common/test_base64utils.py | 208 - .../tests/unit/common/test_connection_pool.py | 135 - .../keystone/tests/unit/common/test_injection.py | 238 - .../keystone/tests/unit/common/test_json_home.py | 91 - .../keystone/tests/unit/common/test_ldap.py | 584 -- .../keystone/tests/unit/common/test_manager.py | 40 - .../tests/unit/common/test_notifications.py | 1248 ---- .../keystone/tests/unit/common/test_pemutils.py | 337 - .../keystone/tests/unit/common/test_sql_core.py | 52 - .../keystone/tests/unit/common/test_utils.py | 210 - .../tests/unit/config_files/backend_db2.conf | 4 - .../tests/unit/config_files/backend_ldap.conf | 5 - .../tests/unit/config_files/backend_ldap_pool.conf | 41 - .../tests/unit/config_files/backend_ldap_sql.conf | 14 - .../tests/unit/config_files/backend_liveldap.conf | 10 - .../unit/config_files/backend_multi_ldap_sql.conf | 9 - .../tests/unit/config_files/backend_mysql.conf | 4 - .../unit/config_files/backend_pool_liveldap.conf | 32 - .../unit/config_files/backend_postgresql.conf | 4 - .../tests/unit/config_files/backend_sql.conf | 8 - .../unit/config_files/backend_tls_liveldap.conf | 14 - .../tests/unit/config_files/deprecated.conf | 8 - .../unit/config_files/deprecated_override.conf | 15 - .../keystone.domain1.conf | 5 - .../keystone.Default.conf | 14 - .../keystone.domain1.conf | 12 - .../keystone.domain2.conf | 13 - .../keystone.domain2.conf | 5 - .../keystone.Default.conf | 14 - .../keystone.domain1.conf | 5 - .../tests/unit/config_files/test_auth_plugin.conf | 4 - .../keystone/tests/unit/contrib/__init__.py | 0 .../tests/unit/contrib/federation/__init__.py | 0 .../tests/unit/contrib/federation/test_utils.py | 725 --- keystone-moon/keystone/tests/unit/core.py | 907 --- .../keystone/tests/unit/default_catalog.templates | 14 - .../keystone/tests/unit/default_fixtures.py | 154 - .../keystone/tests/unit/external/README.rst | 9 - .../keystone/tests/unit/external/__init__.py | 0 .../keystone/tests/unit/external/test_timeutils.py | 33 - keystone-moon/keystone/tests/unit/fakeldap.py | 664 -- .../keystone/tests/unit/federation_fixtures.py | 28 - keystone-moon/keystone/tests/unit/filtering.py | 124 - .../keystone/tests/unit/identity/__init__.py | 0 .../keystone/tests/unit/identity/test_backends.py | 1297 ---- .../tests/unit/identity/test_controllers.py | 65 - .../keystone/tests/unit/identity/test_core.py | 176 - .../keystone/tests/unit/identity_mapping.py | 22 - .../keystone/tests/unit/ksfixtures/__init__.py | 17 - .../keystone/tests/unit/ksfixtures/appserver.py | 79 - .../keystone/tests/unit/ksfixtures/auth_plugins.py | 34 - .../keystone/tests/unit/ksfixtures/cache.py | 43 - .../keystone/tests/unit/ksfixtures/database.py | 158 - .../keystone/tests/unit/ksfixtures/hacking.py | 417 -- .../tests/unit/ksfixtures/key_repository.py | 30 - .../keystone/tests/unit/ksfixtures/ldapdb.py | 35 - .../keystone/tests/unit/ksfixtures/policy.py | 33 - .../tests/unit/ksfixtures/temporaryfile.py | 29 - .../keystone/tests/unit/mapping_fixtures.py | 1486 ----- .../keystone/tests/unit/policy/__init__.py | 0 .../keystone/tests/unit/policy/test_backends.py | 86 - .../keystone/tests/unit/resource/__init__.py | 0 .../tests/unit/resource/backends/__init__.py | 0 .../tests/unit/resource/backends/test_sql.py | 24 - .../unit/resource/config_backends/__init__.py | 0 .../unit/resource/config_backends/test_sql.py | 53 - .../keystone/tests/unit/resource/test_backends.py | 1669 ----- .../tests/unit/resource/test_controllers.py | 57 - .../keystone/tests/unit/resource/test_core.py | 692 -- keystone-moon/keystone/tests/unit/rest.py | 261 - .../tests/unit/saml2/idp_saml2_metadata.xml | 25 - .../tests/unit/saml2/signed_saml2_assertion.xml | 69 - .../keystone/tests/unit/schema/__init__.py | 0 keystone-moon/keystone/tests/unit/schema/v2.py | 161 - .../test_associate_project_endpoint_extension.py | 1391 ---- keystone-moon/keystone/tests/unit/test_auth.py | 1446 ----- .../keystone/tests/unit/test_auth_plugin.py | 190 - keystone-moon/keystone/tests/unit/test_backend.py | 6851 -------------------- .../tests/unit/test_backend_endpoint_policy.py | 249 - .../tests/unit/test_backend_endpoint_policy_sql.py | 38 - .../tests/unit/test_backend_federation_sql.py | 51 - .../tests/unit/test_backend_id_mapping_sql.py | 198 - .../keystone/tests/unit/test_backend_kvs.py | 113 - .../keystone/tests/unit/test_backend_ldap.py | 3287 ---------- .../keystone/tests/unit/test_backend_ldap_pool.py | 243 - .../keystone/tests/unit/test_backend_rules.py | 63 - .../keystone/tests/unit/test_backend_sql.py | 1025 --- .../keystone/tests/unit/test_backend_templated.py | 261 - keystone-moon/keystone/tests/unit/test_cache.py | 324 - .../tests/unit/test_cache_backend_mongo.py | 728 --- keystone-moon/keystone/tests/unit/test_catalog.py | 355 - .../keystone/tests/unit/test_cert_setup.py | 243 - keystone-moon/keystone/tests/unit/test_cli.py | 478 -- keystone-moon/keystone/tests/unit/test_config.py | 82 - .../keystone/tests/unit/test_contrib_ec2.py | 208 - .../keystone/tests/unit/test_contrib_s3_core.py | 103 - .../tests/unit/test_contrib_simple_cert.py | 57 - .../keystone/tests/unit/test_credential.py | 265 - .../keystone/tests/unit/test_driver_hints.py | 60 - .../tests/unit/test_ec2_token_middleware.py | 34 - .../keystone/tests/unit/test_entry_points.py | 48 - .../keystone/tests/unit/test_exception.py | 273 - .../keystone/tests/unit/test_hacking_checks.py | 143 - keystone-moon/keystone/tests/unit/test_ipv6.py | 51 - keystone-moon/keystone/tests/unit/test_kvs.py | 586 -- .../keystone/tests/unit/test_ldap_livetest.py | 217 - .../keystone/tests/unit/test_ldap_pool_livetest.py | 202 - .../keystone/tests/unit/test_ldap_tls_livetest.py | 119 - .../keystone/tests/unit/test_middleware.py | 764 --- .../tests/unit/test_no_admin_token_auth.py | 60 - keystone-moon/keystone/tests/unit/test_policy.py | 222 - keystone-moon/keystone/tests/unit/test_revoke.py | 622 -- .../keystone/tests/unit/test_singular_plural.py | 48 - .../keystone/tests/unit/test_sql_livetest.py | 49 - .../tests/unit/test_sql_migrate_extensions.py | 112 - .../keystone/tests/unit/test_sql_upgrade.py | 1195 ---- keystone-moon/keystone/tests/unit/test_ssl.py | 186 - .../keystone/tests/unit/test_token_bind.py | 198 - .../keystone/tests/unit/test_token_provider.py | 845 --- .../keystone/tests/unit/test_url_middleware.py | 54 - keystone-moon/keystone/tests/unit/test_v2.py | 1590 ----- .../keystone/tests/unit/test_v2_controller.py | 186 - .../keystone/tests/unit/test_v2_keystoneclient.py | 1376 ---- .../tests/unit/test_v2_keystoneclient_sql.py | 344 - keystone-moon/keystone/tests/unit/test_v3.py | 1640 ----- .../keystone/tests/unit/test_v3_assignment.py | 2871 -------- keystone-moon/keystone/tests/unit/test_v3_auth.py | 4955 -------------- .../keystone/tests/unit/test_v3_catalog.py | 924 --- .../keystone/tests/unit/test_v3_controller.py | 53 - .../keystone/tests/unit/test_v3_credential.py | 478 -- .../keystone/tests/unit/test_v3_domain_config.py | 459 -- .../keystone/tests/unit/test_v3_endpoint_policy.py | 246 - .../keystone/tests/unit/test_v3_federation.py | 3722 ----------- .../keystone/tests/unit/test_v3_filters.py | 435 -- .../keystone/tests/unit/test_v3_identity.py | 795 --- .../keystone/tests/unit/test_v3_oauth1.py | 907 --- .../keystone/tests/unit/test_v3_os_revoke.py | 136 - .../keystone/tests/unit/test_v3_policy.py | 63 - .../keystone/tests/unit/test_v3_protection.py | 1777 ----- .../keystone/tests/unit/test_v3_resource.py | 1434 ---- keystone-moon/keystone/tests/unit/test_v3_trust.py | 403 -- .../keystone/tests/unit/test_validation.py | 2115 ------ keystone-moon/keystone/tests/unit/test_versions.py | 1065 --- keystone-moon/keystone/tests/unit/test_wsgi.py | 586 -- .../keystone/tests/unit/tests/__init__.py | 0 .../keystone/tests/unit/tests/test_core.py | 53 - .../keystone/tests/unit/tests/test_utils.py | 37 - .../keystone/tests/unit/token/__init__.py | 0 .../keystone/tests/unit/token/test_backends.py | 551 -- .../tests/unit/token/test_fernet_provider.py | 611 -- .../keystone/tests/unit/token/test_pki_provider.py | 26 - .../tests/unit/token/test_pkiz_provider.py | 26 - .../keystone/tests/unit/token/test_provider.py | 30 - .../tests/unit/token/test_token_data_helper.py | 56 - .../keystone/tests/unit/token/test_token_model.py | 263 - .../tests/unit/token/test_uuid_provider.py | 26 - .../keystone/tests/unit/trust/__init__.py | 0 .../keystone/tests/unit/trust/test_backends.py | 172 - keystone-moon/keystone/tests/unit/utils.py | 85 - keystone-moon/keystone/token/__init__.py | 17 - keystone-moon/keystone/token/_simple_cert.py | 91 - keystone-moon/keystone/token/controllers.py | 529 -- .../keystone/token/persistence/__init__.py | 16 - .../token/persistence/backends/__init__.py | 0 .../keystone/token/persistence/backends/kvs.py | 367 -- .../token/persistence/backends/memcache.py | 39 - .../token/persistence/backends/memcache_pool.py | 34 - .../keystone/token/persistence/backends/sql.py | 286 - keystone-moon/keystone/token/persistence/core.py | 357 - keystone-moon/keystone/token/provider.py | 637 -- keystone-moon/keystone/token/providers/__init__.py | 0 keystone-moon/keystone/token/providers/common.py | 808 --- .../keystone/token/providers/fernet/__init__.py | 13 - .../keystone/token/providers/fernet/core.py | 211 - .../token/providers/fernet/token_formatters.py | 677 -- .../keystone/token/providers/fernet/utils.py | 270 - keystone-moon/keystone/token/providers/pki.py | 66 - keystone-moon/keystone/token/providers/pkiz.py | 64 - keystone-moon/keystone/token/providers/uuid.py | 41 - keystone-moon/keystone/token/routers.py | 59 - keystone-moon/keystone/token/utils.py | 27 - keystone-moon/keystone/trust/__init__.py | 16 - keystone-moon/keystone/trust/backends/__init__.py | 0 keystone-moon/keystone/trust/backends/sql.py | 183 - keystone-moon/keystone/trust/controllers.py | 270 - keystone-moon/keystone/trust/core.py | 251 - keystone-moon/keystone/trust/routers.py | 67 - keystone-moon/keystone/trust/schema.py | 49 - keystone-moon/keystone/v2_crud/__init__.py | 0 keystone-moon/keystone/v2_crud/admin_crud.py | 240 - keystone-moon/keystone/v2_crud/user_crud.py | 134 - keystone-moon/keystone/version/__init__.py | 0 keystone-moon/keystone/version/controllers.py | 215 - keystone-moon/keystone/version/routers.py | 80 - keystone-moon/keystone/version/service.py | 161 - 654 files changed, 168392 deletions(-) delete mode 100644 keystone-moon/keystone/__init__.py delete mode 100644 keystone-moon/keystone/assignment/V8_backends/__init__.py delete mode 100644 keystone-moon/keystone/assignment/V8_backends/sql.py delete mode 100644 keystone-moon/keystone/assignment/V8_role_backends/__init__.py delete mode 100644 keystone-moon/keystone/assignment/V8_role_backends/sql.py delete mode 100644 keystone-moon/keystone/assignment/__init__.py delete mode 100644 keystone-moon/keystone/assignment/backends/__init__.py delete mode 100644 keystone-moon/keystone/assignment/backends/ldap.py delete mode 100644 keystone-moon/keystone/assignment/backends/sql.py delete mode 100644 keystone-moon/keystone/assignment/controllers.py delete mode 100644 keystone-moon/keystone/assignment/core.py delete mode 100644 keystone-moon/keystone/assignment/role_backends/__init__.py delete mode 100644 keystone-moon/keystone/assignment/role_backends/ldap.py delete mode 100644 keystone-moon/keystone/assignment/role_backends/sql.py delete mode 100644 keystone-moon/keystone/assignment/routers.py delete mode 100644 keystone-moon/keystone/assignment/schema.py delete mode 100644 keystone-moon/keystone/auth/__init__.py delete mode 100644 keystone-moon/keystone/auth/controllers.py delete mode 100644 keystone-moon/keystone/auth/core.py delete mode 100644 keystone-moon/keystone/auth/plugins/__init__.py delete mode 100644 keystone-moon/keystone/auth/plugins/core.py delete mode 100644 keystone-moon/keystone/auth/plugins/external.py delete mode 100644 keystone-moon/keystone/auth/plugins/mapped.py delete mode 100644 keystone-moon/keystone/auth/plugins/oauth1.py delete mode 100644 keystone-moon/keystone/auth/plugins/password.py delete mode 100644 keystone-moon/keystone/auth/plugins/saml2.py delete mode 100644 keystone-moon/keystone/auth/plugins/token.py delete mode 100644 keystone-moon/keystone/auth/plugins/totp.py delete mode 100644 keystone-moon/keystone/auth/routers.py delete mode 100644 keystone-moon/keystone/backends.py delete mode 100644 keystone-moon/keystone/catalog/__init__.py delete mode 100644 keystone-moon/keystone/catalog/backends/__init__.py delete mode 100644 keystone-moon/keystone/catalog/backends/kvs.py delete mode 100644 keystone-moon/keystone/catalog/backends/sql.py delete mode 100644 keystone-moon/keystone/catalog/backends/templated.py delete mode 100644 keystone-moon/keystone/catalog/controllers.py delete mode 100644 keystone-moon/keystone/catalog/core.py delete mode 100644 keystone-moon/keystone/catalog/routers.py delete mode 100644 keystone-moon/keystone/catalog/schema.py delete mode 100644 keystone-moon/keystone/clean.py delete mode 100644 keystone-moon/keystone/cli.py delete mode 100644 keystone-moon/keystone/cmd/__init__.py delete mode 100644 keystone-moon/keystone/cmd/all.py delete mode 100644 keystone-moon/keystone/cmd/cli.py delete mode 100644 keystone-moon/keystone/cmd/manage.py delete mode 100644 keystone-moon/keystone/common/__init__.py delete mode 100644 keystone-moon/keystone/common/authorization.py delete mode 100644 keystone-moon/keystone/common/base64utils.py delete mode 100644 keystone-moon/keystone/common/cache/__init__.py delete mode 100644 keystone-moon/keystone/common/cache/_context_cache.py delete mode 100644 keystone-moon/keystone/common/cache/_memcache_pool.py delete mode 100644 keystone-moon/keystone/common/cache/backends/__init__.py delete mode 100644 keystone-moon/keystone/common/cache/backends/memcache_pool.py delete mode 100644 keystone-moon/keystone/common/cache/backends/mongo.py delete mode 100644 keystone-moon/keystone/common/cache/backends/noop.py delete mode 100644 keystone-moon/keystone/common/cache/core.py delete mode 100644 keystone-moon/keystone/common/clean.py delete mode 100644 keystone-moon/keystone/common/config.py delete mode 100644 keystone-moon/keystone/common/controller.py delete mode 100644 keystone-moon/keystone/common/dependency.py delete mode 100644 keystone-moon/keystone/common/driver_hints.py delete mode 100644 keystone-moon/keystone/common/environment/__init__.py delete mode 100644 keystone-moon/keystone/common/environment/eventlet_server.py delete mode 100644 keystone-moon/keystone/common/extension.py delete mode 100644 keystone-moon/keystone/common/json_home.py delete mode 100644 keystone-moon/keystone/common/kvs/__init__.py delete mode 100644 keystone-moon/keystone/common/kvs/backends/__init__.py delete mode 100644 keystone-moon/keystone/common/kvs/backends/inmemdb.py delete mode 100644 keystone-moon/keystone/common/kvs/backends/memcached.py delete mode 100644 keystone-moon/keystone/common/kvs/core.py delete mode 100644 keystone-moon/keystone/common/kvs/legacy.py delete mode 100644 keystone-moon/keystone/common/ldap/__init__.py delete mode 100644 keystone-moon/keystone/common/ldap/core.py delete mode 100644 keystone-moon/keystone/common/manager.py delete mode 100644 keystone-moon/keystone/common/models.py delete mode 100644 keystone-moon/keystone/common/openssl.py delete mode 100755 keystone-moon/keystone/common/pemutils.py delete mode 100644 keystone-moon/keystone/common/router.py delete mode 100644 keystone-moon/keystone/common/sql/__init__.py delete mode 100644 keystone-moon/keystone/common/sql/core.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/README delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/__init__.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/manage.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/migrate.cfg delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/067_kilo.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/068_placeholder.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/069_placeholder.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/070_placeholder.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/071_placeholder.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/072_placeholder.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/073_insert_assignment_inherited_pk.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/074_add_is_domain_project.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/075_confirm_config_registration.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/076_placeholder.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/077_placeholder.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/078_placeholder.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/079_placeholder.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/080_placeholder.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/081_add_endpoint_policy_table.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/082_add_federation_tables.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/083_add_oauth1_tables.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/084_add_revoke_tables.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/085_add_endpoint_filtering_table.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/086_add_duplicate_constraint_trusts.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/087_implied_roles.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/088_domain_specific_roles.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/089_add_root_of_all_domains.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/090_add_local_user_and_password_tables.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/091_migrate_data_to_local_user_and_password_tables.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/092_make_implied_roles_fks_cascaded.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/093_migrate_domains_to_projects.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/094_add_federated_user_table.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/095_add_integer_pkey_to_revocation_event_table.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/096_drop_role_name_constraint.py delete mode 100644 keystone-moon/keystone/common/sql/migrate_repo/versions/__init__.py delete mode 100644 keystone-moon/keystone/common/sql/migration_helpers.py delete mode 100644 keystone-moon/keystone/common/tokenless_auth.py delete mode 100644 keystone-moon/keystone/common/utils.py delete mode 100644 keystone-moon/keystone/common/validation/__init__.py delete mode 100644 keystone-moon/keystone/common/validation/parameter_types.py delete mode 100644 keystone-moon/keystone/common/validation/validators.py delete mode 100644 keystone-moon/keystone/common/wsgi.py delete mode 100644 keystone-moon/keystone/config.py delete mode 100644 keystone-moon/keystone/contrib/__init__.py delete mode 100644 keystone-moon/keystone/contrib/admin_crud/__init__.py delete mode 100644 keystone-moon/keystone/contrib/admin_crud/core.py delete mode 100644 keystone-moon/keystone/contrib/ec2/__init__.py delete mode 100644 keystone-moon/keystone/contrib/ec2/controllers.py delete mode 100644 keystone-moon/keystone/contrib/ec2/core.py delete mode 100644 keystone-moon/keystone/contrib/ec2/routers.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_filter/__init__.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_filter/backends/__init__.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_filter/backends/catalog_sql.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_filter/backends/sql.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_filter/controllers.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_filter/core.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/__init__.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/migrate.cfg delete mode 100644 keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/001_add_endpoint_filtering_table.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/002_add_endpoint_groups.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/__init__.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_filter/routers.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_filter/schema.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_policy/__init__.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_policy/backends/__init__.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_policy/backends/sql.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_policy/controllers.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_policy/core.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/__init__.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/migrate.cfg delete mode 100644 keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/versions/001_add_endpoint_policy_table.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/versions/__init__.py delete mode 100644 keystone-moon/keystone/contrib/endpoint_policy/routers.py delete mode 100644 keystone-moon/keystone/contrib/example/__init__.py delete mode 100644 keystone-moon/keystone/contrib/example/configuration.rst delete mode 100644 keystone-moon/keystone/contrib/example/controllers.py delete mode 100644 keystone-moon/keystone/contrib/example/core.py delete mode 100644 keystone-moon/keystone/contrib/example/migrate_repo/__init__.py delete mode 100644 keystone-moon/keystone/contrib/example/migrate_repo/migrate.cfg delete mode 100644 keystone-moon/keystone/contrib/example/migrate_repo/versions/001_example_table.py delete mode 100644 keystone-moon/keystone/contrib/example/migrate_repo/versions/__init__.py delete mode 100644 keystone-moon/keystone/contrib/example/routers.py delete mode 100644 keystone-moon/keystone/contrib/federation/__init__.py delete mode 100644 keystone-moon/keystone/contrib/federation/backends/__init__.py delete mode 100644 keystone-moon/keystone/contrib/federation/backends/sql.py delete mode 100644 keystone-moon/keystone/contrib/federation/constants.py delete mode 100644 keystone-moon/keystone/contrib/federation/controllers.py delete mode 100644 keystone-moon/keystone/contrib/federation/core.py delete mode 100644 keystone-moon/keystone/contrib/federation/idp.py delete mode 100644 keystone-moon/keystone/contrib/federation/migrate_repo/__init__.py delete mode 100644 keystone-moon/keystone/contrib/federation/migrate_repo/migrate.cfg delete mode 100644 keystone-moon/keystone/contrib/federation/migrate_repo/versions/001_add_identity_provider_table.py delete mode 100644 keystone-moon/keystone/contrib/federation/migrate_repo/versions/002_add_mapping_tables.py delete mode 100644 keystone-moon/keystone/contrib/federation/migrate_repo/versions/003_mapping_id_nullable_false.py delete mode 100644 keystone-moon/keystone/contrib/federation/migrate_repo/versions/004_add_remote_id_column.py delete mode 100644 keystone-moon/keystone/contrib/federation/migrate_repo/versions/005_add_service_provider_table.py delete mode 100644 keystone-moon/keystone/contrib/federation/migrate_repo/versions/006_fixup_service_provider_attributes.py delete mode 100644 keystone-moon/keystone/contrib/federation/migrate_repo/versions/007_add_remote_id_table.py delete mode 100644 keystone-moon/keystone/contrib/federation/migrate_repo/versions/008_add_relay_state_to_sp.py delete mode 100644 keystone-moon/keystone/contrib/federation/migrate_repo/versions/__init__.py delete mode 100644 keystone-moon/keystone/contrib/federation/routers.py delete mode 100644 keystone-moon/keystone/contrib/federation/schema.py delete mode 100644 keystone-moon/keystone/contrib/federation/utils.py delete mode 100644 keystone-moon/keystone/contrib/moon/__init__.py delete mode 100644 keystone-moon/keystone/contrib/moon/algorithms.py delete mode 100644 keystone-moon/keystone/contrib/moon/backends/__init__.py delete mode 100644 keystone-moon/keystone/contrib/moon/backends/flat.py delete mode 100644 keystone-moon/keystone/contrib/moon/backends/memory.py delete mode 100644 keystone-moon/keystone/contrib/moon/backends/sql.py delete mode 100644 keystone-moon/keystone/contrib/moon/controllers.py delete mode 100644 keystone-moon/keystone/contrib/moon/core.py delete mode 100644 keystone-moon/keystone/contrib/moon/exception.py delete mode 100644 keystone-moon/keystone/contrib/moon/migrate_repo/__init__.py delete mode 100644 keystone-moon/keystone/contrib/moon/migrate_repo/migrate.cfg delete mode 100644 keystone-moon/keystone/contrib/moon/migrate_repo/versions/001_moon.py delete mode 100644 keystone-moon/keystone/contrib/moon/migrate_repo/versions/002_moon.py delete mode 100644 keystone-moon/keystone/contrib/moon/migrate_repo/versions/003_moon.py delete mode 100644 keystone-moon/keystone/contrib/moon/migrate_repo/versions/__init__.py delete mode 100644 keystone-moon/keystone/contrib/moon/routers.py delete mode 100644 keystone-moon/keystone/contrib/moon/service.py delete mode 100644 keystone-moon/keystone/contrib/moon/wsgi.py delete mode 100644 keystone-moon/keystone/contrib/oauth1/__init__.py delete mode 100644 keystone-moon/keystone/contrib/oauth1/backends/__init__.py delete mode 100644 keystone-moon/keystone/contrib/oauth1/backends/sql.py delete mode 100644 keystone-moon/keystone/contrib/oauth1/controllers.py delete mode 100644 keystone-moon/keystone/contrib/oauth1/core.py delete mode 100644 keystone-moon/keystone/contrib/oauth1/migrate_repo/__init__.py delete mode 100644 keystone-moon/keystone/contrib/oauth1/migrate_repo/migrate.cfg delete mode 100644 keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/001_add_oauth_tables.py delete mode 100644 keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/002_fix_oauth_tables_fk.py delete mode 100644 keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/003_consumer_description_nullalbe.py delete mode 100644 keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/004_request_token_roles_nullable.py delete mode 100644 keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/005_consumer_id_index.py delete mode 100644 keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/__init__.py delete mode 100644 keystone-moon/keystone/contrib/oauth1/routers.py delete mode 100644 keystone-moon/keystone/contrib/oauth1/validator.py delete mode 100644 keystone-moon/keystone/contrib/revoke/__init__.py delete mode 100644 keystone-moon/keystone/contrib/revoke/backends/__init__.py delete mode 100644 keystone-moon/keystone/contrib/revoke/backends/kvs.py delete mode 100644 keystone-moon/keystone/contrib/revoke/backends/sql.py delete mode 100644 keystone-moon/keystone/contrib/revoke/controllers.py delete mode 100644 keystone-moon/keystone/contrib/revoke/core.py delete mode 100644 keystone-moon/keystone/contrib/revoke/migrate_repo/__init__.py delete mode 100644 keystone-moon/keystone/contrib/revoke/migrate_repo/migrate.cfg delete mode 100644 keystone-moon/keystone/contrib/revoke/migrate_repo/versions/001_revoke_table.py delete mode 100644 keystone-moon/keystone/contrib/revoke/migrate_repo/versions/002_add_audit_id_and_chain_to_revoke_table.py delete mode 100644 keystone-moon/keystone/contrib/revoke/migrate_repo/versions/__init__.py delete mode 100644 keystone-moon/keystone/contrib/revoke/model.py delete mode 100644 keystone-moon/keystone/contrib/revoke/routers.py delete mode 100644 keystone-moon/keystone/contrib/s3/__init__.py delete mode 100644 keystone-moon/keystone/contrib/s3/core.py delete mode 100644 keystone-moon/keystone/contrib/simple_cert/__init__.py delete mode 100644 keystone-moon/keystone/contrib/simple_cert/controllers.py delete mode 100644 keystone-moon/keystone/contrib/simple_cert/core.py delete mode 100644 keystone-moon/keystone/contrib/simple_cert/routers.py delete mode 100644 keystone-moon/keystone/contrib/user_crud/__init__.py delete mode 100644 keystone-moon/keystone/contrib/user_crud/core.py delete mode 100644 keystone-moon/keystone/controllers.py delete mode 100644 keystone-moon/keystone/credential/__init__.py delete mode 100644 keystone-moon/keystone/credential/backends/__init__.py delete mode 100644 keystone-moon/keystone/credential/backends/sql.py delete mode 100644 keystone-moon/keystone/credential/controllers.py delete mode 100644 keystone-moon/keystone/credential/core.py delete mode 100644 keystone-moon/keystone/credential/routers.py delete mode 100644 keystone-moon/keystone/credential/schema.py delete mode 100644 keystone-moon/keystone/endpoint_policy/__init__.py delete mode 100644 keystone-moon/keystone/endpoint_policy/backends/__init__.py delete mode 100644 keystone-moon/keystone/endpoint_policy/backends/sql.py delete mode 100644 keystone-moon/keystone/endpoint_policy/controllers.py delete mode 100644 keystone-moon/keystone/endpoint_policy/core.py delete mode 100644 keystone-moon/keystone/endpoint_policy/routers.py delete mode 100644 keystone-moon/keystone/exception.py delete mode 100644 keystone-moon/keystone/federation/V8_backends/__init__.py delete mode 100644 keystone-moon/keystone/federation/V8_backends/sql.py delete mode 100644 keystone-moon/keystone/federation/__init__.py delete mode 100644 keystone-moon/keystone/federation/backends/__init__.py delete mode 100644 keystone-moon/keystone/federation/backends/sql.py delete mode 100644 keystone-moon/keystone/federation/constants.py delete mode 100644 keystone-moon/keystone/federation/controllers.py delete mode 100644 keystone-moon/keystone/federation/core.py delete mode 100644 keystone-moon/keystone/federation/idp.py delete mode 100644 keystone-moon/keystone/federation/routers.py delete mode 100644 keystone-moon/keystone/federation/schema.py delete mode 100644 keystone-moon/keystone/federation/utils.py delete mode 100644 keystone-moon/keystone/hacking/__init__.py delete mode 100644 keystone-moon/keystone/hacking/checks.py delete mode 100644 keystone-moon/keystone/i18n.py delete mode 100644 keystone-moon/keystone/identity/__init__.py delete mode 100644 keystone-moon/keystone/identity/backends/__init__.py delete mode 100644 keystone-moon/keystone/identity/backends/ldap.py delete mode 100644 keystone-moon/keystone/identity/backends/sql.py delete mode 100644 keystone-moon/keystone/identity/controllers.py delete mode 100644 keystone-moon/keystone/identity/core.py delete mode 100644 keystone-moon/keystone/identity/generator.py delete mode 100644 keystone-moon/keystone/identity/id_generators/__init__.py delete mode 100644 keystone-moon/keystone/identity/id_generators/sha256.py delete mode 100644 keystone-moon/keystone/identity/mapping_backends/__init__.py delete mode 100644 keystone-moon/keystone/identity/mapping_backends/mapping.py delete mode 100644 keystone-moon/keystone/identity/mapping_backends/sql.py delete mode 100644 keystone-moon/keystone/identity/routers.py delete mode 100644 keystone-moon/keystone/identity/schema.py delete mode 100644 keystone-moon/keystone/identity/shadow_backends/__init__.py delete mode 100644 keystone-moon/keystone/identity/shadow_backends/sql.py delete mode 100644 keystone-moon/keystone/locale/de/LC_MESSAGES/keystone-log-critical.po delete mode 100644 keystone-moon/keystone/locale/de/LC_MESSAGES/keystone-log-info.po delete mode 100644 keystone-moon/keystone/locale/de/LC_MESSAGES/keystone.po delete mode 100644 keystone-moon/keystone/locale/el/LC_MESSAGES/keystone-log-critical.po delete mode 100644 keystone-moon/keystone/locale/en_AU/LC_MESSAGES/keystone-log-critical.po delete mode 100644 keystone-moon/keystone/locale/en_AU/LC_MESSAGES/keystone-log-error.po delete mode 100644 keystone-moon/keystone/locale/en_AU/LC_MESSAGES/keystone.po delete mode 100644 keystone-moon/keystone/locale/en_GB/LC_MESSAGES/keystone-log-info.po delete mode 100644 keystone-moon/keystone/locale/es/LC_MESSAGES/keystone-log-critical.po delete mode 100644 keystone-moon/keystone/locale/es/LC_MESSAGES/keystone-log-error.po delete mode 100644 keystone-moon/keystone/locale/es/LC_MESSAGES/keystone.po delete mode 100644 keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone-log-critical.po delete mode 100644 keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone-log-error.po delete mode 100644 keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone-log-info.po delete mode 100644 keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone-log-warning.po delete mode 100644 keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone.po delete mode 100644 keystone-moon/keystone/locale/hu/LC_MESSAGES/keystone-log-critical.po delete mode 100644 keystone-moon/keystone/locale/it/LC_MESSAGES/keystone-log-critical.po delete mode 100644 keystone-moon/keystone/locale/it/LC_MESSAGES/keystone-log-error.po delete mode 100644 keystone-moon/keystone/locale/it/LC_MESSAGES/keystone-log-info.po delete mode 100644 keystone-moon/keystone/locale/it/LC_MESSAGES/keystone.po delete mode 100644 keystone-moon/keystone/locale/ja/LC_MESSAGES/keystone-log-critical.po delete mode 100644 keystone-moon/keystone/locale/ja/LC_MESSAGES/keystone-log-error.po delete mode 100644 keystone-moon/keystone/locale/ja/LC_MESSAGES/keystone.po delete mode 100644 keystone-moon/keystone/locale/keystone-log-critical.pot delete mode 100644 keystone-moon/keystone/locale/keystone-log-error.pot delete mode 100644 keystone-moon/keystone/locale/keystone-log-info.pot delete mode 100644 keystone-moon/keystone/locale/keystone-log-warning.pot delete mode 100644 keystone-moon/keystone/locale/keystone.pot delete mode 100644 keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone-log-critical.po delete mode 100644 keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone-log-error.po delete mode 100644 keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone-log-info.po delete mode 100644 keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone-log-warning.po delete mode 100644 keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone.po delete mode 100644 keystone-moon/keystone/locale/pl_PL/LC_MESSAGES/keystone-log-critical.po delete mode 100644 keystone-moon/keystone/locale/pt_BR/LC_MESSAGES/keystone-log-critical.po delete mode 100644 keystone-moon/keystone/locale/pt_BR/LC_MESSAGES/keystone-log-error.po delete mode 100644 keystone-moon/keystone/locale/pt_BR/LC_MESSAGES/keystone.po delete mode 100644 keystone-moon/keystone/locale/ru/LC_MESSAGES/keystone-log-critical.po delete mode 100644 keystone-moon/keystone/locale/ru/LC_MESSAGES/keystone.po delete mode 100644 keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone-log-critical.po delete mode 100644 keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone-log-error.po delete mode 100644 keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone-log-info.po delete mode 100644 keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone-log-warning.po delete mode 100644 keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone.po delete mode 100644 keystone-moon/keystone/locale/vi_VN/LC_MESSAGES/keystone-log-info.po delete mode 100644 keystone-moon/keystone/locale/zh_CN/LC_MESSAGES/keystone-log-critical.po delete mode 100644 keystone-moon/keystone/locale/zh_CN/LC_MESSAGES/keystone-log-error.po delete mode 100644 keystone-moon/keystone/locale/zh_CN/LC_MESSAGES/keystone-log-info.po delete mode 100644 keystone-moon/keystone/locale/zh_CN/LC_MESSAGES/keystone.po delete mode 100644 keystone-moon/keystone/locale/zh_TW/LC_MESSAGES/keystone-log-critical.po delete mode 100644 keystone-moon/keystone/locale/zh_TW/LC_MESSAGES/keystone.po delete mode 100644 keystone-moon/keystone/middleware/__init__.py delete mode 100644 keystone-moon/keystone/middleware/auth.py delete mode 100644 keystone-moon/keystone/middleware/core.py delete mode 100644 keystone-moon/keystone/middleware/ec2_token.py delete mode 100644 keystone-moon/keystone/models/__init__.py delete mode 100644 keystone-moon/keystone/models/revoke_model.py delete mode 100644 keystone-moon/keystone/models/token_model.py delete mode 100644 keystone-moon/keystone/notifications.py delete mode 100644 keystone-moon/keystone/oauth1/__init__.py delete mode 100644 keystone-moon/keystone/oauth1/backends/__init__.py delete mode 100644 keystone-moon/keystone/oauth1/backends/sql.py delete mode 100644 keystone-moon/keystone/oauth1/controllers.py delete mode 100644 keystone-moon/keystone/oauth1/core.py delete mode 100644 keystone-moon/keystone/oauth1/routers.py delete mode 100644 keystone-moon/keystone/oauth1/schema.py delete mode 100644 keystone-moon/keystone/oauth1/validator.py delete mode 100644 keystone-moon/keystone/openstack/__init__.py delete mode 100644 keystone-moon/keystone/openstack/common/README delete mode 100644 keystone-moon/keystone/openstack/common/__init__.py delete mode 100644 keystone-moon/keystone/openstack/common/_i18n.py delete mode 100644 keystone-moon/keystone/openstack/common/eventlet_backdoor.py delete mode 100644 keystone-moon/keystone/openstack/common/fileutils.py delete mode 100644 keystone-moon/keystone/openstack/common/loopingcall.py delete mode 100644 keystone-moon/keystone/openstack/common/service.py delete mode 100644 keystone-moon/keystone/openstack/common/systemd.py delete mode 100644 keystone-moon/keystone/openstack/common/threadgroup.py delete mode 100644 keystone-moon/keystone/openstack/common/versionutils.py delete mode 100644 keystone-moon/keystone/policy/__init__.py delete mode 100644 keystone-moon/keystone/policy/backends/__init__.py delete mode 100644 keystone-moon/keystone/policy/backends/rules.py delete mode 100644 keystone-moon/keystone/policy/backends/sql.py delete mode 100644 keystone-moon/keystone/policy/controllers.py delete mode 100644 keystone-moon/keystone/policy/core.py delete mode 100644 keystone-moon/keystone/policy/routers.py delete mode 100644 keystone-moon/keystone/policy/schema.py delete mode 100644 keystone-moon/keystone/resource/V8_backends/__init__.py delete mode 100644 keystone-moon/keystone/resource/V8_backends/sql.py delete mode 100644 keystone-moon/keystone/resource/__init__.py delete mode 100644 keystone-moon/keystone/resource/backends/__init__.py delete mode 100644 keystone-moon/keystone/resource/backends/ldap.py delete mode 100644 keystone-moon/keystone/resource/backends/sql.py delete mode 100644 keystone-moon/keystone/resource/config_backends/__init__.py delete mode 100644 keystone-moon/keystone/resource/config_backends/sql.py delete mode 100644 keystone-moon/keystone/resource/controllers.py delete mode 100644 keystone-moon/keystone/resource/core.py delete mode 100644 keystone-moon/keystone/resource/routers.py delete mode 100644 keystone-moon/keystone/resource/schema.py delete mode 100644 keystone-moon/keystone/revoke/__init__.py delete mode 100644 keystone-moon/keystone/revoke/backends/__init__.py delete mode 100644 keystone-moon/keystone/revoke/backends/sql.py delete mode 100644 keystone-moon/keystone/revoke/controllers.py delete mode 100644 keystone-moon/keystone/revoke/core.py delete mode 100644 keystone-moon/keystone/revoke/model.py delete mode 100644 keystone-moon/keystone/revoke/routers.py delete mode 100644 keystone-moon/keystone/routers.py delete mode 100644 keystone-moon/keystone/server/__init__.py delete mode 100644 keystone-moon/keystone/server/backends.py delete mode 100644 keystone-moon/keystone/server/common.py delete mode 100644 keystone-moon/keystone/server/eventlet.py delete mode 100644 keystone-moon/keystone/server/wsgi.py delete mode 100644 keystone-moon/keystone/service.py delete mode 100644 keystone-moon/keystone/tests/__init__.py delete mode 100644 keystone-moon/keystone/tests/common/__init__.py delete mode 100644 keystone-moon/keystone/tests/common/auth.py delete mode 100644 keystone-moon/keystone/tests/functional/__init__.py delete mode 100644 keystone-moon/keystone/tests/functional/core.py delete mode 100644 keystone-moon/keystone/tests/functional/shared/__init__.py delete mode 100644 keystone-moon/keystone/tests/functional/shared/test_running.py delete mode 100644 keystone-moon/keystone/tests/hacking/__init__.py delete mode 100644 keystone-moon/keystone/tests/hacking/checks.py delete mode 100644 keystone-moon/keystone/tests/moon/__init__.py delete mode 100644 keystone-moon/keystone/tests/moon/backends/__init__.py delete mode 100644 keystone-moon/keystone/tests/moon/backends/test_sql_backend.py delete mode 100644 keystone-moon/keystone/tests/moon/func/__init__.py delete mode 100644 keystone-moon/keystone/tests/moon/func/test_func_api_authz.py delete mode 100644 keystone-moon/keystone/tests/moon/func/test_func_api_intra_extension_admin.py delete mode 100644 keystone-moon/keystone/tests/moon/func/test_func_api_log.py delete mode 100644 keystone-moon/keystone/tests/moon/func/test_func_api_tenant.py delete mode 100644 keystone-moon/keystone/tests/moon/func/test_func_moon_auth.py delete mode 100644 keystone-moon/keystone/tests/moon/scenario/test_nova_a.sh delete mode 100644 keystone-moon/keystone/tests/moon/scenario/test_nova_b.sh delete mode 100644 keystone-moon/keystone/tests/moon/scenario/test_nova_c.sh delete mode 100644 keystone-moon/keystone/tests/moon/unit/__init__.py delete mode 100644 keystone-moon/keystone/tests/moon/unit/test_unit_core_configuration.py delete mode 100644 keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_admin.py delete mode 100644 keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_authz.py delete mode 100644 keystone-moon/keystone/tests/moon/unit/test_unit_core_log.py delete mode 100644 keystone-moon/keystone/tests/moon/unit/test_unit_core_tenant.py delete mode 100644 keystone-moon/keystone/tests/unit/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/assignment/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/assignment/role_backends/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/assignment/role_backends/test_sql.py delete mode 100644 keystone-moon/keystone/tests/unit/assignment/test_backends.py delete mode 100644 keystone-moon/keystone/tests/unit/assignment/test_core.py delete mode 100644 keystone-moon/keystone/tests/unit/auth/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/auth/test_controllers.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/core_ldap.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/core_sql.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/domain_config/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/domain_config/core.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/domain_config/test_sql.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/legacy_drivers/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/sql.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/api_v3.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/sql.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/sql.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/role/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/role/core.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/role/test_ldap.py delete mode 100644 keystone-moon/keystone/tests/unit/backend/role/test_sql.py delete mode 100644 keystone-moon/keystone/tests/unit/catalog/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/catalog/test_backends.py delete mode 100644 keystone-moon/keystone/tests/unit/catalog/test_core.py delete mode 100644 keystone-moon/keystone/tests/unit/common/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/common/test_authorization.py delete mode 100644 keystone-moon/keystone/tests/unit/common/test_base64utils.py delete mode 100644 keystone-moon/keystone/tests/unit/common/test_connection_pool.py delete mode 100644 keystone-moon/keystone/tests/unit/common/test_injection.py delete mode 100644 keystone-moon/keystone/tests/unit/common/test_json_home.py delete mode 100644 keystone-moon/keystone/tests/unit/common/test_ldap.py delete mode 100644 keystone-moon/keystone/tests/unit/common/test_manager.py delete mode 100644 keystone-moon/keystone/tests/unit/common/test_notifications.py delete mode 100644 keystone-moon/keystone/tests/unit/common/test_pemutils.py delete mode 100644 keystone-moon/keystone/tests/unit/common/test_sql_core.py delete mode 100644 keystone-moon/keystone/tests/unit/common/test_utils.py delete mode 100644 keystone-moon/keystone/tests/unit/config_files/backend_db2.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/backend_ldap.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/backend_ldap_pool.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/backend_liveldap.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/backend_multi_ldap_sql.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/backend_pool_liveldap.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/backend_postgresql.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/backend_sql.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/backend_tls_liveldap.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/deprecated.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/deprecated_override.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/domain_configs_default_ldap_one_sql/keystone.domain1.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain2.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/domain_configs_one_extra_sql/keystone.domain2.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.Default.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.domain1.conf delete mode 100644 keystone-moon/keystone/tests/unit/config_files/test_auth_plugin.conf delete mode 100644 keystone-moon/keystone/tests/unit/contrib/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/contrib/federation/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/contrib/federation/test_utils.py delete mode 100644 keystone-moon/keystone/tests/unit/core.py delete mode 100644 keystone-moon/keystone/tests/unit/default_catalog.templates delete mode 100644 keystone-moon/keystone/tests/unit/default_fixtures.py delete mode 100644 keystone-moon/keystone/tests/unit/external/README.rst delete mode 100644 keystone-moon/keystone/tests/unit/external/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/external/test_timeutils.py delete mode 100644 keystone-moon/keystone/tests/unit/fakeldap.py delete mode 100644 keystone-moon/keystone/tests/unit/federation_fixtures.py delete mode 100644 keystone-moon/keystone/tests/unit/filtering.py delete mode 100644 keystone-moon/keystone/tests/unit/identity/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/identity/test_backends.py delete mode 100644 keystone-moon/keystone/tests/unit/identity/test_controllers.py delete mode 100644 keystone-moon/keystone/tests/unit/identity/test_core.py delete mode 100644 keystone-moon/keystone/tests/unit/identity_mapping.py delete mode 100644 keystone-moon/keystone/tests/unit/ksfixtures/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/ksfixtures/appserver.py delete mode 100644 keystone-moon/keystone/tests/unit/ksfixtures/auth_plugins.py delete mode 100644 keystone-moon/keystone/tests/unit/ksfixtures/cache.py delete mode 100644 keystone-moon/keystone/tests/unit/ksfixtures/database.py delete mode 100644 keystone-moon/keystone/tests/unit/ksfixtures/hacking.py delete mode 100644 keystone-moon/keystone/tests/unit/ksfixtures/key_repository.py delete mode 100644 keystone-moon/keystone/tests/unit/ksfixtures/ldapdb.py delete mode 100644 keystone-moon/keystone/tests/unit/ksfixtures/policy.py delete mode 100644 keystone-moon/keystone/tests/unit/ksfixtures/temporaryfile.py delete mode 100644 keystone-moon/keystone/tests/unit/mapping_fixtures.py delete mode 100644 keystone-moon/keystone/tests/unit/policy/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/policy/test_backends.py delete mode 100644 keystone-moon/keystone/tests/unit/resource/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/resource/backends/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/resource/backends/test_sql.py delete mode 100644 keystone-moon/keystone/tests/unit/resource/config_backends/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/resource/config_backends/test_sql.py delete mode 100644 keystone-moon/keystone/tests/unit/resource/test_backends.py delete mode 100644 keystone-moon/keystone/tests/unit/resource/test_controllers.py delete mode 100644 keystone-moon/keystone/tests/unit/resource/test_core.py delete mode 100644 keystone-moon/keystone/tests/unit/rest.py delete mode 100644 keystone-moon/keystone/tests/unit/saml2/idp_saml2_metadata.xml delete mode 100644 keystone-moon/keystone/tests/unit/saml2/signed_saml2_assertion.xml delete mode 100644 keystone-moon/keystone/tests/unit/schema/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/schema/v2.py delete mode 100644 keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py delete mode 100644 keystone-moon/keystone/tests/unit/test_auth.py delete mode 100644 keystone-moon/keystone/tests/unit/test_auth_plugin.py delete mode 100644 keystone-moon/keystone/tests/unit/test_backend.py delete mode 100644 keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py delete mode 100644 keystone-moon/keystone/tests/unit/test_backend_endpoint_policy_sql.py delete mode 100644 keystone-moon/keystone/tests/unit/test_backend_federation_sql.py delete mode 100644 keystone-moon/keystone/tests/unit/test_backend_id_mapping_sql.py delete mode 100644 keystone-moon/keystone/tests/unit/test_backend_kvs.py delete mode 100644 keystone-moon/keystone/tests/unit/test_backend_ldap.py delete mode 100644 keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py delete mode 100644 keystone-moon/keystone/tests/unit/test_backend_rules.py delete mode 100644 keystone-moon/keystone/tests/unit/test_backend_sql.py delete mode 100644 keystone-moon/keystone/tests/unit/test_backend_templated.py delete mode 100644 keystone-moon/keystone/tests/unit/test_cache.py delete mode 100644 keystone-moon/keystone/tests/unit/test_cache_backend_mongo.py delete mode 100644 keystone-moon/keystone/tests/unit/test_catalog.py delete mode 100644 keystone-moon/keystone/tests/unit/test_cert_setup.py delete mode 100644 keystone-moon/keystone/tests/unit/test_cli.py delete mode 100644 keystone-moon/keystone/tests/unit/test_config.py delete mode 100644 keystone-moon/keystone/tests/unit/test_contrib_ec2.py delete mode 100644 keystone-moon/keystone/tests/unit/test_contrib_s3_core.py delete mode 100644 keystone-moon/keystone/tests/unit/test_contrib_simple_cert.py delete mode 100644 keystone-moon/keystone/tests/unit/test_credential.py delete mode 100644 keystone-moon/keystone/tests/unit/test_driver_hints.py delete mode 100644 keystone-moon/keystone/tests/unit/test_ec2_token_middleware.py delete mode 100644 keystone-moon/keystone/tests/unit/test_entry_points.py delete mode 100644 keystone-moon/keystone/tests/unit/test_exception.py delete mode 100644 keystone-moon/keystone/tests/unit/test_hacking_checks.py delete mode 100644 keystone-moon/keystone/tests/unit/test_ipv6.py delete mode 100644 keystone-moon/keystone/tests/unit/test_kvs.py delete mode 100644 keystone-moon/keystone/tests/unit/test_ldap_livetest.py delete mode 100644 keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py delete mode 100644 keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py delete mode 100644 keystone-moon/keystone/tests/unit/test_middleware.py delete mode 100644 keystone-moon/keystone/tests/unit/test_no_admin_token_auth.py delete mode 100644 keystone-moon/keystone/tests/unit/test_policy.py delete mode 100644 keystone-moon/keystone/tests/unit/test_revoke.py delete mode 100644 keystone-moon/keystone/tests/unit/test_singular_plural.py delete mode 100644 keystone-moon/keystone/tests/unit/test_sql_livetest.py delete mode 100644 keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py delete mode 100644 keystone-moon/keystone/tests/unit/test_sql_upgrade.py delete mode 100644 keystone-moon/keystone/tests/unit/test_ssl.py delete mode 100644 keystone-moon/keystone/tests/unit/test_token_bind.py delete mode 100644 keystone-moon/keystone/tests/unit/test_token_provider.py delete mode 100644 keystone-moon/keystone/tests/unit/test_url_middleware.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v2.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v2_controller.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v2_keystoneclient.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v2_keystoneclient_sql.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v3.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v3_assignment.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v3_auth.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v3_catalog.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v3_controller.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v3_credential.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v3_domain_config.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v3_federation.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v3_filters.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v3_identity.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v3_oauth1.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v3_os_revoke.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v3_policy.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v3_protection.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v3_resource.py delete mode 100644 keystone-moon/keystone/tests/unit/test_v3_trust.py delete mode 100644 keystone-moon/keystone/tests/unit/test_validation.py delete mode 100644 keystone-moon/keystone/tests/unit/test_versions.py delete mode 100644 keystone-moon/keystone/tests/unit/test_wsgi.py delete mode 100644 keystone-moon/keystone/tests/unit/tests/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/tests/test_core.py delete mode 100644 keystone-moon/keystone/tests/unit/tests/test_utils.py delete mode 100644 keystone-moon/keystone/tests/unit/token/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/token/test_backends.py delete mode 100644 keystone-moon/keystone/tests/unit/token/test_fernet_provider.py delete mode 100644 keystone-moon/keystone/tests/unit/token/test_pki_provider.py delete mode 100644 keystone-moon/keystone/tests/unit/token/test_pkiz_provider.py delete mode 100644 keystone-moon/keystone/tests/unit/token/test_provider.py delete mode 100644 keystone-moon/keystone/tests/unit/token/test_token_data_helper.py delete mode 100644 keystone-moon/keystone/tests/unit/token/test_token_model.py delete mode 100644 keystone-moon/keystone/tests/unit/token/test_uuid_provider.py delete mode 100644 keystone-moon/keystone/tests/unit/trust/__init__.py delete mode 100644 keystone-moon/keystone/tests/unit/trust/test_backends.py delete mode 100644 keystone-moon/keystone/tests/unit/utils.py delete mode 100644 keystone-moon/keystone/token/__init__.py delete mode 100644 keystone-moon/keystone/token/_simple_cert.py delete mode 100644 keystone-moon/keystone/token/controllers.py delete mode 100644 keystone-moon/keystone/token/persistence/__init__.py delete mode 100644 keystone-moon/keystone/token/persistence/backends/__init__.py delete mode 100644 keystone-moon/keystone/token/persistence/backends/kvs.py delete mode 100644 keystone-moon/keystone/token/persistence/backends/memcache.py delete mode 100644 keystone-moon/keystone/token/persistence/backends/memcache_pool.py delete mode 100644 keystone-moon/keystone/token/persistence/backends/sql.py delete mode 100644 keystone-moon/keystone/token/persistence/core.py delete mode 100644 keystone-moon/keystone/token/provider.py delete mode 100644 keystone-moon/keystone/token/providers/__init__.py delete mode 100644 keystone-moon/keystone/token/providers/common.py delete mode 100644 keystone-moon/keystone/token/providers/fernet/__init__.py delete mode 100644 keystone-moon/keystone/token/providers/fernet/core.py delete mode 100644 keystone-moon/keystone/token/providers/fernet/token_formatters.py delete mode 100644 keystone-moon/keystone/token/providers/fernet/utils.py delete mode 100644 keystone-moon/keystone/token/providers/pki.py delete mode 100644 keystone-moon/keystone/token/providers/pkiz.py delete mode 100644 keystone-moon/keystone/token/providers/uuid.py delete mode 100644 keystone-moon/keystone/token/routers.py delete mode 100644 keystone-moon/keystone/token/utils.py delete mode 100644 keystone-moon/keystone/trust/__init__.py delete mode 100644 keystone-moon/keystone/trust/backends/__init__.py delete mode 100644 keystone-moon/keystone/trust/backends/sql.py delete mode 100644 keystone-moon/keystone/trust/controllers.py delete mode 100644 keystone-moon/keystone/trust/core.py delete mode 100644 keystone-moon/keystone/trust/routers.py delete mode 100644 keystone-moon/keystone/trust/schema.py delete mode 100644 keystone-moon/keystone/v2_crud/__init__.py delete mode 100644 keystone-moon/keystone/v2_crud/admin_crud.py delete mode 100644 keystone-moon/keystone/v2_crud/user_crud.py delete mode 100644 keystone-moon/keystone/version/__init__.py delete mode 100644 keystone-moon/keystone/version/controllers.py delete mode 100644 keystone-moon/keystone/version/routers.py delete mode 100644 keystone-moon/keystone/version/service.py (limited to 'keystone-moon/keystone') diff --git a/keystone-moon/keystone/__init__.py b/keystone-moon/keystone/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/assignment/V8_backends/__init__.py b/keystone-moon/keystone/assignment/V8_backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/assignment/V8_backends/sql.py b/keystone-moon/keystone/assignment/V8_backends/sql.py deleted file mode 100644 index 88c10a6a..00000000 --- a/keystone-moon/keystone/assignment/V8_backends/sql.py +++ /dev/null @@ -1,452 +0,0 @@ -# Copyright 2012-13 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -import sqlalchemy -from sqlalchemy.sql.expression import false - -from keystone import assignment as keystone_assignment -from keystone.common import sql -from keystone import exception -from keystone.i18n import _ - - -CONF = cfg.CONF - - -class AssignmentType(object): - USER_PROJECT = 'UserProject' - GROUP_PROJECT = 'GroupProject' - USER_DOMAIN = 'UserDomain' - GROUP_DOMAIN = 'GroupDomain' - - @classmethod - def calculate_type(cls, user_id, group_id, project_id, domain_id): - if user_id: - if project_id: - return cls.USER_PROJECT - if domain_id: - return cls.USER_DOMAIN - if group_id: - if project_id: - return cls.GROUP_PROJECT - if domain_id: - return cls.GROUP_DOMAIN - # Invalid parameters combination - raise exception.AssignmentTypeCalculationError(**locals()) - - -class Assignment(keystone_assignment.AssignmentDriverV8): - - def default_role_driver(self): - return 'sql' - - def default_resource_driver(self): - return 'sql' - - def list_user_ids_for_project(self, tenant_id): - with sql.session_for_read() as session: - query = session.query(RoleAssignment.actor_id) - query = query.filter_by(type=AssignmentType.USER_PROJECT) - query = query.filter_by(target_id=tenant_id) - query = query.distinct('actor_id') - assignments = query.all() - return [assignment.actor_id for assignment in assignments] - - def create_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - - assignment_type = AssignmentType.calculate_type( - user_id, group_id, project_id, domain_id) - try: - with sql.session_for_write() as session: - session.add(RoleAssignment( - type=assignment_type, - actor_id=user_id or group_id, - target_id=project_id or domain_id, - role_id=role_id, - inherited=inherited_to_projects)) - except sql.DBDuplicateEntry: # nosec : The v3 grant APIs are silent if - # the assignment already exists - pass - - def list_grant_role_ids(self, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - with sql.session_for_read() as session: - q = session.query(RoleAssignment.role_id) - q = q.filter(RoleAssignment.actor_id == (user_id or group_id)) - q = q.filter(RoleAssignment.target_id == (project_id or domain_id)) - q = q.filter(RoleAssignment.inherited == inherited_to_projects) - return [x.role_id for x in q.all()] - - def _build_grant_filter(self, session, role_id, user_id, group_id, - domain_id, project_id, inherited_to_projects): - q = session.query(RoleAssignment) - q = q.filter_by(actor_id=user_id or group_id) - q = q.filter_by(target_id=project_id or domain_id) - q = q.filter_by(role_id=role_id) - q = q.filter_by(inherited=inherited_to_projects) - return q - - def check_grant_role_id(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - with sql.session_for_read() as session: - try: - q = self._build_grant_filter( - session, role_id, user_id, group_id, domain_id, project_id, - inherited_to_projects) - q.one() - except sql.NotFound: - actor_id = user_id or group_id - target_id = domain_id or project_id - raise exception.RoleAssignmentNotFound(role_id=role_id, - actor_id=actor_id, - target_id=target_id) - - def delete_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - with sql.session_for_write() as session: - q = self._build_grant_filter( - session, role_id, user_id, group_id, domain_id, project_id, - inherited_to_projects) - if not q.delete(False): - actor_id = user_id or group_id - target_id = domain_id or project_id - raise exception.RoleAssignmentNotFound(role_id=role_id, - actor_id=actor_id, - target_id=target_id) - - def _list_project_ids_for_actor(self, actors, hints, inherited, - group_only=False): - # TODO(henry-nash): Now that we have a single assignment table, we - # should be able to honor the hints list that is provided. - - assignment_type = [AssignmentType.GROUP_PROJECT] - if not group_only: - assignment_type.append(AssignmentType.USER_PROJECT) - - sql_constraints = sqlalchemy.and_( - RoleAssignment.type.in_(assignment_type), - RoleAssignment.inherited == inherited, - RoleAssignment.actor_id.in_(actors)) - - with sql.session_for_read() as session: - query = session.query(RoleAssignment.target_id).filter( - sql_constraints).distinct() - - return [x.target_id for x in query.all()] - - def list_project_ids_for_user(self, user_id, group_ids, hints, - inherited=False): - actor_list = [user_id] - if group_ids: - actor_list = actor_list + group_ids - - return self._list_project_ids_for_actor(actor_list, hints, inherited) - - def list_domain_ids_for_user(self, user_id, group_ids, hints, - inherited=False): - with sql.session_for_read() as session: - query = session.query(RoleAssignment.target_id) - filters = [] - - if user_id: - sql_constraints = sqlalchemy.and_( - RoleAssignment.actor_id == user_id, - RoleAssignment.inherited == inherited, - RoleAssignment.type == AssignmentType.USER_DOMAIN) - filters.append(sql_constraints) - - if group_ids: - sql_constraints = sqlalchemy.and_( - RoleAssignment.actor_id.in_(group_ids), - RoleAssignment.inherited == inherited, - RoleAssignment.type == AssignmentType.GROUP_DOMAIN) - filters.append(sql_constraints) - - if not filters: - return [] - - query = query.filter(sqlalchemy.or_(*filters)).distinct() - - return [assignment.target_id for assignment in query.all()] - - def list_role_ids_for_groups_on_domain(self, group_ids, domain_id): - if not group_ids: - # If there's no groups then there will be no domain roles. - return [] - - sql_constraints = sqlalchemy.and_( - RoleAssignment.type == AssignmentType.GROUP_DOMAIN, - RoleAssignment.target_id == domain_id, - RoleAssignment.inherited == false(), - RoleAssignment.actor_id.in_(group_ids)) - - with sql.session_for_read() as session: - query = session.query(RoleAssignment.role_id).filter( - sql_constraints).distinct() - return [role.role_id for role in query.all()] - - def list_role_ids_for_groups_on_project( - self, group_ids, project_id, project_domain_id, project_parents): - - if not group_ids: - # If there's no groups then there will be no project roles. - return [] - - # NOTE(rodrigods): First, we always include projects with - # non-inherited assignments - sql_constraints = sqlalchemy.and_( - RoleAssignment.type == AssignmentType.GROUP_PROJECT, - RoleAssignment.inherited == false(), - RoleAssignment.target_id == project_id) - - if CONF.os_inherit.enabled: - # Inherited roles from domains - sql_constraints = sqlalchemy.or_( - sql_constraints, - sqlalchemy.and_( - RoleAssignment.type == AssignmentType.GROUP_DOMAIN, - RoleAssignment.inherited, - RoleAssignment.target_id == project_domain_id)) - - # Inherited roles from projects - if project_parents: - sql_constraints = sqlalchemy.or_( - sql_constraints, - sqlalchemy.and_( - RoleAssignment.type == AssignmentType.GROUP_PROJECT, - RoleAssignment.inherited, - RoleAssignment.target_id.in_(project_parents))) - - sql_constraints = sqlalchemy.and_( - sql_constraints, RoleAssignment.actor_id.in_(group_ids)) - - with sql.session_for_read() as session: - # NOTE(morganfainberg): Only select the columns we actually care - # about here, in this case role_id. - query = session.query(RoleAssignment.role_id).filter( - sql_constraints).distinct() - - return [result.role_id for result in query.all()] - - def list_project_ids_for_groups(self, group_ids, hints, - inherited=False): - return self._list_project_ids_for_actor( - group_ids, hints, inherited, group_only=True) - - def list_domain_ids_for_groups(self, group_ids, inherited=False): - if not group_ids: - # If there's no groups then there will be no domains. - return [] - - group_sql_conditions = sqlalchemy.and_( - RoleAssignment.type == AssignmentType.GROUP_DOMAIN, - RoleAssignment.inherited == inherited, - RoleAssignment.actor_id.in_(group_ids)) - - with sql.session_for_read() as session: - query = session.query(RoleAssignment.target_id).filter( - group_sql_conditions).distinct() - return [x.target_id for x in query.all()] - - def add_role_to_user_and_project(self, user_id, tenant_id, role_id): - try: - with sql.session_for_write() as session: - session.add(RoleAssignment( - type=AssignmentType.USER_PROJECT, - actor_id=user_id, target_id=tenant_id, - role_id=role_id, inherited=False)) - except sql.DBDuplicateEntry: - msg = ('User %s already has role %s in tenant %s' - % (user_id, role_id, tenant_id)) - raise exception.Conflict(type='role grant', details=msg) - - def remove_role_from_user_and_project(self, user_id, tenant_id, role_id): - with sql.session_for_write() as session: - q = session.query(RoleAssignment) - q = q.filter_by(actor_id=user_id) - q = q.filter_by(target_id=tenant_id) - q = q.filter_by(role_id=role_id) - if q.delete() == 0: - raise exception.RoleNotFound(message=_( - 'Cannot remove role that has not been granted, %s') % - role_id) - - def _get_user_assignment_types(self): - return [AssignmentType.USER_PROJECT, AssignmentType.USER_DOMAIN] - - def _get_group_assignment_types(self): - return [AssignmentType.GROUP_PROJECT, AssignmentType.GROUP_DOMAIN] - - def _get_project_assignment_types(self): - return [AssignmentType.USER_PROJECT, AssignmentType.GROUP_PROJECT] - - def _get_domain_assignment_types(self): - return [AssignmentType.USER_DOMAIN, AssignmentType.GROUP_DOMAIN] - - def _get_assignment_types(self, user, group, project, domain): - """Returns a list of role assignment types based on provided entities - - If one of user or group (the "actor") as well as one of project or - domain (the "target") are provided, the list will contain the role - assignment type for that specific pair of actor and target. - - If only an actor or target is provided, the list will contain the - role assignment types that satisfy the specified entity. - - For example, if user and project are provided, the return will be: - - [AssignmentType.USER_PROJECT] - - However, if only user was provided, the return would be: - - [AssignmentType.USER_PROJECT, AssignmentType.USER_DOMAIN] - - It is not expected that user and group (or project and domain) are - specified - but if they are, the most fine-grained value will be - chosen (i.e. user over group, project over domain). - - """ - actor_types = [] - if user: - actor_types = self._get_user_assignment_types() - elif group: - actor_types = self._get_group_assignment_types() - - target_types = [] - if project: - target_types = self._get_project_assignment_types() - elif domain: - target_types = self._get_domain_assignment_types() - - if actor_types and target_types: - return list(set(actor_types).intersection(target_types)) - - return actor_types or target_types - - def list_role_assignments(self, role_id=None, - user_id=None, group_ids=None, - domain_id=None, project_ids=None, - inherited_to_projects=None): - - def denormalize_role(ref): - assignment = {} - if ref.type == AssignmentType.USER_PROJECT: - assignment['user_id'] = ref.actor_id - assignment['project_id'] = ref.target_id - elif ref.type == AssignmentType.USER_DOMAIN: - assignment['user_id'] = ref.actor_id - assignment['domain_id'] = ref.target_id - elif ref.type == AssignmentType.GROUP_PROJECT: - assignment['group_id'] = ref.actor_id - assignment['project_id'] = ref.target_id - elif ref.type == AssignmentType.GROUP_DOMAIN: - assignment['group_id'] = ref.actor_id - assignment['domain_id'] = ref.target_id - else: - raise exception.Error(message=_( - 'Unexpected assignment type encountered, %s') % - ref.type) - assignment['role_id'] = ref.role_id - if ref.inherited: - assignment['inherited_to_projects'] = 'projects' - return assignment - - with sql.session_for_read() as session: - assignment_types = self._get_assignment_types( - user_id, group_ids, project_ids, domain_id) - - targets = None - if project_ids: - targets = project_ids - elif domain_id: - targets = [domain_id] - - actors = None - if group_ids: - actors = group_ids - elif user_id: - actors = [user_id] - - query = session.query(RoleAssignment) - - if role_id: - query = query.filter_by(role_id=role_id) - if actors: - query = query.filter(RoleAssignment.actor_id.in_(actors)) - if targets: - query = query.filter(RoleAssignment.target_id.in_(targets)) - if assignment_types: - query = query.filter(RoleAssignment.type.in_(assignment_types)) - if inherited_to_projects is not None: - query = query.filter_by(inherited=inherited_to_projects) - - return [denormalize_role(ref) for ref in query.all()] - - def delete_project_assignments(self, project_id): - with sql.session_for_write() as session: - q = session.query(RoleAssignment) - q = q.filter_by(target_id=project_id) - q.delete(False) - - def delete_role_assignments(self, role_id): - with sql.session_for_write() as session: - q = session.query(RoleAssignment) - q = q.filter_by(role_id=role_id) - q.delete(False) - - def delete_user_assignments(self, user_id): - with sql.session_for_write() as session: - q = session.query(RoleAssignment) - q = q.filter_by(actor_id=user_id) - q.delete(False) - - def delete_group_assignments(self, group_id): - with sql.session_for_write() as session: - q = session.query(RoleAssignment) - q = q.filter_by(actor_id=group_id) - q.delete(False) - - -class RoleAssignment(sql.ModelBase, sql.DictBase): - __tablename__ = 'assignment' - attributes = ['type', 'actor_id', 'target_id', 'role_id', 'inherited'] - # NOTE(henry-nash); Postgres requires a name to be defined for an Enum - type = sql.Column( - sql.Enum(AssignmentType.USER_PROJECT, AssignmentType.GROUP_PROJECT, - AssignmentType.USER_DOMAIN, AssignmentType.GROUP_DOMAIN, - name='type'), - nullable=False) - actor_id = sql.Column(sql.String(64), nullable=False) - target_id = sql.Column(sql.String(64), nullable=False) - role_id = sql.Column(sql.String(64), nullable=False) - inherited = sql.Column(sql.Boolean, default=False, nullable=False) - __table_args__ = ( - sql.PrimaryKeyConstraint('type', 'actor_id', 'target_id', 'role_id', - 'inherited'), - sql.Index('ix_actor_id', 'actor_id'), - ) - - def to_dict(self): - """Override parent method with a simpler implementation. - - RoleAssignment doesn't have non-indexed 'extra' attributes, so the - parent implementation is not applicable. - """ - return dict(self.items()) diff --git a/keystone-moon/keystone/assignment/V8_role_backends/__init__.py b/keystone-moon/keystone/assignment/V8_role_backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/assignment/V8_role_backends/sql.py b/keystone-moon/keystone/assignment/V8_role_backends/sql.py deleted file mode 100644 index 2e2e119a..00000000 --- a/keystone-moon/keystone/assignment/V8_role_backends/sql.py +++ /dev/null @@ -1,80 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import assignment -from keystone.common import sql -from keystone import exception - - -class Role(assignment.RoleDriverV8): - - @sql.handle_conflicts(conflict_type='role') - def create_role(self, role_id, role): - with sql.session_for_write() as session: - ref = RoleTable.from_dict(role) - session.add(ref) - return ref.to_dict() - - @sql.truncated - def list_roles(self, hints): - with sql.session_for_read() as session: - query = session.query(RoleTable) - refs = sql.filter_limit_query(RoleTable, query, hints) - return [ref.to_dict() for ref in refs] - - def list_roles_from_ids(self, ids): - if not ids: - return [] - else: - with sql.session_for_read() as session: - query = session.query(RoleTable) - query = query.filter(RoleTable.id.in_(ids)) - role_refs = query.all() - return [role_ref.to_dict() for role_ref in role_refs] - - def _get_role(self, session, role_id): - ref = session.query(RoleTable).get(role_id) - if ref is None: - raise exception.RoleNotFound(role_id=role_id) - return ref - - def get_role(self, role_id): - with sql.session_for_read() as session: - return self._get_role(session, role_id).to_dict() - - @sql.handle_conflicts(conflict_type='role') - def update_role(self, role_id, role): - with sql.session_for_write() as session: - ref = self._get_role(session, role_id) - old_dict = ref.to_dict() - for k in role: - old_dict[k] = role[k] - new_role = RoleTable.from_dict(old_dict) - for attr in RoleTable.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_role, attr)) - ref.extra = new_role.extra - return ref.to_dict() - - def delete_role(self, role_id): - with sql.session_for_write() as session: - ref = self._get_role(session, role_id) - session.delete(ref) - - -class RoleTable(sql.ModelBase, sql.DictBase): - __tablename__ = 'role' - attributes = ['id', 'name'] - id = sql.Column(sql.String(64), primary_key=True) - name = sql.Column(sql.String(255), unique=True, nullable=False) - extra = sql.Column(sql.JsonBlob()) - __table_args__ = (sql.UniqueConstraint('name'),) diff --git a/keystone-moon/keystone/assignment/__init__.py b/keystone-moon/keystone/assignment/__init__.py deleted file mode 100644 index 4aa04ee6..00000000 --- a/keystone-moon/keystone/assignment/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.assignment import controllers # noqa -from keystone.assignment.core import * # noqa diff --git a/keystone-moon/keystone/assignment/backends/__init__.py b/keystone-moon/keystone/assignment/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/assignment/backends/ldap.py b/keystone-moon/keystone/assignment/backends/ldap.py deleted file mode 100644 index b52dc46e..00000000 --- a/keystone-moon/keystone/assignment/backends/ldap.py +++ /dev/null @@ -1,545 +0,0 @@ -# Copyright 2012-2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from __future__ import absolute_import - -import ldap.filter -from oslo_config import cfg -from oslo_log import log -from oslo_log import versionutils - -from keystone import assignment -from keystone.assignment.role_backends import ldap as ldap_role -from keystone.common import ldap as common_ldap -from keystone.common import models -from keystone import exception -from keystone.i18n import _ -from keystone.identity.backends import ldap as ldap_identity - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -class Assignment(assignment.AssignmentDriverV8): - @versionutils.deprecated( - versionutils.deprecated.KILO, - remove_in=+2, - what='ldap assignment') - def __init__(self): - super(Assignment, self).__init__() - self.LDAP_URL = CONF.ldap.url - self.LDAP_USER = CONF.ldap.user - self.LDAP_PASSWORD = CONF.ldap.password - self.suffix = CONF.ldap.suffix - - # This is the only deep dependency from assignment back to identity. - # This is safe to do since if you are using LDAP for assignment, it is - # required that you are using it for identity as well. - self.user = ldap_identity.UserApi(CONF) - self.group = ldap_identity.GroupApi(CONF) - - self.project = ProjectApi(CONF) - self.role = RoleApi(CONF, self.user) - - def default_role_driver(self): - return 'ldap' - - def default_resource_driver(self): - return 'ldap' - - def list_role_ids_for_groups_on_project( - self, groups, project_id, project_domain_id, project_parents): - group_dns = [self.group._id_to_dn(group_id) for group_id in groups] - role_list = [self.role._dn_to_id(role_assignment.role_dn) - for role_assignment in self.role.get_role_assignments - (self.project._id_to_dn(project_id)) - if role_assignment.user_dn.upper() in group_dns] - # NOTE(morganfainberg): Does not support OS-INHERIT as domain - # metadata/roles are not supported by LDAP backend. Skip OS-INHERIT - # logic. - return role_list - - def _get_metadata(self, user_id=None, tenant_id=None, - domain_id=None, group_id=None): - - def _get_roles_for_just_user_and_project(user_id, tenant_id): - user_dn = self.user._id_to_dn(user_id) - return [self.role._dn_to_id(a.role_dn) - for a in self.role.get_role_assignments - (self.project._id_to_dn(tenant_id)) - if common_ldap.is_dn_equal(a.user_dn, user_dn)] - - def _get_roles_for_group_and_project(group_id, project_id): - group_dn = self.group._id_to_dn(group_id) - return [self.role._dn_to_id(a.role_dn) - for a in self.role.get_role_assignments - (self.project._id_to_dn(project_id)) - if common_ldap.is_dn_equal(a.user_dn, group_dn)] - - if domain_id is not None: - msg = _('Domain metadata not supported by LDAP') - raise exception.NotImplemented(message=msg) - if group_id is None and user_id is None: - return {} - - if tenant_id is None: - return {} - if user_id is None: - metadata_ref = _get_roles_for_group_and_project(group_id, - tenant_id) - else: - metadata_ref = _get_roles_for_just_user_and_project(user_id, - tenant_id) - if not metadata_ref: - return {} - return {'roles': [self._role_to_dict(r, False) for r in metadata_ref]} - - def list_project_ids_for_user(self, user_id, group_ids, hints, - inherited=False): - # TODO(henry-nash): The ldap driver does not support inherited - # assignments, so the inherited parameter is unused. - # See bug #1404273. - user_dn = self.user._id_to_dn(user_id) - associations = (self.role.list_project_roles_for_user - (user_dn, self.project.tree_dn)) - - for group_id in group_ids: - group_dn = self.group._id_to_dn(group_id) - for group_role in self.role.list_project_roles_for_group( - group_dn, self.project.tree_dn): - associations.append(group_role) - - return list(set( - [self.project._dn_to_id(x.project_dn) for x in associations])) - - def list_role_ids_for_groups_on_domain(self, group_ids, domain_id): - raise exception.NotImplemented() - - def list_project_ids_for_groups(self, group_ids, hints, - inherited=False): - raise exception.NotImplemented() - - def list_domain_ids_for_user(self, user_id, group_ids, hints): - raise exception.NotImplemented() - - def list_domain_ids_for_groups(self, group_ids, inherited=False): - raise exception.NotImplemented() - - def list_user_ids_for_project(self, tenant_id): - tenant_dn = self.project._id_to_dn(tenant_id) - rolegrants = self.role.get_role_assignments(tenant_dn) - return [self.user._dn_to_id(user_dn) for user_dn in - self.project.get_user_dns(tenant_id, rolegrants)] - - def _subrole_id_to_dn(self, role_id, tenant_id): - if tenant_id is None: - return self.role._id_to_dn(role_id) - else: - return '%s=%s,%s' % (self.role.id_attr, - ldap.dn.escape_dn_chars(role_id), - self.project._id_to_dn(tenant_id)) - - def add_role_to_user_and_project(self, user_id, tenant_id, role_id): - user_dn = self.user._id_to_dn(user_id) - role_dn = self._subrole_id_to_dn(role_id, tenant_id) - self.role.add_user(role_id, role_dn, user_dn, user_id, tenant_id) - tenant_dn = self.project._id_to_dn(tenant_id) - return UserRoleAssociation(role_dn=role_dn, - user_dn=user_dn, - tenant_dn=tenant_dn) - - def _add_role_to_group_and_project(self, group_id, tenant_id, role_id): - group_dn = self.group._id_to_dn(group_id) - role_dn = self._subrole_id_to_dn(role_id, tenant_id) - self.role.add_user(role_id, role_dn, group_dn, group_id, tenant_id) - tenant_dn = self.project._id_to_dn(tenant_id) - return GroupRoleAssociation(group_dn=group_dn, - role_dn=role_dn, - tenant_dn=tenant_dn) - - def remove_role_from_user_and_project(self, user_id, tenant_id, role_id): - role_dn = self._subrole_id_to_dn(role_id, tenant_id) - return self.role.delete_user(role_dn, - self.user._id_to_dn(user_id), role_id) - - def _remove_role_from_group_and_project(self, group_id, tenant_id, - role_id): - role_dn = self._subrole_id_to_dn(role_id, tenant_id) - return self.role.delete_user(role_dn, - self.group._id_to_dn(group_id), role_id) - -# Bulk actions on User From identity - def delete_user_assignments(self, user_id): - user_dn = self.user._id_to_dn(user_id) - for ref in self.role.list_global_roles_for_user(user_dn): - self.role.delete_user(ref.role_dn, ref.user_dn, - self.role._dn_to_id(ref.role_dn)) - for ref in self.role.list_project_roles_for_user(user_dn, - self.project.tree_dn): - self.role.delete_user(ref.role_dn, ref.user_dn, - self.role._dn_to_id(ref.role_dn)) - - def delete_group_assignments(self, group_id): - """Called when the group was deleted. - - Any role assignments for the group should be cleaned up. - - """ - group_dn = self.group._id_to_dn(group_id) - group_role_assignments = self.role.list_project_roles_for_group( - group_dn, self.project.tree_dn) - for ref in group_role_assignments: - self.role.delete_user(ref.role_dn, ref.group_dn, - self.role._dn_to_id(ref.role_dn)) - - def create_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - - try: - metadata_ref = self._get_metadata(user_id, project_id, - domain_id, group_id) - except exception.MetadataNotFound: - metadata_ref = {} - - if user_id is None: - metadata_ref['roles'] = self._add_role_to_group_and_project( - group_id, project_id, role_id) - else: - metadata_ref['roles'] = self.add_role_to_user_and_project( - user_id, project_id, role_id) - - def check_grant_role_id(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - - try: - metadata_ref = self._get_metadata(user_id, project_id, - domain_id, group_id) - except exception.MetadataNotFound: - metadata_ref = {} - role_ids = set(self._roles_from_role_dicts( - metadata_ref.get('roles', []), inherited_to_projects)) - if role_id not in role_ids: - actor_id = user_id or group_id - target_id = domain_id or project_id - raise exception.RoleAssignmentNotFound(role_id=role_id, - actor_id=actor_id, - target_id=target_id) - - def delete_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - - try: - metadata_ref = self._get_metadata(user_id, project_id, - domain_id, group_id) - except exception.MetadataNotFound: - metadata_ref = {} - - try: - if user_id is None: - metadata_ref['roles'] = ( - self._remove_role_from_group_and_project( - group_id, project_id, role_id)) - else: - metadata_ref['roles'] = self.remove_role_from_user_and_project( - user_id, project_id, role_id) - except (exception.RoleNotFound, KeyError): - actor_id = user_id or group_id - target_id = domain_id or project_id - raise exception.RoleAssignmentNotFound(role_id=role_id, - actor_id=actor_id, - target_id=target_id) - - def list_grant_role_ids(self, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - - try: - metadata_ref = self._get_metadata(user_id, project_id, - domain_id, group_id) - except exception.MetadataNotFound: - metadata_ref = {} - - return self._roles_from_role_dicts(metadata_ref.get('roles', []), - inherited_to_projects) - - def list_role_assignments(self, role_id=None, - user_id=None, group_ids=None, - domain_id=None, project_ids=None, - inherited_to_projects=None): - role_assignments = [] - - # Since the LDAP backend does not support assignments to domains, if - # the request is to filter by domain, then the answer is guaranteed - # to be an empty list. - if not domain_id: - for a in self.role.list_role_assignments(self.project.tree_dn): - if isinstance(a, UserRoleAssociation): - assignment = { - 'role_id': self.role._dn_to_id(a.role_dn), - 'user_id': self.user._dn_to_id(a.user_dn), - 'project_id': self.project._dn_to_id(a.project_dn)} - else: - assignment = { - 'role_id': self.role._dn_to_id(a.role_dn), - 'group_id': self.group._dn_to_id(a.group_dn), - 'project_id': self.project._dn_to_id(a.project_dn)} - - if role_id and assignment['role_id'] != role_id: - continue - if user_id and assignment.get('user_id') != user_id: - continue - if group_ids and assignment.get('group_id') not in group_ids: - continue - if project_ids and assignment['project_id'] not in project_ids: - continue - - role_assignments.append(assignment) - - return role_assignments - - def delete_project_assignments(self, project_id): - tenant_dn = self.project._id_to_dn(project_id) - self.role.roles_delete_subtree_by_project(tenant_dn) - - def delete_role_assignments(self, role_id): - self.role.roles_delete_subtree_by_role(role_id, self.project.tree_dn) - - -# TODO(termie): turn this into a data object and move logic to driver -class ProjectApi(common_ldap.ProjectLdapStructureMixin, - common_ldap.EnabledEmuMixIn, common_ldap.BaseLdap): - - model = models.Project - - def __init__(self, conf): - super(ProjectApi, self).__init__(conf) - self.member_attribute = (conf.ldap.project_member_attribute - or self.DEFAULT_MEMBER_ATTRIBUTE) - - def get_user_projects(self, user_dn, associations): - """Returns the list of tenants to which a user has access.""" - project_ids = set() - for assoc in associations: - project_ids.add(self._dn_to_id(assoc.project_dn)) - projects = [] - for project_id in project_ids: - # slower to get them one at a time, but a huge list could blow out - # the connection. This is the safer way - projects.append(self.get(project_id)) - return projects - - def get_user_dns(self, tenant_id, rolegrants, role_dn=None): - tenant = self._ldap_get(tenant_id) - res = set() - if not role_dn: - # Get users who have default tenant mapping - for user_dn in tenant[1].get(self.member_attribute, []): - if self._is_dumb_member(user_dn): - continue - res.add(user_dn) - - # Get users who are explicitly mapped via a tenant - for rolegrant in rolegrants: - if role_dn is None or rolegrant.role_dn == role_dn: - res.add(rolegrant.user_dn) - return list(res) - - -class UserRoleAssociation(object): - """Role Grant model.""" - - def __init__(self, user_dn=None, role_dn=None, tenant_dn=None, - *args, **kw): - self.user_dn = user_dn - self.role_dn = role_dn - self.project_dn = tenant_dn - - -class GroupRoleAssociation(object): - """Role Grant model.""" - - def __init__(self, group_dn=None, role_dn=None, tenant_dn=None, - *args, **kw): - self.group_dn = group_dn - self.role_dn = role_dn - self.project_dn = tenant_dn - - -# TODO(termie): turn this into a data object and move logic to driver -# NOTE(heny-nash): The RoleLdapStructureMixin class enables the sharing of the -# LDAP structure between here and the role backend LDAP, no methods are shared. -class RoleApi(ldap_role.RoleLdapStructureMixin, common_ldap.BaseLdap): - - def __init__(self, conf, user_api): - super(RoleApi, self).__init__(conf) - self.member_attribute = (conf.ldap.role_member_attribute - or self.DEFAULT_MEMBER_ATTRIBUTE) - self._user_api = user_api - - def add_user(self, role_id, role_dn, user_dn, user_id, tenant_id=None): - try: - super(RoleApi, self).add_member(user_dn, role_dn) - except exception.Conflict: - msg = (_('User %(user_id)s already has role %(role_id)s in ' - 'tenant %(tenant_id)s') % - dict(user_id=user_id, role_id=role_id, tenant_id=tenant_id)) - raise exception.Conflict(type='role grant', details=msg) - except self.NotFound: - if tenant_id is None or self.get(role_id) is None: - raise Exception(_("Role %s not found") % (role_id,)) - - attrs = [('objectClass', [self.object_class]), - (self.member_attribute, [user_dn]), - (self.id_attr, [role_id])] - - if self.use_dumb_member: - attrs[1][1].append(self.dumb_member) - with self.get_connection() as conn: - conn.add_s(role_dn, attrs) - - def delete_user(self, role_dn, user_dn, role_id): - try: - super(RoleApi, self).remove_member(user_dn, role_dn) - except (self.NotFound, ldap.NO_SUCH_ATTRIBUTE): - raise exception.RoleNotFound(message=_( - 'Cannot remove role that has not been granted, %s') % - role_id) - - def get_role_assignments(self, tenant_dn): - try: - roles = self._ldap_get_list(tenant_dn, ldap.SCOPE_ONELEVEL, - attrlist=[self.member_attribute]) - except ldap.NO_SUCH_OBJECT: - roles = [] - res = [] - for role_dn, attrs in roles: - try: - user_dns = attrs[self.member_attribute] - except KeyError: - continue - for user_dn in user_dns: - if self._is_dumb_member(user_dn): - continue - res.append(UserRoleAssociation( - user_dn=user_dn, - role_dn=role_dn, - tenant_dn=tenant_dn)) - - return res - - def list_global_roles_for_user(self, user_dn): - user_dn_esc = ldap.filter.escape_filter_chars(user_dn) - roles = self.get_all('(%s=%s)' % (self.member_attribute, user_dn_esc)) - return [UserRoleAssociation( - role_dn=role.dn, - user_dn=user_dn) for role in roles] - - def list_project_roles_for_user(self, user_dn, project_subtree): - try: - roles = self._ldap_get_list(project_subtree, ldap.SCOPE_SUBTREE, - query_params={ - self.member_attribute: user_dn}, - attrlist=common_ldap.DN_ONLY) - except ldap.NO_SUCH_OBJECT: - roles = [] - res = [] - for role_dn, _role_attrs in roles: - # ldap.dn.dn2str returns an array, where the first - # element is the first segment. - # For a role assignment, this contains the role ID, - # The remainder is the DN of the tenant. - # role_dn is already utf8 encoded since it came from LDAP. - tenant = ldap.dn.str2dn(role_dn) - tenant.pop(0) - tenant_dn = ldap.dn.dn2str(tenant) - res.append(UserRoleAssociation( - user_dn=user_dn, - role_dn=role_dn, - tenant_dn=tenant_dn)) - return res - - def list_project_roles_for_group(self, group_dn, project_subtree): - group_dn_esc = ldap.filter.escape_filter_chars(group_dn) - query = '(&(objectClass=%s)(%s=%s))' % (self.object_class, - self.member_attribute, - group_dn_esc) - with self.get_connection() as conn: - try: - roles = conn.search_s(project_subtree, - ldap.SCOPE_SUBTREE, - query, - attrlist=common_ldap.DN_ONLY) - except ldap.NO_SUCH_OBJECT: - # Return no roles rather than raise an exception if the project - # subtree entry doesn't exist because an empty subtree is not - # an error. - return [] - - res = [] - for role_dn, _role_attrs in roles: - # ldap.dn.str2dn returns a list, where the first - # element is the first RDN. - # For a role assignment, this contains the role ID, - # the remainder is the DN of the project. - # role_dn is already utf8 encoded since it came from LDAP. - project = ldap.dn.str2dn(role_dn) - project.pop(0) - project_dn = ldap.dn.dn2str(project) - res.append(GroupRoleAssociation( - group_dn=group_dn, - role_dn=role_dn, - tenant_dn=project_dn)) - return res - - def roles_delete_subtree_by_project(self, tenant_dn): - self._delete_tree_nodes(tenant_dn, ldap.SCOPE_ONELEVEL) - - def roles_delete_subtree_by_role(self, role_id, tree_dn): - self._delete_tree_nodes(tree_dn, ldap.SCOPE_SUBTREE, query_params={ - self.id_attr: role_id}) - - def list_role_assignments(self, project_tree_dn): - """List the role assignments linked to project_tree_dn attribute.""" - try: - roles = self._ldap_get_list(project_tree_dn, ldap.SCOPE_SUBTREE, - attrlist=[self.member_attribute]) - except ldap.NO_SUCH_OBJECT: - roles = [] - res = [] - for role_dn, role in roles: - # role_dn is already utf8 encoded since it came from LDAP. - tenant = ldap.dn.str2dn(role_dn) - tenant.pop(0) - # It obtains the tenant DN to construct the UserRoleAssociation - # object. - tenant_dn = ldap.dn.dn2str(tenant) - for occupant_dn in role[self.member_attribute]: - if self._is_dumb_member(occupant_dn): - continue - if self._user_api.is_user(occupant_dn): - association = UserRoleAssociation( - user_dn=occupant_dn, - role_dn=role_dn, - tenant_dn=tenant_dn) - else: - # occupant_dn is a group. - association = GroupRoleAssociation( - group_dn=occupant_dn, - role_dn=role_dn, - tenant_dn=tenant_dn) - res.append(association) - return res diff --git a/keystone-moon/keystone/assignment/backends/sql.py b/keystone-moon/keystone/assignment/backends/sql.py deleted file mode 100644 index e089726a..00000000 --- a/keystone-moon/keystone/assignment/backends/sql.py +++ /dev/null @@ -1,319 +0,0 @@ -# Copyright 2012-13 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import assignment as keystone_assignment -from keystone.common import sql -from keystone import exception -from keystone.i18n import _ - - -class AssignmentType(object): - USER_PROJECT = 'UserProject' - GROUP_PROJECT = 'GroupProject' - USER_DOMAIN = 'UserDomain' - GROUP_DOMAIN = 'GroupDomain' - - @classmethod - def calculate_type(cls, user_id, group_id, project_id, domain_id): - if user_id: - if project_id: - return cls.USER_PROJECT - if domain_id: - return cls.USER_DOMAIN - if group_id: - if project_id: - return cls.GROUP_PROJECT - if domain_id: - return cls.GROUP_DOMAIN - # Invalid parameters combination - raise exception.AssignmentTypeCalculationError(**locals()) - - -class Assignment(keystone_assignment.AssignmentDriverV9): - - def default_role_driver(self): - return 'sql' - - def default_resource_driver(self): - return 'sql' - - def create_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - - assignment_type = AssignmentType.calculate_type( - user_id, group_id, project_id, domain_id) - try: - with sql.session_for_write() as session: - session.add(RoleAssignment( - type=assignment_type, - actor_id=user_id or group_id, - target_id=project_id or domain_id, - role_id=role_id, - inherited=inherited_to_projects)) - except sql.DBDuplicateEntry: # nosec : The v3 grant APIs are silent if - # the assignment already exists - pass - - def list_grant_role_ids(self, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - with sql.session_for_read() as session: - q = session.query(RoleAssignment.role_id) - q = q.filter(RoleAssignment.actor_id == (user_id or group_id)) - q = q.filter(RoleAssignment.target_id == (project_id or domain_id)) - q = q.filter(RoleAssignment.inherited == inherited_to_projects) - return [x.role_id for x in q.all()] - - def _build_grant_filter(self, session, role_id, user_id, group_id, - domain_id, project_id, inherited_to_projects): - q = session.query(RoleAssignment) - q = q.filter_by(actor_id=user_id or group_id) - q = q.filter_by(target_id=project_id or domain_id) - q = q.filter_by(role_id=role_id) - q = q.filter_by(inherited=inherited_to_projects) - return q - - def check_grant_role_id(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - with sql.session_for_read() as session: - try: - q = self._build_grant_filter( - session, role_id, user_id, group_id, domain_id, project_id, - inherited_to_projects) - q.one() - except sql.NotFound: - actor_id = user_id or group_id - target_id = domain_id or project_id - raise exception.RoleAssignmentNotFound(role_id=role_id, - actor_id=actor_id, - target_id=target_id) - - def delete_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - with sql.session_for_write() as session: - q = self._build_grant_filter( - session, role_id, user_id, group_id, domain_id, project_id, - inherited_to_projects) - if not q.delete(False): - actor_id = user_id or group_id - target_id = domain_id or project_id - raise exception.RoleAssignmentNotFound(role_id=role_id, - actor_id=actor_id, - target_id=target_id) - - def add_role_to_user_and_project(self, user_id, tenant_id, role_id): - try: - with sql.session_for_write() as session: - session.add(RoleAssignment( - type=AssignmentType.USER_PROJECT, - actor_id=user_id, target_id=tenant_id, - role_id=role_id, inherited=False)) - except sql.DBDuplicateEntry: - msg = ('User %s already has role %s in tenant %s' - % (user_id, role_id, tenant_id)) - raise exception.Conflict(type='role grant', details=msg) - - def remove_role_from_user_and_project(self, user_id, tenant_id, role_id): - with sql.session_for_write() as session: - q = session.query(RoleAssignment) - q = q.filter_by(actor_id=user_id) - q = q.filter_by(target_id=tenant_id) - q = q.filter_by(role_id=role_id) - if q.delete() == 0: - raise exception.RoleNotFound(message=_( - 'Cannot remove role that has not been granted, %s') % - role_id) - - def _get_user_assignment_types(self): - return [AssignmentType.USER_PROJECT, AssignmentType.USER_DOMAIN] - - def _get_group_assignment_types(self): - return [AssignmentType.GROUP_PROJECT, AssignmentType.GROUP_DOMAIN] - - def _get_project_assignment_types(self): - return [AssignmentType.USER_PROJECT, AssignmentType.GROUP_PROJECT] - - def _get_domain_assignment_types(self): - return [AssignmentType.USER_DOMAIN, AssignmentType.GROUP_DOMAIN] - - def _get_assignment_types(self, user, group, project, domain): - """Returns a list of role assignment types based on provided entities - - If one of user or group (the "actor") as well as one of project or - domain (the "target") are provided, the list will contain the role - assignment type for that specific pair of actor and target. - - If only an actor or target is provided, the list will contain the - role assignment types that satisfy the specified entity. - - For example, if user and project are provided, the return will be: - - [AssignmentType.USER_PROJECT] - - However, if only user was provided, the return would be: - - [AssignmentType.USER_PROJECT, AssignmentType.USER_DOMAIN] - - It is not expected that user and group (or project and domain) are - specified - but if they are, the most fine-grained value will be - chosen (i.e. user over group, project over domain). - - """ - actor_types = [] - if user: - actor_types = self._get_user_assignment_types() - elif group: - actor_types = self._get_group_assignment_types() - - target_types = [] - if project: - target_types = self._get_project_assignment_types() - elif domain: - target_types = self._get_domain_assignment_types() - - if actor_types and target_types: - return list(set(actor_types).intersection(target_types)) - - return actor_types or target_types - - def list_role_assignments(self, role_id=None, - user_id=None, group_ids=None, - domain_id=None, project_ids=None, - inherited_to_projects=None): - - def denormalize_role(ref): - assignment = {} - if ref.type == AssignmentType.USER_PROJECT: - assignment['user_id'] = ref.actor_id - assignment['project_id'] = ref.target_id - elif ref.type == AssignmentType.USER_DOMAIN: - assignment['user_id'] = ref.actor_id - assignment['domain_id'] = ref.target_id - elif ref.type == AssignmentType.GROUP_PROJECT: - assignment['group_id'] = ref.actor_id - assignment['project_id'] = ref.target_id - elif ref.type == AssignmentType.GROUP_DOMAIN: - assignment['group_id'] = ref.actor_id - assignment['domain_id'] = ref.target_id - else: - raise exception.Error(message=_( - 'Unexpected assignment type encountered, %s') % - ref.type) - assignment['role_id'] = ref.role_id - if ref.inherited: - assignment['inherited_to_projects'] = 'projects' - return assignment - - with sql.session_for_read() as session: - assignment_types = self._get_assignment_types( - user_id, group_ids, project_ids, domain_id) - - targets = None - if project_ids: - targets = project_ids - elif domain_id: - targets = [domain_id] - - actors = None - if group_ids: - actors = group_ids - elif user_id: - actors = [user_id] - - query = session.query(RoleAssignment) - - if role_id: - query = query.filter_by(role_id=role_id) - if actors: - query = query.filter(RoleAssignment.actor_id.in_(actors)) - if targets: - query = query.filter(RoleAssignment.target_id.in_(targets)) - if assignment_types: - query = query.filter(RoleAssignment.type.in_(assignment_types)) - if inherited_to_projects is not None: - query = query.filter_by(inherited=inherited_to_projects) - - return [denormalize_role(ref) for ref in query.all()] - - def delete_project_assignments(self, project_id): - with sql.session_for_write() as session: - q = session.query(RoleAssignment) - q = q.filter_by(target_id=project_id).filter( - RoleAssignment.type.in_((AssignmentType.USER_PROJECT, - AssignmentType.GROUP_PROJECT)) - ) - q.delete(False) - - def delete_role_assignments(self, role_id): - with sql.session_for_write() as session: - q = session.query(RoleAssignment) - q = q.filter_by(role_id=role_id) - q.delete(False) - - def delete_domain_assignments(self, domain_id): - with sql.session_for_write() as session: - q = session.query(RoleAssignment) - q = q.filter(RoleAssignment.target_id == domain_id).filter( - (RoleAssignment.type == AssignmentType.USER_DOMAIN) | - (RoleAssignment.type == AssignmentType.GROUP_DOMAIN)) - q.delete(False) - - def delete_user_assignments(self, user_id): - with sql.session_for_write() as session: - q = session.query(RoleAssignment) - q = q.filter_by(actor_id=user_id).filter( - RoleAssignment.type.in_((AssignmentType.USER_PROJECT, - AssignmentType.USER_DOMAIN)) - ) - q.delete(False) - - def delete_group_assignments(self, group_id): - with sql.session_for_write() as session: - q = session.query(RoleAssignment) - q = q.filter_by(actor_id=group_id).filter( - RoleAssignment.type.in_((AssignmentType.GROUP_PROJECT, - AssignmentType.GROUP_DOMAIN)) - ) - q.delete(False) - - -class RoleAssignment(sql.ModelBase, sql.DictBase): - __tablename__ = 'assignment' - attributes = ['type', 'actor_id', 'target_id', 'role_id', 'inherited'] - # NOTE(henry-nash): Postgres requires a name to be defined for an Enum - type = sql.Column( - sql.Enum(AssignmentType.USER_PROJECT, AssignmentType.GROUP_PROJECT, - AssignmentType.USER_DOMAIN, AssignmentType.GROUP_DOMAIN, - name='type'), - nullable=False) - actor_id = sql.Column(sql.String(64), nullable=False) - target_id = sql.Column(sql.String(64), nullable=False) - role_id = sql.Column(sql.String(64), nullable=False) - inherited = sql.Column(sql.Boolean, default=False, nullable=False) - __table_args__ = ( - sql.PrimaryKeyConstraint('type', 'actor_id', 'target_id', 'role_id', - 'inherited'), - sql.Index('ix_actor_id', 'actor_id'), - ) - - def to_dict(self): - """Override parent method with a simpler implementation. - - RoleAssignment doesn't have non-indexed 'extra' attributes, so the - parent implementation is not applicable. - """ - return dict(self.items()) diff --git a/keystone-moon/keystone/assignment/controllers.py b/keystone-moon/keystone/assignment/controllers.py deleted file mode 100644 index 1b163013..00000000 --- a/keystone-moon/keystone/assignment/controllers.py +++ /dev/null @@ -1,972 +0,0 @@ -# Copyright 2013 Metacloud, Inc. -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Workflow Logic the Assignment service.""" - -import functools -import uuid - -from oslo_config import cfg -from oslo_log import log -from six.moves import urllib - -from keystone.assignment import schema -from keystone.common import controller -from keystone.common import dependency -from keystone.common import utils -from keystone.common import validation -from keystone.common import wsgi -from keystone import exception -from keystone.i18n import _ -from keystone import notifications - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -@dependency.requires('assignment_api', 'identity_api', 'token_provider_api') -class TenantAssignment(controller.V2Controller): - """The V2 Project APIs that are processing assignments.""" - - @controller.v2_auth_deprecated - def get_projects_for_token(self, context, **kw): - """Get valid tenants for token based on token used to authenticate. - - Pulls the token from the context, validates it and gets the valid - tenants for the user in the token. - - Doesn't care about token scopedness. - - """ - token_ref = utils.get_token_ref(context) - - tenant_refs = ( - self.assignment_api.list_projects_for_user(token_ref.user_id)) - tenant_refs = [self.v3_to_v2_project(ref) for ref in tenant_refs - if ref['domain_id'] == CONF.identity.default_domain_id] - params = { - 'limit': context['query_string'].get('limit'), - 'marker': context['query_string'].get('marker'), - } - return self.format_project_list(tenant_refs, **params) - - @controller.v2_deprecated - def get_project_users(self, context, tenant_id, **kw): - self.assert_admin(context) - user_refs = [] - user_ids = self.assignment_api.list_user_ids_for_project(tenant_id) - for user_id in user_ids: - try: - user_ref = self.identity_api.get_user(user_id) - except exception.UserNotFound: - # Log that user is missing and continue on. - message = ("User %(user_id)s in project %(project_id)s " - "doesn't exist.") - LOG.debug(message, - {'user_id': user_id, 'project_id': tenant_id}) - else: - user_refs.append(self.v3_to_v2_user(user_ref)) - return {'users': user_refs} - - -@dependency.requires('assignment_api', 'role_api') -class Role(controller.V2Controller): - """The Role management APIs.""" - - @controller.v2_deprecated - def get_role(self, context, role_id): - self.assert_admin(context) - return {'role': self.role_api.get_role(role_id)} - - @controller.v2_deprecated - def create_role(self, context, role): - role = self._normalize_dict(role) - self.assert_admin(context) - - if 'name' not in role or not role['name']: - msg = _('Name field is required and cannot be empty') - raise exception.ValidationError(message=msg) - - if role['name'] == CONF.member_role_name: - # Use the configured member role ID when creating the configured - # member role name. This avoids the potential of creating a - # "member" role with an unexpected ID. - role_id = CONF.member_role_id - else: - role_id = uuid.uuid4().hex - - role['id'] = role_id - initiator = notifications._get_request_audit_info(context) - role_ref = self.role_api.create_role(role_id, role, initiator) - return {'role': role_ref} - - @controller.v2_deprecated - def delete_role(self, context, role_id): - self.assert_admin(context) - initiator = notifications._get_request_audit_info(context) - self.role_api.delete_role(role_id, initiator) - - @controller.v2_deprecated - def get_roles(self, context): - self.assert_admin(context) - return {'roles': self.role_api.list_roles()} - - -@dependency.requires('assignment_api', 'resource_api', 'role_api') -class RoleAssignmentV2(controller.V2Controller): - """The V2 Role APIs that are processing assignments.""" - - # COMPAT(essex-3) - @controller.v2_deprecated - def get_user_roles(self, context, user_id, tenant_id=None): - """Get the roles for a user and tenant pair. - - Since we're trying to ignore the idea of user-only roles we're - not implementing them in hopes that the idea will die off. - - """ - self.assert_admin(context) - # NOTE(davechen): Router without project id is defined, - # but we don't plan on implementing this. - if tenant_id is None: - raise exception.NotImplemented( - message=_('User roles not supported: tenant_id required')) - roles = self.assignment_api.get_roles_for_user_and_project( - user_id, tenant_id) - return {'roles': [self.role_api.get_role(x) - for x in roles]} - - @controller.v2_deprecated - def add_role_to_user(self, context, user_id, role_id, tenant_id=None): - """Add a role to a user and tenant pair. - - Since we're trying to ignore the idea of user-only roles we're - not implementing them in hopes that the idea will die off. - - """ - self.assert_admin(context) - if tenant_id is None: - raise exception.NotImplemented( - message=_('User roles not supported: tenant_id required')) - - self.assignment_api.add_role_to_user_and_project( - user_id, tenant_id, role_id) - - role_ref = self.role_api.get_role(role_id) - return {'role': role_ref} - - @controller.v2_deprecated - def remove_role_from_user(self, context, user_id, role_id, tenant_id=None): - """Remove a role from a user and tenant pair. - - Since we're trying to ignore the idea of user-only roles we're - not implementing them in hopes that the idea will die off. - - """ - self.assert_admin(context) - if tenant_id is None: - raise exception.NotImplemented( - message=_('User roles not supported: tenant_id required')) - - # This still has the weird legacy semantics that adding a role to - # a user also adds them to a tenant, so we must follow up on that - self.assignment_api.remove_role_from_user_and_project( - user_id, tenant_id, role_id) - - # COMPAT(diablo): CRUD extension - @controller.v2_deprecated - def get_role_refs(self, context, user_id): - """Ultimate hack to get around having to make role_refs first-class. - - This will basically iterate over the various roles the user has in - all tenants the user is a member of and create fake role_refs where - the id encodes the user-tenant-role information so we can look - up the appropriate data when we need to delete them. - - """ - self.assert_admin(context) - tenants = self.assignment_api.list_projects_for_user(user_id) - o = [] - for tenant in tenants: - # As a v2 call, we should limit the response to those projects in - # the default domain. - if tenant['domain_id'] != CONF.identity.default_domain_id: - continue - role_ids = self.assignment_api.get_roles_for_user_and_project( - user_id, tenant['id']) - for role_id in role_ids: - ref = {'roleId': role_id, - 'tenantId': tenant['id'], - 'userId': user_id} - ref['id'] = urllib.parse.urlencode(ref) - o.append(ref) - return {'roles': o} - - # COMPAT(diablo): CRUD extension - @controller.v2_deprecated - def create_role_ref(self, context, user_id, role): - """This is actually used for adding a user to a tenant. - - In the legacy data model adding a user to a tenant required setting - a role. - - """ - self.assert_admin(context) - # TODO(termie): for now we're ignoring the actual role - tenant_id = role.get('tenantId') - role_id = role.get('roleId') - self.assignment_api.add_role_to_user_and_project( - user_id, tenant_id, role_id) - - role_ref = self.role_api.get_role(role_id) - return {'role': role_ref} - - # COMPAT(diablo): CRUD extension - @controller.v2_deprecated - def delete_role_ref(self, context, user_id, role_ref_id): - """This is actually used for deleting a user from a tenant. - - In the legacy data model removing a user from a tenant required - deleting a role. - - To emulate this, we encode the tenant and role in the role_ref_id, - and if this happens to be the last role for the user-tenant pair, - we remove the user from the tenant. - - """ - self.assert_admin(context) - # TODO(termie): for now we're ignoring the actual role - role_ref_ref = urllib.parse.parse_qs(role_ref_id) - tenant_id = role_ref_ref.get('tenantId')[0] - role_id = role_ref_ref.get('roleId')[0] - self.assignment_api.remove_role_from_user_and_project( - user_id, tenant_id, role_id) - - -@dependency.requires('assignment_api', 'resource_api') -class ProjectAssignmentV3(controller.V3Controller): - """The V3 Project APIs that are processing assignments.""" - - collection_name = 'projects' - member_name = 'project' - - def __init__(self): - super(ProjectAssignmentV3, self).__init__() - self.get_member_from_driver = self.resource_api.get_project - - @controller.filterprotected('domain_id', 'enabled', 'name') - def list_user_projects(self, context, filters, user_id): - hints = ProjectAssignmentV3.build_driver_hints(context, filters) - refs = self.assignment_api.list_projects_for_user(user_id, - hints=hints) - return ProjectAssignmentV3.wrap_collection(context, refs, hints=hints) - - -@dependency.requires('role_api') -class RoleV3(controller.V3Controller): - """The V3 Role CRUD APIs. - - To ease complexity (and hence risk) in writing the policy rules for the - role APIs, we create separate policy actions for roles that are domain - specific, as opposed to those that are global. In order to achieve this - each of the role API methods has a wrapper method that checks to see if the - role is global or domain specific. - - NOTE (henry-nash): If this separate global vs scoped policy action pattern - becomes repeated for other entities, we should consider encapsulating this - into a specialized router class. - - """ - - collection_name = 'roles' - member_name = 'role' - - def __init__(self): - super(RoleV3, self).__init__() - self.get_member_from_driver = self.role_api.get_role - - def _is_domain_role(self, role): - return role.get('domain_id') is not None - - def _is_domain_role_target(self, role_id): - try: - role = self.role_api.get_role(role_id) - except exception.RoleNotFound: - # We hide this error since we have not yet carried out a policy - # check - and it maybe that the caller isn't authorized to make - # this call. If so, we want that error to be raised instead. - return False - return self._is_domain_role(role) - - def create_role_wrapper(self, context, role): - if self._is_domain_role(role): - return self.create_domain_role(context, role=role) - else: - return self.create_role(context, role=role) - - @controller.protected() - @validation.validated(schema.role_create, 'role') - def create_role(self, context, role): - return self._create_role(context, role) - - @controller.protected() - @validation.validated(schema.role_create, 'role') - def create_domain_role(self, context, role): - return self._create_role(context, role) - - def list_roles_wrapper(self, context): - # If there is no domain_id filter defined, then we only want to return - # global roles, so we set the domain_id filter to None. - params = context['query_string'] - if 'domain_id' not in params: - context['query_string']['domain_id'] = None - - if context['query_string']['domain_id'] is not None: - return self.list_domain_roles(context) - else: - return self.list_roles(context) - - @controller.filterprotected('name', 'domain_id') - def list_roles(self, context, filters): - return self._list_roles(context, filters) - - @controller.filterprotected('name', 'domain_id') - def list_domain_roles(self, context, filters): - return self._list_roles(context, filters) - - def get_role_wrapper(self, context, role_id): - if self._is_domain_role_target(role_id): - return self.get_domain_role(context, role_id=role_id) - else: - return self.get_role(context, role_id=role_id) - - @controller.protected() - def get_role(self, context, role_id): - return self._get_role(context, role_id) - - @controller.protected() - def get_domain_role(self, context, role_id): - return self._get_role(context, role_id) - - def update_role_wrapper(self, context, role_id, role): - # Since we don't allow you change whether a role is global or domain - # specific, we can ignore the new update attributes and just look at - # the existing role. - if self._is_domain_role_target(role_id): - return self.update_domain_role( - context, role_id=role_id, role=role) - else: - return self.update_role(context, role_id=role_id, role=role) - - @controller.protected() - @validation.validated(schema.role_update, 'role') - def update_role(self, context, role_id, role): - return self._update_role(context, role_id, role) - - @controller.protected() - @validation.validated(schema.role_update, 'role') - def update_domain_role(self, context, role_id, role): - return self._update_role(context, role_id, role) - - def delete_role_wrapper(self, context, role_id): - if self._is_domain_role_target(role_id): - return self.delete_domain_role(context, role_id=role_id) - else: - return self.delete_role(context, role_id=role_id) - - @controller.protected() - def delete_role(self, context, role_id): - return self._delete_role(context, role_id) - - @controller.protected() - def delete_domain_role(self, context, role_id): - return self._delete_role(context, role_id) - - def _create_role(self, context, role): - if role['name'] == CONF.member_role_name: - # Use the configured member role ID when creating the configured - # member role name. This avoids the potential of creating a - # "member" role with an unexpected ID. - role['id'] = CONF.member_role_id - else: - role = self._assign_unique_id(role) - - ref = self._normalize_dict(role) - - initiator = notifications._get_request_audit_info(context) - ref = self.role_api.create_role(ref['id'], ref, initiator) - return RoleV3.wrap_member(context, ref) - - def _list_roles(self, context, filters): - hints = RoleV3.build_driver_hints(context, filters) - refs = self.role_api.list_roles( - hints=hints) - return RoleV3.wrap_collection(context, refs, hints=hints) - - def _get_role(self, context, role_id): - ref = self.role_api.get_role(role_id) - return RoleV3.wrap_member(context, ref) - - def _update_role(self, context, role_id, role): - self._require_matching_id(role_id, role) - initiator = notifications._get_request_audit_info(context) - ref = self.role_api.update_role(role_id, role, initiator) - return RoleV3.wrap_member(context, ref) - - def _delete_role(self, context, role_id): - initiator = notifications._get_request_audit_info(context) - self.role_api.delete_role(role_id, initiator) - - -@dependency.requires('role_api') -class ImpliedRolesV3(controller.V3Controller): - """The V3 ImpliedRoles CRD APIs. There is no Update.""" - - def _prior_role_stanza(self, endpoint, prior_role_id, prior_role_name): - return { - "id": prior_role_id, - "links": { - "self": endpoint + "/v3/roles/" + prior_role_id - }, - "name": prior_role_name - } - - def _implied_role_stanza(self, endpoint, implied_role): - implied_id = implied_role['id'] - implied_response = { - "id": implied_id, - "links": { - "self": endpoint + "/v3/roles/" + implied_id - }, - "name": implied_role['name'] - } - return implied_response - - def _populate_prior_role_response(self, endpoint, prior_id): - prior_role = self.role_api.get_role(prior_id) - response = { - "role_inference": { - "prior_role": self._prior_role_stanza( - endpoint, prior_id, prior_role['name']) - } - } - return response - - def _populate_implied_roles_response(self, endpoint, - prior_id, implied_ids): - response = self._populate_prior_role_response(endpoint, prior_id) - response["role_inference"]['implies'] = [] - for implied_id in implied_ids: - implied_role = self.role_api.get_role(implied_id) - implied_response = self._implied_role_stanza( - endpoint, implied_role) - response["role_inference"]['implies'].append(implied_response) - return response - - def _populate_implied_role_response(self, endpoint, prior_id, implied_id): - response = self._populate_prior_role_response(endpoint, prior_id) - implied_role = self.role_api.get_role(implied_id) - stanza = self._implied_role_stanza(endpoint, implied_role) - response["role_inference"]['implies'] = stanza - return response - - @controller.protected() - def get_implied_role(self, context, prior_role_id, implied_role_id): - ref = self.role_api.get_implied_role(prior_role_id, implied_role_id) - - prior_id = ref['prior_role_id'] - implied_id = ref['implied_role_id'] - endpoint = super(controller.V3Controller, ImpliedRolesV3).base_url( - context, 'public') - response = self._populate_implied_role_response( - endpoint, prior_id, implied_id) - return response - - @controller.protected() - def check_implied_role(self, context, prior_role_id, implied_role_id): - self.role_api.get_implied_role(prior_role_id, implied_role_id) - - @controller.protected() - def create_implied_role(self, context, prior_role_id, implied_role_id): - self.role_api.create_implied_role(prior_role_id, implied_role_id) - return wsgi.render_response( - self.get_implied_role(context, prior_role_id, implied_role_id), - status=(201, 'Created')) - - @controller.protected() - def delete_implied_role(self, context, prior_role_id, implied_role_id): - self.role_api.delete_implied_role(prior_role_id, implied_role_id) - - @controller.protected() - def list_implied_roles(self, context, prior_role_id): - ref = self.role_api.list_implied_roles(prior_role_id) - implied_ids = [r['implied_role_id'] for r in ref] - endpoint = super(controller.V3Controller, ImpliedRolesV3).base_url( - context, 'public') - - results = self._populate_implied_roles_response( - endpoint, prior_role_id, implied_ids) - - return results - - @controller.protected() - def list_role_inference_rules(self, context): - refs = self.role_api.list_role_inference_rules() - role_dict = {role_ref['id']: role_ref - for role_ref in self.role_api.list_roles()} - - rules = dict() - endpoint = super(controller.V3Controller, ImpliedRolesV3).base_url( - context, 'public') - - for ref in refs: - implied_role_id = ref['implied_role_id'] - prior_role_id = ref['prior_role_id'] - implied = rules.get(prior_role_id, []) - implied.append(self._implied_role_stanza( - endpoint, role_dict[implied_role_id])) - rules[prior_role_id] = implied - - inferences = [] - for prior_id, implied in rules.items(): - prior_response = self._prior_role_stanza( - endpoint, prior_id, role_dict[prior_id]['name']) - inferences.append({'prior_role': prior_response, - 'implies': implied}) - results = {'role_inferences': inferences} - return results - - -@dependency.requires('assignment_api', 'identity_api', 'resource_api', - 'role_api') -class GrantAssignmentV3(controller.V3Controller): - """The V3 Grant Assignment APIs.""" - - collection_name = 'roles' - member_name = 'role' - - def __init__(self): - super(GrantAssignmentV3, self).__init__() - self.get_member_from_driver = self.role_api.get_role - - def _require_domain_xor_project(self, domain_id, project_id): - if domain_id and project_id: - msg = _('Specify a domain or project, not both') - raise exception.ValidationError(msg) - if not domain_id and not project_id: - msg = _('Specify one of domain or project') - raise exception.ValidationError(msg) - - def _require_user_xor_group(self, user_id, group_id): - if user_id and group_id: - msg = _('Specify a user or group, not both') - raise exception.ValidationError(msg) - if not user_id and not group_id: - msg = _('Specify one of user or group') - raise exception.ValidationError(msg) - - def _check_if_inherited(self, context): - return (CONF.os_inherit.enabled and - context['path'].startswith('/OS-INHERIT') and - context['path'].endswith('/inherited_to_projects')) - - def _check_grant_protection(self, context, protection, role_id=None, - user_id=None, group_id=None, - domain_id=None, project_id=None, - allow_no_user=False): - """Check protection for role grant APIs. - - The policy rule might want to inspect attributes of any of the entities - involved in the grant. So we get these and pass them to the - check_protection() handler in the controller. - - """ - ref = {} - if role_id: - ref['role'] = self.role_api.get_role(role_id) - if user_id: - try: - ref['user'] = self.identity_api.get_user(user_id) - except exception.UserNotFound: - if not allow_no_user: - raise - else: - ref['group'] = self.identity_api.get_group(group_id) - - if domain_id: - ref['domain'] = self.resource_api.get_domain(domain_id) - else: - ref['project'] = self.resource_api.get_project(project_id) - - self.check_protection(context, protection, ref) - - @controller.protected(callback=_check_grant_protection) - def create_grant(self, context, role_id, user_id=None, - group_id=None, domain_id=None, project_id=None): - """Grants a role to a user or group on either a domain or project.""" - self._require_domain_xor_project(domain_id, project_id) - self._require_user_xor_group(user_id, group_id) - - self.assignment_api.create_grant( - role_id, user_id, group_id, domain_id, project_id, - self._check_if_inherited(context), context) - - @controller.protected(callback=_check_grant_protection) - def list_grants(self, context, user_id=None, - group_id=None, domain_id=None, project_id=None): - """Lists roles granted to user/group on either a domain or project.""" - self._require_domain_xor_project(domain_id, project_id) - self._require_user_xor_group(user_id, group_id) - - refs = self.assignment_api.list_grants( - user_id, group_id, domain_id, project_id, - self._check_if_inherited(context)) - return GrantAssignmentV3.wrap_collection(context, refs) - - @controller.protected(callback=_check_grant_protection) - def check_grant(self, context, role_id, user_id=None, - group_id=None, domain_id=None, project_id=None): - """Checks if a role has been granted on either a domain or project.""" - self._require_domain_xor_project(domain_id, project_id) - self._require_user_xor_group(user_id, group_id) - - self.assignment_api.get_grant( - role_id, user_id, group_id, domain_id, project_id, - self._check_if_inherited(context)) - - # NOTE(lbragstad): This will allow users to clean up role assignments - # from the backend in the event the user was removed prior to the role - # assignment being removed. - @controller.protected(callback=functools.partial( - _check_grant_protection, allow_no_user=True)) - def revoke_grant(self, context, role_id, user_id=None, - group_id=None, domain_id=None, project_id=None): - """Revokes a role from user/group on either a domain or project.""" - self._require_domain_xor_project(domain_id, project_id) - self._require_user_xor_group(user_id, group_id) - - self.assignment_api.delete_grant( - role_id, user_id, group_id, domain_id, project_id, - self._check_if_inherited(context), context) - - -@dependency.requires('assignment_api', 'identity_api', 'resource_api') -class RoleAssignmentV3(controller.V3Controller): - """The V3 Role Assignment APIs, really just list_role_assignment().""" - - # TODO(henry-nash): The current implementation does not provide a full - # first class entity for role-assignment. There is no role_assignment_id - # and only the list_role_assignment call is supported. Further, since it - # is not a first class entity, the links for the individual entities - # reference the individual role grant APIs. - - collection_name = 'role_assignments' - member_name = 'role_assignment' - - @classmethod - def wrap_member(cls, context, ref): - # NOTE(henry-nash): Since we are not yet a true collection, we override - # the wrapper as have already included the links in the entities - pass - - def _format_entity(self, context, entity): - """Format an assignment entity for API response. - - The driver layer returns entities as dicts containing the ids of the - actor (e.g. user or group), target (e.g. domain or project) and role. - If it is an inherited role, then this is also indicated. Examples: - - For a non-inherited expanded assignment from group membership: - {'user_id': user_id, - 'project_id': project_id, - 'role_id': role_id, - 'indirect': {'group_id': group_id}} - - or, for a project inherited role: - - {'user_id': user_id, - 'project_id': project_id, - 'role_id': role_id, - 'indirect': {'project_id': parent_id}} - - or, for a role that was implied by a prior role: - - {'user_id': user_id, - 'project_id': project_id, - 'role_id': role_id, - 'indirect': {'role_id': prior role_id}} - - It is possible to deduce if a role assignment came from group - membership if it has both 'user_id' in the main body of the dict and - 'group_id' in the 'indirect' subdict, as well as it is possible to - deduce if it has come from inheritance if it contains both a - 'project_id' in the main body of the dict and 'parent_id' in the - 'indirect' subdict. - - This function maps this into the format to be returned via the API, - e.g. for the second example above: - - { - 'user': { - {'id': user_id} - }, - 'scope': { - 'project': { - {'id': project_id} - }, - 'OS-INHERIT:inherited_to': 'projects' - }, - 'role': { - {'id': role_id} - }, - 'links': { - 'assignment': '/OS-INHERIT/projects/parent_id/users/user_id/' - 'roles/role_id/inherited_to_projects' - } - } - - """ - formatted_entity = {'links': {}} - inherited_assignment = entity.get('inherited_to_projects') - - if 'project_id' in entity: - if 'project_name' in entity: - formatted_entity['scope'] = {'project': { - 'id': entity['project_id'], - 'name': entity['project_name'], - 'domain': {'id': entity['project_domain_id'], - 'name': entity['project_domain_name']}}} - else: - formatted_entity['scope'] = { - 'project': {'id': entity['project_id']}} - - if 'domain_id' in entity.get('indirect', {}): - inherited_assignment = True - formatted_link = ('/domains/%s' % - entity['indirect']['domain_id']) - elif 'project_id' in entity.get('indirect', {}): - inherited_assignment = True - formatted_link = ('/projects/%s' % - entity['indirect']['project_id']) - else: - formatted_link = '/projects/%s' % entity['project_id'] - elif 'domain_id' in entity: - if 'domain_name' in entity: - formatted_entity['scope'] = { - 'domain': {'id': entity['domain_id'], - 'name': entity['domain_name']}} - else: - formatted_entity['scope'] = { - 'domain': {'id': entity['domain_id']}} - formatted_link = '/domains/%s' % entity['domain_id'] - - if 'user_id' in entity: - if 'user_name' in entity: - formatted_entity['user'] = { - 'id': entity['user_id'], - 'name': entity['user_name'], - 'domain': {'id': entity['user_domain_id'], - 'name': entity['user_domain_name']}} - else: - formatted_entity['user'] = {'id': entity['user_id']} - if 'group_id' in entity.get('indirect', {}): - membership_url = ( - self.base_url(context, '/groups/%s/users/%s' % ( - entity['indirect']['group_id'], entity['user_id']))) - formatted_entity['links']['membership'] = membership_url - formatted_link += '/groups/%s' % entity['indirect']['group_id'] - else: - formatted_link += '/users/%s' % entity['user_id'] - elif 'group_id' in entity: - if 'group_name' in entity: - formatted_entity['group'] = { - 'id': entity['group_id'], - 'name': entity['group_name'], - 'domain': {'id': entity['group_domain_id'], - 'name': entity['group_domain_name']}} - else: - formatted_entity['group'] = {'id': entity['group_id']} - formatted_link += '/groups/%s' % entity['group_id'] - - if 'role_name' in entity: - formatted_entity['role'] = {'id': entity['role_id'], - 'name': entity['role_name']} - else: - formatted_entity['role'] = {'id': entity['role_id']} - prior_role_link = '' - if 'role_id' in entity.get('indirect', {}): - formatted_link += '/roles/%s' % entity['indirect']['role_id'] - prior_role_link = ( - '/prior_role/%(prior)s/implies/%(implied)s' % { - 'prior': entity['role_id'], - 'implied': entity['indirect']['role_id'] - }) - else: - formatted_link += '/roles/%s' % entity['role_id'] - - if inherited_assignment: - formatted_entity['scope']['OS-INHERIT:inherited_to'] = ( - 'projects') - formatted_link = ('/OS-INHERIT%s/inherited_to_projects' % - formatted_link) - - formatted_entity['links']['assignment'] = self.base_url(context, - formatted_link) - if prior_role_link: - formatted_entity['links']['prior_role'] = ( - self.base_url(context, prior_role_link)) - - return formatted_entity - - def _assert_effective_filters(self, inherited, group, domain): - """Assert that useless filter combinations are avoided. - - In effective mode, the following filter combinations are useless, since - they would always return an empty list of role assignments: - - group id, since no group assignment is returned in effective mode; - - domain id and inherited, since no domain inherited assignment is - returned in effective mode. - - """ - if group: - msg = _('Combining effective and group filter will always ' - 'result in an empty list.') - raise exception.ValidationError(msg) - - if inherited and domain: - msg = _('Combining effective, domain and inherited filters will ' - 'always result in an empty list.') - raise exception.ValidationError(msg) - - def _assert_domain_nand_project(self, domain_id, project_id): - if domain_id and project_id: - msg = _('Specify a domain or project, not both') - raise exception.ValidationError(msg) - - def _assert_user_nand_group(self, user_id, group_id): - if user_id and group_id: - msg = _('Specify a user or group, not both') - raise exception.ValidationError(msg) - - def _list_role_assignments(self, context, filters, include_subtree=False): - """List role assignments to user and groups on domains and projects. - - Return a list of all existing role assignments in the system, filtered - by assignments attributes, if provided. - - If effective option is used and OS-INHERIT extension is enabled, the - following functions will be applied: - 1) For any group role assignment on a target, replace it by a set of - role assignments containing one for each user of that group on that - target; - 2) For any inherited role assignment for an actor on a target, replace - it by a set of role assignments for that actor on every project under - that target. - - It means that, if effective mode is used, no group or domain inherited - assignments will be present in the resultant list. Thus, combining - effective with them is invalid. - - As a role assignment contains only one actor and one target, providing - both user and group ids or domain and project ids is invalid as well. - - """ - params = context['query_string'] - effective = 'effective' in params and ( - self.query_filter_is_true(params['effective'])) - include_names = ('include_names' in params and - self.query_filter_is_true(params['include_names'])) - - if 'scope.OS-INHERIT:inherited_to' in params: - inherited = ( - params['scope.OS-INHERIT:inherited_to'] == 'projects') - else: - # None means querying both inherited and direct assignments - inherited = None - - self._assert_domain_nand_project(params.get('scope.domain.id'), - params.get('scope.project.id')) - self._assert_user_nand_group(params.get('user.id'), - params.get('group.id')) - - if effective: - self._assert_effective_filters(inherited=inherited, - group=params.get('group.id'), - domain=params.get( - 'scope.domain.id')) - - refs = self.assignment_api.list_role_assignments( - role_id=params.get('role.id'), - user_id=params.get('user.id'), - group_id=params.get('group.id'), - domain_id=params.get('scope.domain.id'), - project_id=params.get('scope.project.id'), - include_subtree=include_subtree, - inherited=inherited, effective=effective, - include_names=include_names) - - formatted_refs = [self._format_entity(context, ref) for ref in refs] - - return self.wrap_collection(context, formatted_refs) - - @controller.filterprotected('group.id', 'role.id', - 'scope.domain.id', 'scope.project.id', - 'scope.OS-INHERIT:inherited_to', 'user.id') - def list_role_assignments(self, context, filters): - return self._list_role_assignments(context, filters) - - def _check_list_tree_protection(self, context, protection_info): - """Check protection for list assignment for tree API. - - The policy rule might want to inspect the domain of any project filter - so if one is defined, then load the project ref and pass it to the - check protection method. - - """ - ref = {} - for filter, value in protection_info['filter_attr'].items(): - if filter == 'scope.project.id' and value: - ref['project'] = self.resource_api.get_project(value) - - self.check_protection(context, protection_info, ref) - - @controller.filterprotected('group.id', 'role.id', - 'scope.domain.id', 'scope.project.id', - 'scope.OS-INHERIT:inherited_to', 'user.id', - callback=_check_list_tree_protection) - def list_role_assignments_for_tree(self, context, filters): - if not context['query_string'].get('scope.project.id'): - msg = _('scope.project.id must be specified if include_subtree ' - 'is also specified') - raise exception.ValidationError(message=msg) - return self._list_role_assignments(context, filters, - include_subtree=True) - - def list_role_assignments_wrapper(self, context): - """Main entry point from router for list role assignments. - - Since we want different policy file rules to be applicable based on - whether there the include_subtree query parameter is part of the API - call, this method checks for this and then calls the appropriate - protected entry point. - - """ - params = context['query_string'] - if 'include_subtree' in params and ( - self.query_filter_is_true(params['include_subtree'])): - return self.list_role_assignments_for_tree(context) - else: - return self.list_role_assignments(context) diff --git a/keystone-moon/keystone/assignment/core.py b/keystone-moon/keystone/assignment/core.py deleted file mode 100644 index 05368fbf..00000000 --- a/keystone-moon/keystone/assignment/core.py +++ /dev/null @@ -1,1790 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into the Assignment service.""" - -import abc -import copy - -from oslo_cache import core as oslo_cache -from oslo_config import cfg -from oslo_log import log -from oslo_log import versionutils -import six - -from keystone.common import cache -from keystone.common import dependency -from keystone.common import driver_hints -from keystone.common import manager -from keystone import exception -from keystone.i18n import _ -from keystone.i18n import _LI, _LE, _LW -from keystone import notifications - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - -# This is a general cache region for assignment administration (CRUD -# operations). -MEMOIZE = cache.get_memoization_decorator(group='role') - -# This builds a discrete cache region dedicated to role assignments computed -# for a given user + project/domain pair. Any write operation to add or remove -# any role assignment should invalidate this entire cache region. -COMPUTED_ASSIGNMENTS_REGION = oslo_cache.create_region() -MEMOIZE_COMPUTED_ASSIGNMENTS = cache.get_memoization_decorator( - group='role', - region=COMPUTED_ASSIGNMENTS_REGION) - - -@notifications.listener -@dependency.provider('assignment_api') -@dependency.requires('credential_api', 'identity_api', 'resource_api', - 'revoke_api', 'role_api') -class Manager(manager.Manager): - """Default pivot point for the Assignment backend. - - See :class:`keystone.common.manager.Manager` for more details on how this - dynamically calls the backend. - - """ - - driver_namespace = 'keystone.assignment' - - _PROJECT = 'project' - _ROLE_REMOVED_FROM_USER = 'role_removed_from_user' - _INVALIDATION_USER_PROJECT_TOKENS = 'invalidate_user_project_tokens' - - def __init__(self): - assignment_driver = CONF.assignment.driver - # If there is no explicit assignment driver specified, we let the - # identity driver tell us what to use. This is for backward - # compatibility reasons from the time when identity, resource and - # assignment were all part of identity. - if assignment_driver is None: - msg = _('Use of the identity driver config to automatically ' - 'configure the same assignment driver has been ' - 'deprecated, in the "O" release, the assignment driver ' - 'will need to be expicitly configured if different ' - 'than the default (SQL).') - versionutils.report_deprecated_feature(LOG, msg) - try: - identity_driver = dependency.get_provider( - 'identity_api').driver - assignment_driver = identity_driver.default_assignment_driver() - except ValueError: - msg = _('Attempted automatic driver selection for assignment ' - 'based upon [identity]\driver option failed since ' - 'driver %s is not found. Set [assignment]/driver to ' - 'a valid driver in keystone config.') - LOG.critical(msg) - raise exception.KeystoneConfigurationError(msg) - super(Manager, self).__init__(assignment_driver) - - # Make sure it is a driver version we support, and if it is a legacy - # driver, then wrap it. - if isinstance(self.driver, AssignmentDriverV8): - self.driver = V9AssignmentWrapperForV8Driver(self.driver) - elif not isinstance(self.driver, AssignmentDriverV9): - raise exception.UnsupportedDriverVersion(driver=assignment_driver) - - self.event_callbacks = { - notifications.ACTIONS.deleted: { - 'domain': [self._delete_domain_assignments], - }, - } - - def _delete_domain_assignments(self, service, resource_type, operations, - payload): - domain_id = payload['resource_info'] - self.driver.delete_domain_assignments(domain_id) - - def _get_group_ids_for_user_id(self, user_id): - # TODO(morganfainberg): Implement a way to get only group_ids - # instead of the more expensive to_dict() call for each record. - return [x['id'] for - x in self.identity_api.list_groups_for_user(user_id)] - - def list_user_ids_for_project(self, tenant_id): - self.resource_api.get_project(tenant_id) - assignment_list = self.list_role_assignments( - project_id=tenant_id, effective=True) - # Use set() to process the list to remove any duplicates - return list(set([x['user_id'] for x in assignment_list])) - - def _list_parent_ids_of_project(self, project_id): - if CONF.os_inherit.enabled: - return [x['id'] for x in ( - self.resource_api.list_project_parents(project_id))] - else: - return [] - - @MEMOIZE_COMPUTED_ASSIGNMENTS - def get_roles_for_user_and_project(self, user_id, tenant_id): - """Get the roles associated with a user within given project. - - This includes roles directly assigned to the user on the - project, as well as those by virtue of group membership or - inheritance. - - :returns: a list of role ids. - :raises keystone.exception.ProjectNotFound: If the project doesn't - exist. - - """ - self.resource_api.get_project(tenant_id) - assignment_list = self.list_role_assignments( - user_id=user_id, project_id=tenant_id, effective=True) - # Use set() to process the list to remove any duplicates - return list(set([x['role_id'] for x in assignment_list])) - - @MEMOIZE_COMPUTED_ASSIGNMENTS - def get_roles_for_user_and_domain(self, user_id, domain_id): - """Get the roles associated with a user within given domain. - - :returns: a list of role ids. - :raises keystone.exception.DomainNotFound: If the domain doesn't exist. - - """ - self.resource_api.get_domain(domain_id) - assignment_list = self.list_role_assignments( - user_id=user_id, domain_id=domain_id, effective=True) - # Use set() to process the list to remove any duplicates - return list(set([x['role_id'] for x in assignment_list])) - - def get_roles_for_groups(self, group_ids, project_id=None, domain_id=None): - """Get a list of roles for this group on domain and/or project.""" - if project_id is not None: - self.resource_api.get_project(project_id) - assignment_list = self.list_role_assignments( - source_from_group_ids=group_ids, project_id=project_id, - effective=True) - elif domain_id is not None: - assignment_list = self.list_role_assignments( - source_from_group_ids=group_ids, domain_id=domain_id, - effective=True) - else: - raise AttributeError(_("Must specify either domain or project")) - - role_ids = list(set([x['role_id'] for x in assignment_list])) - return self.role_api.list_roles_from_ids(role_ids) - - def add_user_to_project(self, tenant_id, user_id): - """Add user to a tenant by creating a default role relationship. - - :raises keystone.exception.ProjectNotFound: If the project doesn't - exist. - :raises keystone.exception.UserNotFound: If the user doesn't exist. - - """ - self.resource_api.get_project(tenant_id) - try: - self.role_api.get_role(CONF.member_role_id) - self.driver.add_role_to_user_and_project( - user_id, - tenant_id, - CONF.member_role_id) - except exception.RoleNotFound: - LOG.info(_LI("Creating the default role %s " - "because it does not exist."), - CONF.member_role_id) - role = {'id': CONF.member_role_id, - 'name': CONF.member_role_name} - try: - self.role_api.create_role(CONF.member_role_id, role) - except exception.Conflict: - LOG.info(_LI("Creating the default role %s failed because it " - "was already created"), - CONF.member_role_id) - # now that default role exists, the add should succeed - self.driver.add_role_to_user_and_project( - user_id, - tenant_id, - CONF.member_role_id) - COMPUTED_ASSIGNMENTS_REGION.invalidate() - - @notifications.role_assignment('created') - def _add_role_to_user_and_project_adapter(self, role_id, user_id=None, - group_id=None, domain_id=None, - project_id=None, - inherited_to_projects=False, - context=None): - - # The parameters for this method must match the parameters for - # create_grant so that the notifications.role_assignment decorator - # will work. - - self.resource_api.get_project(project_id) - self.role_api.get_role(role_id) - self.driver.add_role_to_user_and_project(user_id, project_id, role_id) - - def add_role_to_user_and_project(self, user_id, tenant_id, role_id): - self._add_role_to_user_and_project_adapter( - role_id, user_id=user_id, project_id=tenant_id) - COMPUTED_ASSIGNMENTS_REGION.invalidate() - - def remove_user_from_project(self, tenant_id, user_id): - """Remove user from a tenant - - :raises keystone.exception.ProjectNotFound: If the project doesn't - exist. - :raises keystone.exception.UserNotFound: If the user doesn't exist. - - """ - roles = self.get_roles_for_user_and_project(user_id, tenant_id) - if not roles: - raise exception.NotFound(tenant_id) - for role_id in roles: - try: - self.driver.remove_role_from_user_and_project(user_id, - tenant_id, - role_id) - self.revoke_api.revoke_by_grant(role_id, user_id=user_id, - project_id=tenant_id) - - except exception.RoleNotFound: - LOG.debug("Removing role %s failed because it does not exist.", - role_id) - COMPUTED_ASSIGNMENTS_REGION.invalidate() - - # TODO(henry-nash): We might want to consider list limiting this at some - # point in the future. - def list_projects_for_user(self, user_id, hints=None): - assignment_list = self.list_role_assignments( - user_id=user_id, effective=True) - # Use set() to process the list to remove any duplicates - project_ids = list(set([x['project_id'] for x in assignment_list - if x.get('project_id')])) - return self.resource_api.list_projects_from_ids(list(project_ids)) - - # TODO(henry-nash): We might want to consider list limiting this at some - # point in the future. - def list_domains_for_user(self, user_id, hints=None): - assignment_list = self.list_role_assignments( - user_id=user_id, effective=True) - # Use set() to process the list to remove any duplicates - domain_ids = list(set([x['domain_id'] for x in assignment_list - if x.get('domain_id')])) - return self.resource_api.list_domains_from_ids(domain_ids) - - def list_domains_for_groups(self, group_ids): - assignment_list = self.list_role_assignments( - source_from_group_ids=group_ids, effective=True) - domain_ids = list(set([x['domain_id'] for x in assignment_list - if x.get('domain_id')])) - return self.resource_api.list_domains_from_ids(domain_ids) - - def list_projects_for_groups(self, group_ids): - assignment_list = self.list_role_assignments( - source_from_group_ids=group_ids, effective=True) - project_ids = list(set([x['project_id'] for x in assignment_list - if x.get('project_id')])) - return self.resource_api.list_projects_from_ids(project_ids) - - @notifications.role_assignment('deleted') - def _remove_role_from_user_and_project_adapter(self, role_id, user_id=None, - group_id=None, - domain_id=None, - project_id=None, - inherited_to_projects=False, - context=None): - - # The parameters for this method must match the parameters for - # delete_grant so that the notifications.role_assignment decorator - # will work. - - self.driver.remove_role_from_user_and_project(user_id, project_id, - role_id) - if project_id: - self._emit_invalidate_grant_token_persistence(user_id, project_id) - else: - self.identity_api.emit_invalidate_user_token_persistence(user_id) - self.revoke_api.revoke_by_grant(role_id, user_id=user_id, - project_id=project_id) - - def remove_role_from_user_and_project(self, user_id, tenant_id, role_id): - self._remove_role_from_user_and_project_adapter( - role_id, user_id=user_id, project_id=tenant_id) - COMPUTED_ASSIGNMENTS_REGION.invalidate() - - def _emit_invalidate_user_token_persistence(self, user_id): - self.identity_api.emit_invalidate_user_token_persistence(user_id) - - # NOTE(lbragstad): The previous notification decorator behavior didn't - # send the notification unless the operation was successful. We - # maintain that behavior here by calling to the notification module - # after the call to emit invalid user tokens. - notifications.Audit.internal( - notifications.INVALIDATE_USER_TOKEN_PERSISTENCE, user_id - ) - - def _emit_invalidate_grant_token_persistence(self, user_id, project_id): - self.identity_api.emit_invalidate_grant_token_persistence( - {'user_id': user_id, 'project_id': project_id} - ) - - @notifications.role_assignment('created') - def create_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False, context=None): - self.role_api.get_role(role_id) - if domain_id: - self.resource_api.get_domain(domain_id) - if project_id: - self.resource_api.get_project(project_id) - self.driver.create_grant(role_id, user_id, group_id, domain_id, - project_id, inherited_to_projects) - COMPUTED_ASSIGNMENTS_REGION.invalidate() - - def get_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - role_ref = self.role_api.get_role(role_id) - if domain_id: - self.resource_api.get_domain(domain_id) - if project_id: - self.resource_api.get_project(project_id) - self.check_grant_role_id( - role_id, user_id, group_id, domain_id, project_id, - inherited_to_projects) - return role_ref - - def list_grants(self, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - if domain_id: - self.resource_api.get_domain(domain_id) - if project_id: - self.resource_api.get_project(project_id) - grant_ids = self.list_grant_role_ids( - user_id, group_id, domain_id, project_id, inherited_to_projects) - return self.role_api.list_roles_from_ids(grant_ids) - - @notifications.role_assignment('deleted') - def _emit_revoke_user_grant(self, role_id, user_id, domain_id, project_id, - inherited_to_projects, context): - self._emit_invalidate_grant_token_persistence(user_id, project_id) - - def delete_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False, context=None): - if group_id is None: - self.revoke_api.revoke_by_grant(user_id=user_id, - role_id=role_id, - domain_id=domain_id, - project_id=project_id) - self._emit_revoke_user_grant( - role_id, user_id, domain_id, project_id, - inherited_to_projects, context) - else: - try: - # Group may contain a lot of users so revocation will be - # by role & domain/project - if domain_id is None: - self.revoke_api.revoke_by_project_role_assignment( - project_id, role_id - ) - else: - self.revoke_api.revoke_by_domain_role_assignment( - domain_id, role_id - ) - if CONF.token.revoke_by_id: - # NOTE(morganfainberg): The user ids are the important part - # for invalidating tokens below, so extract them here. - for user in self.identity_api.list_users_in_group( - group_id): - self._emit_revoke_user_grant( - role_id, user['id'], domain_id, project_id, - inherited_to_projects, context) - except exception.GroupNotFound: - LOG.debug('Group %s not found, no tokens to invalidate.', - group_id) - - # TODO(henry-nash): While having the call to get_role here mimics the - # previous behavior (when it was buried inside the driver delete call), - # this seems an odd place to have this check, given what we have - # already done so far in this method. See Bug #1406776. - self.role_api.get_role(role_id) - - if domain_id: - self.resource_api.get_domain(domain_id) - if project_id: - self.resource_api.get_project(project_id) - self.driver.delete_grant(role_id, user_id, group_id, domain_id, - project_id, inherited_to_projects) - COMPUTED_ASSIGNMENTS_REGION.invalidate() - - # The methods _expand_indirect_assignment, _list_direct_role_assignments - # and _list_effective_role_assignments below are only used on - # list_role_assignments, but they are not in its scope as nested functions - # since it would significantly increase McCabe complexity, that should be - # kept as it is in order to detect unnecessarily complex code, which is not - # this case. - - def _expand_indirect_assignment(self, ref, user_id=None, project_id=None, - subtree_ids=None, expand_groups=True): - """Returns a list of expanded role assignments. - - This methods is called for each discovered assignment that either needs - a group assignment expanded into individual user assignments, or needs - an inherited assignment to be applied to its children. - - In all cases, if either user_id and/or project_id is specified, then we - filter the result on those values. - - If project_id is specified and subtree_ids is None, then this - indicates that we are only interested in that one project. If - subtree_ids is not None, then this is an indicator that any - inherited assignments need to be expanded down the tree. The - actual subtree_ids don't need to be used as a filter here, since we - already ensured only those assignments that could affect them - were passed to this method. - - If expand_groups is True then we expand groups out to a list of - assignments, one for each member of that group. - - """ - def create_group_assignment(base_ref, user_id): - """Creates a group assignment from the provided ref.""" - ref = copy.deepcopy(base_ref) - - ref['user_id'] = user_id - - indirect = ref.setdefault('indirect', {}) - indirect['group_id'] = ref.pop('group_id') - - return ref - - def expand_group_assignment(ref, user_id): - """Expands group role assignment. - - For any group role assignment on a target, it is replaced by a list - of role assignments containing one for each user of that group on - that target. - - An example of accepted ref is:: - - { - 'group_id': group_id, - 'project_id': project_id, - 'role_id': role_id - } - - Once expanded, it should be returned as a list of entities like the - one below, one for each each user_id in the provided group_id. - - :: - - { - 'user_id': user_id, - 'project_id': project_id, - 'role_id': role_id, - 'indirect' : { - 'group_id': group_id - } - } - - Returned list will be formatted by the Controller, which will - deduce a role assignment came from group membership if it has both - 'user_id' in the main body of the dict and 'group_id' in indirect - subdict. - - """ - if user_id: - return [create_group_assignment(ref, user_id=user_id)] - - return [create_group_assignment(ref, user_id=m['id']) - for m in self.identity_api.list_users_in_group( - ref['group_id'])] - - def expand_inherited_assignment(ref, user_id, project_id, subtree_ids, - expand_groups): - """Expands inherited role assignments. - - If expand_groups is True and this is a group role assignment on a - target, replace it by a list of role assignments containing one for - each user of that group, on every project under that target. If - expand_groups is False, then return a group assignment on an - inherited target. - - If this is a user role assignment on a specific target (i.e. - project_id is specified, but subtree_ids is None) then simply - format this as a single assignment (since we are effectively - filtering on project_id). If however, project_id is None or - subtree_ids is not None, then replace this one assignment with a - list of role assignments for that user on every project under - that target. - - An example of accepted ref is:: - - { - 'group_id': group_id, - 'project_id': parent_id, - 'role_id': role_id, - 'inherited_to_projects': 'projects' - } - - Once expanded, it should be returned as a list of entities like the - one below, one for each each user_id in the provided group_id and - for each subproject_id in the project_id subtree. - - :: - - { - 'user_id': user_id, - 'project_id': subproject_id, - 'role_id': role_id, - 'indirect' : { - 'group_id': group_id, - 'project_id': parent_id - } - } - - Returned list will be formatted by the Controller, which will - deduce a role assignment came from group membership if it has both - 'user_id' in the main body of the dict and 'group_id' in the - 'indirect' subdict, as well as it is possible to deduce if it has - come from inheritance if it contains both a 'project_id' in the - main body of the dict and 'parent_id' in the 'indirect' subdict. - - """ - def create_inherited_assignment(base_ref, project_id): - """Creates a project assignment from the provided ref. - - base_ref can either be a project or domain inherited - assignment ref. - - """ - ref = copy.deepcopy(base_ref) - - indirect = ref.setdefault('indirect', {}) - if ref.get('project_id'): - indirect['project_id'] = ref.pop('project_id') - else: - indirect['domain_id'] = ref.pop('domain_id') - - ref['project_id'] = project_id - ref.pop('inherited_to_projects') - - return ref - - # Define expanded project list to which to apply this assignment - if project_id: - # Since ref is an inherited assignment and we are filtering by - # project(s), we are only going to apply the assignment to the - # relevant project(s) - project_ids = [project_id] - if subtree_ids: - project_ids += subtree_ids - # If this is a domain inherited assignment, then we know - # that all the project_ids will get this assignment. If - # it's a project inherited assignment, and the assignment - # point is an ancestor of project_id, then we know that - # again all the project_ids will get the assignment. If, - # however, the assignment point is within the subtree, - # then only a partial tree will get the assignment. - if ref.get('project_id'): - if ref['project_id'] in project_ids: - project_ids = ( - [x['id'] for x in - self.resource_api.list_projects_in_subtree( - ref['project_id'])]) - elif ref.get('domain_id'): - # A domain inherited assignment, so apply it to all projects - # in this domain - project_ids = ( - [x['id'] for x in - self.resource_api.list_projects_in_domain( - ref['domain_id'])]) - else: - # It must be a project assignment, so apply it to its subtree - project_ids = ( - [x['id'] for x in - self.resource_api.list_projects_in_subtree( - ref['project_id'])]) - - new_refs = [] - if 'group_id' in ref: - if expand_groups: - # Expand role assignment to all group members on any - # inherited target of any of the projects - for ref in expand_group_assignment(ref, user_id): - new_refs += [create_inherited_assignment(ref, proj_id) - for proj_id in project_ids] - else: - # Just place the group assignment on any inherited target - # of any of the projects - new_refs += [create_inherited_assignment(ref, proj_id) - for proj_id in project_ids] - else: - # Expand role assignment for all projects - new_refs += [create_inherited_assignment(ref, proj_id) - for proj_id in project_ids] - - return new_refs - - if ref.get('inherited_to_projects') == 'projects': - return expand_inherited_assignment( - ref, user_id, project_id, subtree_ids, expand_groups) - elif 'group_id' in ref and expand_groups: - return expand_group_assignment(ref, user_id) - return [ref] - - def add_implied_roles(self, role_refs): - """Expand out implied roles. - - The role_refs passed in have had all inheritance and group assignments - expanded out. We now need to look at the role_id in each ref and see - if it is a prior role for some implied roles. If it is, then we need to - duplicate that ref, one for each implied role. We store the prior role - in the indirect dict that is part of such a duplicated ref, so that a - caller can determine where the assignment came from. - - """ - def _make_implied_ref_copy(prior_ref, implied_role_id): - # Create a ref for an implied role from the ref of a prior role, - # setting the new role_id to be the implied role and the indirect - # role_id to be the prior role - implied_ref = copy.deepcopy(prior_ref) - implied_ref['role_id'] = implied_role_id - indirect = implied_ref.setdefault('indirect', {}) - indirect['role_id'] = prior_ref['role_id'] - return implied_ref - - if not CONF.token.infer_roles: - return role_refs - try: - implied_roles_cache = {} - role_refs_to_check = list(role_refs) - ref_results = list(role_refs) - checked_role_refs = list() - while(role_refs_to_check): - next_ref = role_refs_to_check.pop() - checked_role_refs.append(next_ref) - next_role_id = next_ref['role_id'] - if next_role_id in implied_roles_cache: - implied_roles = implied_roles_cache[next_role_id] - else: - implied_roles = ( - self.role_api.list_implied_roles(next_role_id)) - implied_roles_cache[next_role_id] = implied_roles - for implied_role in implied_roles: - implied_ref = ( - _make_implied_ref_copy( - next_ref, implied_role['implied_role_id'])) - if implied_ref in checked_role_refs: - msg = _LE('Circular reference found ' - 'role inference rules - %(prior_role_id)s.') - LOG.error(msg, {'prior_role_id': next_ref['role_id']}) - else: - ref_results.append(implied_ref) - role_refs_to_check.append(implied_ref) - except exception.NotImplemented: - LOG.error('Role driver does not support implied roles.') - - return ref_results - - def _filter_by_role_id(self, role_id, ref_results): - # if we arrive here, we need to filer by role_id. - filter_results = [] - for ref in ref_results: - if ref['role_id'] == role_id: - filter_results.append(ref) - return filter_results - - def _strip_domain_roles(self, role_refs): - """Post process assignment list for domain roles. - - Domain roles are only designed to do the job of inferring other roles - and since that has been done before this method is called, we need to - remove any assignments that include a domain role. - - """ - def _role_is_global(role_id): - ref = self.role_api.get_role(role_id) - return (ref['domain_id'] is None) - - filter_results = [] - for ref in role_refs: - if _role_is_global(ref['role_id']): - filter_results.append(ref) - return filter_results - - def _list_effective_role_assignments(self, role_id, user_id, group_id, - domain_id, project_id, subtree_ids, - inherited, source_from_group_ids, - strip_domain_roles): - """List role assignments in effective mode. - - When using effective mode, besides the direct assignments, the indirect - ones that come from grouping or inheritance are retrieved and will then - be expanded. - - The resulting list of assignments will be filtered by the provided - parameters. If subtree_ids is not None, then we also want to include - all subtree_ids in the filter as well. Since we are in effective mode, - group can never act as a filter (since group assignments are expanded - into user roles) and domain can only be filter if we want non-inherited - assignments, since domains can't inherit assignments. - - The goal of this method is to only ask the driver for those - assignments as could effect the result based on the parameter filters - specified, hence avoiding retrieving a huge list. - - """ - def list_role_assignments_for_actor( - role_id, inherited, user_id=None, group_ids=None, - project_id=None, subtree_ids=None, domain_id=None): - """List role assignments for actor on target. - - List direct and indirect assignments for an actor, optionally - for a given target (i.e. projects or domain). - - :param role_id: List for a specific role, can be None meaning all - roles - :param inherited: Indicates whether inherited assignments or only - direct assignments are required. If None, then - both are required. - :param user_id: If not None, list only assignments that affect this - user. - :param group_ids: A list of groups required. Only one of user_id - and group_ids can be specified - :param project_id: If specified, only include those assignments - that affect at least this project, with - additionally any projects specified in - subtree_ids - :param subtree_ids: The list of projects in the subtree. If - specified, also include those assignments that - affect these projects. These projects are - guaranteed to be in the same domain as the - project specified in project_id. subtree_ids - can only be specified if project_id has also - been specified. - :param domain_id: If specified, only include those assignments - that affect this domain - by definition this will - not include any inherited assignments - - :returns: List of assignments matching the criteria. Any inherited - or group assignments that could affect the resulting - response are included. - - """ - project_ids_of_interest = None - if project_id: - if subtree_ids: - project_ids_of_interest = subtree_ids + [project_id] - else: - project_ids_of_interest = [project_id] - - # List direct project role assignments - non_inherited_refs = [] - if inherited is False or inherited is None: - # Get non inherited assignments - non_inherited_refs = self.driver.list_role_assignments( - role_id=role_id, domain_id=domain_id, - project_ids=project_ids_of_interest, user_id=user_id, - group_ids=group_ids, inherited_to_projects=False) - - inherited_refs = [] - if inherited is True or inherited is None: - # Get inherited assignments - if project_id: - # The project and any subtree are guaranteed to be owned by - # the same domain, so since we are filtering by these - # specific projects, then we can only get inherited - # assignments from their common domain or from any of - # their parents projects. - - # List inherited assignments from the project's domain - proj_domain_id = self.resource_api.get_project( - project_id)['domain_id'] - inherited_refs += self.driver.list_role_assignments( - role_id=role_id, domain_id=proj_domain_id, - user_id=user_id, group_ids=group_ids, - inherited_to_projects=True) - - # For inherited assignments from projects, since we know - # they are from the same tree the only places these can - # come from are from parents of the main project or - # inherited assignments on the project or subtree itself. - source_ids = [project['id'] for project in - self.resource_api.list_project_parents( - project_id)] - if subtree_ids: - source_ids += project_ids_of_interest - if source_ids: - inherited_refs += self.driver.list_role_assignments( - role_id=role_id, project_ids=source_ids, - user_id=user_id, group_ids=group_ids, - inherited_to_projects=True) - else: - # List inherited assignments without filtering by target - inherited_refs = self.driver.list_role_assignments( - role_id=role_id, user_id=user_id, group_ids=group_ids, - inherited_to_projects=True) - - return non_inherited_refs + inherited_refs - - # If filtering by group or inherited domain assignment the list is - # guaranteed to be empty - if group_id or (domain_id and inherited): - return [] - - if user_id and source_from_group_ids: - # You can't do both - and since source_from_group_ids is only used - # internally, this must be a coding error by the caller. - msg = _('Cannot list assignments sourced from groups and filtered ' - 'by user ID.') - raise exception.UnexpectedError(msg) - - # If filtering by domain, then only non-inherited assignments are - # relevant, since domains don't inherit assignments - inherited = False if domain_id else inherited - - # List user or explicit group assignments. - # Due to the need to expand implied roles, this call will skip - # filtering by role_id and instead return the whole set of roles. - # Matching on the specified role is performed at the end. - direct_refs = list_role_assignments_for_actor( - role_id=None, user_id=user_id, group_ids=source_from_group_ids, - project_id=project_id, subtree_ids=subtree_ids, - domain_id=domain_id, inherited=inherited) - - # And those from the user's groups, so long as we are not restricting - # to a set of source groups (in which case we already got those - # assignments in the direct listing above). - group_refs = [] - if not source_from_group_ids and user_id: - group_ids = self._get_group_ids_for_user_id(user_id) - if group_ids: - group_refs = list_role_assignments_for_actor( - role_id=None, project_id=project_id, - subtree_ids=subtree_ids, group_ids=group_ids, - domain_id=domain_id, inherited=inherited) - - # Expand grouping and inheritance on retrieved role assignments - refs = [] - expand_groups = (source_from_group_ids is None) - for ref in (direct_refs + group_refs): - refs += self._expand_indirect_assignment( - ref, user_id, project_id, subtree_ids, expand_groups) - - refs = self.add_implied_roles(refs) - if strip_domain_roles: - refs = self._strip_domain_roles(refs) - if role_id: - refs = self._filter_by_role_id(role_id, refs) - - return refs - - def _list_direct_role_assignments(self, role_id, user_id, group_id, - domain_id, project_id, subtree_ids, - inherited): - """List role assignments without applying expansion. - - Returns a list of direct role assignments, where their attributes match - the provided filters. If subtree_ids is not None, then we also want to - include all subtree_ids in the filter as well. - - """ - group_ids = [group_id] if group_id else None - project_ids_of_interest = None - if project_id: - if subtree_ids: - project_ids_of_interest = subtree_ids + [project_id] - else: - project_ids_of_interest = [project_id] - - return self.driver.list_role_assignments( - role_id=role_id, user_id=user_id, group_ids=group_ids, - domain_id=domain_id, project_ids=project_ids_of_interest, - inherited_to_projects=inherited) - - def list_role_assignments(self, role_id=None, user_id=None, group_id=None, - domain_id=None, project_id=None, - include_subtree=False, inherited=None, - effective=None, include_names=False, - source_from_group_ids=None, - strip_domain_roles=True): - """List role assignments, honoring effective mode and provided filters. - - Returns a list of role assignments, where their attributes match the - provided filters (role_id, user_id, group_id, domain_id, project_id and - inherited). If include_subtree is True, then assignments on all - descendants of the project specified by project_id are also included. - The inherited filter defaults to None, meaning to get both - non-inherited and inherited role assignments. - - If effective mode is specified, this means that rather than simply - return the assignments that match the filters, any group or - inheritance assignments will be expanded. Group assignments will - become assignments for all the users in that group, and inherited - assignments will be shown on the projects below the assignment point. - Think of effective mode as being the list of assignments that actually - affect a user, for example the roles that would be placed in a token. - - If include_names is set to true the entities' names are returned - in addition to their id's. - - source_from_group_ids is a list of group IDs and, if specified, then - only those assignments that are derived from membership of these groups - are considered, and any such assignments will not be expanded into - their user membership assignments. This is different to a group filter - of the resulting list, instead being a restriction on which assignments - should be considered before expansion of inheritance. This option is - only used internally (i.e. it is not exposed at the API level) and is - only supported in effective mode (since in regular mode there is no - difference between this and a group filter, other than it is a list of - groups). - - In effective mode, any domain specific roles are usually stripped from - the returned assignments (since such roles are not placed in tokens). - This stripping can be disabled by specifying strip_domain_roles=False, - which is useful for internal calls like trusts which need to examine - the full set of roles. - - If OS-INHERIT extension is disabled or the used driver does not support - inherited roles retrieval, inherited role assignments will be ignored. - - """ - if not CONF.os_inherit.enabled: - if inherited: - return [] - inherited = False - - subtree_ids = None - if project_id and include_subtree: - subtree_ids = ( - [x['id'] for x in - self.resource_api.list_projects_in_subtree(project_id)]) - - if effective: - role_assignments = self._list_effective_role_assignments( - role_id, user_id, group_id, domain_id, project_id, - subtree_ids, inherited, source_from_group_ids, - strip_domain_roles) - else: - role_assignments = self._list_direct_role_assignments( - role_id, user_id, group_id, domain_id, project_id, - subtree_ids, inherited) - - if include_names: - return self._get_names_from_role_assignments(role_assignments) - return role_assignments - - def _get_names_from_role_assignments(self, role_assignments): - role_assign_list = [] - - for role_asgmt in role_assignments: - new_assign = {} - for id_type, id_ in role_asgmt.items(): - if id_type == 'domain_id': - _domain = self.resource_api.get_domain(id_) - new_assign['domain_id'] = _domain['id'] - new_assign['domain_name'] = _domain['name'] - elif id_type == 'user_id': - _user = self.identity_api.get_user(id_) - new_assign['user_id'] = _user['id'] - new_assign['user_name'] = _user['name'] - new_assign['user_domain_id'] = _user['domain_id'] - new_assign['user_domain_name'] = ( - self.resource_api.get_domain(_user['domain_id']) - ['name']) - elif id_type == 'group_id': - _group = self.identity_api.get_group(id_) - new_assign['group_id'] = _group['id'] - new_assign['group_name'] = _group['name'] - new_assign['group_domain_id'] = _group['domain_id'] - new_assign['group_domain_name'] = ( - self.resource_api.get_domain(_group['domain_id']) - ['name']) - elif id_type == 'project_id': - _project = self.resource_api.get_project(id_) - new_assign['project_id'] = _project['id'] - new_assign['project_name'] = _project['name'] - new_assign['project_domain_id'] = _project['domain_id'] - new_assign['project_domain_name'] = ( - self.resource_api.get_domain(_project['domain_id']) - ['name']) - elif id_type == 'role_id': - _role = self.role_api.get_role(id_) - new_assign['role_id'] = _role['id'] - new_assign['role_name'] = _role['name'] - role_assign_list.append(new_assign) - return role_assign_list - - def delete_tokens_for_role_assignments(self, role_id): - assignments = self.list_role_assignments(role_id=role_id) - - # Iterate over the assignments for this role and build the list of - # user or user+project IDs for the tokens we need to delete - user_ids = set() - user_and_project_ids = list() - for assignment in assignments: - # If we have a project assignment, then record both the user and - # project IDs so we can target the right token to delete. If it is - # a domain assignment, we might as well kill all the tokens for - # the user, since in the vast majority of cases all the tokens - # for a user will be within one domain anyway, so not worth - # trying to delete tokens for each project in the domain. - if 'user_id' in assignment: - if 'project_id' in assignment: - user_and_project_ids.append( - (assignment['user_id'], assignment['project_id'])) - elif 'domain_id' in assignment: - self._emit_invalidate_user_token_persistence( - assignment['user_id']) - elif 'group_id' in assignment: - # Add in any users for this group, being tolerant of any - # cross-driver database integrity errors. - try: - users = self.identity_api.list_users_in_group( - assignment['group_id']) - except exception.GroupNotFound: - # Ignore it, but log a debug message - if 'project_id' in assignment: - target = _('Project (%s)') % assignment['project_id'] - elif 'domain_id' in assignment: - target = _('Domain (%s)') % assignment['domain_id'] - else: - target = _('Unknown Target') - msg = ('Group (%(group)s), referenced in assignment ' - 'for %(target)s, not found - ignoring.') - LOG.debug(msg, {'group': assignment['group_id'], - 'target': target}) - continue - - if 'project_id' in assignment: - for user in users: - user_and_project_ids.append( - (user['id'], assignment['project_id'])) - elif 'domain_id' in assignment: - for user in users: - self._emit_invalidate_user_token_persistence( - user['id']) - - # Now process the built up lists. Before issuing calls to delete any - # tokens, let's try and minimize the number of calls by pruning out - # any user+project deletions where a general token deletion for that - # same user is also planned. - user_and_project_ids_to_action = [] - for user_and_project_id in user_and_project_ids: - if user_and_project_id[0] not in user_ids: - user_and_project_ids_to_action.append(user_and_project_id) - - for user_id, project_id in user_and_project_ids_to_action: - payload = {'user_id': user_id, 'project_id': project_id} - notifications.Audit.internal( - notifications.INVALIDATE_USER_PROJECT_TOKEN_PERSISTENCE, - payload - ) - - -# The AssignmentDriverBase class is the set of driver methods from earlier -# drivers that we still support, that have not been removed or modified. This -# class is then used to created the augmented V8 and V9 version abstract driver -# classes, without having to duplicate a lot of abstract method signatures. -# If you remove a method from V9, then move the abstract methods from this Base -# class to the V8 class. Do not modify any of the method signatures in the Base -# class - changes should only be made in the V8 and subsequent classes. -@six.add_metaclass(abc.ABCMeta) -class AssignmentDriverBase(object): - - def _get_list_limit(self): - return CONF.assignment.list_limit or CONF.list_limit - - @abc.abstractmethod - def add_role_to_user_and_project(self, user_id, tenant_id, role_id): - """Add a role to a user within given tenant. - - :raises keystone.exception.Conflict: If a duplicate role assignment - exists. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def remove_role_from_user_and_project(self, user_id, tenant_id, role_id): - """Remove a role from a user within given tenant. - - :raises keystone.exception.RoleNotFound: If the role doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - # assignment/grant crud - - @abc.abstractmethod - def create_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - """Creates a new assignment/grant. - - If the assignment is to a domain, then optionally it may be - specified as inherited to owned projects (this requires - the OS-INHERIT extension to be enabled). - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_grant_role_ids(self, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - """Lists role ids for assignments/grants.""" - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def check_grant_role_id(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - """Checks an assignment/grant role id. - - :raises keystone.exception.RoleAssignmentNotFound: If the role - assignment doesn't exist. - :returns: None or raises an exception if grant not found - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - """Deletes assignments/grants. - - :raises keystone.exception.RoleAssignmentNotFound: If the role - assignment doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_role_assignments(self, role_id=None, - user_id=None, group_ids=None, - domain_id=None, project_ids=None, - inherited_to_projects=None): - """Returns a list of role assignments for actors on targets. - - Available parameters represent values in which the returned role - assignments attributes need to be filtered on. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_project_assignments(self, project_id): - """Deletes all assignments for a project. - - :raises keystone.exception.ProjectNotFound: If the project doesn't - exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_role_assignments(self, role_id): - """Deletes all assignments for a role.""" - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_user_assignments(self, user_id): - """Deletes all assignments for a user. - - :raises keystone.exception.RoleNotFound: If the role doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_group_assignments(self, group_id): - """Deletes all assignments for a group. - - :raises keystone.exception.RoleNotFound: If the role doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - -class AssignmentDriverV8(AssignmentDriverBase): - """Removed or redefined methods from V8. - - Move the abstract methods of any methods removed or modified in later - versions of the driver from AssignmentDriverBase to here. We maintain this - so that legacy drivers, which will be a subclass of AssignmentDriverV8, can - still reference them. - - """ - - @abc.abstractmethod - def list_user_ids_for_project(self, tenant_id): - """Lists all user IDs with a role assignment in the specified project. - - :returns: a list of user_ids or an empty set. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_project_ids_for_user(self, user_id, group_ids, hints, - inherited=False): - """List all project ids associated with a given user. - - :param user_id: the user in question - :param group_ids: the groups this user is a member of. This list is - built in the Manager, so that the driver itself - does not have to call across to identity. - :param hints: filter hints which the driver should - implement if at all possible. - :param inherited: whether assignments marked as inherited should - be included. - - :returns: a list of project ids or an empty list. - - This method should not try and expand any inherited assignments, - just report the projects that have the role for this user. The manager - method is responsible for expanding out inherited assignments. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_domain_ids_for_user(self, user_id, group_ids, hints, - inherited=False): - """List all domain ids associated with a given user. - - :param user_id: the user in question - :param group_ids: the groups this user is a member of. This list is - built in the Manager, so that the driver itself - does not have to call across to identity. - :param hints: filter hints which the driver should - implement if at all possible. - :param inherited: whether to return domain_ids that have inherited - assignments or not. - - :returns: a list of domain ids or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_project_ids_for_groups(self, group_ids, hints, - inherited=False): - """List project ids accessible to specified groups. - - :param group_ids: List of group ids. - :param hints: filter hints which the driver should - implement if at all possible. - :param inherited: whether assignments marked as inherited should - be included. - :returns: List of project ids accessible to specified groups. - - This method should not try and expand any inherited assignments, - just report the projects that have the role for this group. The manager - method is responsible for expanding out inherited assignments. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_domain_ids_for_groups(self, group_ids, inherited=False): - """List domain ids accessible to specified groups. - - :param group_ids: List of group ids. - :param inherited: whether to return domain_ids that have inherited - assignments or not. - :returns: List of domain ids accessible to specified groups. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_role_ids_for_groups_on_project( - self, group_ids, project_id, project_domain_id, project_parents): - """List the group role ids for a specific project. - - Supports the ``OS-INHERIT`` role inheritance from the project's domain - if supported by the assignment driver. - - :param group_ids: list of group ids - :type group_ids: list - :param project_id: project identifier - :type project_id: str - :param project_domain_id: project's domain identifier - :type project_domain_id: str - :param project_parents: list of parent ids of this project - :type project_parents: list - :returns: list of role ids for the project - :rtype: list - """ - raise exception.NotImplemented() - - @abc.abstractmethod - def list_role_ids_for_groups_on_domain(self, group_ids, domain_id): - """List the group role ids for a specific domain. - - :param group_ids: list of group ids - :type group_ids: list - :param domain_id: domain identifier - :type domain_id: str - :returns: list of role ids for the project - :rtype: list - """ - raise exception.NotImplemented() - - -class AssignmentDriverV9(AssignmentDriverBase): - """New or redefined methods from V8. - - Add any new V9 abstract methods (or those with modified signatures) to - this class. - - """ - - @abc.abstractmethod - def delete_domain_assignments(self, domain_id): - """Deletes all assignments for a domain.""" - raise exception.NotImplemented() - - -class V9AssignmentWrapperForV8Driver(AssignmentDriverV9): - """Wrapper class to supported a V8 legacy driver. - - In order to support legacy drivers without having to make the manager code - driver-version aware, we wrap legacy drivers so that they look like the - latest version. For the various changes made in a new driver, here are the - actions needed in this wrapper: - - Method removed from new driver - remove the call-through method from this - class, since the manager will no longer be - calling it. - Method signature (or meaning) changed - wrap the old method in a new - signature here, and munge the input - and output parameters accordingly. - New method added to new driver - add a method to implement the new - functionality here if possible. If that is - not possible, then return NotImplemented, - since we do not guarantee to support new - functionality with legacy drivers. - - """ - - @versionutils.deprecated( - as_of=versionutils.deprecated.MITAKA, - what='keystone.assignment.AssignmentDriverV8', - in_favor_of='keystone.assignment.AssignmentDriverV9', - remove_in=+2) - def __init__(self, wrapped_driver): - self.driver = wrapped_driver - - def delete_domain_assignments(self, domain_id): - """Deletes all assignments for a domain.""" - msg = _LW('delete_domain_assignments method not found in custom ' - 'assignment driver. Domain assignments for domain (%s) to ' - 'users from other domains will not be removed. This was ' - 'added in V9 of the assignment driver.') - LOG.warning(msg, domain_id) - - def default_role_driver(self): - return self.driver.default_role_driver() - - def default_resource_driver(self): - return self.driver.default_resource_driver() - - def add_role_to_user_and_project(self, user_id, tenant_id, role_id): - self.driver.add_role_to_user_and_project(user_id, tenant_id, role_id) - - def remove_role_from_user_and_project(self, user_id, tenant_id, role_id): - self.driver.remove_role_from_user_and_project( - user_id, tenant_id, role_id) - - def create_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - self.driver.create_grant( - role_id, user_id=user_id, group_id=group_id, - domain_id=domain_id, project_id=project_id, - inherited_to_projects=inherited_to_projects) - - def list_grant_role_ids(self, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - return self.driver.list_grant_role_ids( - user_id=user_id, group_id=group_id, - domain_id=domain_id, project_id=project_id, - inherited_to_projects=inherited_to_projects) - - def check_grant_role_id(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - self.driver.check_grant_role_id( - role_id, user_id=user_id, group_id=group_id, - domain_id=domain_id, project_id=project_id, - inherited_to_projects=inherited_to_projects) - - def delete_grant(self, role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False): - self.driver.delete_grant( - role_id, user_id=user_id, group_id=group_id, - domain_id=domain_id, project_id=project_id, - inherited_to_projects=inherited_to_projects) - - def list_role_assignments(self, role_id=None, - user_id=None, group_ids=None, - domain_id=None, project_ids=None, - inherited_to_projects=None): - return self.driver.list_role_assignments( - role_id=role_id, - user_id=user_id, group_ids=group_ids, - domain_id=domain_id, project_ids=project_ids, - inherited_to_projects=inherited_to_projects) - - def delete_project_assignments(self, project_id): - self.driver.delete_project_assignments(project_id) - - def delete_role_assignments(self, role_id): - self.driver.delete_role_assignments(role_id) - - def delete_user_assignments(self, user_id): - self.driver.delete_user_assignments(user_id) - - def delete_group_assignments(self, group_id): - self.driver.delete_group_assignments(group_id) - - -Driver = manager.create_legacy_driver(AssignmentDriverV8) - - -@dependency.provider('role_api') -@dependency.requires('assignment_api') -class RoleManager(manager.Manager): - """Default pivot point for the Role backend.""" - - driver_namespace = 'keystone.role' - - _ROLE = 'role' - - def __init__(self): - # If there is a specific driver specified for role, then use it. - # Otherwise retrieve the driver type from the assignment driver. - role_driver = CONF.role.driver - - if role_driver is None: - assignment_manager = dependency.get_provider('assignment_api') - role_driver = assignment_manager.default_role_driver() - - super(RoleManager, self).__init__(role_driver) - - # Make sure it is a driver version we support, and if it is a legacy - # driver, then wrap it. - if isinstance(self.driver, RoleDriverV8): - self.driver = V9RoleWrapperForV8Driver(self.driver) - elif not isinstance(self.driver, RoleDriverV9): - raise exception.UnsupportedDriverVersion(driver=role_driver) - - @MEMOIZE - def get_role(self, role_id): - return self.driver.get_role(role_id) - - def create_role(self, role_id, role, initiator=None): - ret = self.driver.create_role(role_id, role) - notifications.Audit.created(self._ROLE, role_id, initiator) - if MEMOIZE.should_cache(ret): - self.get_role.set(ret, self, role_id) - return ret - - @manager.response_truncated - def list_roles(self, hints=None): - return self.driver.list_roles(hints or driver_hints.Hints()) - - def update_role(self, role_id, role, initiator=None): - original_role = self.driver.get_role(role_id) - if ('domain_id' in role and - role['domain_id'] != original_role['domain_id']): - raise exception.ValidationError( - message=_('Update of `domain_id` is not allowed.')) - - ret = self.driver.update_role(role_id, role) - notifications.Audit.updated(self._ROLE, role_id, initiator) - self.get_role.invalidate(self, role_id) - return ret - - def delete_role(self, role_id, initiator=None): - self.assignment_api.delete_tokens_for_role_assignments(role_id) - self.assignment_api.delete_role_assignments(role_id) - self.driver.delete_role(role_id) - notifications.Audit.deleted(self._ROLE, role_id, initiator) - self.get_role.invalidate(self, role_id) - COMPUTED_ASSIGNMENTS_REGION.invalidate() - - # TODO(ayoung): Add notification - def create_implied_role(self, prior_role_id, implied_role_id): - implied_role = self.driver.get_role(implied_role_id) - self.driver.get_role(prior_role_id) - if implied_role['name'] in CONF.assignment.prohibited_implied_role: - raise exception.InvalidImpliedRole(role_id=implied_role_id) - response = self.driver.create_implied_role( - prior_role_id, implied_role_id) - COMPUTED_ASSIGNMENTS_REGION.invalidate() - return response - - def delete_implied_role(self, prior_role_id, implied_role_id): - self.driver.delete_implied_role(prior_role_id, implied_role_id) - COMPUTED_ASSIGNMENTS_REGION.invalidate() - - -# The RoleDriverBase class is the set of driver methods from earlier -# drivers that we still support, that have not been removed or modified. This -# class is then used to created the augmented V8 and V9 version abstract driver -# classes, without having to duplicate a lot of abstract method signatures. -# If you remove a method from V9, then move the abstract methods from this Base -# class to the V8 class. Do not modify any of the method signatures in the Base -# class - changes should only be made in the V8 and subsequent classes. -@six.add_metaclass(abc.ABCMeta) -class RoleDriverBase(object): - - def _get_list_limit(self): - return CONF.role.list_limit or CONF.list_limit - - @abc.abstractmethod - def create_role(self, role_id, role): - """Creates a new role. - - :raises keystone.exception.Conflict: If a duplicate role exists. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_roles(self, hints): - """List roles in the system. - - :param hints: filter hints which the driver should - implement if at all possible. - - :returns: a list of role_refs or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_roles_from_ids(self, role_ids): - """List roles for the provided list of ids. - - :param role_ids: list of ids - - :returns: a list of role_refs. - - This method is used internally by the assignment manager to bulk read - a set of roles given their ids. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_role(self, role_id): - """Get a role by ID. - - :returns: role_ref - :raises keystone.exception.RoleNotFound: If the role doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_role(self, role_id, role): - """Updates an existing role. - - :raises keystone.exception.RoleNotFound: If the role doesn't exist. - :raises keystone.exception.Conflict: If a duplicate role exists. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_role(self, role_id): - """Deletes an existing role. - - :raises keystone.exception.RoleNotFound: If the role doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - -class RoleDriverV8(RoleDriverBase): - """Removed or redefined methods from V8. - - Move the abstract methods of any methods removed or modified in later - versions of the driver from RoleDriverBase to here. We maintain this - so that legacy drivers, which will be a subclass of RoleDriverV8, can - still reference them. - - """ - - pass - - -class RoleDriverV9(RoleDriverBase): - """New or redefined methods from V8. - - Add any new V9 abstract methods (or those with modified signatures) to - this class. - - """ - - @abc.abstractmethod - def get_implied_role(self, prior_role_id, implied_role_id): - """Fetches a role inference rule - - :raises keystone.exception.ImpliedRoleNotFound: If the implied role - doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def create_implied_role(self, prior_role_id, implied_role_id): - """Creates a role inference rule - - :raises: keystone.exception.RoleNotFound: If the role doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_implied_role(self, prior_role_id, implied_role_id): - """Deletes a role inference rule - - :raises keystone.exception.ImpliedRoleNotFound: If the implied role - doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_role_inference_rules(self): - """Lists all the rules used to imply one role from another""" - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_implied_roles(self, prior_role_id): - """Lists roles implied from the prior role ID""" - raise exception.NotImplemented() # pragma: no cover - - -class V9RoleWrapperForV8Driver(RoleDriverV9): - """Wrapper class to supported a V8 legacy driver. - - In order to support legacy drivers without having to make the manager code - driver-version aware, we wrap legacy drivers so that they look like the - latest version. For the various changes made in a new driver, here are the - actions needed in this wrapper: - - Method removed from new driver - remove the call-through method from this - class, since the manager will no longer be - calling it. - Method signature (or meaning) changed - wrap the old method in a new - signature here, and munge the input - and output parameters accordingly. - New method added to new driver - add a method to implement the new - functionality here if possible. If that is - not possible, then return NotImplemented, - since we do not guarantee to support new - functionality with legacy drivers. - - This V8 wrapper contains the following support for newer manager code: - - - The current manager code expects a role entity to have a domain_id - attribute, with a non-None value indicating a domain specific role. V8 - drivers will only understand global roles, hence if a non-None domain_id - is passed to this wrapper, it will raise a NotImplemented exception. - If a None-valued domain_id is passed in, it will be trimmed off before - the underlying driver is called (and a None-valued domain_id attribute - is added in for any entities returned to the manager. - - """ - - @versionutils.deprecated( - as_of=versionutils.deprecated.MITAKA, - what='keystone.assignment.RoleDriverV8', - in_favor_of='keystone.assignment.RoleDriverV9', - remove_in=+2) - def __init__(self, wrapped_driver): - self.driver = wrapped_driver - - def _append_null_domain_id(self, role_or_list): - def _append_null_domain_id_to_dict(role): - if 'domain_id' not in role: - role['domain_id'] = None - return role - - if isinstance(role_or_list, list): - return [_append_null_domain_id_to_dict(x) for x in role_or_list] - else: - return _append_null_domain_id_to_dict(role_or_list) - - def _trim_and_assert_null_domain_id(self, role): - if 'domain_id' in role: - if role['domain_id'] is not None: - raise exception.NotImplemented( - _('Domain specific roles are not supported in the V8 ' - 'role driver')) - else: - new_role = role.copy() - new_role.pop('domain_id') - return new_role - else: - return role - - def create_role(self, role_id, role): - new_role = self._trim_and_assert_null_domain_id(role) - return self._append_null_domain_id( - self.driver.create_role(role_id, new_role)) - - def list_roles(self, hints): - return self._append_null_domain_id(self.driver.list_roles(hints)) - - def list_roles_from_ids(self, role_ids): - return self._append_null_domain_id( - self.driver.list_roles_from_ids(role_ids)) - - def get_role(self, role_id): - return self._append_null_domain_id(self.driver.get_role(role_id)) - - def update_role(self, role_id, role): - update_role = self._trim_and_assert_null_domain_id(role) - return self._append_null_domain_id( - self.driver.update_role(role_id, update_role)) - - def delete_role(self, role_id): - self.driver.delete_role(role_id) - - def get_implied_role(self, prior_role_id, implied_role_id): - raise exception.NotImplemented() # pragma: no cover - - def create_implied_role(self, prior_role_id, implied_role_id): - raise exception.NotImplemented() # pragma: no cover - - def delete_implied_role(self, prior_role_id, implied_role_id): - raise exception.NotImplemented() # pragma: no cover - - def list_implied_roles(self, prior_role_id): - raise exception.NotImplemented() # pragma: no cover - - def list_role_inference_rules(self): - raise exception.NotImplemented() # pragma: no cover - -RoleDriver = manager.create_legacy_driver(RoleDriverV8) diff --git a/keystone-moon/keystone/assignment/role_backends/__init__.py b/keystone-moon/keystone/assignment/role_backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/assignment/role_backends/ldap.py b/keystone-moon/keystone/assignment/role_backends/ldap.py deleted file mode 100644 index 6e5e038e..00000000 --- a/keystone-moon/keystone/assignment/role_backends/ldap.py +++ /dev/null @@ -1,125 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import - -from oslo_config import cfg -from oslo_log import log - -from keystone import assignment -from keystone.common import ldap as common_ldap -from keystone.common import models -from keystone import exception -from keystone.i18n import _ -from keystone.identity.backends import ldap as ldap_identity - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -class Role(assignment.RoleDriverV8): - - def __init__(self): - super(Role, self).__init__() - self.LDAP_URL = CONF.ldap.url - self.LDAP_USER = CONF.ldap.user - self.LDAP_PASSWORD = CONF.ldap.password - self.suffix = CONF.ldap.suffix - - # This is the only deep dependency from resource back - # to identity. The assumption is that if you are using - # LDAP for resource, you are using it for identity as well. - self.user = ldap_identity.UserApi(CONF) - self.role = RoleApi(CONF, self.user) - - def get_role(self, role_id): - return self.role.get(role_id) - - def list_roles(self, hints): - return self.role.get_all() - - def list_roles_from_ids(self, ids): - return [self.get_role(id) for id in ids] - - def create_role(self, role_id, role): - self.role.check_allow_create() - try: - self.get_role(role_id) - except exception.NotFound: - pass - else: - msg = _('Duplicate ID, %s.') % role_id - raise exception.Conflict(type='role', details=msg) - - try: - self.role.get_by_name(role['name']) - except exception.NotFound: - pass - else: - msg = _('Duplicate name, %s.') % role['name'] - raise exception.Conflict(type='role', details=msg) - - return self.role.create(role) - - def delete_role(self, role_id): - self.role.check_allow_delete() - return self.role.delete(role_id) - - def update_role(self, role_id, role): - self.role.check_allow_update() - self.get_role(role_id) - return self.role.update(role_id, role) - - -# NOTE(heny-nash): A mixin class to enable the sharing of the LDAP structure -# between here and the assignment LDAP. -class RoleLdapStructureMixin(object): - DEFAULT_OU = 'ou=Roles' - DEFAULT_STRUCTURAL_CLASSES = [] - DEFAULT_OBJECTCLASS = 'organizationalRole' - DEFAULT_MEMBER_ATTRIBUTE = 'roleOccupant' - NotFound = exception.RoleNotFound - options_name = 'role' - attribute_options_names = {'name': 'name'} - immutable_attrs = ['id'] - model = models.Role - - -# TODO(termie): turn this into a data object and move logic to driver -class RoleApi(RoleLdapStructureMixin, common_ldap.BaseLdap): - - def __init__(self, conf, user_api): - super(RoleApi, self).__init__(conf) - self._user_api = user_api - - def get(self, role_id, role_filter=None): - model = super(RoleApi, self).get(role_id, role_filter) - return model - - def create(self, values): - return super(RoleApi, self).create(values) - - def update(self, role_id, role): - new_name = role.get('name') - if new_name is not None: - try: - old_role = self.get_by_name(new_name) - if old_role['id'] != role_id: - raise exception.Conflict( - _('Cannot duplicate name %s') % old_role) - except exception.NotFound: - pass - return super(RoleApi, self).update(role_id, role) - - def delete(self, role_id): - super(RoleApi, self).delete(role_id) diff --git a/keystone-moon/keystone/assignment/role_backends/sql.py b/keystone-moon/keystone/assignment/role_backends/sql.py deleted file mode 100644 index 1045f23a..00000000 --- a/keystone-moon/keystone/assignment/role_backends/sql.py +++ /dev/null @@ -1,202 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from oslo_db import exception as db_exception - -from keystone import assignment -from keystone.common import driver_hints -from keystone.common import sql -from keystone import exception - -# NOTE(henry-nash): From the manager and above perspective, the domain_id -# attribute of a role is nullable. However, to ensure uniqueness in -# multi-process configurations, it is better to still use a sql uniqueness -# constraint. Since the support for a nullable component of a uniqueness -# constraint across different sql databases is mixed, we instead store a -# special value to represent null, as defined in NULL_DOMAIN_ID below. -NULL_DOMAIN_ID = '<>' - - -class Role(assignment.RoleDriverV9): - - @sql.handle_conflicts(conflict_type='role') - def create_role(self, role_id, role): - with sql.session_for_write() as session: - ref = RoleTable.from_dict(role) - session.add(ref) - return ref.to_dict() - - @driver_hints.truncated - def list_roles(self, hints): - # If there is a filter on domain_id and the value is None, then to - # ensure that the sql filtering works correctly, we need to patch - # the value to be NULL_DOMAIN_ID. This is safe to do here since we - # know we are able to satisfy any filter of this type in the call to - # filter_limit_query() below, which will remove the filter from the - # hints (hence ensuring our substitution is not exposed to the caller). - for f in hints.filters: - if (f['name'] == 'domain_id' and f['value'] is None): - f['value'] = NULL_DOMAIN_ID - - with sql.session_for_read() as session: - query = session.query(RoleTable) - refs = sql.filter_limit_query(RoleTable, query, hints) - return [ref.to_dict() for ref in refs] - - def list_roles_from_ids(self, ids): - if not ids: - return [] - else: - with sql.session_for_read() as session: - query = session.query(RoleTable) - query = query.filter(RoleTable.id.in_(ids)) - role_refs = query.all() - return [role_ref.to_dict() for role_ref in role_refs] - - def _get_role(self, session, role_id): - ref = session.query(RoleTable).get(role_id) - if ref is None: - raise exception.RoleNotFound(role_id=role_id) - return ref - - def get_role(self, role_id): - with sql.session_for_read() as session: - return self._get_role(session, role_id).to_dict() - - @sql.handle_conflicts(conflict_type='role') - def update_role(self, role_id, role): - with sql.session_for_write() as session: - ref = self._get_role(session, role_id) - old_dict = ref.to_dict() - for k in role: - old_dict[k] = role[k] - new_role = RoleTable.from_dict(old_dict) - for attr in RoleTable.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_role, attr)) - ref.extra = new_role.extra - return ref.to_dict() - - def delete_role(self, role_id): - with sql.session_for_write() as session: - ref = self._get_role(session, role_id) - session.delete(ref) - - def _get_implied_role(self, session, prior_role_id, implied_role_id): - query = session.query( - ImpliedRoleTable).filter( - ImpliedRoleTable.prior_role_id == prior_role_id).filter( - ImpliedRoleTable.implied_role_id == implied_role_id) - try: - ref = query.one() - except sql.NotFound: - raise exception.ImpliedRoleNotFound( - prior_role_id=prior_role_id, - implied_role_id=implied_role_id) - return ref - - @sql.handle_conflicts(conflict_type='implied_role') - def create_implied_role(self, prior_role_id, implied_role_id): - with sql.session_for_write() as session: - inference = {'prior_role_id': prior_role_id, - 'implied_role_id': implied_role_id} - ref = ImpliedRoleTable.from_dict(inference) - try: - session.add(ref) - except db_exception.DBReferenceError: - # We don't know which role threw this. - # Query each to trigger the exception. - self._get_role(session, prior_role_id) - self._get_role(session, implied_role_id) - return ref.to_dict() - - def delete_implied_role(self, prior_role_id, implied_role_id): - with sql.session_for_write() as session: - ref = self._get_implied_role(session, prior_role_id, - implied_role_id) - session.delete(ref) - - def list_implied_roles(self, prior_role_id): - with sql.session_for_read() as session: - query = session.query( - ImpliedRoleTable).filter( - ImpliedRoleTable.prior_role_id == prior_role_id) - refs = query.all() - return [ref.to_dict() for ref in refs] - - def list_role_inference_rules(self): - with sql.session_for_read() as session: - query = session.query(ImpliedRoleTable) - refs = query.all() - return [ref.to_dict() for ref in refs] - - def get_implied_role(self, prior_role_id, implied_role_id): - with sql.session_for_read() as session: - ref = self._get_implied_role(session, prior_role_id, - implied_role_id) - return ref.to_dict() - - -class ImpliedRoleTable(sql.ModelBase, sql.DictBase): - __tablename__ = 'implied_role' - attributes = ['prior_role_id', 'implied_role_id'] - prior_role_id = sql.Column( - sql.String(64), - sql.ForeignKey('role.id', ondelete="CASCADE"), - primary_key=True) - implied_role_id = sql.Column( - sql.String(64), - sql.ForeignKey('role.id', ondelete="CASCADE"), - primary_key=True) - - @classmethod - def from_dict(cls, dictionary): - new_dictionary = dictionary.copy() - return cls(**new_dictionary) - - def to_dict(self): - """Return a dictionary with model's attributes. - - overrides the `to_dict` function from the base class - to avoid having an `extra` field. - """ - d = dict() - for attr in self.__class__.attributes: - d[attr] = getattr(self, attr) - return d - - -class RoleTable(sql.ModelBase, sql.DictBase): - - def to_dict(self, include_extra_dict=False): - d = super(RoleTable, self).to_dict( - include_extra_dict=include_extra_dict) - if d['domain_id'] == NULL_DOMAIN_ID: - d['domain_id'] = None - return d - - @classmethod - def from_dict(cls, role_dict): - if 'domain_id' in role_dict and role_dict['domain_id'] is None: - new_dict = role_dict.copy() - new_dict['domain_id'] = NULL_DOMAIN_ID - else: - new_dict = role_dict - return super(RoleTable, cls).from_dict(new_dict) - - __tablename__ = 'role' - attributes = ['id', 'name', 'domain_id'] - id = sql.Column(sql.String(64), primary_key=True) - name = sql.Column(sql.String(255), nullable=False) - domain_id = sql.Column(sql.String(64), nullable=False, - server_default=NULL_DOMAIN_ID) - extra = sql.Column(sql.JsonBlob()) - __table_args__ = (sql.UniqueConstraint('name', 'domain_id'),) diff --git a/keystone-moon/keystone/assignment/routers.py b/keystone-moon/keystone/assignment/routers.py deleted file mode 100644 index 9bef401e..00000000 --- a/keystone-moon/keystone/assignment/routers.py +++ /dev/null @@ -1,282 +0,0 @@ -# Copyright 2013 Metacloud, Inc. -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""WSGI Routers for the Assignment service.""" - -import functools - -from oslo_config import cfg - -from keystone.assignment import controllers -from keystone.common import json_home -from keystone.common import router -from keystone.common import wsgi - - -CONF = cfg.CONF - -build_os_inherit_relation = functools.partial( - json_home.build_v3_extension_resource_relation, - extension_name='OS-INHERIT', extension_version='1.0') - - -class Public(wsgi.ComposableRouter): - def add_routes(self, mapper): - tenant_controller = controllers.TenantAssignment() - mapper.connect('/tenants', - controller=tenant_controller, - action='get_projects_for_token', - conditions=dict(method=['GET'])) - - -class Admin(wsgi.ComposableRouter): - def add_routes(self, mapper): - # Role Operations - roles_controller = controllers.RoleAssignmentV2() - mapper.connect('/tenants/{tenant_id}/users/{user_id}/roles', - controller=roles_controller, - action='get_user_roles', - conditions=dict(method=['GET'])) - mapper.connect('/users/{user_id}/roles', - controller=roles_controller, - action='get_user_roles', - conditions=dict(method=['GET'])) - - -class Routers(wsgi.RoutersBase): - - def append_v3_routers(self, mapper, routers): - - project_controller = controllers.ProjectAssignmentV3() - self._add_resource( - mapper, project_controller, - path='/users/{user_id}/projects', - get_action='list_user_projects', - rel=json_home.build_v3_resource_relation('user_projects'), - path_vars={ - 'user_id': json_home.Parameters.USER_ID, - }) - - routers.append( - router.Router(controllers.RoleV3(), 'roles', 'role', - resource_descriptions=self.v3_resources, - method_template='%s_wrapper')) - - implied_roles_controller = controllers.ImpliedRolesV3() - self._add_resource( - mapper, implied_roles_controller, - path='/roles/{prior_role_id}/implies', - rel=json_home.build_v3_resource_relation('implied_roles'), - get_action='list_implied_roles', - status=json_home.Status.EXPERIMENTAL, - path_vars={ - 'prior_role_id': json_home.Parameters.ROLE_ID, - } - ) - - self._add_resource( - mapper, implied_roles_controller, - path='/roles/{prior_role_id}/implies/{implied_role_id}', - put_action='create_implied_role', - delete_action='delete_implied_role', - head_action='check_implied_role', - get_action='get_implied_role', - rel=json_home.build_v3_resource_relation('implied_role'), - status=json_home.Status.EXPERIMENTAL, - path_vars={ - 'prior_role_id': json_home.Parameters.ROLE_ID, - 'implied_role_id': json_home.Parameters.ROLE_ID - } - ) - self._add_resource( - mapper, implied_roles_controller, - path='/role_inferences', - get_action='list_role_inference_rules', - rel=json_home.build_v3_resource_relation('role_inferences'), - status=json_home.Status.EXPERIMENTAL, - path_vars={} - ) - - grant_controller = controllers.GrantAssignmentV3() - self._add_resource( - mapper, grant_controller, - path='/projects/{project_id}/users/{user_id}/roles/{role_id}', - get_head_action='check_grant', - put_action='create_grant', - delete_action='revoke_grant', - rel=json_home.build_v3_resource_relation('project_user_role'), - path_vars={ - 'project_id': json_home.Parameters.PROJECT_ID, - 'role_id': json_home.Parameters.ROLE_ID, - 'user_id': json_home.Parameters.USER_ID, - }) - self._add_resource( - mapper, grant_controller, - path='/projects/{project_id}/groups/{group_id}/roles/{role_id}', - get_head_action='check_grant', - put_action='create_grant', - delete_action='revoke_grant', - rel=json_home.build_v3_resource_relation('project_group_role'), - path_vars={ - 'group_id': json_home.Parameters.GROUP_ID, - 'project_id': json_home.Parameters.PROJECT_ID, - 'role_id': json_home.Parameters.ROLE_ID, - }) - self._add_resource( - mapper, grant_controller, - path='/projects/{project_id}/users/{user_id}/roles', - get_action='list_grants', - rel=json_home.build_v3_resource_relation('project_user_roles'), - path_vars={ - 'project_id': json_home.Parameters.PROJECT_ID, - 'user_id': json_home.Parameters.USER_ID, - }) - self._add_resource( - mapper, grant_controller, - path='/projects/{project_id}/groups/{group_id}/roles', - get_action='list_grants', - rel=json_home.build_v3_resource_relation('project_group_roles'), - path_vars={ - 'group_id': json_home.Parameters.GROUP_ID, - 'project_id': json_home.Parameters.PROJECT_ID, - }) - self._add_resource( - mapper, grant_controller, - path='/domains/{domain_id}/users/{user_id}/roles/{role_id}', - get_head_action='check_grant', - put_action='create_grant', - delete_action='revoke_grant', - rel=json_home.build_v3_resource_relation('domain_user_role'), - path_vars={ - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'role_id': json_home.Parameters.ROLE_ID, - 'user_id': json_home.Parameters.USER_ID, - }) - self._add_resource( - mapper, grant_controller, - path='/domains/{domain_id}/groups/{group_id}/roles/{role_id}', - get_head_action='check_grant', - put_action='create_grant', - delete_action='revoke_grant', - rel=json_home.build_v3_resource_relation('domain_group_role'), - path_vars={ - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'group_id': json_home.Parameters.GROUP_ID, - 'role_id': json_home.Parameters.ROLE_ID, - }) - self._add_resource( - mapper, grant_controller, - path='/domains/{domain_id}/users/{user_id}/roles', - get_action='list_grants', - rel=json_home.build_v3_resource_relation('domain_user_roles'), - path_vars={ - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'user_id': json_home.Parameters.USER_ID, - }) - self._add_resource( - mapper, grant_controller, - path='/domains/{domain_id}/groups/{group_id}/roles', - get_action='list_grants', - rel=json_home.build_v3_resource_relation('domain_group_roles'), - path_vars={ - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'group_id': json_home.Parameters.GROUP_ID, - }) - - self._add_resource( - mapper, controllers.RoleAssignmentV3(), - path='/role_assignments', - get_action='list_role_assignments_wrapper', - rel=json_home.build_v3_resource_relation('role_assignments')) - - if CONF.os_inherit.enabled: - self._add_resource( - mapper, grant_controller, - path='/OS-INHERIT/domains/{domain_id}/users/{user_id}/roles/' - '{role_id}/inherited_to_projects', - get_head_action='check_grant', - put_action='create_grant', - delete_action='revoke_grant', - rel=build_os_inherit_relation( - resource_name='domain_user_role_inherited_to_projects'), - path_vars={ - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'role_id': json_home.Parameters.ROLE_ID, - 'user_id': json_home.Parameters.USER_ID, - }) - self._add_resource( - mapper, grant_controller, - path='/OS-INHERIT/domains/{domain_id}/groups/{group_id}/roles/' - '{role_id}/inherited_to_projects', - get_head_action='check_grant', - put_action='create_grant', - delete_action='revoke_grant', - rel=build_os_inherit_relation( - resource_name='domain_group_role_inherited_to_projects'), - path_vars={ - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'group_id': json_home.Parameters.GROUP_ID, - 'role_id': json_home.Parameters.ROLE_ID, - }) - self._add_resource( - mapper, grant_controller, - path='/OS-INHERIT/domains/{domain_id}/groups/{group_id}/roles/' - 'inherited_to_projects', - get_action='list_grants', - rel=build_os_inherit_relation( - resource_name='domain_group_roles_inherited_to_projects'), - path_vars={ - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'group_id': json_home.Parameters.GROUP_ID, - }) - self._add_resource( - mapper, grant_controller, - path='/OS-INHERIT/domains/{domain_id}/users/{user_id}/roles/' - 'inherited_to_projects', - get_action='list_grants', - rel=build_os_inherit_relation( - resource_name='domain_user_roles_inherited_to_projects'), - path_vars={ - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'user_id': json_home.Parameters.USER_ID, - }) - self._add_resource( - mapper, grant_controller, - path='/OS-INHERIT/projects/{project_id}/users/{user_id}/roles/' - '{role_id}/inherited_to_projects', - get_head_action='check_grant', - put_action='create_grant', - delete_action='revoke_grant', - rel=build_os_inherit_relation( - resource_name='project_user_role_inherited_to_projects'), - path_vars={ - 'project_id': json_home.Parameters.PROJECT_ID, - 'user_id': json_home.Parameters.USER_ID, - 'role_id': json_home.Parameters.ROLE_ID, - }) - self._add_resource( - mapper, grant_controller, - path='/OS-INHERIT/projects/{project_id}/groups/{group_id}/' - 'roles/{role_id}/inherited_to_projects', - get_head_action='check_grant', - put_action='create_grant', - delete_action='revoke_grant', - rel=build_os_inherit_relation( - resource_name='project_group_role_inherited_to_projects'), - path_vars={ - 'project_id': json_home.Parameters.PROJECT_ID, - 'group_id': json_home.Parameters.GROUP_ID, - 'role_id': json_home.Parameters.ROLE_ID, - }) diff --git a/keystone-moon/keystone/assignment/schema.py b/keystone-moon/keystone/assignment/schema.py deleted file mode 100644 index f4d1b08a..00000000 --- a/keystone-moon/keystone/assignment/schema.py +++ /dev/null @@ -1,32 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common.validation import parameter_types - - -_role_properties = { - 'name': parameter_types.name -} - -role_create = { - 'type': 'object', - 'properties': _role_properties, - 'required': ['name'], - 'additionalProperties': True -} - -role_update = { - 'type': 'object', - 'properties': _role_properties, - 'minProperties': 1, - 'additionalProperties': True -} diff --git a/keystone-moon/keystone/auth/__init__.py b/keystone-moon/keystone/auth/__init__.py deleted file mode 100644 index bcbf69fd..00000000 --- a/keystone-moon/keystone/auth/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.auth import controllers # noqa -from keystone.auth.core import * # noqa diff --git a/keystone-moon/keystone/auth/controllers.py b/keystone-moon/keystone/auth/controllers.py deleted file mode 100644 index 3e6af80f..00000000 --- a/keystone-moon/keystone/auth/controllers.py +++ /dev/null @@ -1,675 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sys - -from keystoneclient.common import cms -from oslo_config import cfg -from oslo_log import log -from oslo_log import versionutils -from oslo_serialization import jsonutils -from oslo_utils import importutils -import six -import stevedore - -from keystone.common import config -from keystone.common import controller -from keystone.common import dependency -from keystone.common import utils -from keystone.common import wsgi -from keystone import exception -from keystone.federation import constants -from keystone.i18n import _, _LI, _LW -from keystone.resource import controllers as resource_controllers - - -LOG = log.getLogger(__name__) - -CONF = cfg.CONF - -# registry of authentication methods -AUTH_METHODS = {} -AUTH_PLUGINS_LOADED = False - - -def load_auth_method(method): - plugin_name = CONF.auth.get(method) or 'default' - namespace = 'keystone.auth.%s' % method - try: - driver_manager = stevedore.DriverManager(namespace, plugin_name, - invoke_on_load=True) - return driver_manager.driver - except RuntimeError: - LOG.debug('Failed to load the %s driver (%s) using stevedore, will ' - 'attempt to load using import_object instead.', - method, plugin_name) - - driver = importutils.import_object(plugin_name) - - msg = (_( - 'Direct import of auth plugin %(name)r is deprecated as of Liberty in ' - 'favor of its entrypoint from %(namespace)r and may be removed in ' - 'N.') % - {'name': plugin_name, 'namespace': namespace}) - versionutils.report_deprecated_feature(LOG, msg) - - return driver - - -def load_auth_methods(): - global AUTH_PLUGINS_LOADED - - if AUTH_PLUGINS_LOADED: - # Only try and load methods a single time. - return - # config.setup_authentication should be idempotent, call it to ensure we - # have setup all the appropriate configuration options we may need. - config.setup_authentication() - for plugin in set(CONF.auth.methods): - AUTH_METHODS[plugin] = load_auth_method(plugin) - AUTH_PLUGINS_LOADED = True - - -def get_auth_method(method_name): - global AUTH_METHODS - if method_name not in AUTH_METHODS: - raise exception.AuthMethodNotSupported() - return AUTH_METHODS[method_name] - - -class AuthContext(dict): - """Retrofitting auth_context to reconcile identity attributes. - - The identity attributes must not have conflicting values among the - auth plug-ins. The only exception is `expires_at`, which is set to its - earliest value. - - """ - - # identity attributes need to be reconciled among the auth plugins - IDENTITY_ATTRIBUTES = frozenset(['user_id', 'project_id', - 'access_token_id', 'domain_id', - 'expires_at']) - - def __setitem__(self, key, val): - if key in self.IDENTITY_ATTRIBUTES and key in self: - existing_val = self[key] - if key == 'expires_at': - # special treatment for 'expires_at', we are going to take - # the earliest expiration instead. - if existing_val != val: - LOG.info(_LI('"expires_at" has conflicting values ' - '%(existing)s and %(new)s. Will use the ' - 'earliest value.'), - {'existing': existing_val, 'new': val}) - if existing_val is None or val is None: - val = existing_val or val - else: - val = min(existing_val, val) - elif existing_val != val: - msg = _('Unable to reconcile identity attribute %(attribute)s ' - 'as it has conflicting values %(new)s and %(old)s') % ( - {'attribute': key, - 'new': val, - 'old': existing_val}) - raise exception.Unauthorized(msg) - return super(AuthContext, self).__setitem__(key, val) - - -@dependency.requires('resource_api', 'trust_api') -class AuthInfo(object): - """Encapsulation of "auth" request.""" - - @staticmethod - def create(context, auth=None, scope_only=False): - auth_info = AuthInfo(context, auth=auth) - auth_info._validate_and_normalize_auth_data(scope_only) - return auth_info - - def __init__(self, context, auth=None): - self.context = context - self.auth = auth - self._scope_data = (None, None, None, None) - # self._scope_data is (domain_id, project_id, trust_ref, unscoped) - # project scope: (None, project_id, None, None) - # domain scope: (domain_id, None, None, None) - # trust scope: (None, None, trust_ref, None) - # unscoped: (None, None, None, 'unscoped') - - def _assert_project_is_enabled(self, project_ref): - # ensure the project is enabled - try: - self.resource_api.assert_project_enabled( - project_id=project_ref['id'], - project=project_ref) - except AssertionError as e: - LOG.warning(six.text_type(e)) - six.reraise(exception.Unauthorized, exception.Unauthorized(e), - sys.exc_info()[2]) - - def _assert_domain_is_enabled(self, domain_ref): - try: - self.resource_api.assert_domain_enabled( - domain_id=domain_ref['id'], - domain=domain_ref) - except AssertionError as e: - LOG.warning(six.text_type(e)) - six.reraise(exception.Unauthorized, exception.Unauthorized(e), - sys.exc_info()[2]) - - def _lookup_domain(self, domain_info): - domain_id = domain_info.get('id') - domain_name = domain_info.get('name') - domain_ref = None - if not domain_id and not domain_name: - raise exception.ValidationError(attribute='id or name', - target='domain') - try: - if domain_name: - if (CONF.resource.domain_name_url_safe == 'strict' and - utils.is_not_url_safe(domain_name)): - msg = _('Domain name cannot contain reserved characters.') - raise exception.Unauthorized(message=msg) - domain_ref = self.resource_api.get_domain_by_name( - domain_name) - else: - domain_ref = self.resource_api.get_domain(domain_id) - except exception.DomainNotFound as e: - LOG.exception(six.text_type(e)) - raise exception.Unauthorized(e) - self._assert_domain_is_enabled(domain_ref) - return domain_ref - - def _lookup_project(self, project_info): - project_id = project_info.get('id') - project_name = project_info.get('name') - project_ref = None - if not project_id and not project_name: - raise exception.ValidationError(attribute='id or name', - target='project') - try: - if project_name: - if (CONF.resource.project_name_url_safe == 'strict' and - utils.is_not_url_safe(project_name)): - msg = _('Project name cannot contain reserved characters.') - raise exception.Unauthorized(message=msg) - if 'domain' not in project_info: - raise exception.ValidationError(attribute='domain', - target='project') - domain_ref = self._lookup_domain(project_info['domain']) - project_ref = self.resource_api.get_project_by_name( - project_name, domain_ref['id']) - else: - project_ref = self.resource_api.get_project(project_id) - # NOTE(morganfainberg): The _lookup_domain method will raise - # exception.Unauthorized if the domain isn't found or is - # disabled. - self._lookup_domain({'id': project_ref['domain_id']}) - except exception.ProjectNotFound as e: - raise exception.Unauthorized(e) - self._assert_project_is_enabled(project_ref) - return project_ref - - def _lookup_trust(self, trust_info): - trust_id = trust_info.get('id') - if not trust_id: - raise exception.ValidationError(attribute='trust_id', - target='trust') - trust = self.trust_api.get_trust(trust_id) - return trust - - def _validate_and_normalize_scope_data(self): - """Validate and normalize scope data.""" - if 'scope' not in self.auth: - return - if sum(['project' in self.auth['scope'], - 'domain' in self.auth['scope'], - 'unscoped' in self.auth['scope'], - 'OS-TRUST:trust' in self.auth['scope']]) != 1: - raise exception.ValidationError( - attribute='project, domain, OS-TRUST:trust or unscoped', - target='scope') - if 'unscoped' in self.auth['scope']: - self._scope_data = (None, None, None, 'unscoped') - return - if 'project' in self.auth['scope']: - project_ref = self._lookup_project(self.auth['scope']['project']) - self._scope_data = (None, project_ref['id'], None, None) - elif 'domain' in self.auth['scope']: - domain_ref = self._lookup_domain(self.auth['scope']['domain']) - self._scope_data = (domain_ref['id'], None, None, None) - elif 'OS-TRUST:trust' in self.auth['scope']: - if not CONF.trust.enabled: - raise exception.Forbidden('Trusts are disabled.') - trust_ref = self._lookup_trust( - self.auth['scope']['OS-TRUST:trust']) - # TODO(ayoung): when trusts support domains, fill in domain data - if trust_ref.get('project_id') is not None: - project_ref = self._lookup_project( - {'id': trust_ref['project_id']}) - self._scope_data = (None, project_ref['id'], trust_ref, None) - else: - self._scope_data = (None, None, trust_ref, None) - - def _validate_auth_methods(self): - if 'identity' not in self.auth: - raise exception.ValidationError(attribute='identity', - target='auth') - - # make sure auth methods are provided - if 'methods' not in self.auth['identity']: - raise exception.ValidationError(attribute='methods', - target='identity') - - # make sure all the method data/payload are provided - for method_name in self.get_method_names(): - if method_name not in self.auth['identity']: - raise exception.ValidationError(attribute=method_name, - target='identity') - - # make sure auth method is supported - for method_name in self.get_method_names(): - if method_name not in AUTH_METHODS: - raise exception.AuthMethodNotSupported() - - def _validate_and_normalize_auth_data(self, scope_only=False): - """Make sure "auth" is valid. - - :param scope_only: If it is True, auth methods will not be - validated but only the scope data. - :type scope_only: boolean - """ - # make sure "auth" exist - if not self.auth: - raise exception.ValidationError(attribute='auth', - target='request body') - - # NOTE(chioleong): Tokenless auth does not provide auth methods, - # we only care about using this method to validate the scope - # information. Therefore, validating the auth methods here is - # insignificant and we can skip it when scope_only is set to - # true. - if scope_only is False: - self._validate_auth_methods() - self._validate_and_normalize_scope_data() - - def get_method_names(self): - """Returns the identity method names. - - :returns: list of auth method names - - """ - # Sanitizes methods received in request's body - # Filters out duplicates, while keeping elements' order. - method_names = [] - for method in self.auth['identity']['methods']: - if method not in method_names: - method_names.append(method) - return method_names - - def get_method_data(self, method): - """Get the auth method payload. - - :returns: auth method payload - - """ - if method not in self.auth['identity']['methods']: - raise exception.ValidationError(attribute=method, - target='identity') - return self.auth['identity'][method] - - def get_scope(self): - """Get scope information. - - Verify and return the scoping information. - - :returns: (domain_id, project_id, trust_ref, unscoped). - If scope to a project, (None, project_id, None, None) - will be returned. - If scoped to a domain, (domain_id, None, None, None) - will be returned. - If scoped to a trust, (None, project_id, trust_ref, None), - Will be returned, where the project_id comes from the - trust definition. - If unscoped, (None, None, None, 'unscoped') will be - returned. - - """ - return self._scope_data - - def set_scope(self, domain_id=None, project_id=None, trust=None, - unscoped=None): - """Set scope information.""" - if domain_id and project_id: - msg = _('Scoping to both domain and project is not allowed') - raise ValueError(msg) - if domain_id and trust: - msg = _('Scoping to both domain and trust is not allowed') - raise ValueError(msg) - if project_id and trust: - msg = _('Scoping to both project and trust is not allowed') - raise ValueError(msg) - self._scope_data = (domain_id, project_id, trust, unscoped) - - -@dependency.requires('assignment_api', 'catalog_api', 'identity_api', - 'resource_api', 'token_provider_api', 'trust_api') -class Auth(controller.V3Controller): - - # Note(atiwari): From V3 auth controller code we are - # calling protection() wrappers, so we need to setup - # the member_name and collection_name attributes of - # auth controller code. - # In the absence of these attributes, default 'entity' - # string will be used to represent the target which is - # generic. Policy can be defined using 'entity' but it - # would not reflect the exact entity that is in context. - # We are defining collection_name = 'tokens' and - # member_name = 'token' to facilitate policy decisions. - collection_name = 'tokens' - member_name = 'token' - - def __init__(self, *args, **kw): - super(Auth, self).__init__(*args, **kw) - config.setup_authentication() - - def authenticate_for_token(self, context, auth=None): - """Authenticate user and issue a token.""" - include_catalog = 'nocatalog' not in context['query_string'] - - try: - auth_info = AuthInfo.create(context, auth=auth) - auth_context = AuthContext(extras={}, - method_names=[], - bind={}) - self.authenticate(context, auth_info, auth_context) - if auth_context.get('access_token_id'): - auth_info.set_scope(None, auth_context['project_id'], None) - self._check_and_set_default_scoping(auth_info, auth_context) - (domain_id, project_id, trust, unscoped) = auth_info.get_scope() - - method_names = auth_info.get_method_names() - method_names += auth_context.get('method_names', []) - # make sure the list is unique - method_names = list(set(method_names)) - expires_at = auth_context.get('expires_at') - # NOTE(morganfainberg): define this here so it is clear what the - # argument is during the issue_v3_token provider call. - metadata_ref = None - - token_audit_id = auth_context.get('audit_id') - - (token_id, token_data) = self.token_provider_api.issue_v3_token( - auth_context['user_id'], method_names, expires_at, project_id, - domain_id, auth_context, trust, metadata_ref, include_catalog, - parent_audit_id=token_audit_id) - - # NOTE(wanghong): We consume a trust use only when we are using - # trusts and have successfully issued a token. - if trust: - self.trust_api.consume_use(trust['id']) - - return render_token_data_response(token_id, token_data, - created=True) - except exception.TrustNotFound as e: - raise exception.Unauthorized(e) - - def _check_and_set_default_scoping(self, auth_info, auth_context): - (domain_id, project_id, trust, unscoped) = auth_info.get_scope() - if trust: - project_id = trust['project_id'] - if domain_id or project_id or trust: - # scope is specified - return - - # Skip scoping when unscoped federated token is being issued - if constants.IDENTITY_PROVIDER in auth_context: - return - - # Do not scope if request is for explicitly unscoped token - if unscoped is not None: - return - - # fill in default_project_id if it is available - try: - user_ref = self.identity_api.get_user(auth_context['user_id']) - except exception.UserNotFound as e: - LOG.exception(six.text_type(e)) - raise exception.Unauthorized(e) - - default_project_id = user_ref.get('default_project_id') - if not default_project_id: - # User has no default project. He shall get an unscoped token. - return - - # make sure user's default project is legit before scoping to it - try: - default_project_ref = self.resource_api.get_project( - default_project_id) - default_project_domain_ref = self.resource_api.get_domain( - default_project_ref['domain_id']) - if (default_project_ref.get('enabled', True) and - default_project_domain_ref.get('enabled', True)): - if self.assignment_api.get_roles_for_user_and_project( - user_ref['id'], default_project_id): - auth_info.set_scope(project_id=default_project_id) - else: - msg = _LW("User %(user_id)s doesn't have access to" - " default project %(project_id)s. The token" - " will be unscoped rather than scoped to the" - " project.") - LOG.warning(msg, - {'user_id': user_ref['id'], - 'project_id': default_project_id}) - else: - msg = _LW("User %(user_id)s's default project %(project_id)s" - " is disabled. The token will be unscoped rather" - " than scoped to the project.") - LOG.warning(msg, - {'user_id': user_ref['id'], - 'project_id': default_project_id}) - except (exception.ProjectNotFound, exception.DomainNotFound): - # default project or default project domain doesn't exist, - # will issue unscoped token instead - msg = _LW("User %(user_id)s's default project %(project_id)s not" - " found. The token will be unscoped rather than" - " scoped to the project.") - LOG.warning(msg, {'user_id': user_ref['id'], - 'project_id': default_project_id}) - - def authenticate(self, context, auth_info, auth_context): - """Authenticate user.""" - # The 'external' method allows any 'REMOTE_USER' based authentication - # In some cases the server can set REMOTE_USER as '' instead of - # dropping it, so this must be filtered out - if context['environment'].get('REMOTE_USER'): - try: - external = get_auth_method('external') - external.authenticate(context, auth_info, auth_context) - except exception.AuthMethodNotSupported: - # This will happen there is no 'external' plugin registered - # and the container is performing authentication. - # The 'kerberos' and 'saml' methods will be used this way. - # In those cases, it is correct to not register an - # 'external' plugin; if there is both an 'external' and a - # 'kerberos' plugin, it would run the check on identity twice. - LOG.debug("No 'external' plugin is registered.") - except exception.Unauthorized: - # If external fails then continue and attempt to determine - # user identity using remaining auth methods - LOG.debug("Authorization failed for 'external' auth method.") - - # need to aggregate the results in case two or more methods - # are specified - auth_response = {'methods': []} - for method_name in auth_info.get_method_names(): - method = get_auth_method(method_name) - resp = method.authenticate(context, - auth_info.get_method_data(method_name), - auth_context) - if resp: - auth_response['methods'].append(method_name) - auth_response[method_name] = resp - - if auth_response["methods"]: - # authentication continuation required - raise exception.AdditionalAuthRequired(auth_response) - - if 'user_id' not in auth_context: - msg = _('User not found') - raise exception.Unauthorized(msg) - - @controller.protected() - def check_token(self, context): - token_id = context.get('subject_token_id') - token_data = self.token_provider_api.validate_v3_token( - token_id) - # NOTE(morganfainberg): The code in - # ``keystone.common.wsgi.render_response`` will remove the content - # body. - return render_token_data_response(token_id, token_data) - - @controller.protected() - def revoke_token(self, context): - token_id = context.get('subject_token_id') - return self.token_provider_api.revoke_token(token_id) - - @controller.protected() - def validate_token(self, context): - token_id = context.get('subject_token_id') - include_catalog = 'nocatalog' not in context['query_string'] - token_data = self.token_provider_api.validate_v3_token( - token_id) - if not include_catalog and 'catalog' in token_data['token']: - del token_data['token']['catalog'] - return render_token_data_response(token_id, token_data) - - @controller.protected() - def revocation_list(self, context, auth=None): - if not CONF.token.revoke_by_id: - raise exception.Gone() - - audit_id_only = ('audit_id_only' in context['query_string']) - - tokens = self.token_provider_api.list_revoked_tokens() - - for t in tokens: - expires = t['expires'] - if not (expires and isinstance(expires, six.text_type)): - t['expires'] = utils.isotime(expires) - if audit_id_only: - t.pop('id', None) - data = {'revoked': tokens} - - if audit_id_only: - # No need to obfuscate if no token IDs. - return data - - json_data = jsonutils.dumps(data) - signed_text = cms.cms_sign_text(json_data, - CONF.signing.certfile, - CONF.signing.keyfile) - - return {'signed': signed_text} - - def _combine_lists_uniquely(self, a, b): - # it's most likely that only one of these will be filled so avoid - # the combination if possible. - if a and b: - return {x['id']: x for x in a + b}.values() - else: - return a or b - - @controller.protected() - def get_auth_projects(self, context): - auth_context = self.get_auth_context(context) - - user_id = auth_context.get('user_id') - user_refs = [] - if user_id: - try: - user_refs = self.assignment_api.list_projects_for_user(user_id) - except exception.UserNotFound: # nosec - # federated users have an id but they don't link to anything - pass - - group_ids = auth_context.get('group_ids') - grp_refs = [] - if group_ids: - grp_refs = self.assignment_api.list_projects_for_groups(group_ids) - - refs = self._combine_lists_uniquely(user_refs, grp_refs) - return resource_controllers.ProjectV3.wrap_collection(context, refs) - - @controller.protected() - def get_auth_domains(self, context): - auth_context = self.get_auth_context(context) - - user_id = auth_context.get('user_id') - user_refs = [] - if user_id: - try: - user_refs = self.assignment_api.list_domains_for_user(user_id) - except exception.UserNotFound: # nosec - # federated users have an id but they don't link to anything - pass - - group_ids = auth_context.get('group_ids') - grp_refs = [] - if group_ids: - grp_refs = self.assignment_api.list_domains_for_groups(group_ids) - - refs = self._combine_lists_uniquely(user_refs, grp_refs) - return resource_controllers.DomainV3.wrap_collection(context, refs) - - @controller.protected() - def get_auth_catalog(self, context): - auth_context = self.get_auth_context(context) - user_id = auth_context.get('user_id') - project_id = auth_context.get('project_id') - - if not project_id: - raise exception.Forbidden( - _('A project-scoped token is required to produce a service ' - 'catalog.')) - - # The V3Controller base methods mostly assume that you're returning - # either a collection or a single element from a collection, neither of - # which apply to the catalog. Because this is a special case, this - # re-implements a tiny bit of work done by the base controller (such as - # self-referential link building) to avoid overriding or refactoring - # several private methods. - return { - 'catalog': self.catalog_api.get_v3_catalog(user_id, project_id), - 'links': {'self': self.base_url(context, path='auth/catalog')} - } - - -# FIXME(gyee): not sure if it belongs here or keystone.common. Park it here -# for now. -def render_token_data_response(token_id, token_data, created=False): - """Render token data HTTP response. - - Stash token ID into the X-Subject-Token header. - - """ - headers = [('X-Subject-Token', token_id)] - - if created: - status = (201, 'Created') - else: - status = (200, 'OK') - - return wsgi.render_response(body=token_data, - status=status, headers=headers) diff --git a/keystone-moon/keystone/auth/core.py b/keystone-moon/keystone/auth/core.py deleted file mode 100644 index b865d82b..00000000 --- a/keystone-moon/keystone/auth/core.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc - -import six - -from keystone import exception - - -@six.add_metaclass(abc.ABCMeta) -class AuthMethodHandler(object): - """Abstract base class for an authentication plugin.""" - - def __init__(self): - pass - - @abc.abstractmethod - def authenticate(self, context, auth_payload, auth_context): - """Authenticate user and return an authentication context. - - :param context: keystone's request context - :param auth_payload: the content of the authentication for a given - method - :param auth_context: user authentication context, a dictionary shared - by all plugins. It contains "method_names" and - "extras" by default. "method_names" is a list and - "extras" is a dictionary. - - If successful, plugin must set ``user_id`` in ``auth_context``. - ``method_name`` is used to convey any additional authentication methods - in case authentication is for re-scoping. For example, if the - authentication is for re-scoping, plugin must append the previous - method names into ``method_names``. Also, plugin may add any additional - information into ``extras``. Anything in ``extras`` will be conveyed in - the token's ``extras`` attribute. Here's an example of ``auth_context`` - on successful authentication:: - - { - "extras": {}, - "methods": [ - "password", - "token" - ], - "user_id": "abc123" - } - - Plugins are invoked in the order in which they are specified in the - ``methods`` attribute of the ``identity`` object. For example, - ``custom-plugin`` is invoked before ``password``, which is invoked - before ``token`` in the following authentication request:: - - { - "auth": { - "identity": { - "custom-plugin": { - "custom-data": "sdfdfsfsfsdfsf" - }, - "methods": [ - "custom-plugin", - "password", - "token" - ], - "password": { - "user": { - "id": "s23sfad1", - "password": "secrete" - } - }, - "token": { - "id": "sdfafasdfsfasfasdfds" - } - } - } - } - - :returns: None if authentication is successful. - Authentication payload in the form of a dictionary for the - next authentication step if this is a multi step - authentication. - :raises keystone.exception.Unauthorized: for authentication failure - """ - raise exception.Unauthorized() diff --git a/keystone-moon/keystone/auth/plugins/__init__.py b/keystone-moon/keystone/auth/plugins/__init__.py deleted file mode 100644 index 5da54703..00000000 --- a/keystone-moon/keystone/auth/plugins/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2015 CERN -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.auth.plugins.core import * # noqa diff --git a/keystone-moon/keystone/auth/plugins/core.py b/keystone-moon/keystone/auth/plugins/core.py deleted file mode 100644 index c513f815..00000000 --- a/keystone-moon/keystone/auth/plugins/core.py +++ /dev/null @@ -1,211 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sys - -from oslo_config import cfg -from oslo_log import log -import six - -from keystone.common import dependency -from keystone import exception - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -def construct_method_map_from_config(): - """Determine authentication method types for deployment. - - :returns: a dictionary containing the methods and their indexes - - """ - method_map = dict() - method_index = 1 - for method in CONF.auth.methods: - method_map[method_index] = method - method_index = method_index * 2 - - return method_map - - -def convert_method_list_to_integer(methods): - """Convert the method type(s) to an integer. - - :param methods: a list of method names - :returns: an integer representing the methods - - """ - method_map = construct_method_map_from_config() - - method_ints = [] - for method in methods: - for k, v in method_map.items(): - if v == method: - method_ints.append(k) - return sum(method_ints) - - -def convert_integer_to_method_list(method_int): - """Convert an integer to a list of methods. - - :param method_int: an integer representing methods - :returns: a corresponding list of methods - - """ - # If the method_int is 0 then no methods were used so return an empty - # method list - if method_int == 0: - return [] - - method_map = construct_method_map_from_config() - method_ints = [] - for k, v in method_map.items(): - method_ints.append(k) - method_ints.sort(reverse=True) - - confirmed_methods = [] - for m_int in method_ints: - # (lbragstad): By dividing the method_int by each key in the - # method_map, we know if the division results in an integer of 1, that - # key was used in the construction of the total sum of the method_int. - # In that case, we should confirm the key value and store it so we can - # look it up later. Then we should take the remainder of what is - # confirmed and the method_int and continue the process. In the end, we - # should have a list of integers that correspond to indexes in our - # method_map and we can reinflate the methods that the original - # method_int represents. - if (method_int / m_int) == 1: - confirmed_methods.append(m_int) - method_int = method_int - m_int - - methods = [] - for method in confirmed_methods: - methods.append(method_map[method]) - - return methods - - -@dependency.requires('identity_api', 'resource_api') -class BaseUserInfo(object): - - @classmethod - def create(cls, auth_payload, method_name): - user_auth_info = cls() - user_auth_info._validate_and_normalize_auth_data(auth_payload) - user_auth_info.METHOD_NAME = method_name - return user_auth_info - - def __init__(self): - self.user_id = None - self.user_ref = None - self.METHOD_NAME = None - - def _assert_domain_is_enabled(self, domain_ref): - try: - self.resource_api.assert_domain_enabled( - domain_id=domain_ref['id'], - domain=domain_ref) - except AssertionError as e: - LOG.warning(six.text_type(e)) - six.reraise(exception.Unauthorized, exception.Unauthorized(e), - sys.exc_info()[2]) - - def _assert_user_is_enabled(self, user_ref): - try: - self.identity_api.assert_user_enabled( - user_id=user_ref['id'], - user=user_ref) - except AssertionError as e: - LOG.warning(six.text_type(e)) - six.reraise(exception.Unauthorized, exception.Unauthorized(e), - sys.exc_info()[2]) - - def _lookup_domain(self, domain_info): - domain_id = domain_info.get('id') - domain_name = domain_info.get('name') - domain_ref = None - if not domain_id and not domain_name: - raise exception.ValidationError(attribute='id or name', - target='domain') - try: - if domain_name: - domain_ref = self.resource_api.get_domain_by_name( - domain_name) - else: - domain_ref = self.resource_api.get_domain(domain_id) - except exception.DomainNotFound as e: - LOG.exception(six.text_type(e)) - raise exception.Unauthorized(e) - self._assert_domain_is_enabled(domain_ref) - return domain_ref - - def _validate_and_normalize_auth_data(self, auth_payload): - if 'user' not in auth_payload: - raise exception.ValidationError(attribute='user', - target=self.METHOD_NAME) - user_info = auth_payload['user'] - user_id = user_info.get('id') - user_name = user_info.get('name') - user_ref = None - if not user_id and not user_name: - raise exception.ValidationError(attribute='id or name', - target='user') - try: - if user_name: - if 'domain' not in user_info: - raise exception.ValidationError(attribute='domain', - target='user') - domain_ref = self._lookup_domain(user_info['domain']) - user_ref = self.identity_api.get_user_by_name( - user_name, domain_ref['id']) - else: - user_ref = self.identity_api.get_user(user_id) - domain_ref = self.resource_api.get_domain( - user_ref['domain_id']) - self._assert_domain_is_enabled(domain_ref) - except exception.UserNotFound as e: - LOG.exception(six.text_type(e)) - raise exception.Unauthorized(e) - self._assert_user_is_enabled(user_ref) - self.user_ref = user_ref - self.user_id = user_ref['id'] - self.domain_id = domain_ref['id'] - - -class UserAuthInfo(BaseUserInfo): - - def __init__(self): - super(UserAuthInfo, self).__init__() - self.password = None - - def _validate_and_normalize_auth_data(self, auth_payload): - super(UserAuthInfo, self)._validate_and_normalize_auth_data( - auth_payload) - user_info = auth_payload['user'] - self.password = user_info.get('password') - - -class TOTPUserInfo(BaseUserInfo): - - def __init__(self): - super(TOTPUserInfo, self).__init__() - self.passcode = None - - def _validate_and_normalize_auth_data(self, auth_payload): - super(TOTPUserInfo, self)._validate_and_normalize_auth_data( - auth_payload) - user_info = auth_payload['user'] - self.passcode = user_info.get('passcode') diff --git a/keystone-moon/keystone/auth/plugins/external.py b/keystone-moon/keystone/auth/plugins/external.py deleted file mode 100644 index b00b808a..00000000 --- a/keystone-moon/keystone/auth/plugins/external.py +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Keystone External Authentication Plugins""" - -import abc - -from oslo_config import cfg -import six - -from keystone import auth -from keystone.common import dependency -from keystone import exception -from keystone.i18n import _ - - -CONF = cfg.CONF - - -@six.add_metaclass(abc.ABCMeta) -class Base(auth.AuthMethodHandler): - def authenticate(self, context, auth_info, auth_context): - """Use REMOTE_USER to look up the user in the identity backend. - - auth_context is an in-out variable that will be updated with the - user_id from the actual user from the REMOTE_USER env variable. - """ - try: - REMOTE_USER = context['environment']['REMOTE_USER'] - except KeyError: - msg = _('No authenticated user') - raise exception.Unauthorized(msg) - try: - user_ref = self._authenticate(REMOTE_USER, context) - auth_context['user_id'] = user_ref['id'] - if ('kerberos' in CONF.token.bind and - (context['environment'].get('AUTH_TYPE', '').lower() - == 'negotiate')): - auth_context['bind']['kerberos'] = user_ref['name'] - except Exception: - msg = _('Unable to lookup user %s') % (REMOTE_USER) - raise exception.Unauthorized(msg) - - @abc.abstractmethod - def _authenticate(self, remote_user, context): - """Look up the user in the identity backend. - - Return user_ref - """ - pass - - -@dependency.requires('identity_api') -class DefaultDomain(Base): - def _authenticate(self, remote_user, context): - """Use remote_user to look up the user in the identity backend.""" - domain_id = CONF.identity.default_domain_id - user_ref = self.identity_api.get_user_by_name(remote_user, domain_id) - return user_ref - - -@dependency.requires('identity_api', 'resource_api') -class Domain(Base): - def _authenticate(self, remote_user, context): - """Use remote_user to look up the user in the identity backend. - - The domain will be extracted from the REMOTE_DOMAIN environment - variable if present. If not, the default domain will be used. - """ - username = remote_user - try: - domain_name = context['environment']['REMOTE_DOMAIN'] - except KeyError: - domain_id = CONF.identity.default_domain_id - else: - domain_ref = self.resource_api.get_domain_by_name(domain_name) - domain_id = domain_ref['id'] - - user_ref = self.identity_api.get_user_by_name(username, domain_id) - return user_ref - - -class KerberosDomain(Domain): - """Allows `kerberos` as a method.""" - - def _authenticate(self, remote_user, context): - auth_type = context['environment'].get('AUTH_TYPE') - if auth_type != 'Negotiate': - raise exception.Unauthorized(_("auth_type is not Negotiate")) - return super(KerberosDomain, self)._authenticate(remote_user, context) diff --git a/keystone-moon/keystone/auth/plugins/mapped.py b/keystone-moon/keystone/auth/plugins/mapped.py deleted file mode 100644 index e9716201..00000000 --- a/keystone-moon/keystone/auth/plugins/mapped.py +++ /dev/null @@ -1,258 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools - -from pycadf import cadftaxonomy as taxonomy -from six.moves.urllib import parse - -from keystone import auth -from keystone.auth import plugins as auth_plugins -from keystone.common import dependency -from keystone import exception -from keystone.federation import constants as federation_constants -from keystone.federation import utils -from keystone.i18n import _ -from keystone.models import token_model -from keystone import notifications - - -METHOD_NAME = 'mapped' - - -@dependency.requires('federation_api', 'identity_api', - 'resource_api', 'token_provider_api') -class Mapped(auth.AuthMethodHandler): - - def _get_token_ref(self, auth_payload): - token_id = auth_payload['id'] - response = self.token_provider_api.validate_token(token_id) - return token_model.KeystoneToken(token_id=token_id, - token_data=response) - - def authenticate(self, context, auth_payload, auth_context): - """Authenticate mapped user and set an authentication context. - - :param context: keystone's request context - :param auth_payload: the content of the authentication for a - given method - :param auth_context: user authentication context, a dictionary - shared by all plugins. - - In addition to ``user_id`` in ``auth_context``, this plugin sets - ``group_ids``, ``OS-FEDERATION:identity_provider`` and - ``OS-FEDERATION:protocol`` - - """ - if 'id' in auth_payload: - token_ref = self._get_token_ref(auth_payload) - handle_scoped_token(context, auth_payload, auth_context, token_ref, - self.federation_api, - self.identity_api, - self.token_provider_api) - else: - handle_unscoped_token(context, auth_payload, auth_context, - self.resource_api, self.federation_api, - self.identity_api) - - -def handle_scoped_token(context, auth_payload, auth_context, token_ref, - federation_api, identity_api, token_provider_api): - utils.validate_expiration(token_ref) - token_audit_id = token_ref.audit_id - identity_provider = token_ref.federation_idp_id - protocol = token_ref.federation_protocol_id - user_id = token_ref.user_id - group_ids = token_ref.federation_group_ids - send_notification = functools.partial( - notifications.send_saml_audit_notification, 'authenticate', - context, user_id, group_ids, identity_provider, protocol, - token_audit_id) - - utils.assert_enabled_identity_provider(federation_api, identity_provider) - - try: - mapping = federation_api.get_mapping_from_idp_and_protocol( - identity_provider, protocol) - utils.validate_groups(group_ids, mapping['id'], identity_api) - - except Exception: - # NOTE(topol): Diaper defense to catch any exception, so we can - # send off failed authentication notification, raise the exception - # after sending the notification - send_notification(taxonomy.OUTCOME_FAILURE) - raise - else: - send_notification(taxonomy.OUTCOME_SUCCESS) - - auth_context['user_id'] = user_id - auth_context['group_ids'] = group_ids - auth_context[federation_constants.IDENTITY_PROVIDER] = identity_provider - auth_context[federation_constants.PROTOCOL] = protocol - - -def handle_unscoped_token(context, auth_payload, auth_context, - resource_api, federation_api, identity_api): - - def is_ephemeral_user(mapped_properties): - return mapped_properties['user']['type'] == utils.UserType.EPHEMERAL - - def build_ephemeral_user_context(auth_context, user, mapped_properties, - identity_provider, protocol): - auth_context['user_id'] = user['id'] - auth_context['group_ids'] = mapped_properties['group_ids'] - auth_context[federation_constants.IDENTITY_PROVIDER] = ( - identity_provider) - auth_context[federation_constants.PROTOCOL] = protocol - - def build_local_user_context(auth_context, mapped_properties): - user_info = auth_plugins.UserAuthInfo.create(mapped_properties, - METHOD_NAME) - auth_context['user_id'] = user_info.user_id - - assertion = extract_assertion_data(context) - identity_provider = auth_payload['identity_provider'] - protocol = auth_payload['protocol'] - - utils.assert_enabled_identity_provider(federation_api, identity_provider) - - group_ids = None - # NOTE(topol): The user is coming in from an IdP with a SAML assertion - # instead of from a token, so we set token_id to None - token_id = None - # NOTE(marek-denis): This variable is set to None and there is a - # possibility that it will be used in the CADF notification. This means - # operation will not be mapped to any user (even ephemeral). - user_id = None - - try: - try: - mapped_properties, mapping_id = apply_mapping_filter( - identity_provider, protocol, assertion, resource_api, - federation_api, identity_api) - except exception.ValidationError as e: - # if mapping is either invalid or yield no valid identity, - # it is considered a failed authentication - raise exception.Unauthorized(e) - - if is_ephemeral_user(mapped_properties): - unique_id, display_name = ( - get_user_unique_id_and_display_name(context, mapped_properties) - ) - user = identity_api.shadow_federated_user(identity_provider, - protocol, unique_id, - display_name) - user_id = user['id'] - group_ids = mapped_properties['group_ids'] - utils.validate_groups_cardinality(group_ids, mapping_id) - build_ephemeral_user_context(auth_context, user, - mapped_properties, - identity_provider, protocol) - else: - build_local_user_context(auth_context, mapped_properties) - - except Exception: - # NOTE(topol): Diaper defense to catch any exception, so we can - # send off failed authentication notification, raise the exception - # after sending the notification - outcome = taxonomy.OUTCOME_FAILURE - notifications.send_saml_audit_notification('authenticate', context, - user_id, group_ids, - identity_provider, - protocol, token_id, - outcome) - raise - else: - outcome = taxonomy.OUTCOME_SUCCESS - notifications.send_saml_audit_notification('authenticate', context, - user_id, group_ids, - identity_provider, - protocol, token_id, - outcome) - - -def extract_assertion_data(context): - assertion = dict(utils.get_assertion_params_from_env(context)) - return assertion - - -def apply_mapping_filter(identity_provider, protocol, assertion, - resource_api, federation_api, identity_api): - idp = federation_api.get_idp(identity_provider) - utils.validate_idp(idp, protocol, assertion) - - mapped_properties, mapping_id = federation_api.evaluate( - identity_provider, protocol, assertion) - - # NOTE(marek-denis): We update group_ids only here to avoid fetching - # groups identified by name/domain twice. - # NOTE(marek-denis): Groups are translated from name/domain to their - # corresponding ids in the auth plugin, as we need information what - # ``mapping_id`` was used as well as idenity_api and resource_api - # objects. - group_ids = mapped_properties['group_ids'] - utils.validate_groups_in_backend(group_ids, - mapping_id, - identity_api) - group_ids.extend( - utils.transform_to_group_ids( - mapped_properties['group_names'], mapping_id, - identity_api, resource_api)) - mapped_properties['group_ids'] = list(set(group_ids)) - return mapped_properties, mapping_id - - -def get_user_unique_id_and_display_name(context, mapped_properties): - """Setup federated username. - - Function covers all the cases for properly setting user id, a primary - identifier for identity objects. Initial version of the mapping engine - assumed user is identified by ``name`` and his ``id`` is built from the - name. We, however need to be able to accept local rules that identify user - by either id or name/domain. - - The following use-cases are covered: - - 1) If neither user_name nor user_id is set raise exception.Unauthorized - 2) If user_id is set and user_name not, set user_name equal to user_id - 3) If user_id is not set and user_name is, set user_id as url safe version - of user_name. - - :param context: authentication context - :param mapped_properties: Properties issued by a RuleProcessor. - :type: dictionary - - :raises keystone.exception.Unauthorized: If neither `user_name` nor - `user_id` is set. - :returns: tuple with user identification - :rtype: tuple - - """ - user = mapped_properties['user'] - - user_id = user.get('id') - user_name = user.get('name') or context['environment'].get('REMOTE_USER') - - if not any([user_id, user_name]): - msg = _("Could not map user while setting ephemeral user identity. " - "Either mapping rules must specify user id/name or " - "REMOTE_USER environment variable must be set.") - raise exception.Unauthorized(msg) - - elif not user_name: - user['name'] = user_id - - elif not user_id: - user_id = user_name - - user['id'] = parse.quote(user_id) - return (user['id'], user['name']) diff --git a/keystone-moon/keystone/auth/plugins/oauth1.py b/keystone-moon/keystone/auth/plugins/oauth1.py deleted file mode 100644 index bf60f91c..00000000 --- a/keystone-moon/keystone/auth/plugins/oauth1.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_utils import timeutils - -from keystone import auth -from keystone.common import controller -from keystone.common import dependency -from keystone import exception -from keystone.i18n import _ -from keystone.oauth1 import core as oauth -from keystone.oauth1 import validator - - -@dependency.requires('oauth_api') -class OAuth(auth.AuthMethodHandler): - def authenticate(self, context, auth_info, auth_context): - """Turn a signed request with an access key into a keystone token.""" - headers = context['headers'] - oauth_headers = oauth.get_oauth_headers(headers) - access_token_id = oauth_headers.get('oauth_token') - - if not access_token_id: - raise exception.ValidationError( - attribute='oauth_token', target='request') - - acc_token = self.oauth_api.get_access_token(access_token_id) - - expires_at = acc_token['expires_at'] - if expires_at: - now = timeutils.utcnow() - expires = timeutils.normalize_time( - timeutils.parse_isotime(expires_at)) - if now > expires: - raise exception.Unauthorized(_('Access token is expired')) - - url = controller.V3Controller.base_url(context, context['path']) - access_verifier = oauth.ResourceEndpoint( - request_validator=validator.OAuthValidator(), - token_generator=oauth.token_generator) - result, request = access_verifier.validate_protected_resource_request( - url, - http_method='POST', - body=context['query_string'], - headers=headers, - realms=None - ) - if not result: - msg = _('Could not validate the access token') - raise exception.Unauthorized(msg) - auth_context['user_id'] = acc_token['authorizing_user_id'] - auth_context['access_token_id'] = access_token_id - auth_context['project_id'] = acc_token['project_id'] diff --git a/keystone-moon/keystone/auth/plugins/password.py b/keystone-moon/keystone/auth/plugins/password.py deleted file mode 100644 index a16887b4..00000000 --- a/keystone-moon/keystone/auth/plugins/password.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import auth -from keystone.auth import plugins as auth_plugins -from keystone.common import dependency -from keystone import exception -from keystone.i18n import _ - - -METHOD_NAME = 'password' - - -@dependency.requires('identity_api') -class Password(auth.AuthMethodHandler): - - def authenticate(self, context, auth_payload, auth_context): - """Try to authenticate against the identity backend.""" - user_info = auth_plugins.UserAuthInfo.create(auth_payload, METHOD_NAME) - - try: - self.identity_api.authenticate( - context, - user_id=user_info.user_id, - password=user_info.password) - except AssertionError: - # authentication failed because of invalid username or password - msg = _('Invalid username or password') - raise exception.Unauthorized(msg) - - auth_context['user_id'] = user_info.user_id diff --git a/keystone-moon/keystone/auth/plugins/saml2.py b/keystone-moon/keystone/auth/plugins/saml2.py deleted file mode 100644 index 0e7ec6bc..00000000 --- a/keystone-moon/keystone/auth/plugins/saml2.py +++ /dev/null @@ -1,35 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import versionutils - -from keystone.auth.plugins import mapped - - -@versionutils.deprecated( - versionutils.deprecated.MITAKA, - what='keystone.auth.plugins.saml2.Saml2', - in_favor_of='keystone.auth.plugins.mapped.Mapped', - remove_in=+2) -class Saml2(mapped.Mapped): - """Provide an entry point to authenticate with SAML2. - - This plugin subclasses ``mapped.Mapped``, and may be specified in - keystone.conf:: - - [auth] - methods = external,password,token,saml2 - saml2 = keystone.auth.plugins.mapped.Mapped - - """ - - pass diff --git a/keystone-moon/keystone/auth/plugins/token.py b/keystone-moon/keystone/auth/plugins/token.py deleted file mode 100644 index 069f1140..00000000 --- a/keystone-moon/keystone/auth/plugins/token.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_log import log -import six - -from keystone import auth -from keystone.auth.plugins import mapped -from keystone.common import dependency -from keystone.common import wsgi -from keystone import exception -from keystone.i18n import _ -from keystone.models import token_model - - -LOG = log.getLogger(__name__) - -CONF = cfg.CONF - - -@dependency.requires('federation_api', 'identity_api', 'token_provider_api') -class Token(auth.AuthMethodHandler): - - def _get_token_ref(self, auth_payload): - token_id = auth_payload['id'] - response = self.token_provider_api.validate_token(token_id) - return token_model.KeystoneToken(token_id=token_id, - token_data=response) - - def authenticate(self, context, auth_payload, user_context): - if 'id' not in auth_payload: - raise exception.ValidationError(attribute='id', - target='token') - token_ref = self._get_token_ref(auth_payload) - if token_ref.is_federated_user and self.federation_api: - mapped.handle_scoped_token( - context, auth_payload, user_context, token_ref, - self.federation_api, self.identity_api, - self.token_provider_api) - else: - token_authenticate(context, auth_payload, user_context, token_ref) - - -def token_authenticate(context, auth_payload, user_context, token_ref): - try: - - # Do not allow tokens used for delegation to - # create another token, or perform any changes of - # state in Keystone. To do so is to invite elevation of - # privilege attacks - - if token_ref.oauth_scoped or token_ref.trust_scoped: - raise exception.Forbidden() - - if not CONF.token.allow_rescope_scoped_token: - # Do not allow conversion from scoped tokens. - if token_ref.project_scoped or token_ref.domain_scoped: - raise exception.Forbidden(action=_("rescope a scoped token")) - - wsgi.validate_token_bind(context, token_ref) - - # New tokens maintain the audit_id of the original token in the - # chain (if possible) as the second element in the audit data - # structure. Look for the last element in the audit data structure - # which will be either the audit_id of the token (in the case of - # a token that has not been rescoped) or the audit_chain id (in - # the case of a token that has been rescoped). - try: - token_audit_id = token_ref.get('audit_ids', [])[-1] - except IndexError: - # NOTE(morganfainberg): In the case this is a token that was - # issued prior to audit id existing, the chain is not tracked. - token_audit_id = None - - user_context.setdefault('expires_at', token_ref.expires) - user_context['audit_id'] = token_audit_id - user_context.setdefault('user_id', token_ref.user_id) - # TODO(morganfainberg: determine if token 'extras' can be removed - # from the user_context - user_context['extras'].update(token_ref.get('extras', {})) - user_context['method_names'].extend(token_ref.methods) - - except AssertionError as e: - LOG.error(six.text_type(e)) - raise exception.Unauthorized(e) diff --git a/keystone-moon/keystone/auth/plugins/totp.py b/keystone-moon/keystone/auth/plugins/totp.py deleted file mode 100644 index d0b61b3b..00000000 --- a/keystone-moon/keystone/auth/plugins/totp.py +++ /dev/null @@ -1,99 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Time-based One-time Password Algorithm (TOTP) auth plugin - -TOTP is an algorithm that computes a one-time password from a shared secret -key and the current time. - -TOTP is an implementation of a hash-based message authentication code (HMAC). -It combines a secret key with the current timestamp using a cryptographic hash -function to generate a one-time password. The timestamp typically increases in -30-second intervals, so passwords generated close together in time from the -same secret key will be equal. -""" - -import base64 - -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.twofactor import totp as crypto_totp -from oslo_log import log -from oslo_utils import timeutils -import six - -from keystone import auth -from keystone.auth import plugins -from keystone.common import dependency -from keystone import exception -from keystone.i18n import _ - - -METHOD_NAME = 'totp' - -LOG = log.getLogger(__name__) - - -def _generate_totp_passcode(secret): - """Generate TOTP passcode. - - :param bytes secret: A base32 encoded secret for the TOTP authentication - :returns: totp passcode as bytes - """ - if isinstance(secret, six.text_type): - # NOTE(dstanek): since this may be coming from the JSON stored in the - # database it may be UTF-8 encoded - secret = secret.encode('utf-8') - - # NOTE(nonameentername): cryptography takes a non base32 encoded value for - # TOTP. Add the correct padding to be able to base32 decode - while len(secret) % 8 != 0: - secret = secret + b'=' - - decoded = base64.b32decode(secret) - totp = crypto_totp.TOTP( - decoded, 6, hashes.SHA1(), 30, backend=default_backend()) - return totp.generate(timeutils.utcnow_ts(microsecond=True)) - - -@dependency.requires('credential_api') -class TOTP(auth.AuthMethodHandler): - - def authenticate(self, context, auth_payload, auth_context): - """Try to authenticate using TOTP""" - user_info = plugins.TOTPUserInfo.create(auth_payload, METHOD_NAME) - auth_passcode = auth_payload.get('user').get('passcode') - - credentials = self.credential_api.list_credentials_for_user( - user_info.user_id, type='totp') - - valid_passcode = False - for credential in credentials: - try: - generated_passcode = _generate_totp_passcode( - credential['blob']) - if auth_passcode == generated_passcode: - valid_passcode = True - break - except (ValueError, KeyError): - LOG.debug('No TOTP match; credential id: %s, user_id: %s', - credential['id'], user_info.user_id) - except (TypeError): - LOG.debug('Base32 decode failed for TOTP credential %s', - credential['id']) - - if not valid_passcode: - # authentication failed because of invalid username or passcode - msg = _('Invalid username or TOTP passcode') - raise exception.Unauthorized(msg) - - auth_context['user_id'] = user_info.user_id diff --git a/keystone-moon/keystone/auth/routers.py b/keystone-moon/keystone/auth/routers.py deleted file mode 100644 index c7a525c3..00000000 --- a/keystone-moon/keystone/auth/routers.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.auth import controllers -from keystone.common import json_home -from keystone.common import wsgi - - -class Routers(wsgi.RoutersBase): - - def append_v3_routers(self, mapper, routers): - auth_controller = controllers.Auth() - - self._add_resource( - mapper, auth_controller, - path='/auth/tokens', - get_action='validate_token', - head_action='check_token', - post_action='authenticate_for_token', - delete_action='revoke_token', - rel=json_home.build_v3_resource_relation('auth_tokens')) - - self._add_resource( - mapper, auth_controller, - path='/auth/tokens/OS-PKI/revoked', - get_action='revocation_list', - rel=json_home.build_v3_extension_resource_relation( - 'OS-PKI', '1.0', 'revocations')) - - self._add_resource( - mapper, auth_controller, - path='/auth/catalog', - get_action='get_auth_catalog', - rel=json_home.build_v3_resource_relation('auth_catalog')) - - self._add_resource( - mapper, auth_controller, - path='/auth/projects', - get_action='get_auth_projects', - rel=json_home.build_v3_resource_relation('auth_projects')) - - self._add_resource( - mapper, auth_controller, - path='/auth/domains', - get_action='get_auth_domains', - rel=json_home.build_v3_resource_relation('auth_domains')) diff --git a/keystone-moon/keystone/backends.py b/keystone-moon/keystone/backends.py deleted file mode 100644 index 3a10675e..00000000 --- a/keystone-moon/keystone/backends.py +++ /dev/null @@ -1,66 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import assignment -from keystone import auth -from keystone import catalog -from keystone.common import cache -from keystone.contrib import endpoint_filter -from keystone.contrib import endpoint_policy -from keystone.contrib import federation -from keystone.contrib import oauth1 -from keystone.contrib import revoke -from keystone import credential -from keystone import identity -from keystone import policy -from keystone import resource -from keystone import token -from keystone import trust -# from keystone.contrib import moon - - -def load_backends(): - - # Configure and build the cache - cache.configure_cache_region(cache.REGION) - - # Ensure that the identity driver is created before the assignment manager. - # The default assignment driver is determined by the identity driver, so - # the identity driver must be available to the assignment manager. - _IDENTITY_API = identity.Manager() - - DRIVERS = dict( - assignment_api=assignment.Manager(), - catalog_api=catalog.Manager(), - credential_api=credential.Manager(), - domain_config_api=resource.DomainConfigManager(), - endpoint_filter_api=endpoint_filter.Manager(), - endpoint_policy_api=endpoint_policy.Manager(), - federation_api=federation.Manager(), - id_generator_api=identity.generator.Manager(), - id_mapping_api=identity.MappingManager(), - identity_api=_IDENTITY_API, - oauth_api=oauth1.Manager(), - policy_api=policy.Manager(), - resource_api=resource.Manager(), - revoke_api=revoke.Manager(), - role_api=assignment.RoleManager(), - token_api=token.persistence.Manager(), - trust_api=trust.Manager(), - token_provider_api=token.provider.Manager(), - # admin_api=moon.AdminManager(), - # authz_api=moon.AuthzManager() - ) - - auth.controllers.load_auth_methods() - - return DRIVERS diff --git a/keystone-moon/keystone/catalog/__init__.py b/keystone-moon/keystone/catalog/__init__.py deleted file mode 100644 index 29f297d6..00000000 --- a/keystone-moon/keystone/catalog/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.catalog import controllers # noqa -from keystone.catalog.core import * # noqa diff --git a/keystone-moon/keystone/catalog/backends/__init__.py b/keystone-moon/keystone/catalog/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/catalog/backends/kvs.py b/keystone-moon/keystone/catalog/backends/kvs.py deleted file mode 100644 index fe975d9d..00000000 --- a/keystone-moon/keystone/catalog/backends/kvs.py +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from keystone import catalog -from keystone.common import driver_hints -from keystone.common import kvs - - -class Catalog(kvs.Base, catalog.CatalogDriverV8): - # Public interface - def get_catalog(self, user_id, tenant_id): - return self.db.get('catalog-%s-%s' % (tenant_id, user_id)) - - # region crud - - def _delete_child_regions(self, region_id, root_region_id): - """Delete all child regions. - - Recursively delete any region that has the supplied region - as its parent. - """ - children = [r for r in self.list_regions(driver_hints.Hints()) - if r['parent_region_id'] == region_id] - for child in children: - if child['id'] == root_region_id: - # Hit a circular region hierarchy - return - self._delete_child_regions(child['id'], root_region_id) - self._delete_region(child['id']) - - def _check_parent_region(self, region_ref): - """Raise a NotFound if the parent region does not exist. - - If the region_ref has a specified parent_region_id, check that - the parent exists, otherwise, raise a NotFound. - """ - parent_region_id = region_ref.get('parent_region_id') - if parent_region_id is not None: - # This will raise NotFound if the parent doesn't exist, - # which is the behavior we want. - self.get_region(parent_region_id) - - def create_region(self, region): - region_id = region['id'] - region.setdefault('parent_region_id') - self._check_parent_region(region) - self.db.set('region-%s' % region_id, region) - region_list = set(self.db.get('region_list', [])) - region_list.add(region_id) - self.db.set('region_list', list(region_list)) - return region - - def list_regions(self, hints): - return [self.get_region(x) for x in self.db.get('region_list', [])] - - def get_region(self, region_id): - return self.db.get('region-%s' % region_id) - - def update_region(self, region_id, region): - self._check_parent_region(region) - old_region = self.get_region(region_id) - old_region.update(region) - self._ensure_no_circle_in_hierarchical_regions(old_region) - self.db.set('region-%s' % region_id, old_region) - return old_region - - def _delete_region(self, region_id): - self.db.delete('region-%s' % region_id) - region_list = set(self.db.get('region_list', [])) - region_list.remove(region_id) - self.db.set('region_list', list(region_list)) - - def delete_region(self, region_id): - self._delete_child_regions(region_id, region_id) - self._delete_region(region_id) - - # service crud - - def create_service(self, service_id, service): - self.db.set('service-%s' % service_id, service) - service_list = set(self.db.get('service_list', [])) - service_list.add(service_id) - self.db.set('service_list', list(service_list)) - return service - - def list_services(self, hints): - return [self.get_service(x) for x in self.db.get('service_list', [])] - - def get_service(self, service_id): - return self.db.get('service-%s' % service_id) - - def update_service(self, service_id, service): - old_service = self.get_service(service_id) - old_service.update(service) - self.db.set('service-%s' % service_id, old_service) - return old_service - - def delete_service(self, service_id): - # delete referencing endpoints - for endpoint_id in self.db.get('endpoint_list', []): - if self.get_endpoint(endpoint_id)['service_id'] == service_id: - self.delete_endpoint(endpoint_id) - - self.db.delete('service-%s' % service_id) - service_list = set(self.db.get('service_list', [])) - service_list.remove(service_id) - self.db.set('service_list', list(service_list)) - - # endpoint crud - - def create_endpoint(self, endpoint_id, endpoint): - self.db.set('endpoint-%s' % endpoint_id, endpoint) - endpoint_list = set(self.db.get('endpoint_list', [])) - endpoint_list.add(endpoint_id) - self.db.set('endpoint_list', list(endpoint_list)) - return endpoint - - def list_endpoints(self, hints): - return [self.get_endpoint(x) for x in self.db.get('endpoint_list', [])] - - def get_endpoint(self, endpoint_id): - return self.db.get('endpoint-%s' % endpoint_id) - - def update_endpoint(self, endpoint_id, endpoint): - if endpoint.get('region_id') is not None: - self.get_region(endpoint['region_id']) - - old_endpoint = self.get_endpoint(endpoint_id) - old_endpoint.update(endpoint) - self.db.set('endpoint-%s' % endpoint_id, old_endpoint) - return old_endpoint - - def delete_endpoint(self, endpoint_id): - self.db.delete('endpoint-%s' % endpoint_id) - endpoint_list = set(self.db.get('endpoint_list', [])) - endpoint_list.remove(endpoint_id) - self.db.set('endpoint_list', list(endpoint_list)) - - # Private interface - def _create_catalog(self, user_id, tenant_id, data): - self.db.set('catalog-%s-%s' % (tenant_id, user_id), data) - return data diff --git a/keystone-moon/keystone/catalog/backends/sql.py b/keystone-moon/keystone/catalog/backends/sql.py deleted file mode 100644 index bd92f107..00000000 --- a/keystone-moon/keystone/catalog/backends/sql.py +++ /dev/null @@ -1,572 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# Copyright 2012 Canonical Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import itertools - -from oslo_config import cfg -import sqlalchemy -from sqlalchemy.sql import true - -from keystone import catalog -from keystone.catalog import core -from keystone.common import driver_hints -from keystone.common import sql -from keystone import exception -from keystone.i18n import _ - - -CONF = cfg.CONF - - -class Region(sql.ModelBase, sql.DictBase): - __tablename__ = 'region' - attributes = ['id', 'description', 'parent_region_id'] - id = sql.Column(sql.String(255), primary_key=True) - description = sql.Column(sql.String(255), nullable=False) - # NOTE(jaypipes): Right now, using an adjacency list model for - # storing the hierarchy of regions is fine, since - # the API does not support any kind of querying for - # more complex hierarchical queries such as "get me only - # the regions that are subchildren of this region", etc. - # If, in the future, such queries are needed, then it - # would be possible to add in columns to this model for - # "left" and "right" and provide support for a nested set - # model. - parent_region_id = sql.Column(sql.String(255), nullable=True) - extra = sql.Column(sql.JsonBlob()) - endpoints = sqlalchemy.orm.relationship("Endpoint", backref="region") - - -class Service(sql.ModelBase, sql.DictBase): - __tablename__ = 'service' - attributes = ['id', 'type', 'enabled'] - id = sql.Column(sql.String(64), primary_key=True) - type = sql.Column(sql.String(255)) - enabled = sql.Column(sql.Boolean, nullable=False, default=True, - server_default=sqlalchemy.sql.expression.true()) - extra = sql.Column(sql.JsonBlob()) - endpoints = sqlalchemy.orm.relationship("Endpoint", backref="service") - - -class Endpoint(sql.ModelBase, sql.DictBase): - __tablename__ = 'endpoint' - attributes = ['id', 'interface', 'region_id', 'service_id', 'url', - 'legacy_endpoint_id', 'enabled'] - id = sql.Column(sql.String(64), primary_key=True) - legacy_endpoint_id = sql.Column(sql.String(64)) - interface = sql.Column(sql.String(8), nullable=False) - region_id = sql.Column(sql.String(255), - sql.ForeignKey('region.id', - ondelete='RESTRICT'), - nullable=True, - default=None) - service_id = sql.Column(sql.String(64), - sql.ForeignKey('service.id'), - nullable=False) - url = sql.Column(sql.Text(), nullable=False) - enabled = sql.Column(sql.Boolean, nullable=False, default=True, - server_default=sqlalchemy.sql.expression.true()) - extra = sql.Column(sql.JsonBlob()) - - -class Catalog(catalog.CatalogDriverV8): - # Regions - def list_regions(self, hints): - with sql.session_for_read() as session: - regions = session.query(Region) - regions = sql.filter_limit_query(Region, regions, hints) - return [s.to_dict() for s in list(regions)] - - def _get_region(self, session, region_id): - ref = session.query(Region).get(region_id) - if not ref: - raise exception.RegionNotFound(region_id=region_id) - return ref - - def _delete_child_regions(self, session, region_id, root_region_id): - """Delete all child regions. - - Recursively delete any region that has the supplied region - as its parent. - """ - children = session.query(Region).filter_by(parent_region_id=region_id) - for child in children: - if child.id == root_region_id: - # Hit a circular region hierarchy - return - self._delete_child_regions(session, child.id, root_region_id) - session.delete(child) - - def _check_parent_region(self, session, region_ref): - """Raise a NotFound if the parent region does not exist. - - If the region_ref has a specified parent_region_id, check that - the parent exists, otherwise, raise a NotFound. - """ - parent_region_id = region_ref.get('parent_region_id') - if parent_region_id is not None: - # This will raise NotFound if the parent doesn't exist, - # which is the behavior we want. - self._get_region(session, parent_region_id) - - def _has_endpoints(self, session, region, root_region): - if region.endpoints is not None and len(region.endpoints) > 0: - return True - - q = session.query(Region) - q = q.filter_by(parent_region_id=region.id) - for child in q.all(): - if child.id == root_region.id: - # Hit a circular region hierarchy - return False - if self._has_endpoints(session, child, root_region): - return True - return False - - def get_region(self, region_id): - with sql.session_for_read() as session: - return self._get_region(session, region_id).to_dict() - - def delete_region(self, region_id): - with sql.session_for_write() as session: - ref = self._get_region(session, region_id) - if self._has_endpoints(session, ref, ref): - raise exception.RegionDeletionError(region_id=region_id) - self._delete_child_regions(session, region_id, region_id) - session.delete(ref) - - @sql.handle_conflicts(conflict_type='region') - def create_region(self, region_ref): - with sql.session_for_write() as session: - self._check_parent_region(session, region_ref) - region = Region.from_dict(region_ref) - session.add(region) - return region.to_dict() - - def update_region(self, region_id, region_ref): - with sql.session_for_write() as session: - self._check_parent_region(session, region_ref) - ref = self._get_region(session, region_id) - old_dict = ref.to_dict() - old_dict.update(region_ref) - self._ensure_no_circle_in_hierarchical_regions(old_dict) - new_region = Region.from_dict(old_dict) - for attr in Region.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_region, attr)) - return ref.to_dict() - - # Services - @driver_hints.truncated - def list_services(self, hints): - with sql.session_for_read() as session: - services = session.query(Service) - services = sql.filter_limit_query(Service, services, hints) - return [s.to_dict() for s in list(services)] - - def _get_service(self, session, service_id): - ref = session.query(Service).get(service_id) - if not ref: - raise exception.ServiceNotFound(service_id=service_id) - return ref - - def get_service(self, service_id): - with sql.session_for_read() as session: - return self._get_service(session, service_id).to_dict() - - def delete_service(self, service_id): - with sql.session_for_write() as session: - ref = self._get_service(session, service_id) - session.query(Endpoint).filter_by(service_id=service_id).delete() - session.delete(ref) - - def create_service(self, service_id, service_ref): - with sql.session_for_write() as session: - service = Service.from_dict(service_ref) - session.add(service) - return service.to_dict() - - def update_service(self, service_id, service_ref): - with sql.session_for_write() as session: - ref = self._get_service(session, service_id) - old_dict = ref.to_dict() - old_dict.update(service_ref) - new_service = Service.from_dict(old_dict) - for attr in Service.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_service, attr)) - ref.extra = new_service.extra - return ref.to_dict() - - # Endpoints - def create_endpoint(self, endpoint_id, endpoint_ref): - new_endpoint = Endpoint.from_dict(endpoint_ref) - with sql.session_for_write() as session: - session.add(new_endpoint) - return new_endpoint.to_dict() - - def delete_endpoint(self, endpoint_id): - with sql.session_for_write() as session: - ref = self._get_endpoint(session, endpoint_id) - session.delete(ref) - - def _get_endpoint(self, session, endpoint_id): - try: - return session.query(Endpoint).filter_by(id=endpoint_id).one() - except sql.NotFound: - raise exception.EndpointNotFound(endpoint_id=endpoint_id) - - def get_endpoint(self, endpoint_id): - with sql.session_for_read() as session: - return self._get_endpoint(session, endpoint_id).to_dict() - - @driver_hints.truncated - def list_endpoints(self, hints): - with sql.session_for_read() as session: - endpoints = session.query(Endpoint) - endpoints = sql.filter_limit_query(Endpoint, endpoints, hints) - return [e.to_dict() for e in list(endpoints)] - - def update_endpoint(self, endpoint_id, endpoint_ref): - with sql.session_for_write() as session: - ref = self._get_endpoint(session, endpoint_id) - old_dict = ref.to_dict() - old_dict.update(endpoint_ref) - new_endpoint = Endpoint.from_dict(old_dict) - for attr in Endpoint.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_endpoint, attr)) - ref.extra = new_endpoint.extra - return ref.to_dict() - - def get_catalog(self, user_id, tenant_id): - """Retrieve and format the V2 service catalog. - - :param user_id: The id of the user who has been authenticated for - creating service catalog. - :param tenant_id: The id of the project. 'tenant_id' will be None - in the case this being called to create a catalog to go in a - domain scoped token. In this case, any endpoint that requires - a tenant_id as part of their URL will be skipped (as would a whole - service if, as a consequence, it has no valid endpoints). - - :returns: A nested dict representing the service catalog or an - empty dict. - - """ - substitutions = dict( - itertools.chain(CONF.items(), CONF.eventlet_server.items())) - substitutions.update({'user_id': user_id}) - silent_keyerror_failures = [] - if tenant_id: - substitutions.update({ - 'tenant_id': tenant_id, - 'project_id': tenant_id - }) - else: - silent_keyerror_failures = ['tenant_id', 'project_id', ] - - with sql.session_for_read() as session: - endpoints = (session.query(Endpoint). - options(sql.joinedload(Endpoint.service)). - filter(Endpoint.enabled == true()).all()) - - catalog = {} - - for endpoint in endpoints: - if not endpoint.service['enabled']: - continue - try: - formatted_url = core.format_url( - endpoint['url'], substitutions, - silent_keyerror_failures=silent_keyerror_failures) - if formatted_url is not None: - url = formatted_url - else: - continue - except exception.MalformedEndpoint: - continue # this failure is already logged in format_url() - - region = endpoint['region_id'] - service_type = endpoint.service['type'] - default_service = { - 'id': endpoint['id'], - 'name': endpoint.service.extra.get('name', ''), - 'publicURL': '' - } - catalog.setdefault(region, {}) - catalog[region].setdefault(service_type, default_service) - interface_url = '%sURL' % endpoint['interface'] - catalog[region][service_type][interface_url] = url - - return catalog - - def get_v3_catalog(self, user_id, tenant_id): - """Retrieve and format the current V3 service catalog. - - :param user_id: The id of the user who has been authenticated for - creating service catalog. - :param tenant_id: The id of the project. 'tenant_id' will be None in - the case this being called to create a catalog to go in a domain - scoped token. In this case, any endpoint that requires a - tenant_id as part of their URL will be skipped. - - :returns: A list representing the service catalog or an empty list - - """ - d = dict( - itertools.chain(CONF.items(), CONF.eventlet_server.items())) - d.update({'user_id': user_id}) - silent_keyerror_failures = [] - if tenant_id: - d.update({ - 'tenant_id': tenant_id, - 'project_id': tenant_id, - }) - else: - silent_keyerror_failures = ['tenant_id', 'project_id', ] - - with sql.session_for_read() as session: - services = (session.query(Service).filter( - Service.enabled == true()).options( - sql.joinedload(Service.endpoints)).all()) - - def make_v3_endpoints(endpoints): - for endpoint in (ep.to_dict() - for ep in endpoints if ep.enabled): - del endpoint['service_id'] - del endpoint['legacy_endpoint_id'] - del endpoint['enabled'] - endpoint['region'] = endpoint['region_id'] - try: - formatted_url = core.format_url( - endpoint['url'], d, - silent_keyerror_failures=silent_keyerror_failures) - if formatted_url: - endpoint['url'] = formatted_url - else: - continue - except exception.MalformedEndpoint: - # this failure is already logged in format_url() - continue - - yield endpoint - - # TODO(davechen): If there is service with no endpoints, we should - # skip the service instead of keeping it in the catalog, - # see bug #1436704. - def make_v3_service(svc): - eps = list(make_v3_endpoints(svc.endpoints)) - service = {'endpoints': eps, 'id': svc.id, 'type': svc.type} - service['name'] = svc.extra.get('name', '') - return service - - return [make_v3_service(svc) for svc in services] - - @sql.handle_conflicts(conflict_type='project_endpoint') - def add_endpoint_to_project(self, endpoint_id, project_id): - with sql.session_for_write() as session: - endpoint_filter_ref = ProjectEndpoint(endpoint_id=endpoint_id, - project_id=project_id) - session.add(endpoint_filter_ref) - - def _get_project_endpoint_ref(self, session, endpoint_id, project_id): - endpoint_filter_ref = session.query(ProjectEndpoint).get( - (endpoint_id, project_id)) - if endpoint_filter_ref is None: - msg = _('Endpoint %(endpoint_id)s not found in project ' - '%(project_id)s') % {'endpoint_id': endpoint_id, - 'project_id': project_id} - raise exception.NotFound(msg) - return endpoint_filter_ref - - def check_endpoint_in_project(self, endpoint_id, project_id): - with sql.session_for_read() as session: - self._get_project_endpoint_ref(session, endpoint_id, project_id) - - def remove_endpoint_from_project(self, endpoint_id, project_id): - with sql.session_for_write() as session: - endpoint_filter_ref = self._get_project_endpoint_ref( - session, endpoint_id, project_id) - session.delete(endpoint_filter_ref) - - def list_endpoints_for_project(self, project_id): - with sql.session_for_read() as session: - query = session.query(ProjectEndpoint) - query = query.filter_by(project_id=project_id) - endpoint_filter_refs = query.all() - return [ref.to_dict() for ref in endpoint_filter_refs] - - def list_projects_for_endpoint(self, endpoint_id): - with sql.session_for_read() as session: - query = session.query(ProjectEndpoint) - query = query.filter_by(endpoint_id=endpoint_id) - endpoint_filter_refs = query.all() - return [ref.to_dict() for ref in endpoint_filter_refs] - - def delete_association_by_endpoint(self, endpoint_id): - with sql.session_for_write() as session: - query = session.query(ProjectEndpoint) - query = query.filter_by(endpoint_id=endpoint_id) - query.delete(synchronize_session=False) - - def delete_association_by_project(self, project_id): - with sql.session_for_write() as session: - query = session.query(ProjectEndpoint) - query = query.filter_by(project_id=project_id) - query.delete(synchronize_session=False) - - def create_endpoint_group(self, endpoint_group_id, endpoint_group): - with sql.session_for_write() as session: - endpoint_group_ref = EndpointGroup.from_dict(endpoint_group) - session.add(endpoint_group_ref) - return endpoint_group_ref.to_dict() - - def _get_endpoint_group(self, session, endpoint_group_id): - endpoint_group_ref = session.query(EndpointGroup).get( - endpoint_group_id) - if endpoint_group_ref is None: - raise exception.EndpointGroupNotFound( - endpoint_group_id=endpoint_group_id) - return endpoint_group_ref - - def get_endpoint_group(self, endpoint_group_id): - with sql.session_for_read() as session: - endpoint_group_ref = self._get_endpoint_group(session, - endpoint_group_id) - return endpoint_group_ref.to_dict() - - def update_endpoint_group(self, endpoint_group_id, endpoint_group): - with sql.session_for_write() as session: - endpoint_group_ref = self._get_endpoint_group(session, - endpoint_group_id) - old_endpoint_group = endpoint_group_ref.to_dict() - old_endpoint_group.update(endpoint_group) - new_endpoint_group = EndpointGroup.from_dict(old_endpoint_group) - for attr in EndpointGroup.mutable_attributes: - setattr(endpoint_group_ref, attr, - getattr(new_endpoint_group, attr)) - return endpoint_group_ref.to_dict() - - def delete_endpoint_group(self, endpoint_group_id): - with sql.session_for_write() as session: - endpoint_group_ref = self._get_endpoint_group(session, - endpoint_group_id) - self._delete_endpoint_group_association_by_endpoint_group( - session, endpoint_group_id) - session.delete(endpoint_group_ref) - - def get_endpoint_group_in_project(self, endpoint_group_id, project_id): - with sql.session_for_read() as session: - ref = self._get_endpoint_group_in_project(session, - endpoint_group_id, - project_id) - return ref.to_dict() - - @sql.handle_conflicts(conflict_type='project_endpoint_group') - def add_endpoint_group_to_project(self, endpoint_group_id, project_id): - with sql.session_for_write() as session: - # Create a new Project Endpoint group entity - endpoint_group_project_ref = ProjectEndpointGroupMembership( - endpoint_group_id=endpoint_group_id, project_id=project_id) - session.add(endpoint_group_project_ref) - - def _get_endpoint_group_in_project(self, session, - endpoint_group_id, project_id): - endpoint_group_project_ref = session.query( - ProjectEndpointGroupMembership).get((endpoint_group_id, - project_id)) - if endpoint_group_project_ref is None: - msg = _('Endpoint Group Project Association not found') - raise exception.NotFound(msg) - else: - return endpoint_group_project_ref - - def list_endpoint_groups(self): - with sql.session_for_read() as session: - query = session.query(EndpointGroup) - endpoint_group_refs = query.all() - return [e.to_dict() for e in endpoint_group_refs] - - def list_endpoint_groups_for_project(self, project_id): - with sql.session_for_read() as session: - query = session.query(ProjectEndpointGroupMembership) - query = query.filter_by(project_id=project_id) - endpoint_group_refs = query.all() - return [ref.to_dict() for ref in endpoint_group_refs] - - def remove_endpoint_group_from_project(self, endpoint_group_id, - project_id): - with sql.session_for_write() as session: - endpoint_group_project_ref = self._get_endpoint_group_in_project( - session, endpoint_group_id, project_id) - session.delete(endpoint_group_project_ref) - - def list_projects_associated_with_endpoint_group(self, endpoint_group_id): - with sql.session_for_read() as session: - query = session.query(ProjectEndpointGroupMembership) - query = query.filter_by(endpoint_group_id=endpoint_group_id) - endpoint_group_refs = query.all() - return [ref.to_dict() for ref in endpoint_group_refs] - - def _delete_endpoint_group_association_by_endpoint_group( - self, session, endpoint_group_id): - query = session.query(ProjectEndpointGroupMembership) - query = query.filter_by(endpoint_group_id=endpoint_group_id) - query.delete() - - def delete_endpoint_group_association_by_project(self, project_id): - with sql.session_for_write() as session: - query = session.query(ProjectEndpointGroupMembership) - query = query.filter_by(project_id=project_id) - query.delete() - - -class ProjectEndpoint(sql.ModelBase, sql.ModelDictMixin): - """project-endpoint relationship table.""" - - __tablename__ = 'project_endpoint' - attributes = ['endpoint_id', 'project_id'] - endpoint_id = sql.Column(sql.String(64), - primary_key=True, - nullable=False) - project_id = sql.Column(sql.String(64), - primary_key=True, - nullable=False) - - -class EndpointGroup(sql.ModelBase, sql.ModelDictMixin): - """Endpoint Groups table.""" - - __tablename__ = 'endpoint_group' - attributes = ['id', 'name', 'description', 'filters'] - mutable_attributes = frozenset(['name', 'description', 'filters']) - id = sql.Column(sql.String(64), primary_key=True) - name = sql.Column(sql.String(255), nullable=False) - description = sql.Column(sql.Text, nullable=True) - filters = sql.Column(sql.JsonBlob(), nullable=False) - - -class ProjectEndpointGroupMembership(sql.ModelBase, sql.ModelDictMixin): - """Project to Endpoint group relationship table.""" - - __tablename__ = 'project_endpoint_group' - attributes = ['endpoint_group_id', 'project_id'] - endpoint_group_id = sql.Column(sql.String(64), - sql.ForeignKey('endpoint_group.id'), - nullable=False) - project_id = sql.Column(sql.String(64), nullable=False) - __table_args__ = (sql.PrimaryKeyConstraint('endpoint_group_id', - 'project_id'),) diff --git a/keystone-moon/keystone/catalog/backends/templated.py b/keystone-moon/keystone/catalog/backends/templated.py deleted file mode 100644 index 2e80fd32..00000000 --- a/keystone-moon/keystone/catalog/backends/templated.py +++ /dev/null @@ -1,298 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import itertools -import os.path - -from oslo_config import cfg -from oslo_log import log -import six - -from keystone.catalog import core -from keystone import exception -from keystone.i18n import _LC - - -LOG = log.getLogger(__name__) - -CONF = cfg.CONF - - -def parse_templates(template_lines): - o = {} - for line in template_lines: - if ' = ' not in line: - continue - - k, v = line.strip().split(' = ') - if not k.startswith('catalog.'): - continue - - parts = k.split('.') - - region = parts[1] - # NOTE(termie): object-store insists on having a dash - service = parts[2].replace('_', '-') - key = parts[3] - - region_ref = o.get(region, {}) - service_ref = region_ref.get(service, {}) - service_ref[key] = v - - region_ref[service] = service_ref - o[region] = region_ref - - return o - - -class Catalog(core.Driver): - """A backend that generates endpoints for the Catalog based on templates. - - It is usually configured via config entries that look like: - - catalog.$REGION.$SERVICE.$key = $value - - and is stored in a similar looking hierarchy. Where a value can contain - values to be interpolated by standard python string interpolation that look - like (the % is replaced by a $ due to paste attempting to interpolate on - its own: - - http://localhost:$(public_port)s/ - - When expanding the template it will pass in a dict made up of the conf - instance plus a few additional key-values, notably tenant_id and user_id. - - It does not care what the keys and values are but it is worth noting that - keystone_compat will expect certain keys to be there so that it can munge - them into the output format keystone expects. These keys are: - - name - the name of the service, most likely repeated for all services of - the same type, across regions. - - adminURL - the url of the admin endpoint - - publicURL - the url of the public endpoint - - internalURL - the url of the internal endpoint - - """ - - def __init__(self, templates=None): - super(Catalog, self).__init__() - if templates: - self.templates = templates - else: - template_file = CONF.catalog.template_file - if not os.path.exists(template_file): - template_file = CONF.find_file(template_file) - self._load_templates(template_file) - - def _load_templates(self, template_file): - try: - with open(template_file) as f: - self.templates = parse_templates(f) - except IOError: - LOG.critical(_LC('Unable to open template file %s'), template_file) - raise - - # region crud - - def create_region(self, region_ref): - raise exception.NotImplemented() - - def list_regions(self, hints): - return [{'id': region_id, 'description': '', 'parent_region_id': ''} - for region_id in self.templates] - - def get_region(self, region_id): - if region_id in self.templates: - return {'id': region_id, 'description': '', 'parent_region_id': ''} - raise exception.RegionNotFound(region_id=region_id) - - def update_region(self, region_id, region_ref): - raise exception.NotImplemented() - - def delete_region(self, region_id): - raise exception.NotImplemented() - - # service crud - - def create_service(self, service_id, service_ref): - raise exception.NotImplemented() - - def _list_services(self, hints): - for region_ref in six.itervalues(self.templates): - for service_type, service_ref in six.iteritems(region_ref): - yield { - 'id': service_type, - 'enabled': True, - 'name': service_ref.get('name', ''), - 'description': service_ref.get('description', ''), - 'type': service_type, - } - - def list_services(self, hints): - return list(self._list_services(hints=None)) - - def get_service(self, service_id): - for service in self._list_services(hints=None): - if service['id'] == service_id: - return service - raise exception.ServiceNotFound(service_id=service_id) - - def update_service(self, service_id, service_ref): - raise exception.NotImplemented() - - def delete_service(self, service_id): - raise exception.NotImplemented() - - # endpoint crud - - def create_endpoint(self, endpoint_id, endpoint_ref): - raise exception.NotImplemented() - - def _list_endpoints(self): - for region_id, region_ref in six.iteritems(self.templates): - for service_type, service_ref in six.iteritems(region_ref): - for key in service_ref: - if key.endswith('URL'): - interface = key[:-3] - endpoint_id = ('%s-%s-%s' % - (region_id, service_type, interface)) - yield { - 'id': endpoint_id, - 'service_id': service_type, - 'interface': interface, - 'url': service_ref[key], - 'legacy_endpoint_id': None, - 'region_id': region_id, - 'enabled': True, - } - - def list_endpoints(self, hints): - return list(self._list_endpoints()) - - def get_endpoint(self, endpoint_id): - for endpoint in self._list_endpoints(): - if endpoint['id'] == endpoint_id: - return endpoint - raise exception.EndpointNotFound(endpoint_id=endpoint_id) - - def update_endpoint(self, endpoint_id, endpoint_ref): - raise exception.NotImplemented() - - def delete_endpoint(self, endpoint_id): - raise exception.NotImplemented() - - def get_catalog(self, user_id, tenant_id): - """Retrieve and format the V2 service catalog. - - :param user_id: The id of the user who has been authenticated for - creating service catalog. - :param tenant_id: The id of the project. 'tenant_id' will be None in - the case this being called to create a catalog to go in a domain - scoped token. In this case, any endpoint that requires a tenant_id - as part of their URL will be skipped. - - :returns: A nested dict representing the service catalog or an - empty dict. - - """ - substitutions = dict( - itertools.chain(CONF.items(), CONF.eventlet_server.items())) - substitutions.update({'user_id': user_id}) - silent_keyerror_failures = [] - if tenant_id: - substitutions.update({ - 'tenant_id': tenant_id, - 'project_id': tenant_id, - }) - else: - silent_keyerror_failures = ['tenant_id', 'project_id', ] - - catalog = {} - # TODO(davechen): If there is service with no endpoints, we should - # skip the service instead of keeping it in the catalog. - # see bug #1436704. - for region, region_ref in self.templates.items(): - catalog[region] = {} - for service, service_ref in region_ref.items(): - service_data = {} - try: - for k, v in service_ref.items(): - formatted_value = core.format_url( - v, substitutions, - silent_keyerror_failures=silent_keyerror_failures) - if formatted_value: - service_data[k] = formatted_value - except exception.MalformedEndpoint: - continue # this failure is already logged in format_url() - catalog[region][service] = service_data - - return catalog - - def add_endpoint_to_project(self, endpoint_id, project_id): - raise exception.NotImplemented() - - def remove_endpoint_from_project(self, endpoint_id, project_id): - raise exception.NotImplemented() - - def check_endpoint_in_project(self, endpoint_id, project_id): - raise exception.NotImplemented() - - def list_endpoints_for_project(self, project_id): - raise exception.NotImplemented() - - def list_projects_for_endpoint(self, endpoint_id): - raise exception.NotImplemented() - - def delete_association_by_endpoint(self, endpoint_id): - raise exception.NotImplemented() - - def delete_association_by_project(self, project_id): - raise exception.NotImplemented() - - def create_endpoint_group(self, endpoint_group): - raise exception.NotImplemented() - - def get_endpoint_group(self, endpoint_group_id): - raise exception.NotImplemented() - - def update_endpoint_group(self, endpoint_group_id, endpoint_group): - raise exception.NotImplemented() - - def delete_endpoint_group(self, endpoint_group_id): - raise exception.NotImplemented() - - def add_endpoint_group_to_project(self, endpoint_group_id, project_id): - raise exception.NotImplemented() - - def get_endpoint_group_in_project(self, endpoint_group_id, project_id): - raise exception.NotImplemented() - - def list_endpoint_groups(self): - raise exception.NotImplemented() - - def list_endpoint_groups_for_project(self, project_id): - raise exception.NotImplemented() - - def list_projects_associated_with_endpoint_group(self, endpoint_group_id): - raise exception.NotImplemented() - - def remove_endpoint_group_from_project(self, endpoint_group_id, - project_id): - raise exception.NotImplemented() - - def delete_endpoint_group_association_by_project(self, project_id): - raise exception.NotImplemented() diff --git a/keystone-moon/keystone/catalog/controllers.py b/keystone-moon/keystone/catalog/controllers.py deleted file mode 100644 index fc64c922..00000000 --- a/keystone-moon/keystone/catalog/controllers.py +++ /dev/null @@ -1,615 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# Copyright 2012 Canonical Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -import six - -from keystone.catalog import core -from keystone.catalog import schema -from keystone.common import controller -from keystone.common import dependency -from keystone.common import validation -from keystone.common import wsgi -from keystone import exception -from keystone.i18n import _ -from keystone import notifications -from keystone import resource - - -INTERFACES = ['public', 'internal', 'admin'] - - -@dependency.requires('catalog_api') -class Service(controller.V2Controller): - - @controller.v2_deprecated - def get_services(self, context): - self.assert_admin(context) - service_list = self.catalog_api.list_services() - return {'OS-KSADM:services': service_list} - - @controller.v2_deprecated - def get_service(self, context, service_id): - self.assert_admin(context) - service_ref = self.catalog_api.get_service(service_id) - return {'OS-KSADM:service': service_ref} - - @controller.v2_deprecated - def delete_service(self, context, service_id): - self.assert_admin(context) - initiator = notifications._get_request_audit_info(context) - self.catalog_api.delete_service(service_id, initiator) - - @controller.v2_deprecated - def create_service(self, context, OS_KSADM_service): - self.assert_admin(context) - service_id = uuid.uuid4().hex - service_ref = OS_KSADM_service.copy() - service_ref['id'] = service_id - initiator = notifications._get_request_audit_info(context) - new_service_ref = self.catalog_api.create_service( - service_id, service_ref, initiator) - return {'OS-KSADM:service': new_service_ref} - - -@dependency.requires('catalog_api') -class Endpoint(controller.V2Controller): - - @controller.v2_deprecated - def get_endpoints(self, context): - """Merge matching v3 endpoint refs into legacy refs.""" - self.assert_admin(context) - legacy_endpoints = {} - v3_endpoints = {} - for endpoint in self.catalog_api.list_endpoints(): - if not endpoint.get('legacy_endpoint_id'): # pure v3 endpoint - # tell endpoints apart by the combination of - # service_id and region_id. - # NOTE(muyu): in theory, it's possible that there are more than - # one endpoint of one service, one region and one interface, - # but in practice, it makes no sense because only one will be - # used. - key = (endpoint['service_id'], endpoint['region_id']) - v3_endpoints.setdefault(key, []).append(endpoint) - else: # legacy endpoint - if endpoint['legacy_endpoint_id'] not in legacy_endpoints: - legacy_ep = endpoint.copy() - legacy_ep['id'] = legacy_ep.pop('legacy_endpoint_id') - legacy_ep.pop('interface') - legacy_ep.pop('url') - legacy_ep['region'] = legacy_ep.pop('region_id') - - legacy_endpoints[endpoint['legacy_endpoint_id']] = ( - legacy_ep) - else: - legacy_ep = ( - legacy_endpoints[endpoint['legacy_endpoint_id']]) - - # add the legacy endpoint with an interface url - legacy_ep['%surl' % endpoint['interface']] = endpoint['url'] - - # convert collected v3 endpoints into v2 endpoints - for endpoints in v3_endpoints.values(): - legacy_ep = {} - # For v3 endpoints in the same group, contents of extra attributes - # can be different, which may cause confusion if a random one is - # used. So only necessary attributes are used here. - # It's different for legacy v2 endpoints, which are created - # with the same "extra" value when being migrated. - for key in ('service_id', 'enabled'): - legacy_ep[key] = endpoints[0][key] - legacy_ep['region'] = endpoints[0]['region_id'] - for endpoint in endpoints: - # Public URL is required for v2 endpoints, so the generated v2 - # endpoint uses public endpoint's id as its id, which can also - # be an indicator whether a public v3 endpoint is present. - # It's safe to do so is also because that there is no v2 API to - # get an endpoint by endpoint ID. - if endpoint['interface'] == 'public': - legacy_ep['id'] = endpoint['id'] - legacy_ep['%surl' % endpoint['interface']] = endpoint['url'] - - # this means there is no public URL of this group of v3 endpoints - if 'id' not in legacy_ep: - continue - legacy_endpoints[legacy_ep['id']] = legacy_ep - return {'endpoints': list(legacy_endpoints.values())} - - @controller.v2_deprecated - def create_endpoint(self, context, endpoint): - """Create three v3 endpoint refs based on a legacy ref.""" - self.assert_admin(context) - - # according to the v2 spec publicurl is mandatory - self._require_attribute(endpoint, 'publicurl') - # service_id is necessary - self._require_attribute(endpoint, 'service_id') - - # we should check publicurl, adminurl, internalurl - # if invalid, we should raise an exception to reject - # the request - for interface in INTERFACES: - interface_url = endpoint.get(interface + 'url') - if interface_url: - core.check_endpoint_url(interface_url) - - initiator = notifications._get_request_audit_info(context) - - if endpoint.get('region') is not None: - try: - self.catalog_api.get_region(endpoint['region']) - except exception.RegionNotFound: - region = dict(id=endpoint['region']) - self.catalog_api.create_region(region, initiator) - - legacy_endpoint_ref = endpoint.copy() - - urls = {} - for i in INTERFACES: - # remove all urls so they aren't persisted them more than once - url = '%surl' % i - if endpoint.get(url): - # valid urls need to be persisted - urls[i] = endpoint.pop(url) - elif url in endpoint: - # null or empty urls can be discarded - endpoint.pop(url) - legacy_endpoint_ref.pop(url) - - legacy_endpoint_id = uuid.uuid4().hex - for interface, url in urls.items(): - endpoint_ref = endpoint.copy() - endpoint_ref['id'] = uuid.uuid4().hex - endpoint_ref['legacy_endpoint_id'] = legacy_endpoint_id - endpoint_ref['interface'] = interface - endpoint_ref['url'] = url - endpoint_ref['region_id'] = endpoint_ref.pop('region') - self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref, - initiator) - - legacy_endpoint_ref['id'] = legacy_endpoint_id - return {'endpoint': legacy_endpoint_ref} - - @controller.v2_deprecated - def delete_endpoint(self, context, endpoint_id): - """Delete up to three v3 endpoint refs based on a legacy ref ID.""" - self.assert_admin(context) - initiator = notifications._get_request_audit_info(context) - - deleted_at_least_one = False - for endpoint in self.catalog_api.list_endpoints(): - if endpoint['legacy_endpoint_id'] == endpoint_id: - self.catalog_api.delete_endpoint(endpoint['id'], initiator) - deleted_at_least_one = True - - if not deleted_at_least_one: - raise exception.EndpointNotFound(endpoint_id=endpoint_id) - - -@dependency.requires('catalog_api') -class RegionV3(controller.V3Controller): - collection_name = 'regions' - member_name = 'region' - - def create_region_with_id(self, context, region_id, region): - """Create a region with a user-specified ID. - - This method is unprotected because it depends on ``self.create_region`` - to enforce policy. - """ - if 'id' in region and region_id != region['id']: - raise exception.ValidationError( - _('Conflicting region IDs specified: ' - '"%(url_id)s" != "%(ref_id)s"') % { - 'url_id': region_id, - 'ref_id': region['id']}) - region['id'] = region_id - return self.create_region(context, region) - - @controller.protected() - @validation.validated(schema.region_create, 'region') - def create_region(self, context, region): - ref = self._normalize_dict(region) - - if not ref.get('id'): - ref = self._assign_unique_id(ref) - - initiator = notifications._get_request_audit_info(context) - ref = self.catalog_api.create_region(ref, initiator) - return wsgi.render_response( - RegionV3.wrap_member(context, ref), - status=(201, 'Created')) - - @controller.filterprotected('parent_region_id') - def list_regions(self, context, filters): - hints = RegionV3.build_driver_hints(context, filters) - refs = self.catalog_api.list_regions(hints) - return RegionV3.wrap_collection(context, refs, hints=hints) - - @controller.protected() - def get_region(self, context, region_id): - ref = self.catalog_api.get_region(region_id) - return RegionV3.wrap_member(context, ref) - - @controller.protected() - @validation.validated(schema.region_update, 'region') - def update_region(self, context, region_id, region): - self._require_matching_id(region_id, region) - initiator = notifications._get_request_audit_info(context) - ref = self.catalog_api.update_region(region_id, region, initiator) - return RegionV3.wrap_member(context, ref) - - @controller.protected() - def delete_region(self, context, region_id): - initiator = notifications._get_request_audit_info(context) - return self.catalog_api.delete_region(region_id, initiator) - - -@dependency.requires('catalog_api') -class ServiceV3(controller.V3Controller): - collection_name = 'services' - member_name = 'service' - - def __init__(self): - super(ServiceV3, self).__init__() - self.get_member_from_driver = self.catalog_api.get_service - - @controller.protected() - @validation.validated(schema.service_create, 'service') - def create_service(self, context, service): - ref = self._assign_unique_id(self._normalize_dict(service)) - initiator = notifications._get_request_audit_info(context) - ref = self.catalog_api.create_service(ref['id'], ref, initiator) - return ServiceV3.wrap_member(context, ref) - - @controller.filterprotected('type', 'name') - def list_services(self, context, filters): - hints = ServiceV3.build_driver_hints(context, filters) - refs = self.catalog_api.list_services(hints=hints) - return ServiceV3.wrap_collection(context, refs, hints=hints) - - @controller.protected() - def get_service(self, context, service_id): - ref = self.catalog_api.get_service(service_id) - return ServiceV3.wrap_member(context, ref) - - @controller.protected() - @validation.validated(schema.service_update, 'service') - def update_service(self, context, service_id, service): - self._require_matching_id(service_id, service) - initiator = notifications._get_request_audit_info(context) - ref = self.catalog_api.update_service(service_id, service, initiator) - return ServiceV3.wrap_member(context, ref) - - @controller.protected() - def delete_service(self, context, service_id): - initiator = notifications._get_request_audit_info(context) - return self.catalog_api.delete_service(service_id, initiator) - - -@dependency.requires('catalog_api') -class EndpointV3(controller.V3Controller): - collection_name = 'endpoints' - member_name = 'endpoint' - - def __init__(self): - super(EndpointV3, self).__init__() - self.get_member_from_driver = self.catalog_api.get_endpoint - - @classmethod - def filter_endpoint(cls, ref): - if 'legacy_endpoint_id' in ref: - ref.pop('legacy_endpoint_id') - ref['region'] = ref['region_id'] - return ref - - @classmethod - def wrap_member(cls, context, ref): - ref = cls.filter_endpoint(ref) - return super(EndpointV3, cls).wrap_member(context, ref) - - def _validate_endpoint_region(self, endpoint, context=None): - """Ensure the region for the endpoint exists. - - If 'region_id' is used to specify the region, then we will let the - manager/driver take care of this. If, however, 'region' is used, - then for backward compatibility, we will auto-create the region. - - """ - if (endpoint.get('region_id') is None and - endpoint.get('region') is not None): - # To maintain backward compatibility with clients that are - # using the v3 API in the same way as they used the v2 API, - # create the endpoint region, if that region does not exist - # in keystone. - endpoint['region_id'] = endpoint.pop('region') - try: - self.catalog_api.get_region(endpoint['region_id']) - except exception.RegionNotFound: - region = dict(id=endpoint['region_id']) - initiator = notifications._get_request_audit_info(context) - self.catalog_api.create_region(region, initiator) - - return endpoint - - @controller.protected() - @validation.validated(schema.endpoint_create, 'endpoint') - def create_endpoint(self, context, endpoint): - core.check_endpoint_url(endpoint['url']) - ref = self._assign_unique_id(self._normalize_dict(endpoint)) - ref = self._validate_endpoint_region(ref, context) - initiator = notifications._get_request_audit_info(context) - ref = self.catalog_api.create_endpoint(ref['id'], ref, initiator) - return EndpointV3.wrap_member(context, ref) - - @controller.filterprotected('interface', 'service_id', 'region_id') - def list_endpoints(self, context, filters): - hints = EndpointV3.build_driver_hints(context, filters) - refs = self.catalog_api.list_endpoints(hints=hints) - return EndpointV3.wrap_collection(context, refs, hints=hints) - - @controller.protected() - def get_endpoint(self, context, endpoint_id): - ref = self.catalog_api.get_endpoint(endpoint_id) - return EndpointV3.wrap_member(context, ref) - - @controller.protected() - @validation.validated(schema.endpoint_update, 'endpoint') - def update_endpoint(self, context, endpoint_id, endpoint): - self._require_matching_id(endpoint_id, endpoint) - - endpoint = self._validate_endpoint_region(endpoint.copy(), context) - - initiator = notifications._get_request_audit_info(context) - ref = self.catalog_api.update_endpoint(endpoint_id, endpoint, - initiator) - return EndpointV3.wrap_member(context, ref) - - @controller.protected() - def delete_endpoint(self, context, endpoint_id): - initiator = notifications._get_request_audit_info(context) - return self.catalog_api.delete_endpoint(endpoint_id, initiator) - - -@dependency.requires('catalog_api', 'resource_api') -class EndpointFilterV3Controller(controller.V3Controller): - - def __init__(self): - super(EndpointFilterV3Controller, self).__init__() - notifications.register_event_callback( - notifications.ACTIONS.deleted, 'project', - self._on_project_or_endpoint_delete) - notifications.register_event_callback( - notifications.ACTIONS.deleted, 'endpoint', - self._on_project_or_endpoint_delete) - - def _on_project_or_endpoint_delete(self, service, resource_type, operation, - payload): - project_or_endpoint_id = payload['resource_info'] - if resource_type == 'project': - self.catalog_api.delete_association_by_project( - project_or_endpoint_id) - else: - self.catalog_api.delete_association_by_endpoint( - project_or_endpoint_id) - - @controller.protected() - def add_endpoint_to_project(self, context, project_id, endpoint_id): - """Establishes an association between an endpoint and a project.""" - # NOTE(gyee): we just need to make sure endpoint and project exist - # first. We don't really care whether if project is disabled. - # The relationship can still be established even with a disabled - # project as there are no security implications. - self.catalog_api.get_endpoint(endpoint_id) - self.resource_api.get_project(project_id) - self.catalog_api.add_endpoint_to_project(endpoint_id, - project_id) - - @controller.protected() - def check_endpoint_in_project(self, context, project_id, endpoint_id): - """Verifies endpoint is currently associated with given project.""" - self.catalog_api.get_endpoint(endpoint_id) - self.resource_api.get_project(project_id) - self.catalog_api.check_endpoint_in_project(endpoint_id, - project_id) - - @controller.protected() - def list_endpoints_for_project(self, context, project_id): - """List all endpoints currently associated with a given project.""" - self.resource_api.get_project(project_id) - filtered_endpoints = self.catalog_api.list_endpoints_for_project( - project_id) - - return EndpointV3.wrap_collection( - context, [v for v in six.itervalues(filtered_endpoints)]) - - @controller.protected() - def remove_endpoint_from_project(self, context, project_id, endpoint_id): - """Remove the endpoint from the association with given project.""" - self.catalog_api.remove_endpoint_from_project(endpoint_id, - project_id) - - @controller.protected() - def list_projects_for_endpoint(self, context, endpoint_id): - """Return a list of projects associated with the endpoint.""" - self.catalog_api.get_endpoint(endpoint_id) - refs = self.catalog_api.list_projects_for_endpoint(endpoint_id) - - projects = [self.resource_api.get_project( - ref['project_id']) for ref in refs] - return resource.controllers.ProjectV3.wrap_collection(context, - projects) - - -@dependency.requires('catalog_api', 'resource_api') -class EndpointGroupV3Controller(controller.V3Controller): - collection_name = 'endpoint_groups' - member_name = 'endpoint_group' - - VALID_FILTER_KEYS = ['service_id', 'region_id', 'interface'] - - def __init__(self): - super(EndpointGroupV3Controller, self).__init__() - - @classmethod - def base_url(cls, context, path=None): - """Construct a path and pass it to V3Controller.base_url method.""" - path = '/OS-EP-FILTER/' + cls.collection_name - return super(EndpointGroupV3Controller, cls).base_url(context, - path=path) - - @controller.protected() - @validation.validated(schema.endpoint_group_create, 'endpoint_group') - def create_endpoint_group(self, context, endpoint_group): - """Creates an Endpoint Group with the associated filters.""" - ref = self._assign_unique_id(self._normalize_dict(endpoint_group)) - self._require_attribute(ref, 'filters') - self._require_valid_filter(ref) - ref = self.catalog_api.create_endpoint_group(ref['id'], ref) - return EndpointGroupV3Controller.wrap_member(context, ref) - - def _require_valid_filter(self, endpoint_group): - filters = endpoint_group.get('filters') - for key in six.iterkeys(filters): - if key not in self.VALID_FILTER_KEYS: - raise exception.ValidationError( - attribute=self._valid_filter_keys(), - target='endpoint_group') - - def _valid_filter_keys(self): - return ' or '.join(self.VALID_FILTER_KEYS) - - @controller.protected() - def get_endpoint_group(self, context, endpoint_group_id): - """Retrieve the endpoint group associated with the id if exists.""" - ref = self.catalog_api.get_endpoint_group(endpoint_group_id) - return EndpointGroupV3Controller.wrap_member( - context, ref) - - @controller.protected() - @validation.validated(schema.endpoint_group_update, 'endpoint_group') - def update_endpoint_group(self, context, endpoint_group_id, - endpoint_group): - """Update fixed values and/or extend the filters.""" - if 'filters' in endpoint_group: - self._require_valid_filter(endpoint_group) - ref = self.catalog_api.update_endpoint_group(endpoint_group_id, - endpoint_group) - return EndpointGroupV3Controller.wrap_member( - context, ref) - - @controller.protected() - def delete_endpoint_group(self, context, endpoint_group_id): - """Delete endpoint_group.""" - self.catalog_api.delete_endpoint_group(endpoint_group_id) - - @controller.protected() - def list_endpoint_groups(self, context): - """List all endpoint groups.""" - refs = self.catalog_api.list_endpoint_groups() - return EndpointGroupV3Controller.wrap_collection( - context, refs) - - @controller.protected() - def list_endpoint_groups_for_project(self, context, project_id): - """List all endpoint groups associated with a given project.""" - return EndpointGroupV3Controller.wrap_collection( - context, - self.catalog_api.get_endpoint_groups_for_project(project_id)) - - @controller.protected() - def list_projects_associated_with_endpoint_group(self, - context, - endpoint_group_id): - """List all projects associated with endpoint group.""" - endpoint_group_refs = (self.catalog_api. - list_projects_associated_with_endpoint_group( - endpoint_group_id)) - projects = [] - for endpoint_group_ref in endpoint_group_refs: - project = self.resource_api.get_project( - endpoint_group_ref['project_id']) - if project: - projects.append(project) - return resource.controllers.ProjectV3.wrap_collection(context, - projects) - - @controller.protected() - def list_endpoints_associated_with_endpoint_group(self, - context, - endpoint_group_id): - """List all the endpoints filtered by a specific endpoint group.""" - filtered_endpoints = (self.catalog_api. - get_endpoints_filtered_by_endpoint_group( - endpoint_group_id)) - return EndpointV3.wrap_collection(context, filtered_endpoints) - - -@dependency.requires('catalog_api', 'resource_api') -class ProjectEndpointGroupV3Controller(controller.V3Controller): - collection_name = 'project_endpoint_groups' - member_name = 'project_endpoint_group' - - def __init__(self): - super(ProjectEndpointGroupV3Controller, self).__init__() - notifications.register_event_callback( - notifications.ACTIONS.deleted, 'project', - self._on_project_delete) - - def _on_project_delete(self, service, resource_type, - operation, payload): - project_id = payload['resource_info'] - (self.catalog_api. - delete_endpoint_group_association_by_project( - project_id)) - - @controller.protected() - def get_endpoint_group_in_project(self, context, endpoint_group_id, - project_id): - """Retrieve the endpoint group associated with the id if exists.""" - self.resource_api.get_project(project_id) - self.catalog_api.get_endpoint_group(endpoint_group_id) - ref = self.catalog_api.get_endpoint_group_in_project( - endpoint_group_id, project_id) - return ProjectEndpointGroupV3Controller.wrap_member( - context, ref) - - @controller.protected() - def add_endpoint_group_to_project(self, context, endpoint_group_id, - project_id): - """Creates an association between an endpoint group and project.""" - self.resource_api.get_project(project_id) - self.catalog_api.get_endpoint_group(endpoint_group_id) - self.catalog_api.add_endpoint_group_to_project( - endpoint_group_id, project_id) - - @controller.protected() - def remove_endpoint_group_from_project(self, context, endpoint_group_id, - project_id): - """Remove the endpoint group from associated project.""" - self.resource_api.get_project(project_id) - self.catalog_api.get_endpoint_group(endpoint_group_id) - self.catalog_api.remove_endpoint_group_from_project( - endpoint_group_id, project_id) - - @classmethod - def _add_self_referential_link(cls, context, ref): - url = ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' - '/projects/%(project_id)s' % { - 'endpoint_group_id': ref['endpoint_group_id'], - 'project_id': ref['project_id']}) - ref.setdefault('links', {}) - ref['links']['self'] = url diff --git a/keystone-moon/keystone/catalog/core.py b/keystone-moon/keystone/catalog/core.py deleted file mode 100644 index 384a9b2b..00000000 --- a/keystone-moon/keystone/catalog/core.py +++ /dev/null @@ -1,894 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# Copyright 2012 Canonical Ltd. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into the Catalog service.""" - -import abc -import itertools - -from oslo_cache import core as oslo_cache -from oslo_config import cfg -from oslo_log import log -import six - -from keystone.common import cache -from keystone.common import dependency -from keystone.common import driver_hints -from keystone.common import manager -from keystone.common import utils -from keystone import exception -from keystone.i18n import _ -from keystone.i18n import _LE -from keystone import notifications - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) -WHITELISTED_PROPERTIES = [ - 'tenant_id', 'project_id', 'user_id', - 'public_bind_host', 'admin_bind_host', - 'compute_host', 'admin_port', 'public_port', - 'public_endpoint', 'admin_endpoint', ] - -# This is a general cache region for catalog administration (CRUD operations). -MEMOIZE = cache.get_memoization_decorator(group='catalog') - -# This builds a discrete cache region dedicated to complete service catalogs -# computed for a given user + project pair. Any write operation to create, -# modify or delete elements of the service catalog should invalidate this -# entire cache region. -COMPUTED_CATALOG_REGION = oslo_cache.create_region() -MEMOIZE_COMPUTED_CATALOG = cache.get_memoization_decorator( - group='catalog', - region=COMPUTED_CATALOG_REGION) - - -def format_url(url, substitutions, silent_keyerror_failures=None): - """Formats a user-defined URL with the given substitutions. - - :param string url: the URL to be formatted - :param dict substitutions: the dictionary used for substitution - :param list silent_keyerror_failures: keys for which we should be silent - if there is a KeyError exception on substitution attempt - :returns: a formatted URL - - """ - substitutions = utils.WhiteListedItemFilter( - WHITELISTED_PROPERTIES, - substitutions) - allow_keyerror = silent_keyerror_failures or [] - try: - result = url.replace('$(', '%(') % substitutions - except AttributeError: - LOG.error(_LE('Malformed endpoint - %(url)r is not a string'), - {"url": url}) - raise exception.MalformedEndpoint(endpoint=url) - except KeyError as e: - if not e.args or e.args[0] not in allow_keyerror: - LOG.error(_LE("Malformed endpoint %(url)s - unknown key " - "%(keyerror)s"), - {"url": url, - "keyerror": e}) - raise exception.MalformedEndpoint(endpoint=url) - else: - result = None - except TypeError as e: - LOG.error(_LE("Malformed endpoint '%(url)s'. The following type error " - "occurred during string substitution: %(typeerror)s"), - {"url": url, - "typeerror": e}) - raise exception.MalformedEndpoint(endpoint=url) - except ValueError as e: - LOG.error(_LE("Malformed endpoint %s - incomplete format " - "(are you missing a type notifier ?)"), url) - raise exception.MalformedEndpoint(endpoint=url) - return result - - -def check_endpoint_url(url): - """Check substitution of url. - - The invalid urls are as follows: - urls with substitutions that is not in the whitelist - - Check the substitutions in the URL to make sure they are valid - and on the whitelist. - - :param str url: the URL to validate - :rtype: None - :raises keystone.exception.URLValidationError: if the URL is invalid - """ - # check whether the property in the path is exactly the same - # with that in the whitelist below - substitutions = dict(zip(WHITELISTED_PROPERTIES, itertools.repeat(''))) - try: - url.replace('$(', '%(') % substitutions - except (KeyError, TypeError, ValueError): - raise exception.URLValidationError(url) - - -@dependency.provider('catalog_api') -@dependency.requires('resource_api') -class Manager(manager.Manager): - """Default pivot point for the Catalog backend. - - See :mod:`keystone.common.manager.Manager` for more details on how this - dynamically calls the backend. - - """ - - driver_namespace = 'keystone.catalog' - - _ENDPOINT = 'endpoint' - _SERVICE = 'service' - _REGION = 'region' - - def __init__(self): - super(Manager, self).__init__(CONF.catalog.driver) - - def create_region(self, region_ref, initiator=None): - # Check duplicate ID - try: - self.get_region(region_ref['id']) - except exception.RegionNotFound: # nosec - # A region with the same id doesn't exist already, good. - pass - else: - msg = _('Duplicate ID, %s.') % region_ref['id'] - raise exception.Conflict(type='region', details=msg) - - # NOTE(lbragstad,dstanek): The description column of the region - # database cannot be null. So if the user doesn't pass in a - # description or passes in a null description then set it to an - # empty string. - if region_ref.get('description') is None: - region_ref['description'] = '' - try: - ret = self.driver.create_region(region_ref) - except exception.NotFound: - parent_region_id = region_ref.get('parent_region_id') - raise exception.RegionNotFound(region_id=parent_region_id) - - notifications.Audit.created(self._REGION, ret['id'], initiator) - COMPUTED_CATALOG_REGION.invalidate() - return ret - - @MEMOIZE - def get_region(self, region_id): - try: - return self.driver.get_region(region_id) - except exception.NotFound: - raise exception.RegionNotFound(region_id=region_id) - - def update_region(self, region_id, region_ref, initiator=None): - # NOTE(lbragstad,dstanek): The description column of the region - # database cannot be null. So if the user passes in a null - # description set it to an empty string. - if 'description' in region_ref and region_ref['description'] is None: - region_ref['description'] = '' - ref = self.driver.update_region(region_id, region_ref) - notifications.Audit.updated(self._REGION, region_id, initiator) - self.get_region.invalidate(self, region_id) - COMPUTED_CATALOG_REGION.invalidate() - return ref - - def delete_region(self, region_id, initiator=None): - try: - ret = self.driver.delete_region(region_id) - notifications.Audit.deleted(self._REGION, region_id, initiator) - self.get_region.invalidate(self, region_id) - COMPUTED_CATALOG_REGION.invalidate() - return ret - except exception.NotFound: - raise exception.RegionNotFound(region_id=region_id) - - @manager.response_truncated - def list_regions(self, hints=None): - return self.driver.list_regions(hints or driver_hints.Hints()) - - def create_service(self, service_id, service_ref, initiator=None): - service_ref.setdefault('enabled', True) - service_ref.setdefault('name', '') - ref = self.driver.create_service(service_id, service_ref) - notifications.Audit.created(self._SERVICE, service_id, initiator) - COMPUTED_CATALOG_REGION.invalidate() - return ref - - @MEMOIZE - def get_service(self, service_id): - try: - return self.driver.get_service(service_id) - except exception.NotFound: - raise exception.ServiceNotFound(service_id=service_id) - - def update_service(self, service_id, service_ref, initiator=None): - ref = self.driver.update_service(service_id, service_ref) - notifications.Audit.updated(self._SERVICE, service_id, initiator) - self.get_service.invalidate(self, service_id) - COMPUTED_CATALOG_REGION.invalidate() - return ref - - def delete_service(self, service_id, initiator=None): - try: - endpoints = self.list_endpoints() - ret = self.driver.delete_service(service_id) - notifications.Audit.deleted(self._SERVICE, service_id, initiator) - self.get_service.invalidate(self, service_id) - for endpoint in endpoints: - if endpoint['service_id'] == service_id: - self.get_endpoint.invalidate(self, endpoint['id']) - COMPUTED_CATALOG_REGION.invalidate() - return ret - except exception.NotFound: - raise exception.ServiceNotFound(service_id=service_id) - - @manager.response_truncated - def list_services(self, hints=None): - return self.driver.list_services(hints or driver_hints.Hints()) - - def _assert_region_exists(self, region_id): - try: - if region_id is not None: - self.get_region(region_id) - except exception.RegionNotFound: - raise exception.ValidationError(attribute='endpoint region_id', - target='region table') - - def _assert_service_exists(self, service_id): - try: - if service_id is not None: - self.get_service(service_id) - except exception.ServiceNotFound: - raise exception.ValidationError(attribute='endpoint service_id', - target='service table') - - def create_endpoint(self, endpoint_id, endpoint_ref, initiator=None): - self._assert_region_exists(endpoint_ref.get('region_id')) - self._assert_service_exists(endpoint_ref['service_id']) - ref = self.driver.create_endpoint(endpoint_id, endpoint_ref) - - notifications.Audit.created(self._ENDPOINT, endpoint_id, initiator) - COMPUTED_CATALOG_REGION.invalidate() - return ref - - def update_endpoint(self, endpoint_id, endpoint_ref, initiator=None): - self._assert_region_exists(endpoint_ref.get('region_id')) - self._assert_service_exists(endpoint_ref.get('service_id')) - ref = self.driver.update_endpoint(endpoint_id, endpoint_ref) - notifications.Audit.updated(self._ENDPOINT, endpoint_id, initiator) - self.get_endpoint.invalidate(self, endpoint_id) - COMPUTED_CATALOG_REGION.invalidate() - return ref - - def delete_endpoint(self, endpoint_id, initiator=None): - try: - ret = self.driver.delete_endpoint(endpoint_id) - notifications.Audit.deleted(self._ENDPOINT, endpoint_id, initiator) - self.get_endpoint.invalidate(self, endpoint_id) - COMPUTED_CATALOG_REGION.invalidate() - return ret - except exception.NotFound: - raise exception.EndpointNotFound(endpoint_id=endpoint_id) - - @MEMOIZE - def get_endpoint(self, endpoint_id): - try: - return self.driver.get_endpoint(endpoint_id) - except exception.NotFound: - raise exception.EndpointNotFound(endpoint_id=endpoint_id) - - @manager.response_truncated - def list_endpoints(self, hints=None): - return self.driver.list_endpoints(hints or driver_hints.Hints()) - - @MEMOIZE_COMPUTED_CATALOG - def get_catalog(self, user_id, tenant_id): - try: - return self.driver.get_catalog(user_id, tenant_id) - except exception.NotFound: - raise exception.NotFound('Catalog not found for user and tenant') - - @MEMOIZE_COMPUTED_CATALOG - def get_v3_catalog(self, user_id, tenant_id): - return self.driver.get_v3_catalog(user_id, tenant_id) - - def add_endpoint_to_project(self, endpoint_id, project_id): - self.driver.add_endpoint_to_project(endpoint_id, project_id) - COMPUTED_CATALOG_REGION.invalidate() - - def remove_endpoint_from_project(self, endpoint_id, project_id): - self.driver.remove_endpoint_from_project(endpoint_id, project_id) - COMPUTED_CATALOG_REGION.invalidate() - - def add_endpoint_group_to_project(self, endpoint_group_id, project_id): - self.driver.add_endpoint_group_to_project( - endpoint_group_id, project_id) - COMPUTED_CATALOG_REGION.invalidate() - - def remove_endpoint_group_from_project(self, endpoint_group_id, - project_id): - self.driver.remove_endpoint_group_from_project( - endpoint_group_id, project_id) - COMPUTED_CATALOG_REGION.invalidate() - - def get_endpoint_groups_for_project(self, project_id): - # recover the project endpoint group memberships and for each - # membership recover the endpoint group - self.resource_api.get_project(project_id) - try: - refs = self.list_endpoint_groups_for_project(project_id) - endpoint_groups = [self.get_endpoint_group( - ref['endpoint_group_id']) for ref in refs] - return endpoint_groups - except exception.EndpointGroupNotFound: - return [] - - def get_endpoints_filtered_by_endpoint_group(self, endpoint_group_id): - endpoints = self.list_endpoints() - filters = self.get_endpoint_group(endpoint_group_id)['filters'] - filtered_endpoints = [] - - for endpoint in endpoints: - is_candidate = True - for key, value in filters.items(): - if endpoint[key] != value: - is_candidate = False - break - if is_candidate: - filtered_endpoints.append(endpoint) - return filtered_endpoints - - def list_endpoints_for_project(self, project_id): - """List all endpoints associated with a project. - - :param project_id: project identifier to check - :type project_id: string - :returns: a list of endpoint ids or an empty list. - - """ - refs = self.driver.list_endpoints_for_project(project_id) - filtered_endpoints = {} - for ref in refs: - try: - endpoint = self.get_endpoint(ref['endpoint_id']) - filtered_endpoints.update({ref['endpoint_id']: endpoint}) - except exception.EndpointNotFound: - # remove bad reference from association - self.remove_endpoint_from_project(ref['endpoint_id'], - project_id) - - # need to recover endpoint_groups associated with project - # then for each endpoint group return the endpoints. - endpoint_groups = self.get_endpoint_groups_for_project(project_id) - for endpoint_group in endpoint_groups: - endpoint_refs = self.get_endpoints_filtered_by_endpoint_group( - endpoint_group['id']) - # now check if any endpoints for current endpoint group are not - # contained in the list of filtered endpoints - for endpoint_ref in endpoint_refs: - if endpoint_ref['id'] not in filtered_endpoints: - filtered_endpoints[endpoint_ref['id']] = endpoint_ref - - return filtered_endpoints - - -@six.add_metaclass(abc.ABCMeta) -class CatalogDriverV8(object): - """Interface description for the Catalog driver.""" - - def _get_list_limit(self): - return CONF.catalog.list_limit or CONF.list_limit - - def _ensure_no_circle_in_hierarchical_regions(self, region_ref): - if region_ref.get('parent_region_id') is None: - return - - root_region_id = region_ref['id'] - parent_region_id = region_ref['parent_region_id'] - - while parent_region_id: - # NOTE(wanghong): check before getting parent region can ensure no - # self circle - if parent_region_id == root_region_id: - raise exception.CircularRegionHierarchyError( - parent_region_id=parent_region_id) - parent_region = self.get_region(parent_region_id) - parent_region_id = parent_region.get('parent_region_id') - - @abc.abstractmethod - def create_region(self, region_ref): - """Creates a new region. - - :raises keystone.exception.Conflict: If the region already exists. - :raises keystone.exception.RegionNotFound: If the parent region - is invalid. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_regions(self, hints): - """List all regions. - - :param hints: contains the list of filters yet to be satisfied. - Any filters satisfied here will be removed so that - the caller will know if any filters remain. - - :returns: list of region_refs or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_region(self, region_id): - """Get region by id. - - :returns: region_ref dict - :raises keystone.exception.RegionNotFound: If the region doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_region(self, region_id, region_ref): - """Update region by id. - - :returns: region_ref dict - :raises keystone.exception.RegionNotFound: If the region doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_region(self, region_id): - """Deletes an existing region. - - :raises keystone.exception.RegionNotFound: If the region doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def create_service(self, service_id, service_ref): - """Creates a new service. - - :raises keystone.exception.Conflict: If a duplicate service exists. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_services(self, hints): - """List all services. - - :param hints: contains the list of filters yet to be satisfied. - Any filters satisfied here will be removed so that - the caller will know if any filters remain. - - :returns: list of service_refs or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_service(self, service_id): - """Get service by id. - - :returns: service_ref dict - :raises keystone.exception.ServiceNotFound: If the service doesn't - exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_service(self, service_id, service_ref): - """Update service by id. - - :returns: service_ref dict - :raises keystone.exception.ServiceNotFound: If the service doesn't - exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_service(self, service_id): - """Deletes an existing service. - - :raises keystone.exception.ServiceNotFound: If the service doesn't - exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def create_endpoint(self, endpoint_id, endpoint_ref): - """Creates a new endpoint for a service. - - :raises keystone.exception.Conflict: If a duplicate endpoint exists. - :raises keystone.exception.ServiceNotFound: If the service doesn't - exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_endpoint(self, endpoint_id): - """Get endpoint by id. - - :returns: endpoint_ref dict - :raises keystone.exception.EndpointNotFound: If the endpoint doesn't - exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_endpoints(self, hints): - """List all endpoints. - - :param hints: contains the list of filters yet to be satisfied. - Any filters satisfied here will be removed so that - the caller will know if any filters remain. - - :returns: list of endpoint_refs or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_endpoint(self, endpoint_id, endpoint_ref): - """Get endpoint by id. - - :returns: endpoint_ref dict - :raises keystone.exception.EndpointNotFound: If the endpoint doesn't - exist. - :raises keystone.exception.ServiceNotFound: If the service doesn't - exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_endpoint(self, endpoint_id): - """Deletes an endpoint for a service. - - :raises keystone.exception.EndpointNotFound: If the endpoint doesn't - exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_catalog(self, user_id, tenant_id): - """Retrieve and format the current service catalog. - - Example:: - - { 'RegionOne': - {'compute': { - 'adminURL': u'http://host:8774/v1.1/tenantid', - 'internalURL': u'http://host:8774/v1.1/tenant_id', - 'name': 'Compute Service', - 'publicURL': u'http://host:8774/v1.1/tenantid'}, - 'ec2': { - 'adminURL': 'http://host:8773/services/Admin', - 'internalURL': 'http://host:8773/services/Cloud', - 'name': 'EC2 Service', - 'publicURL': 'http://host:8773/services/Cloud'}} - - :returns: A nested dict representing the service catalog or an - empty dict. - :raises keystone.exception.NotFound: If the endpoint doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - def get_v3_catalog(self, user_id, tenant_id): - """Retrieve and format the current V3 service catalog. - - The default implementation builds the V3 catalog from the V2 catalog. - - Example:: - - [ - { - "endpoints": [ - { - "interface": "public", - "id": "--endpoint-id--", - "region": "RegionOne", - "url": "http://external:8776/v1/--project-id--" - }, - { - "interface": "internal", - "id": "--endpoint-id--", - "region": "RegionOne", - "url": "http://internal:8776/v1/--project-id--" - }], - "id": "--service-id--", - "type": "volume" - }] - - :returns: A list representing the service catalog or an empty list - :raises keystone.exception.NotFound: If the endpoint doesn't exist. - - """ - v2_catalog = self.get_catalog(user_id, tenant_id) - v3_catalog = [] - - for region_name, region in v2_catalog.items(): - for service_type, service in region.items(): - service_v3 = { - 'type': service_type, - 'endpoints': [] - } - - for attr, value in service.items(): - # Attributes that end in URL are interfaces. In the V2 - # catalog, these are internalURL, publicURL, and adminURL. - # For example, .publicURL= in the V2 - # catalog becomes the V3 interface for the service: - # { 'interface': 'public', 'url': '', 'region': - # 'region: '' } - if attr.endswith('URL'): - v3_interface = attr[:-len('URL')] - service_v3['endpoints'].append({ - 'interface': v3_interface, - 'region': region_name, - 'url': value, - }) - continue - - # Other attributes are copied to the service. - service_v3[attr] = value - - v3_catalog.append(service_v3) - - return v3_catalog - - @abc.abstractmethod - def add_endpoint_to_project(self, endpoint_id, project_id): - """Create an endpoint to project association. - - :param endpoint_id: identity of endpoint to associate - :type endpoint_id: string - :param project_id: identity of the project to be associated with - :type project_id: string - :raises: keystone.exception.Conflict: If the endpoint was already - added to project. - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def remove_endpoint_from_project(self, endpoint_id, project_id): - """Removes an endpoint to project association. - - :param endpoint_id: identity of endpoint to remove - :type endpoint_id: string - :param project_id: identity of the project associated with - :type project_id: string - :raises keystone.exception.NotFound: If the endpoint was not found - in the project. - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def check_endpoint_in_project(self, endpoint_id, project_id): - """Checks if an endpoint is associated with a project. - - :param endpoint_id: identity of endpoint to check - :type endpoint_id: string - :param project_id: identity of the project associated with - :type project_id: string - :raises keystone.exception.NotFound: If the endpoint was not found - in the project. - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_endpoints_for_project(self, project_id): - """List all endpoints associated with a project. - - :param project_id: identity of the project to check - :type project_id: string - :returns: a list of identity endpoint ids or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_projects_for_endpoint(self, endpoint_id): - """List all projects associated with an endpoint. - - :param endpoint_id: identity of endpoint to check - :type endpoint_id: string - :returns: a list of projects or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_association_by_endpoint(self, endpoint_id): - """Removes all the endpoints to project association with endpoint. - - :param endpoint_id: identity of endpoint to check - :type endpoint_id: string - :returns: None - - """ - raise exception.NotImplemented() - - @abc.abstractmethod - def delete_association_by_project(self, project_id): - """Removes all the endpoints to project association with project. - - :param project_id: identity of the project to check - :type project_id: string - :returns: None - - """ - raise exception.NotImplemented() - - @abc.abstractmethod - def create_endpoint_group(self, endpoint_group): - """Create an endpoint group. - - :param endpoint_group: endpoint group to create - :type endpoint_group: dictionary - :raises: keystone.exception.Conflict: If a duplicate endpoint group - already exists. - :returns: an endpoint group representation. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_endpoint_group(self, endpoint_group_id): - """Get an endpoint group. - - :param endpoint_group_id: identity of endpoint group to retrieve - :type endpoint_group_id: string - :raises keystone.exception.NotFound: If the endpoint group was not - found. - :returns: an endpoint group representation. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_endpoint_group(self, endpoint_group_id, endpoint_group): - """Update an endpoint group. - - :param endpoint_group_id: identity of endpoint group to retrieve - :type endpoint_group_id: string - :param endpoint_group: A full or partial endpoint_group - :type endpoint_group: dictionary - :raises keystone.exception.NotFound: If the endpoint group was not - found. - :returns: an endpoint group representation. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_endpoint_group(self, endpoint_group_id): - """Delete an endpoint group. - - :param endpoint_group_id: identity of endpoint group to delete - :type endpoint_group_id: string - :raises keystone.exception.NotFound: If the endpoint group was not - found. - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def add_endpoint_group_to_project(self, endpoint_group_id, project_id): - """Adds an endpoint group to project association. - - :param endpoint_group_id: identity of endpoint to associate - :type endpoint_group_id: string - :param project_id: identity of project to associate - :type project_id: string - :raises keystone.exception.Conflict: If the endpoint group was already - added to the project. - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_endpoint_group_in_project(self, endpoint_group_id, project_id): - """Get endpoint group to project association. - - :param endpoint_group_id: identity of endpoint group to retrieve - :type endpoint_group_id: string - :param project_id: identity of project to associate - :type project_id: string - :raises keystone.exception.NotFound: If the endpoint group to the - project association was not found. - :returns: a project endpoint group representation. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_endpoint_groups(self): - """List all endpoint groups. - - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_endpoint_groups_for_project(self, project_id): - """List all endpoint group to project associations for a project. - - :param project_id: identity of project to associate - :type project_id: string - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_projects_associated_with_endpoint_group(self, endpoint_group_id): - """List all projects associated with endpoint group. - - :param endpoint_group_id: identity of endpoint to associate - :type endpoint_group_id: string - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def remove_endpoint_group_from_project(self, endpoint_group_id, - project_id): - """Remove an endpoint to project association. - - :param endpoint_group_id: identity of endpoint to associate - :type endpoint_group_id: string - :param project_id: identity of project to associate - :type project_id: string - :raises keystone.exception.NotFound: If endpoint group project - association was not found. - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_endpoint_group_association_by_project(self, project_id): - """Remove endpoint group to project associations. - - :param project_id: identity of the project to check - :type project_id: string - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover - -Driver = manager.create_legacy_driver(CatalogDriverV8) diff --git a/keystone-moon/keystone/catalog/routers.py b/keystone-moon/keystone/catalog/routers.py deleted file mode 100644 index 8c6e96f0..00000000 --- a/keystone-moon/keystone/catalog/routers.py +++ /dev/null @@ -1,182 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools - -from keystone.catalog import controllers -from keystone.common import json_home -from keystone.common import router -from keystone.common import wsgi - - -build_resource_relation = functools.partial( - json_home.build_v3_extension_resource_relation, - extension_name='OS-EP-FILTER', extension_version='1.0') - -build_parameter_relation = functools.partial( - json_home.build_v3_extension_parameter_relation, - extension_name='OS-EP-FILTER', extension_version='1.0') - -ENDPOINT_GROUP_PARAMETER_RELATION = build_parameter_relation( - parameter_name='endpoint_group_id') - - -class Routers(wsgi.RoutersBase): - """API for the keystone catalog. - - The API Endpoint Filter looks like:: - - PUT /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id} - GET /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id} - HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id} - DELETE /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id} - GET /OS-EP-FILTER/endpoints/{endpoint_id}/projects - GET /OS-EP-FILTER/projects/{project_id}/endpoints - GET /OS-EP-FILTER/projects/{project_id}/endpoint_groups - - GET /OS-EP-FILTER/endpoint_groups - POST /OS-EP-FILTER/endpoint_groups - GET /OS-EP-FILTER/endpoint_groups/{endpoint_group_id} - HEAD /OS-EP-FILTER/endpoint_groups/{endpoint_group_id} - PATCH /OS-EP-FILTER/endpoint_groups/{endpoint_group_id} - DELETE /OS-EP-FILTER/endpoint_groups/{endpoint_group_id} - - GET /OS-EP-FILTER/endpoint_groups/{endpoint_group_id}/projects - GET /OS-EP-FILTER/endpoint_groups/{endpoint_group_id}/endpoints - - PUT /OS-EP-FILTER/endpoint_groups/{endpoint_group}/projects/ - {project_id} - GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}/projects/ - {project_id} - HEAD /OS-EP-FILTER/endpoint_groups/{endpoint_group}/projects/ - {project_id} - DELETE /OS-EP-FILTER/endpoint_groups/{endpoint_group}/projects/ - {project_id} - - """ - - PATH_PREFIX = '/OS-EP-FILTER' - PATH_PROJECT_ENDPOINT = '/projects/{project_id}/endpoints/{endpoint_id}' - PATH_ENDPOINT_GROUPS = '/endpoint_groups/{endpoint_group_id}' - PATH_ENDPOINT_GROUP_PROJECTS = PATH_ENDPOINT_GROUPS + ( - '/projects/{project_id}') - - def append_v3_routers(self, mapper, routers): - regions_controller = controllers.RegionV3() - endpoint_filter_controller = controllers.EndpointFilterV3Controller() - endpoint_group_controller = controllers.EndpointGroupV3Controller() - project_endpoint_group_controller = ( - controllers.ProjectEndpointGroupV3Controller()) - routers.append(router.Router(regions_controller, - 'regions', 'region', - resource_descriptions=self.v3_resources)) - - # Need to add an additional route to support PUT /regions/{region_id} - mapper.connect( - '/regions/{region_id}', - controller=regions_controller, - action='create_region_with_id', - conditions=dict(method=['PUT'])) - - routers.append(router.Router(controllers.ServiceV3(), - 'services', 'service', - resource_descriptions=self.v3_resources)) - routers.append(router.Router(controllers.EndpointV3(), - 'endpoints', 'endpoint', - resource_descriptions=self.v3_resources)) - - self._add_resource( - mapper, endpoint_filter_controller, - path=self.PATH_PREFIX + '/endpoints/{endpoint_id}/projects', - get_action='list_projects_for_endpoint', - rel=build_resource_relation(resource_name='endpoint_projects'), - path_vars={ - 'endpoint_id': json_home.Parameters.ENDPOINT_ID, - }) - self._add_resource( - mapper, endpoint_filter_controller, - path=self.PATH_PREFIX + self.PATH_PROJECT_ENDPOINT, - get_head_action='check_endpoint_in_project', - put_action='add_endpoint_to_project', - delete_action='remove_endpoint_from_project', - rel=build_resource_relation(resource_name='project_endpoint'), - path_vars={ - 'endpoint_id': json_home.Parameters.ENDPOINT_ID, - 'project_id': json_home.Parameters.PROJECT_ID, - }) - self._add_resource( - mapper, endpoint_filter_controller, - path=self.PATH_PREFIX + '/projects/{project_id}/endpoints', - get_action='list_endpoints_for_project', - rel=build_resource_relation(resource_name='project_endpoints'), - path_vars={ - 'project_id': json_home.Parameters.PROJECT_ID, - }) - self._add_resource( - mapper, endpoint_group_controller, - path=self.PATH_PREFIX + '/projects/{project_id}/endpoint_groups', - get_action='list_endpoint_groups_for_project', - rel=build_resource_relation( - resource_name='project_endpoint_groups'), - path_vars={ - 'project_id': json_home.Parameters.PROJECT_ID, - }) - self._add_resource( - mapper, endpoint_group_controller, - path=self.PATH_PREFIX + '/endpoint_groups', - get_action='list_endpoint_groups', - post_action='create_endpoint_group', - rel=build_resource_relation(resource_name='endpoint_groups')) - self._add_resource( - mapper, endpoint_group_controller, - path=self.PATH_PREFIX + self.PATH_ENDPOINT_GROUPS, - get_head_action='get_endpoint_group', - patch_action='update_endpoint_group', - delete_action='delete_endpoint_group', - rel=build_resource_relation(resource_name='endpoint_group'), - path_vars={ - 'endpoint_group_id': ENDPOINT_GROUP_PARAMETER_RELATION - }) - self._add_resource( - mapper, project_endpoint_group_controller, - path=self.PATH_PREFIX + self.PATH_ENDPOINT_GROUP_PROJECTS, - get_head_action='get_endpoint_group_in_project', - put_action='add_endpoint_group_to_project', - delete_action='remove_endpoint_group_from_project', - rel=build_resource_relation( - resource_name='endpoint_group_to_project_association'), - path_vars={ - 'project_id': json_home.Parameters.PROJECT_ID, - 'endpoint_group_id': ENDPOINT_GROUP_PARAMETER_RELATION - }) - self._add_resource( - mapper, endpoint_group_controller, - path=self.PATH_PREFIX + self.PATH_ENDPOINT_GROUPS + ( - '/projects'), - get_action='list_projects_associated_with_endpoint_group', - rel=build_resource_relation( - resource_name='projects_associated_with_endpoint_group'), - path_vars={ - 'endpoint_group_id': ENDPOINT_GROUP_PARAMETER_RELATION - }) - self._add_resource( - mapper, endpoint_group_controller, - path=self.PATH_PREFIX + self.PATH_ENDPOINT_GROUPS + ( - '/endpoints'), - get_action='list_endpoints_associated_with_endpoint_group', - rel=build_resource_relation( - resource_name='endpoints_in_endpoint_group'), - path_vars={ - 'endpoint_group_id': ENDPOINT_GROUP_PARAMETER_RELATION - }) diff --git a/keystone-moon/keystone/catalog/schema.py b/keystone-moon/keystone/catalog/schema.py deleted file mode 100644 index b9643131..00000000 --- a/keystone-moon/keystone/catalog/schema.py +++ /dev/null @@ -1,119 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import validation -from keystone.common.validation import parameter_types - - -_region_properties = { - 'description': { - 'type': ['string', 'null'], - }, - # NOTE(lbragstad): Regions use ID differently. The user can specify the ID - # or it will be generated automatically. - 'id': { - 'type': 'string' - }, - 'parent_region_id': { - 'type': ['string', 'null'] - } -} - -region_create = { - 'type': 'object', - 'properties': _region_properties, - 'additionalProperties': True - # NOTE(lbragstad): No parameters are required for creating regions. -} - -region_update = { - 'type': 'object', - 'properties': _region_properties, - 'minProperties': 1, - 'additionalProperties': True -} - -_service_properties = { - 'enabled': parameter_types.boolean, - 'name': parameter_types.name, - 'type': { - 'type': 'string', - 'minLength': 1, - 'maxLength': 255 - } -} - -service_create = { - 'type': 'object', - 'properties': _service_properties, - 'required': ['type'], - 'additionalProperties': True, -} - -service_update = { - 'type': 'object', - 'properties': _service_properties, - 'minProperties': 1, - 'additionalProperties': True -} - -_endpoint_properties = { - 'enabled': parameter_types.boolean, - 'interface': { - 'type': 'string', - 'enum': ['admin', 'internal', 'public'] - }, - 'region_id': { - 'type': 'string' - }, - 'region': { - 'type': 'string' - }, - 'service_id': { - 'type': 'string' - }, - 'url': parameter_types.url -} - -endpoint_create = { - 'type': 'object', - 'properties': _endpoint_properties, - 'required': ['interface', 'service_id', 'url'], - 'additionalProperties': True -} - -endpoint_update = { - 'type': 'object', - 'properties': _endpoint_properties, - 'minProperties': 1, - 'additionalProperties': True -} - -_endpoint_group_properties = { - 'description': validation.nullable(parameter_types.description), - 'filters': { - 'type': 'object' - }, - 'name': parameter_types.name -} - -endpoint_group_create = { - 'type': 'object', - 'properties': _endpoint_group_properties, - 'required': ['name', 'filters'] -} - -endpoint_group_update = { - 'type': 'object', - 'properties': _endpoint_group_properties, - 'minProperties': 1 -} diff --git a/keystone-moon/keystone/clean.py b/keystone-moon/keystone/clean.py deleted file mode 100644 index 38564e0b..00000000 --- a/keystone-moon/keystone/clean.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import six - -from keystone import exception -from keystone.i18n import _ - - -def check_length(property_name, value, min_length=1, max_length=64): - if len(value) < min_length: - if min_length == 1: - msg = _("%s cannot be empty.") % property_name - else: - msg = (_("%(property_name)s cannot be less than " - "%(min_length)s characters.") % dict( - property_name=property_name, min_length=min_length)) - raise exception.ValidationError(msg) - if len(value) > max_length: - msg = (_("%(property_name)s should not be greater than " - "%(max_length)s characters.") % dict( - property_name=property_name, max_length=max_length)) - - raise exception.ValidationError(msg) - - -def check_type(property_name, value, expected_type, display_expected_type): - if not isinstance(value, expected_type): - msg = (_("%(property_name)s is not a " - "%(display_expected_type)s") % dict( - property_name=property_name, - display_expected_type=display_expected_type)) - raise exception.ValidationError(msg) - - -def check_enabled(property_name, enabled): - # Allow int and it's subclass bool - check_type('%s enabled' % property_name, enabled, int, 'boolean') - return bool(enabled) - - -def check_name(property_name, name, min_length=1, max_length=64): - check_type('%s name' % property_name, name, six.string_types, - 'str or unicode') - name = name.strip() - check_length('%s name' % property_name, name, - min_length=min_length, max_length=max_length) - return name - - -def domain_name(name): - return check_name('Domain', name) - - -def domain_enabled(enabled): - return check_enabled('Domain', enabled) - - -def project_name(name): - return check_name('Project', name) - - -def project_enabled(enabled): - return check_enabled('Project', enabled) - - -def user_name(name): - return check_name('User', name, max_length=255) - - -def user_enabled(enabled): - return check_enabled('User', enabled) - - -def group_name(name): - return check_name('Group', name) diff --git a/keystone-moon/keystone/cli.py b/keystone-moon/keystone/cli.py deleted file mode 100644 index b5fff136..00000000 --- a/keystone-moon/keystone/cli.py +++ /dev/null @@ -1,596 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import -from __future__ import print_function - -import os - -from oslo_config import cfg -from oslo_log import log -import pbr.version - -from keystone import assignment -from keystone.common import driver_hints -from keystone.common import openssl -from keystone.common import sql -from keystone.common.sql import migration_helpers -from keystone.common import utils -from keystone import config -from keystone import exception -from keystone.i18n import _, _LW -from keystone import identity -from keystone import resource -from keystone import token -from keystone.token.providers.fernet import utils as fernet - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -class BaseApp(object): - - name = None - - @classmethod - def add_argument_parser(cls, subparsers): - parser = subparsers.add_parser(cls.name, help=cls.__doc__) - parser.set_defaults(cmd_class=cls) - return parser - - -class DbSync(BaseApp): - """Sync the database.""" - - name = 'db_sync' - - @classmethod - def add_argument_parser(cls, subparsers): - parser = super(DbSync, cls).add_argument_parser(subparsers) - parser.add_argument('version', default=None, nargs='?', - help=('Migrate the database up to a specified ' - 'version. If not provided, db_sync will ' - 'migrate the database to the latest known ' - 'version.')) - parser.add_argument('--extension', default=None, - help=('Migrate the database for the specified ' - 'extension. If not provided, db_sync will ' - 'migrate the common repository.')) - - return parser - - @staticmethod - def main(): - version = CONF.command.version - extension = CONF.command.extension - migration_helpers.sync_database_to_version(extension, version) - - -class DbVersion(BaseApp): - """Print the current migration version of the database.""" - - name = 'db_version' - - @classmethod - def add_argument_parser(cls, subparsers): - parser = super(DbVersion, cls).add_argument_parser(subparsers) - parser.add_argument('--extension', default=None, - help=('Print the migration version of the ' - 'database for the specified extension. If ' - 'not provided, print it for the common ' - 'repository.')) - - @staticmethod - def main(): - extension = CONF.command.extension - migration_helpers.print_db_version(extension) - - -class BasePermissionsSetup(BaseApp): - """Common user/group setup for file permissions.""" - - @classmethod - def add_argument_parser(cls, subparsers): - parser = super(BasePermissionsSetup, - cls).add_argument_parser(subparsers) - running_as_root = (os.geteuid() == 0) - parser.add_argument('--keystone-user', required=running_as_root) - parser.add_argument('--keystone-group', required=running_as_root) - return parser - - @staticmethod - def get_user_group(): - keystone_user_id = None - keystone_group_id = None - - try: - a = CONF.command.keystone_user - if a: - keystone_user_id = utils.get_unix_user(a)[0] - except KeyError: - raise ValueError("Unknown user '%s' in --keystone-user" % a) - - try: - a = CONF.command.keystone_group - if a: - keystone_group_id = utils.get_unix_group(a)[0] - except KeyError: - raise ValueError("Unknown group '%s' in --keystone-group" % a) - - return keystone_user_id, keystone_group_id - - -class BaseCertificateSetup(BasePermissionsSetup): - """Provides common options for certificate setup.""" - - @classmethod - def add_argument_parser(cls, subparsers): - parser = super(BaseCertificateSetup, - cls).add_argument_parser(subparsers) - parser.add_argument('--rebuild', default=False, action='store_true', - help=('Rebuild certificate files: erase previous ' - 'files and regenerate them.')) - return parser - - -class PKISetup(BaseCertificateSetup): - """Set up Key pairs and certificates for token signing and verification. - - This is NOT intended for production use, see Keystone Configuration - documentation for details. - """ - - name = 'pki_setup' - - @classmethod - def main(cls): - LOG.warn(_LW('keystone-manage pki_setup is not recommended for ' - 'production use.')) - keystone_user_id, keystone_group_id = cls.get_user_group() - conf_pki = openssl.ConfigurePKI(keystone_user_id, keystone_group_id, - rebuild=CONF.command.rebuild) - conf_pki.run() - - -class SSLSetup(BaseCertificateSetup): - """Create key pairs and certificates for HTTPS connections. - - This is NOT intended for production use, see Keystone Configuration - documentation for details. - """ - - name = 'ssl_setup' - - @classmethod - def main(cls): - LOG.warn(_LW('keystone-manage ssl_setup is not recommended for ' - 'production use.')) - keystone_user_id, keystone_group_id = cls.get_user_group() - conf_ssl = openssl.ConfigureSSL(keystone_user_id, keystone_group_id, - rebuild=CONF.command.rebuild) - conf_ssl.run() - - -class FernetSetup(BasePermissionsSetup): - """Setup a key repository for Fernet tokens. - - This also creates a primary key used for both creating and validating - Keystone Lightweight tokens. To improve security, you should rotate your - keys (using keystone-manage fernet_rotate, for example). - - """ - - name = 'fernet_setup' - - @classmethod - def main(cls): - keystone_user_id, keystone_group_id = cls.get_user_group() - fernet.create_key_directory(keystone_user_id, keystone_group_id) - if fernet.validate_key_repository(): - fernet.initialize_key_repository( - keystone_user_id, keystone_group_id) - - -class FernetRotate(BasePermissionsSetup): - """Rotate Fernet encryption keys. - - This assumes you have already run keystone-manage fernet_setup. - - A new primary key is placed into rotation, which is used for new tokens. - The old primary key is demoted to secondary, which can then still be used - for validating tokens. Excess secondary keys (beyond [fernet_tokens] - max_active_keys) are revoked. Revoked keys are permanently deleted. A new - staged key will be created and used to validate tokens. The next time key - rotation takes place, the staged key will be put into rotation as the - primary key. - - Rotating keys too frequently, or with [fernet_tokens] max_active_keys set - too low, will cause tokens to become invalid prior to their expiration. - - """ - - name = 'fernet_rotate' - - @classmethod - def main(cls): - keystone_user_id, keystone_group_id = cls.get_user_group() - if fernet.validate_key_repository(): - fernet.rotate_keys(keystone_user_id, keystone_group_id) - - -class TokenFlush(BaseApp): - """Flush expired tokens from the backend.""" - - name = 'token_flush' - - @classmethod - def main(cls): - token_manager = token.persistence.PersistenceManager() - token_manager.driver.flush_expired_tokens() - - -class MappingPurge(BaseApp): - """Purge the mapping table.""" - - name = 'mapping_purge' - - @classmethod - def add_argument_parser(cls, subparsers): - parser = super(MappingPurge, cls).add_argument_parser(subparsers) - parser.add_argument('--all', default=False, action='store_true', - help=('Purge all mappings.')) - parser.add_argument('--domain-name', default=None, - help=('Purge any mappings for the domain ' - 'specified.')) - parser.add_argument('--public-id', default=None, - help=('Purge the mapping for the Public ID ' - 'specified.')) - parser.add_argument('--local-id', default=None, - help=('Purge the mappings for the Local ID ' - 'specified.')) - parser.add_argument('--type', default=None, choices=['user', 'group'], - help=('Purge any mappings for the type ' - 'specified.')) - return parser - - @staticmethod - def main(): - def validate_options(): - # NOTE(henry-nash); It would be nice to use the argparse automated - # checking for this validation, but the only way I can see doing - # that is to make the default (i.e. if no optional parameters - # are specified) to purge all mappings - and that sounds too - # dangerous as a default. So we use it in a slightly - # unconventional way, where all parameters are optional, but you - # must specify at least one. - if (CONF.command.all is False and - CONF.command.domain_name is None and - CONF.command.public_id is None and - CONF.command.local_id is None and - CONF.command.type is None): - raise ValueError(_('At least one option must be provided')) - - if (CONF.command.all is True and - (CONF.command.domain_name is not None or - CONF.command.public_id is not None or - CONF.command.local_id is not None or - CONF.command.type is not None)): - raise ValueError(_('--all option cannot be mixed with ' - 'other options')) - - def get_domain_id(name): - try: - identity.Manager() - # init assignment manager to avoid KeyError in resource.core - assignment.Manager() - resource_manager = resource.Manager() - return resource_manager.driver.get_domain_by_name(name)['id'] - except KeyError: - raise ValueError(_("Unknown domain '%(name)s' specified by " - "--domain-name") % {'name': name}) - - validate_options() - # Now that we have validated the options, we know that at least one - # option has been specified, and if it was the --all option then this - # was the only option specified. - # - # The mapping dict is used to filter which mappings are purged, so - # leaving it empty means purge them all - mapping = {} - if CONF.command.domain_name is not None: - mapping['domain_id'] = get_domain_id(CONF.command.domain_name) - if CONF.command.public_id is not None: - mapping['public_id'] = CONF.command.public_id - if CONF.command.local_id is not None: - mapping['local_id'] = CONF.command.local_id - if CONF.command.type is not None: - mapping['type'] = CONF.command.type - - mapping_manager = identity.MappingManager() - mapping_manager.driver.purge_mappings(mapping) - - -DOMAIN_CONF_FHEAD = 'keystone.' -DOMAIN_CONF_FTAIL = '.conf' - - -class DomainConfigUploadFiles(object): - - def __init__(self): - super(DomainConfigUploadFiles, self).__init__() - self.load_backends() - - def load_backends(self): - """Load the backends needed for uploading domain configs. - - We only need the resource and domain_config managers, but there are - some dependencies which mean we have to load the assignment and - identity managers as well. - - The order of loading the backends is important, since the resource - manager depends on the assignment manager, which in turn depends on - the identity manager. - - """ - identity.Manager() - assignment.Manager() - self.resource_manager = resource.Manager() - self.domain_config_manager = resource.DomainConfigManager() - - def valid_options(self): - """Validate the options, returning True if they are indeed valid. - - It would be nice to use the argparse automated checking for this - validation, but the only way I can see doing that is to make the - default (i.e. if no optional parameters are specified) to upload - all configuration files - and that sounds too dangerous as a - default. So we use it in a slightly unconventional way, where all - parameters are optional, but you must specify at least one. - - """ - if (CONF.command.all is False and - CONF.command.domain_name is None): - print(_('At least one option must be provided, use either ' - '--all or --domain-name')) - raise ValueError - - if (CONF.command.all is True and - CONF.command.domain_name is not None): - print(_('The --all option cannot be used with ' - 'the --domain-name option')) - raise ValueError - - def upload_config_to_database(self, file_name, domain_name): - """Upload a single config file to the database. - - :param file_name: the file containing the config options - :param domain_name: the domain name - - :raises: ValueError: the domain does not exist or already has domain - specific configurations defined - :raises: Exceptions from oslo config: there is an issue with options - defined in the config file or its - format - - The caller of this method should catch the errors raised and handle - appropriately in order that the best UX experience can be provided for - both the case of when a user has asked for a specific config file to - be uploaded, as well as all config files in a directory. - - """ - try: - domain_ref = ( - self.resource_manager.driver.get_domain_by_name(domain_name)) - except exception.DomainNotFound: - print(_('Invalid domain name: %(domain)s found in config file ' - 'name: %(file)s - ignoring this file.') % { - 'domain': domain_name, - 'file': file_name}) - raise ValueError - - if self.domain_config_manager.get_config_with_sensitive_info( - domain_ref['id']): - print(_('Domain: %(domain)s already has a configuration ' - 'defined - ignoring file: %(file)s.') % { - 'domain': domain_name, - 'file': file_name}) - raise ValueError - - sections = {} - try: - parser = cfg.ConfigParser(file_name, sections) - parser.parse() - except Exception: - # We explicitly don't try and differentiate the error cases, in - # order to keep the code in this tool more robust as oslo.config - # changes. - print(_('Error parsing configuration file for domain: %(domain)s, ' - 'file: %(file)s.') % { - 'domain': domain_name, - 'file': file_name}) - raise - - for group in sections: - for option in sections[group]: - sections[group][option] = sections[group][option][0] - self.domain_config_manager.create_config(domain_ref['id'], sections) - - def upload_configs_to_database(self, file_name, domain_name): - """Upload configs from file and load into database. - - This method will be called repeatedly for all the config files in the - config directory. To provide a better UX, we differentiate the error - handling in this case (versus when the user has asked for a single - config file to be uploaded). - - """ - try: - self.upload_config_to_database(file_name, domain_name) - except ValueError: - # We've already given all the info we can in a message, so carry - # on to the next one - pass - except Exception: - # Some other error occurred relating to this specific config file - # or domain. Since we are trying to upload all the config files, - # we'll continue and hide this exception. However, we tell the - # user how to get more info about this error by re-running with - # just the domain at fault. When we run in single-domain mode we - # will NOT hide the exception. - print(_('To get a more detailed information on this error, re-run ' - 'this command for the specific domain, i.e.: ' - 'keystone-manage domain_config_upload --domain-name %s') % - domain_name) - pass - - def read_domain_configs_from_files(self): - """Read configs from file(s) and load into database. - - The command line parameters have already been parsed and the CONF - command option will have been set. It is either set to the name of an - explicit domain, or it's None to indicate that we want all domain - config files. - - """ - domain_name = CONF.command.domain_name - conf_dir = CONF.identity.domain_config_dir - if not os.path.exists(conf_dir): - print(_('Unable to locate domain config directory: %s') % conf_dir) - raise ValueError - - if domain_name: - # Request is to upload the configs for just one domain - fname = DOMAIN_CONF_FHEAD + domain_name + DOMAIN_CONF_FTAIL - self.upload_config_to_database( - os.path.join(conf_dir, fname), domain_name) - return - - # Request is to transfer all config files, so let's read all the - # files in the config directory, and transfer those that match the - # filename pattern of 'keystone..conf' - for r, d, f in os.walk(conf_dir): - for fname in f: - if (fname.startswith(DOMAIN_CONF_FHEAD) and - fname.endswith(DOMAIN_CONF_FTAIL)): - if fname.count('.') >= 2: - self.upload_configs_to_database( - os.path.join(r, fname), - fname[len(DOMAIN_CONF_FHEAD): - -len(DOMAIN_CONF_FTAIL)]) - else: - LOG.warn(_LW('Ignoring file (%s) while scanning ' - 'domain config directory'), fname) - - def run(self): - # First off, let's just check we can talk to the domain database - try: - self.resource_manager.driver.list_domains(driver_hints.Hints()) - except Exception: - # It is likely that there is some SQL or other backend error - # related to set up - print(_('Unable to access the keystone database, please check it ' - 'is configured correctly.')) - raise - - try: - self.valid_options() - self.read_domain_configs_from_files() - except ValueError: - # We will already have printed out a nice message, so indicate - # to caller the non-success error code to be used. - return 1 - - -class DomainConfigUpload(BaseApp): - """Upload the domain specific configuration files to the database.""" - - name = 'domain_config_upload' - - @classmethod - def add_argument_parser(cls, subparsers): - parser = super(DomainConfigUpload, cls).add_argument_parser(subparsers) - parser.add_argument('--all', default=False, action='store_true', - help='Upload contents of all domain specific ' - 'configuration files. Either use this option ' - 'or use the --domain-name option to choose a ' - 'specific domain.') - parser.add_argument('--domain-name', default=None, - help='Upload contents of the specific ' - 'configuration file for the given domain. ' - 'Either use this option or use the --all ' - 'option to upload contents for all domains.') - return parser - - @staticmethod - def main(): - dcu = DomainConfigUploadFiles() - status = dcu.run() - if status is not None: - exit(status) - - -class SamlIdentityProviderMetadata(BaseApp): - """Generate Identity Provider metadata.""" - - name = 'saml_idp_metadata' - - @staticmethod - def main(): - # NOTE(marek-denis): Since federation is currently an extension import - # corresponding modules only when they are really going to be used. - from keystone.contrib.federation import idp - metadata = idp.MetadataGenerator().generate_metadata() - print(metadata.to_string()) - - -CMDS = [ - DbSync, - DbVersion, - DomainConfigUpload, - FernetRotate, - FernetSetup, - MappingPurge, - PKISetup, - SamlIdentityProviderMetadata, - SSLSetup, - TokenFlush, -] - - -def add_command_parsers(subparsers): - for cmd in CMDS: - cmd.add_argument_parser(subparsers) - - -command_opt = cfg.SubCommandOpt('command', - title='Commands', - help='Available commands', - handler=add_command_parsers) - - -def main(argv=None, config_files=None): - CONF.register_cli_opt(command_opt) - - config.configure() - sql.initialize() - config.set_default_for_default_log_levels() - - CONF(args=argv[1:], - project='keystone', - version=pbr.version.VersionInfo('keystone').version_string(), - usage='%(prog)s [' + '|'.join([cmd.name for cmd in CMDS]) + ']', - default_config_files=config_files) - config.setup_logging() - CONF.command.cmd_class.main() diff --git a/keystone-moon/keystone/cmd/__init__.py b/keystone-moon/keystone/cmd/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/cmd/all.py b/keystone-moon/keystone/cmd/all.py deleted file mode 100644 index c583accd..00000000 --- a/keystone-moon/keystone/cmd/all.py +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import sys - - -# If ../../keystone/__init__.py exists, add ../../ to Python search path, so -# that it will override what happens to be installed in -# /usr/(local/)lib/python... -possible_topdir = os.path.normpath(os.path.join(os.path.abspath(__file__), - os.pardir, - os.pardir, - os.pardir)) -if os.path.exists(os.path.join(possible_topdir, - 'keystone', - '__init__.py')): - sys.path.insert(0, possible_topdir) - - -from keystone.server import eventlet as eventlet_server - - -# entry point. -def main(): - eventlet_server.run(possible_topdir) diff --git a/keystone-moon/keystone/cmd/cli.py b/keystone-moon/keystone/cmd/cli.py deleted file mode 100644 index f95007e0..00000000 --- a/keystone-moon/keystone/cmd/cli.py +++ /dev/null @@ -1,992 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import -from __future__ import print_function - -import os -import sys -import uuid - -from oslo_config import cfg -from oslo_log import log -from oslo_log import versionutils -from oslo_serialization import jsonutils -import pbr.version - -from keystone.common import config -from keystone.common import driver_hints -from keystone.common import openssl -from keystone.common import sql -from keystone.common.sql import migration_helpers -from keystone.common import utils -from keystone import exception -from keystone.federation import idp -from keystone.federation import utils as mapping_engine -from keystone.i18n import _, _LW, _LI -from keystone.server import backends -from keystone import token - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -class BaseApp(object): - - name = None - - @classmethod - def add_argument_parser(cls, subparsers): - parser = subparsers.add_parser(cls.name, help=cls.__doc__) - parser.set_defaults(cmd_class=cls) - return parser - - -class BootStrap(BaseApp): - """Perform the basic bootstrap process""" - - name = "bootstrap" - - def __init__(self): - self.load_backends() - self.project_id = uuid.uuid4().hex - self.role_id = uuid.uuid4().hex - self.service_id = None - self.service_name = None - self.username = None - self.project_name = None - self.role_name = None - self.password = None - self.public_url = None - self.internal_url = None - self.admin_url = None - self.region_id = None - self.endpoints = {} - - @classmethod - def add_argument_parser(cls, subparsers): - parser = super(BootStrap, cls).add_argument_parser(subparsers) - parser.add_argument('--bootstrap-username', default='admin', - metavar='OS_BOOTSTRAP_USERNAME', - help=('The username of the initial keystone ' - 'user during bootstrap process.')) - # NOTE(morganfainberg): See below for ENV Variable that can be used - # in lieu of the command-line arguments. - parser.add_argument('--bootstrap-password', default=None, - metavar='OS_BOOTSTRAP_PASSWORD', - help='The bootstrap user password') - parser.add_argument('--bootstrap-project-name', default='admin', - metavar='OS_BOOTSTRAP_PROJECT_NAME', - help=('The initial project created during the ' - 'keystone bootstrap process.')) - parser.add_argument('--bootstrap-role-name', default='admin', - metavar='OS_BOOTSTRAP_ROLE_NAME', - help=('The initial role-name created during the ' - 'keystone bootstrap process.')) - parser.add_argument('--bootstrap-service-name', default='keystone', - metavar='OS_BOOTSTRAP_SERVICE_NAME', - help=('The initial name for the initial identity ' - 'service created during the keystone ' - 'bootstrap process.')) - parser.add_argument('--bootstrap-admin-url', - metavar='OS_BOOTSTRAP_ADMIN_URL', - help=('The initial identity admin url created ' - 'during the keystone bootstrap process. ' - 'e.g. http://127.0.0.1:35357/v2.0')) - parser.add_argument('--bootstrap-public-url', - metavar='OS_BOOTSTRAP_PUBLIC_URL', - help=('The initial identity public url created ' - 'during the keystone bootstrap process. ' - 'e.g. http://127.0.0.1:5000/v2.0')) - parser.add_argument('--bootstrap-internal-url', - metavar='OS_BOOTSTRAP_INTERNAL_URL', - help=('The initial identity internal url created ' - 'during the keystone bootstrap process. ' - 'e.g. http://127.0.0.1:5000/v2.0')) - parser.add_argument('--bootstrap-region-id', - metavar='OS_BOOTSTRAP_REGION_ID', - help=('The initial region_id endpoints will be ' - 'placed in during the keystone bootstrap ' - 'process.')) - return parser - - def load_backends(self): - drivers = backends.load_backends() - self.resource_manager = drivers['resource_api'] - self.identity_manager = drivers['identity_api'] - self.assignment_manager = drivers['assignment_api'] - self.catalog_manager = drivers['catalog_api'] - self.role_manager = drivers['role_api'] - - def _get_config(self): - self.username = ( - os.environ.get('OS_BOOTSTRAP_USERNAME') or - CONF.command.bootstrap_username) - self.project_name = ( - os.environ.get('OS_BOOTSTRAP_PROJECT_NAME') or - CONF.command.bootstrap_project_name) - self.role_name = ( - os.environ.get('OS_BOOTSTRAP_ROLE_NAME') or - CONF.command.bootstrap_role_name) - self.password = ( - os.environ.get('OS_BOOTSTRAP_PASSWORD') or - CONF.command.bootstrap_password) - self.service_name = ( - os.environ.get('OS_BOOTSTRAP_SERVICE_NAME') or - CONF.command.bootstrap_service_name) - self.admin_url = ( - os.environ.get('OS_BOOTSTRAP_ADMIN_URL') or - CONF.command.bootstrap_admin_url) - self.public_url = ( - os.environ.get('OS_BOOTSTRAP_PUBLIC_URL') or - CONF.command.bootstrap_public_url) - self.internal_url = ( - os.environ.get('OS_BOOTSTRAP_INTERNAL_URL') or - CONF.command.bootstrap_internal_url) - self.region_id = ( - os.environ.get('OS_BOOTSTRAP_REGION_ID') or - CONF.command.bootstrap_region_id) - - def do_bootstrap(self): - """Perform the bootstrap actions. - - Create bootstrap user, project, and role so that CMS, humans, or - scripts can continue to perform initial setup (domains, projects, - services, endpoints, etc) of Keystone when standing up a new - deployment. - """ - self._get_config() - - if self.password is None: - print(_('Either --bootstrap-password argument or ' - 'OS_BOOTSTRAP_PASSWORD must be set.')) - raise ValueError - - # NOTE(morganfainberg): Ensure the default domain is in-fact created - default_domain = { - 'id': CONF.identity.default_domain_id, - 'name': 'Default', - 'enabled': True, - 'description': 'The default domain' - } - try: - self.resource_manager.create_domain( - domain_id=default_domain['id'], - domain=default_domain) - LOG.info(_LI('Created domain %s'), default_domain['id']) - except exception.Conflict: - # NOTE(morganfainberg): Domain already exists, continue on. - LOG.info(_LI('Domain %s already exists, skipping creation.'), - default_domain['id']) - - try: - self.resource_manager.create_project( - project_id=self.project_id, - project={'enabled': True, - 'id': self.project_id, - 'domain_id': default_domain['id'], - 'description': 'Bootstrap project for initializing ' - 'the cloud.', - 'name': self.project_name} - ) - LOG.info(_LI('Created project %s'), self.project_name) - except exception.Conflict: - LOG.info(_LI('Project %s already exists, skipping creation.'), - self.project_name) - project = self.resource_manager.get_project_by_name( - self.project_name, default_domain['id']) - self.project_id = project['id'] - - # NOTE(morganfainberg): Do not create the user if it already exists. - try: - user = self.identity_manager.get_user_by_name(self.username, - default_domain['id']) - LOG.info(_LI('User %s already exists, skipping creation.'), - self.username) - except exception.UserNotFound: - user = self.identity_manager.create_user( - user_ref={'name': self.username, - 'enabled': True, - 'domain_id': default_domain['id'], - 'password': self.password - } - ) - LOG.info(_LI('Created user %s'), self.username) - - # NOTE(morganfainberg): Do not create the role if it already exists. - try: - self.role_manager.create_role( - role_id=self.role_id, - role={'name': self.role_name, - 'id': self.role_id}, - ) - LOG.info(_LI('Created Role %s'), self.role_name) - except exception.Conflict: - LOG.info(_LI('Role %s exists, skipping creation.'), self.role_name) - # NOTE(davechen): There is no backend method to get the role - # by name, so build the hints to list the roles and filter by - # name instead. - hints = driver_hints.Hints() - hints.add_filter('name', self.role_name) - role = self.role_manager.list_roles(hints) - self.role_id = role[0]['id'] - - # NOTE(morganfainberg): Handle the case that the role assignment has - # already occurred. - try: - self.assignment_manager.add_role_to_user_and_project( - user_id=user['id'], - tenant_id=self.project_id, - role_id=self.role_id - ) - LOG.info(_LI('Granted %(role)s on %(project)s to user' - ' %(username)s.'), - {'role': self.role_name, - 'project': self.project_name, - 'username': self.username}) - except exception.Conflict: - LOG.info(_LI('User %(username)s already has %(role)s on ' - '%(project)s.'), - {'username': self.username, - 'role': self.role_name, - 'project': self.project_name}) - - if self.region_id: - try: - self.catalog_manager.create_region( - region_ref={'id': self.region_id} - ) - LOG.info(_LI('Created Region %s'), self.region_id) - except exception.Conflict: - LOG.info(_LI('Region %s exists, skipping creation.'), - self.region_id) - - if self.public_url or self.admin_url or self.internal_url: - hints = driver_hints.Hints() - hints.add_filter('type', 'identity') - services = self.catalog_manager.list_services(hints) - - if services: - service_ref = services[0] - - hints = driver_hints.Hints() - hints.add_filter('service_id', service_ref['id']) - if self.region_id: - hints.add_filter('region_id', self.region_id) - - endpoints = self.catalog_manager.list_endpoints(hints) - else: - service_ref = {'id': uuid.uuid4().hex, - 'name': self.service_name, - 'type': 'identity', - 'enabled': True} - - self.catalog_manager.create_service( - service_id=service_ref['id'], - service_ref=service_ref) - - endpoints = [] - - self.service_id = service_ref['id'] - - available_interfaces = {e['interface']: e for e in endpoints} - expected_endpoints = {'public': self.public_url, - 'internal': self.internal_url, - 'admin': self.admin_url} - - for interface, url in expected_endpoints.items(): - if not url: - # not specified to bootstrap command - continue - - try: - endpoint_ref = available_interfaces[interface] - except KeyError: - endpoint_ref = {'id': uuid.uuid4().hex, - 'interface': interface, - 'url': url, - 'service_id': self.service_id, - 'enabled': True} - - if self.region_id: - endpoint_ref['region_id'] = self.region_id - - self.catalog_manager.create_endpoint( - endpoint_id=endpoint_ref['id'], - endpoint_ref=endpoint_ref) - - LOG.info(_LI('Created %(interface)s endpoint %(url)s'), - {'interface': interface, 'url': url}) - else: - # NOTE(jamielennox): electing not to update existing - # endpoints here. There may be call to do so in future. - LOG.info(_LI('Skipping %s endpoint as already created'), - interface) - - self.endpoints[interface] = endpoint_ref['id'] - - @classmethod - def main(cls): - klass = cls() - klass.do_bootstrap() - - -class DbSync(BaseApp): - """Sync the database.""" - - name = 'db_sync' - - @classmethod - def add_argument_parser(cls, subparsers): - parser = super(DbSync, cls).add_argument_parser(subparsers) - parser.add_argument('version', default=None, nargs='?', - help=('Migrate the database up to a specified ' - 'version. If not provided, db_sync will ' - 'migrate the database to the latest known ' - 'version. Schema downgrades are not ' - 'supported.')) - parser.add_argument('--extension', default=None, - help=('Migrate the database for the specified ' - 'extension. If not provided, db_sync will ' - 'migrate the common repository.')) - - return parser - - @staticmethod - def main(): - version = CONF.command.version - extension = CONF.command.extension - migration_helpers.sync_database_to_version(extension, version) - - -class DbVersion(BaseApp): - """Print the current migration version of the database.""" - - name = 'db_version' - - @classmethod - def add_argument_parser(cls, subparsers): - parser = super(DbVersion, cls).add_argument_parser(subparsers) - parser.add_argument('--extension', default=None, - help=('Print the migration version of the ' - 'database for the specified extension. If ' - 'not provided, print it for the common ' - 'repository.')) - - @staticmethod - def main(): - extension = CONF.command.extension - migration_helpers.print_db_version(extension) - - -class BasePermissionsSetup(BaseApp): - """Common user/group setup for file permissions.""" - - @classmethod - def add_argument_parser(cls, subparsers): - parser = super(BasePermissionsSetup, - cls).add_argument_parser(subparsers) - running_as_root = (os.geteuid() == 0) - parser.add_argument('--keystone-user', required=running_as_root) - parser.add_argument('--keystone-group', required=running_as_root) - return parser - - @staticmethod - def get_user_group(): - keystone_user_id = None - keystone_group_id = None - - try: - a = CONF.command.keystone_user - if a: - keystone_user_id = utils.get_unix_user(a)[0] - except KeyError: - raise ValueError("Unknown user '%s' in --keystone-user" % a) - - try: - a = CONF.command.keystone_group - if a: - keystone_group_id = utils.get_unix_group(a)[0] - except KeyError: - raise ValueError("Unknown group '%s' in --keystone-group" % a) - - return keystone_user_id, keystone_group_id - - -class BaseCertificateSetup(BasePermissionsSetup): - """Provides common options for certificate setup.""" - - @classmethod - def add_argument_parser(cls, subparsers): - parser = super(BaseCertificateSetup, - cls).add_argument_parser(subparsers) - parser.add_argument('--rebuild', default=False, action='store_true', - help=('Rebuild certificate files: erase previous ' - 'files and regenerate them.')) - return parser - - -class PKISetup(BaseCertificateSetup): - """Set up Key pairs and certificates for token signing and verification. - - This is NOT intended for production use, see Keystone Configuration - documentation for details. As of the Mitaka release, this command has - been DEPRECATED and may be removed in the 'O' release. - """ - - name = 'pki_setup' - - @classmethod - def main(cls): - versionutils.report_deprecated_feature( - LOG, - _LW("keystone-manage pki_setup is deprecated as of Mitaka in " - "favor of not using PKI tokens and may be removed in 'O' " - "release.")) - LOG.warning(_LW('keystone-manage pki_setup is not recommended for ' - 'production use.')) - keystone_user_id, keystone_group_id = cls.get_user_group() - conf_pki = openssl.ConfigurePKI(keystone_user_id, keystone_group_id, - rebuild=CONF.command.rebuild) - conf_pki.run() - - -class SSLSetup(BaseCertificateSetup): - """Create key pairs and certificates for HTTPS connections. - - This is NOT intended for production use, see Keystone Configuration - documentation for details. - """ - - name = 'ssl_setup' - - @classmethod - def main(cls): - LOG.warning(_LW('keystone-manage ssl_setup is not recommended for ' - 'production use.')) - keystone_user_id, keystone_group_id = cls.get_user_group() - conf_ssl = openssl.ConfigureSSL(keystone_user_id, keystone_group_id, - rebuild=CONF.command.rebuild) - conf_ssl.run() - - -class FernetSetup(BasePermissionsSetup): - """Setup a key repository for Fernet tokens. - - This also creates a primary key used for both creating and validating - Fernet tokens. To improve security, you should rotate your keys (using - keystone-manage fernet_rotate, for example). - - """ - - name = 'fernet_setup' - - @classmethod - def main(cls): - from keystone.token.providers.fernet import utils as fernet - - keystone_user_id, keystone_group_id = cls.get_user_group() - fernet.create_key_directory(keystone_user_id, keystone_group_id) - if fernet.validate_key_repository(requires_write=True): - fernet.initialize_key_repository( - keystone_user_id, keystone_group_id) - - -class FernetRotate(BasePermissionsSetup): - """Rotate Fernet encryption keys. - - This assumes you have already run keystone-manage fernet_setup. - - A new primary key is placed into rotation, which is used for new tokens. - The old primary key is demoted to secondary, which can then still be used - for validating tokens. Excess secondary keys (beyond [fernet_tokens] - max_active_keys) are revoked. Revoked keys are permanently deleted. A new - staged key will be created and used to validate tokens. The next time key - rotation takes place, the staged key will be put into rotation as the - primary key. - - Rotating keys too frequently, or with [fernet_tokens] max_active_keys set - too low, will cause tokens to become invalid prior to their expiration. - - """ - - name = 'fernet_rotate' - - @classmethod - def main(cls): - from keystone.token.providers.fernet import utils as fernet - - keystone_user_id, keystone_group_id = cls.get_user_group() - if fernet.validate_key_repository(requires_write=True): - fernet.rotate_keys(keystone_user_id, keystone_group_id) - - -class TokenFlush(BaseApp): - """Flush expired tokens from the backend.""" - - name = 'token_flush' - - @classmethod - def main(cls): - token_manager = token.persistence.PersistenceManager() - token_manager.flush_expired_tokens() - - -class MappingPurge(BaseApp): - """Purge the mapping table.""" - - name = 'mapping_purge' - - @classmethod - def add_argument_parser(cls, subparsers): - parser = super(MappingPurge, cls).add_argument_parser(subparsers) - parser.add_argument('--all', default=False, action='store_true', - help=('Purge all mappings.')) - parser.add_argument('--domain-name', default=None, - help=('Purge any mappings for the domain ' - 'specified.')) - parser.add_argument('--public-id', default=None, - help=('Purge the mapping for the Public ID ' - 'specified.')) - parser.add_argument('--local-id', default=None, - help=('Purge the mappings for the Local ID ' - 'specified.')) - parser.add_argument('--type', default=None, choices=['user', 'group'], - help=('Purge any mappings for the type ' - 'specified.')) - return parser - - @staticmethod - def main(): - def validate_options(): - # NOTE(henry-nash): It would be nice to use the argparse automated - # checking for this validation, but the only way I can see doing - # that is to make the default (i.e. if no optional parameters - # are specified) to purge all mappings - and that sounds too - # dangerous as a default. So we use it in a slightly - # unconventional way, where all parameters are optional, but you - # must specify at least one. - if (CONF.command.all is False and - CONF.command.domain_name is None and - CONF.command.public_id is None and - CONF.command.local_id is None and - CONF.command.type is None): - raise ValueError(_('At least one option must be provided')) - - if (CONF.command.all is True and - (CONF.command.domain_name is not None or - CONF.command.public_id is not None or - CONF.command.local_id is not None or - CONF.command.type is not None)): - raise ValueError(_('--all option cannot be mixed with ' - 'other options')) - - def get_domain_id(name): - try: - return resource_manager.get_domain_by_name(name)['id'] - except KeyError: - raise ValueError(_("Unknown domain '%(name)s' specified by " - "--domain-name") % {'name': name}) - - validate_options() - drivers = backends.load_backends() - resource_manager = drivers['resource_api'] - mapping_manager = drivers['id_mapping_api'] - - # Now that we have validated the options, we know that at least one - # option has been specified, and if it was the --all option then this - # was the only option specified. - # - # The mapping dict is used to filter which mappings are purged, so - # leaving it empty means purge them all - mapping = {} - if CONF.command.domain_name is not None: - mapping['domain_id'] = get_domain_id(CONF.command.domain_name) - if CONF.command.public_id is not None: - mapping['public_id'] = CONF.command.public_id - if CONF.command.local_id is not None: - mapping['local_id'] = CONF.command.local_id - if CONF.command.type is not None: - mapping['type'] = CONF.command.type - - mapping_manager.purge_mappings(mapping) - - -DOMAIN_CONF_FHEAD = 'keystone.' -DOMAIN_CONF_FTAIL = '.conf' - - -def _domain_config_finder(conf_dir): - """Return a generator of all domain config files found in a directory. - - Donmain configs match the filename pattern of - 'keystone..conf'. - - :returns: generator yeilding (filename, domain_name) tuples - """ - LOG.info(_LI('Scanning %r for domain config files'), conf_dir) - for r, d, f in os.walk(conf_dir): - for fname in f: - if (fname.startswith(DOMAIN_CONF_FHEAD) and - fname.endswith(DOMAIN_CONF_FTAIL)): - if fname.count('.') >= 2: - domain_name = fname[len(DOMAIN_CONF_FHEAD): - -len(DOMAIN_CONF_FTAIL)] - yield (os.path.join(r, fname), domain_name) - continue - - LOG.warning(_LW('Ignoring file (%s) while scanning ' - 'domain config directory'), fname) - - -class DomainConfigUploadFiles(object): - - def __init__(self, domain_config_finder=_domain_config_finder): - super(DomainConfigUploadFiles, self).__init__() - self.load_backends() - self._domain_config_finder = domain_config_finder - - def load_backends(self): - drivers = backends.load_backends() - self.resource_manager = drivers['resource_api'] - self.domain_config_manager = drivers['domain_config_api'] - - def valid_options(self): - """Validate the options, returning True if they are indeed valid. - - It would be nice to use the argparse automated checking for this - validation, but the only way I can see doing that is to make the - default (i.e. if no optional parameters are specified) to upload - all configuration files - and that sounds too dangerous as a - default. So we use it in a slightly unconventional way, where all - parameters are optional, but you must specify at least one. - - """ - if (CONF.command.all is False and - CONF.command.domain_name is None): - print(_('At least one option must be provided, use either ' - '--all or --domain-name')) - raise ValueError - - if (CONF.command.all is True and - CONF.command.domain_name is not None): - print(_('The --all option cannot be used with ' - 'the --domain-name option')) - raise ValueError - - def upload_config_to_database(self, file_name, domain_name): - """Upload a single config file to the database. - - :param file_name: the file containing the config options - :param domain_name: the domain name - - :raises ValueError: the domain does not exist or already has domain - specific configurations defined. - :raises Exceptions from oslo config: there is an issue with options - defined in the config file or its format. - - The caller of this method should catch the errors raised and handle - appropriately in order that the best UX experience can be provided for - both the case of when a user has asked for a specific config file to - be uploaded, as well as all config files in a directory. - - """ - try: - domain_ref = ( - self.resource_manager.get_domain_by_name(domain_name)) - except exception.DomainNotFound: - print(_('Invalid domain name: %(domain)s found in config file ' - 'name: %(file)s - ignoring this file.') % { - 'domain': domain_name, - 'file': file_name}) - raise ValueError - - if self.domain_config_manager.get_config_with_sensitive_info( - domain_ref['id']): - print(_('Domain: %(domain)s already has a configuration ' - 'defined - ignoring file: %(file)s.') % { - 'domain': domain_name, - 'file': file_name}) - raise ValueError - - sections = {} - try: - parser = cfg.ConfigParser(file_name, sections) - parser.parse() - except Exception: - # We explicitly don't try and differentiate the error cases, in - # order to keep the code in this tool more robust as oslo.config - # changes. - print(_('Error parsing configuration file for domain: %(domain)s, ' - 'file: %(file)s.') % { - 'domain': domain_name, - 'file': file_name}) - raise - - for group in sections: - for option in sections[group]: - sections[group][option] = sections[group][option][0] - self.domain_config_manager.create_config(domain_ref['id'], sections) - - def upload_configs_to_database(self, file_name, domain_name): - """Upload configs from file and load into database. - - This method will be called repeatedly for all the config files in the - config directory. To provide a better UX, we differentiate the error - handling in this case (versus when the user has asked for a single - config file to be uploaded). - - """ - try: - self.upload_config_to_database(file_name, domain_name) - except ValueError: # nosec - # We've already given all the info we can in a message, so carry - # on to the next one - pass - except Exception: - # Some other error occurred relating to this specific config file - # or domain. Since we are trying to upload all the config files, - # we'll continue and hide this exception. However, we tell the - # user how to get more info about this error by re-running with - # just the domain at fault. When we run in single-domain mode we - # will NOT hide the exception. - print(_('To get a more detailed information on this error, re-run ' - 'this command for the specific domain, i.e.: ' - 'keystone-manage domain_config_upload --domain-name %s') % - domain_name) - pass - - def read_domain_configs_from_files(self): - """Read configs from file(s) and load into database. - - The command line parameters have already been parsed and the CONF - command option will have been set. It is either set to the name of an - explicit domain, or it's None to indicate that we want all domain - config files. - - """ - domain_name = CONF.command.domain_name - conf_dir = CONF.identity.domain_config_dir - if not os.path.exists(conf_dir): - print(_('Unable to locate domain config directory: %s') % conf_dir) - raise ValueError - - if domain_name: - # Request is to upload the configs for just one domain - fname = DOMAIN_CONF_FHEAD + domain_name + DOMAIN_CONF_FTAIL - self.upload_config_to_database( - os.path.join(conf_dir, fname), domain_name) - return - - for filename, domain_name in self._domain_config_finder(conf_dir): - self.upload_configs_to_database(filename, domain_name) - - def run(self): - # First off, let's just check we can talk to the domain database - try: - self.resource_manager.list_domains(driver_hints.Hints()) - except Exception: - # It is likely that there is some SQL or other backend error - # related to set up - print(_('Unable to access the keystone database, please check it ' - 'is configured correctly.')) - raise - - try: - self.valid_options() - self.read_domain_configs_from_files() - except ValueError: - # We will already have printed out a nice message, so indicate - # to caller the non-success error code to be used. - return 1 - - -class DomainConfigUpload(BaseApp): - """Upload the domain specific configuration files to the database.""" - - name = 'domain_config_upload' - - @classmethod - def add_argument_parser(cls, subparsers): - parser = super(DomainConfigUpload, cls).add_argument_parser(subparsers) - parser.add_argument('--all', default=False, action='store_true', - help='Upload contents of all domain specific ' - 'configuration files. Either use this option ' - 'or use the --domain-name option to choose a ' - 'specific domain.') - parser.add_argument('--domain-name', default=None, - help='Upload contents of the specific ' - 'configuration file for the given domain. ' - 'Either use this option or use the --all ' - 'option to upload contents for all domains.') - return parser - - @staticmethod - def main(): - dcu = DomainConfigUploadFiles() - status = dcu.run() - if status is not None: - sys.exit(status) - - -class SamlIdentityProviderMetadata(BaseApp): - """Generate Identity Provider metadata.""" - - name = 'saml_idp_metadata' - - @staticmethod - def main(): - metadata = idp.MetadataGenerator().generate_metadata() - print(metadata.to_string()) - - -class MappingEngineTester(BaseApp): - """Execute mapping engine locally.""" - - name = 'mapping_engine' - - @staticmethod - def read_rules(path): - try: - with open(path) as file: - return jsonutils.load(file) - except ValueError as e: - raise SystemExit(_('Error while parsing rules ' - '%(path)s: %(err)s') % {'path': path, 'err': e}) - - @staticmethod - def read_file(path): - try: - with open(path) as file: - return file.read().strip() - except IOError as e: - raise SystemExit(_("Error while opening file " - "%(path)s: %(err)s") % {'path': path, 'err': e}) - - @staticmethod - def normalize_assertion(assertion): - def split(line): - try: - k, v = line.split(':', 1) - return k.strip(), v.strip() - except ValueError as e: - msg = _("Error while parsing line: '%(line)s': %(err)s") - raise SystemExit(msg % {'line': line, 'err': e}) - assertion = assertion.split('\n') - assertion_dict = {} - prefix = CONF.command.prefix - for line in assertion: - k, v = split(line) - if prefix: - if k.startswith(prefix): - assertion_dict[k] = v - else: - assertion_dict[k] = v - return assertion_dict - - @staticmethod - def normalize_rules(rules): - if isinstance(rules, list): - return {'rules': rules} - else: - return rules - - @classmethod - def main(cls): - if not CONF.command.engine_debug: - mapping_engine.LOG.logger.setLevel('WARN') - - rules = MappingEngineTester.read_rules(CONF.command.rules) - rules = MappingEngineTester.normalize_rules(rules) - mapping_engine.validate_mapping_structure(rules) - - assertion = MappingEngineTester.read_file(CONF.command.input) - assertion = MappingEngineTester.normalize_assertion(assertion) - rp = mapping_engine.RuleProcessor(rules['rules']) - print(jsonutils.dumps(rp.process(assertion), indent=2)) - - @classmethod - def add_argument_parser(cls, subparsers): - parser = super(MappingEngineTester, - cls).add_argument_parser(subparsers) - - parser.add_argument('--rules', default=None, required=True, - help=("Path to the file with " - "rules to be executed. " - "Content must be a proper JSON structure, " - "with a top-level key 'rules' and " - "corresponding value being a list.")) - parser.add_argument('--input', default=None, required=True, - help=("Path to the file with input attributes. " - "The content consists of ':' separated " - "parameter names and their values. " - "There is only one key-value pair per line. " - "A ';' in the value is a separator and then " - "a value is treated as a list. Example:\n " - "EMAIL: me@example.com\n" - "LOGIN: me\n" - "GROUPS: group1;group2;group3")) - parser.add_argument('--prefix', default=None, - help=("A prefix used for each environment " - "variable in the assertion. For example, " - "all environment variables may have the " - "prefix ASDF_.")) - parser.add_argument('--engine-debug', - default=False, action="store_true", - help=("Enable debug messages from the mapping " - "engine.")) - - -CMDS = [ - BootStrap, - DbSync, - DbVersion, - DomainConfigUpload, - FernetRotate, - FernetSetup, - MappingPurge, - MappingEngineTester, - PKISetup, - SamlIdentityProviderMetadata, - SSLSetup, - TokenFlush, -] - - -def add_command_parsers(subparsers): - for cmd in CMDS: - cmd.add_argument_parser(subparsers) - - -command_opt = cfg.SubCommandOpt('command', - title='Commands', - help='Available commands', - handler=add_command_parsers) - - -def main(argv=None, config_files=None): - CONF.register_cli_opt(command_opt) - - config.configure() - sql.initialize() - config.set_default_for_default_log_levels() - - CONF(args=argv[1:], - project='keystone', - version=pbr.version.VersionInfo('keystone').version_string(), - usage='%(prog)s [' + '|'.join([cmd.name for cmd in CMDS]) + ']', - default_config_files=config_files) - config.setup_logging() - CONF.command.cmd_class.main() diff --git a/keystone-moon/keystone/cmd/manage.py b/keystone-moon/keystone/cmd/manage.py deleted file mode 100644 index 707c9933..00000000 --- a/keystone-moon/keystone/cmd/manage.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import sys - -# If ../../keystone/__init__.py exists, add ../../ to Python search path, so -# that it will override what happens to be installed in -# /usr/(local/)lib/python... -possible_topdir = os.path.normpath(os.path.join(os.path.abspath(__file__), - os.pardir, - os.pardir, - os.pardir)) -if os.path.exists(os.path.join(possible_topdir, - 'keystone', - '__init__.py')): - sys.path.insert(0, possible_topdir) - -from keystone.cmd import cli -from keystone.common import environment - - -# entry point. -def main(): - environment.use_stdlib() - - dev_conf = os.path.join(possible_topdir, - 'etc', - 'keystone.conf') - config_files = None - if os.path.exists(dev_conf): - config_files = [dev_conf] - - cli.main(argv=sys.argv, config_files=config_files) diff --git a/keystone-moon/keystone/common/__init__.py b/keystone-moon/keystone/common/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/common/authorization.py b/keystone-moon/keystone/common/authorization.py deleted file mode 100644 index 414b9525..00000000 --- a/keystone-moon/keystone/common/authorization.py +++ /dev/null @@ -1,111 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 - 2012 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log - -from keystone import exception -from keystone.i18n import _, _LW -from keystone.models import token_model - - -AUTH_CONTEXT_ENV = 'KEYSTONE_AUTH_CONTEXT' -"""Environment variable used to convey the Keystone auth context. - -Auth context is essentially the user credential used for policy enforcement. -It is a dictionary with the following attributes: - -* ``token``: Token from the request -* ``user_id``: user ID of the principal -* ``user_domain_id`` (optional): Domain ID of the principal if the principal - has a domain. -* ``project_id`` (optional): project ID of the scoped project if auth is - project-scoped -* ``project_domain_id`` (optional): Domain ID of the scoped project if auth is - project-scoped. -* ``domain_id`` (optional): domain ID of the scoped domain if auth is - domain-scoped -* ``domain_name`` (optional): domain name of the scoped domain if auth is - domain-scoped -* ``is_delegated_auth``: True if this is delegated (via trust or oauth) -* ``trust_id``: Trust ID if trust-scoped, or None -* ``trustor_id``: Trustor ID if trust-scoped, or None -* ``trustee_id``: Trustee ID if trust-scoped, or None -* ``consumer_id``: OAuth consumer ID, or None -* ``access_token_id``: OAuth access token ID, or None -* ``roles`` (optional): list of role names for the given scope -* ``group_ids`` (optional): list of group IDs for which the API user has - membership if token was for a federated user - -""" - -LOG = log.getLogger(__name__) - - -def token_to_auth_context(token): - if not isinstance(token, token_model.KeystoneToken): - raise exception.UnexpectedError(_('token reference must be a ' - 'KeystoneToken type, got: %s') % - type(token)) - auth_context = {'token': token, - 'is_delegated_auth': False} - try: - auth_context['user_id'] = token.user_id - except KeyError: - LOG.warning(_LW('RBAC: Invalid user data in token')) - raise exception.Unauthorized() - auth_context['user_domain_id'] = token.user_domain_id - - if token.project_scoped: - auth_context['project_id'] = token.project_id - auth_context['project_domain_id'] = token.project_domain_id - elif token.domain_scoped: - auth_context['domain_id'] = token.domain_id - auth_context['domain_name'] = token.domain_name - else: - LOG.debug('RBAC: Proceeding without project or domain scope') - - if token.trust_scoped: - auth_context['is_delegated_auth'] = True - auth_context['trust_id'] = token.trust_id - auth_context['trustor_id'] = token.trustor_user_id - auth_context['trustee_id'] = token.trustee_user_id - else: - # NOTE(lbragstad): These variables will already be set to None but we - # add the else statement here for readability. - auth_context['trust_id'] = None - auth_context['trustor_id'] = None - auth_context['trustee_id'] = None - - roles = token.role_names - if roles: - auth_context['roles'] = roles - - if token.oauth_scoped: - auth_context['is_delegated_auth'] = True - auth_context['consumer_id'] = token.oauth_consumer_id - auth_context['access_token_id'] = token.oauth_access_token_id - else: - # NOTE(lbragstad): These variables will already be set to None but we - # add the else statement here for readability. - auth_context['consumer_id'] = None - auth_context['access_token_id'] = None - - if token.is_federated_user: - auth_context['group_ids'] = token.federation_group_ids - - return auth_context diff --git a/keystone-moon/keystone/common/base64utils.py b/keystone-moon/keystone/common/base64utils.py deleted file mode 100644 index d19eade7..00000000 --- a/keystone-moon/keystone/common/base64utils.py +++ /dev/null @@ -1,401 +0,0 @@ -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" - -Python provides the base64 module as a core module but this is mostly -limited to encoding and decoding base64 and it's variants. It is often -useful to be able to perform other operations on base64 text. This -module is meant to be used in conjunction with the core base64 module. - -Standardized base64 is defined in -RFC-4648 "The Base16, Base32, and Base64 Data Encodings". - -This module provides the following base64 utility functionality: - - * tests if text is valid base64 - * filter formatting from base64 - * convert base64 between different alphabets - * Handle padding issues - - test if base64 is padded - - removes padding - - restores padding - * wraps base64 text into formatted blocks - - via iterator - - return formatted string - -""" - -import re -import string - -import six -from six.moves import urllib - -from keystone.i18n import _ - - -class InvalidBase64Error(ValueError): - pass - -base64_alphabet_re = re.compile(r'^[^A-Za-z0-9+/=]+$') -base64url_alphabet_re = re.compile(r'^[^A-Za-z0-9---_=]+$') - -base64_non_alphabet_re = re.compile(r'[^A-Za-z0-9+/=]+') -base64url_non_alphabet_re = re.compile(r'[^A-Za-z0-9---_=]+') - -_strip_formatting_re = re.compile(r'\s+') - -if six.PY2: - str_ = string -else: - str_ = str - -_base64_to_base64url_trans = str_.maketrans('+/', '-_') -_base64url_to_base64_trans = str_.maketrans('-_', '+/') - - -def _check_padding_length(pad): - if len(pad) != 1: - raise ValueError(_('pad must be single character')) - - -def is_valid_base64(text): - """Test if input text can be base64 decoded. - - :param text: input base64 text - :type text: string - :returns: bool -- True if text can be decoded as base64, False otherwise - """ - - text = filter_formatting(text) - - if base64_non_alphabet_re.search(text): - return False - - try: - return base64_is_padded(text) - except InvalidBase64Error: - return False - - -def is_valid_base64url(text): - """Test if input text can be base64url decoded. - - :param text: input base64 text - :type text: string - :returns: bool -- True if text can be decoded as base64url, - False otherwise - """ - - text = filter_formatting(text) - - if base64url_non_alphabet_re.search(text): - return False - - try: - return base64_is_padded(text) - except InvalidBase64Error: - return False - - -def filter_formatting(text): - """Return base64 text without any formatting, just the base64. - - Base64 text is often formatted with whitespace, line endings, - etc. This function strips out any formatting, the result will - contain only base64 characters. - - Note, this function does not filter out all non-base64 alphabet - characters, it only removes characters used for formatting. - - :param text: input text to filter - :type text: string - :returns: string -- filtered text without formatting - """ - return _strip_formatting_re.sub('', text) - - -def base64_to_base64url(text): - """Convert base64 text to base64url text. - - base64url text is designed to be safe for use in file names and - URL's. It is defined in RFC-4648 Section 5. - - base64url differs from base64 in the last two alphabet characters - at index 62 and 63, these are sometimes referred as the - altchars. The '+' character at index 62 is replaced by '-' - (hyphen) and the '/' character at index 63 is replaced by '_' - (underscore). - - This function only translates the altchars, non-alphabet - characters are not filtered out. - - WARNING:: - - base64url continues to use the '=' pad character which is NOT URL - safe. RFC-4648 suggests two alternate methods to deal with this: - - percent-encode - percent-encode the pad character (e.g. '=' becomes - '%3D'). This makes the base64url text fully safe. But - percent-encoding has the downside of requiring - percent-decoding prior to feeding the base64url text into a - base64url decoder since most base64url decoders do not - recognize %3D as a pad character and most decoders require - correct padding. - - no-padding - padding is not strictly necessary to decode base64 or - base64url text, the pad can be computed from the input text - length. However many decoders demand padding and will consider - non-padded text to be malformed. If one wants to omit the - trailing pad character(s) for use in URL's it can be added back - using the base64_assure_padding() function. - - This function makes no decisions about which padding methodology to - use. One can either call base64_strip_padding() to remove any pad - characters (restoring later with base64_assure_padding()) or call - base64url_percent_encode() to percent-encode the pad characters. - - :param text: input base64 text - :type text: string - :returns: string -- base64url text - """ - return text.translate(_base64_to_base64url_trans) - - -def base64url_to_base64(text): - """Convert base64url text to base64 text. - - See base64_to_base64url() for a description of base64url text and - it's issues. - - This function does NOT handle percent-encoded pad characters, they - will be left intact. If the input base64url text is - percent-encoded you should call - - :param text: text in base64url alphabet - :type text: string - :returns: string -- text in base64 alphabet - - """ - return text.translate(_base64url_to_base64_trans) - - -def base64_is_padded(text, pad='='): - """Test if the text is base64 padded. - - The input text must be in a base64 alphabet. The pad must be a - single character. If the text has been percent-encoded (e.g. pad - is the string '%3D') you must convert the text back to a base64 - alphabet (e.g. if percent-encoded use the function - base64url_percent_decode()). - - :param text: text containing ONLY characters in a base64 alphabet - :type text: string - :param pad: pad character (must be single character) (default: '=') - :type pad: string - :returns: bool -- True if padded, False otherwise - :raises: ValueError, InvalidBase64Error - """ - - _check_padding_length(pad) - - text_len = len(text) - if text_len > 0 and text_len % 4 == 0: - pad_index = text.find(pad) - if pad_index >= 0 and pad_index < text_len - 2: - raise InvalidBase64Error(_('text is multiple of 4, ' - 'but pad "%s" occurs before ' - '2nd to last char') % pad) - if pad_index == text_len - 2 and text[-1] != pad: - raise InvalidBase64Error(_('text is multiple of 4, ' - 'but pad "%s" occurs before ' - 'non-pad last char') % pad) - return True - - if text.find(pad) >= 0: - raise InvalidBase64Error(_('text is not a multiple of 4, ' - 'but contains pad "%s"') % pad) - return False - - -def base64url_percent_encode(text): - """Percent-encode base64url padding. - - The input text should only contain base64url alphabet - characters. Any non-base64url alphabet characters will also be - subject to percent-encoding. - - :param text: text containing ONLY characters in the base64url alphabet - :type text: string - :returns: string -- percent-encoded base64url text - :raises: InvalidBase64Error - """ - - if len(text) % 4 != 0: - raise InvalidBase64Error(_('padded base64url text must be ' - 'multiple of 4 characters')) - - return urllib.parse.quote(text) - - -def base64url_percent_decode(text): - """Percent-decode base64url padding. - - The input text should only contain base64url alphabet - characters and the percent-encoded pad character. Any other - percent-encoded characters will be subject to percent-decoding. - - :param text: base64url alphabet text - :type text: string - :returns: string -- percent-decoded base64url text - """ - - decoded_text = urllib.parse.unquote(text) - - if len(decoded_text) % 4 != 0: - raise InvalidBase64Error(_('padded base64url text must be ' - 'multiple of 4 characters')) - - return decoded_text - - -def base64_strip_padding(text, pad='='): - """Remove padding from input base64 text. - - :param text: text containing ONLY characters in a base64 alphabet - :type text: string - :param pad: pad character (must be single character) (default: '=') - :type pad: string - :returns: string -- base64 text without padding - :raises: ValueError - """ - _check_padding_length(pad) - - # Can't be padded if text is less than 4 characters. - if len(text) < 4: - return text - - if text[-1] == pad: - if text[-2] == pad: - return text[0:-2] - else: - return text[0:-1] - else: - return text - - -def base64_assure_padding(text, pad='='): - """Assure the input text ends with padding. - - Base64 text is normally expected to be a multiple of 4 - characters. Each 4 character base64 sequence produces 3 octets of - binary data. If the binary data is not a multiple of 3 the base64 - text is padded at the end with a pad character such that it is - always a multiple of 4. Padding is ignored and does not alter the - binary data nor it's length. - - In some circumstances it is desirable to omit the padding - character due to transport encoding conflicts. Base64 text can - still be correctly decoded if the length of the base64 text - (consisting only of characters in the desired base64 alphabet) is - known, padding is not absolutely necessary. - - Some base64 decoders demand correct padding or one may wish to - format RFC compliant base64, this function performs this action. - - Input is assumed to consist only of members of a base64 - alphabet (i.e no whitespace). Iteration yields a sequence of lines. - The line does NOT terminate with a line ending. - - Use the filter_formatting() function to assure the input text - contains only the members of the alphabet. - - If the text ends with the pad it is assumed to already be - padded. Otherwise the binary length is computed from the input - text length and correct number of pad characters are appended. - - :param text: text containing ONLY characters in a base64 alphabet - :type text: string - :param pad: pad character (must be single character) (default: '=') - :type pad: string - :returns: string -- input base64 text with padding - :raises: ValueError - """ - _check_padding_length(pad) - - if text.endswith(pad): - return text - - n = len(text) % 4 - if n == 0: - return text - - n = 4 - n - padding = pad * n - return text + padding - - -def base64_wrap_iter(text, width=64): - """Fold text into lines of text with max line length. - - Input is assumed to consist only of members of a base64 - alphabet (i.e no whitespace). Iteration yields a sequence of lines. - The line does NOT terminate with a line ending. - - Use the filter_formatting() function to assure the input text - contains only the members of the alphabet. - - :param text: text containing ONLY characters in a base64 alphabet - :type text: string - :param width: number of characters in each wrapped line (default: 64) - :type width: int - :returns: generator -- sequence of lines of base64 text. - """ - - text = six.text_type(text) - for x in six.moves.range(0, len(text), width): - yield text[x:x + width] - - -def base64_wrap(text, width=64): - """Fold text into lines of text with max line length. - - Input is assumed to consist only of members of a base64 - alphabet (i.e no whitespace). Fold the text into lines whose - line length is width chars long, terminate each line with line - ending (default is '\\n'). Return the wrapped text as a single - string. - - Use the filter_formatting() function to assure the input text - contains only the members of the alphabet. - - :param text: text containing ONLY characters in a base64 alphabet - :type text: string - :param width: number of characters in each wrapped line (default: 64) - :type width: int - :returns: string -- wrapped text. - """ - - buf = six.StringIO() - - for line in base64_wrap_iter(text, width): - buf.write(line) - buf.write(u'\n') - - text = buf.getvalue() - buf.close() - return text diff --git a/keystone-moon/keystone/common/cache/__init__.py b/keystone-moon/keystone/common/cache/__init__.py deleted file mode 100644 index 49502399..00000000 --- a/keystone-moon/keystone/common/cache/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2013 Metacloud -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common.cache.core import * # noqa diff --git a/keystone-moon/keystone/common/cache/_context_cache.py b/keystone-moon/keystone/common/cache/_context_cache.py deleted file mode 100644 index 3895ca1f..00000000 --- a/keystone-moon/keystone/common/cache/_context_cache.py +++ /dev/null @@ -1,129 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""A dogpile.cache proxy that caches objects in the request local cache.""" -from dogpile.cache import api -from dogpile.cache import proxy -from oslo_context import context as oslo_context -from oslo_serialization import msgpackutils - -from keystone.models import revoke_model - - -class _RevokeModelHandler(object): - # NOTE(morganfainberg): There needs to be reserved "registry" entries set - # in oslo_serialization for application-specific handlers. We picked 127 - # here since it's waaaaaay far out before oslo_serialization will use it. - identity = 127 - handles = (revoke_model.RevokeTree,) - - def __init__(self, registry): - self._registry = registry - - def serialize(self, obj): - return msgpackutils.dumps(obj.revoke_map, - registry=self._registry) - - def deserialize(self, data): - revoke_map = msgpackutils.loads(data, registry=self._registry) - revoke_tree = revoke_model.RevokeTree() - revoke_tree.revoke_map = revoke_map - return revoke_tree - - -# Register our new handler. -_registry = msgpackutils.default_registry -_registry.frozen = False -_registry.register(_RevokeModelHandler(registry=_registry)) -_registry.frozen = True - - -class _ResponseCacheProxy(proxy.ProxyBackend): - - __key_pfx = '_request_cache_%s' - - def _get_request_context(self): - # Return the current context or a new/empty context. - return oslo_context.get_current() or oslo_context.RequestContext() - - def _get_request_key(self, key): - return self.__key_pfx % key - - def _set_local_cache(self, key, value, ctx=None): - # Set a serialized version of the returned value in local cache for - # subsequent calls to the memoized method. - if not ctx: - ctx = self._get_request_context() - serialize = {'payload': value.payload, 'metadata': value.metadata} - setattr(ctx, self._get_request_key(key), msgpackutils.dumps(serialize)) - ctx.update_store() - - def _get_local_cache(self, key): - # Return the version from our local request cache if it exists. - ctx = self._get_request_context() - try: - value = getattr(ctx, self._get_request_key(key)) - except AttributeError: - return api.NO_VALUE - - value = msgpackutils.loads(value) - return api.CachedValue(payload=value['payload'], - metadata=value['metadata']) - - def _delete_local_cache(self, key): - # On invalidate/delete remove the value from the local request cache - ctx = self._get_request_context() - try: - delattr(ctx, self._get_request_key(key)) - ctx.update_store() - except AttributeError: # nosec - # NOTE(morganfainberg): We will simply pass here, this value has - # not been cached locally in the request. - pass - - def get(self, key): - value = self._get_local_cache(key) - if value is api.NO_VALUE: - value = self.proxied.get(key) - if value is not api.NO_VALUE: - self._set_local_cache(key, value) - return value - - def set(self, key, value): - self._set_local_cache(key, value) - self.proxied.set(key, value) - - def delete(self, key): - self._delete_local_cache(key) - self.proxied.delete(key) - - def get_multi(self, keys): - values = {} - for key in keys: - v = self._get_local_cache(key) - if v is not api.NO_VALUE: - values[key] = v - query_keys = set(keys).difference(set(values.keys())) - values.update(dict( - zip(query_keys, self.proxied.get_multi(query_keys)))) - return [values[k] for k in keys] - - def set_multi(self, mapping): - ctx = self._get_request_context() - for k, v in mapping.items(): - self._set_local_cache(k, v, ctx) - self.proxied.set_multi(mapping) - - def delete_multi(self, keys): - for k in keys: - self._delete_local_cache(k) - self.proxied.delete_multi(keys) diff --git a/keystone-moon/keystone/common/cache/_memcache_pool.py b/keystone-moon/keystone/common/cache/_memcache_pool.py deleted file mode 100644 index 2bfcc3bb..00000000 --- a/keystone-moon/keystone/common/cache/_memcache_pool.py +++ /dev/null @@ -1,244 +0,0 @@ -# Copyright 2014 Mirantis Inc -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Thread-safe connection pool for python-memcached.""" - -# NOTE(yorik-sar): this file is copied between keystone and keystonemiddleware -# and should be kept in sync until we can use external library for this. - -import collections -import contextlib -import itertools -import logging -import threading -import time - -import memcache -from oslo_log import log -from six.moves import queue, zip - -from keystone import exception -from keystone.i18n import _ - - -LOG = log.getLogger(__name__) - - -class _MemcacheClient(memcache.Client): - """Thread global memcache client - - As client is inherited from threading.local we have to restore object - methods overloaded by threading.local so we can reuse clients in - different threads - """ - __delattr__ = object.__delattr__ - __getattribute__ = object.__getattribute__ - __new__ = object.__new__ - __setattr__ = object.__setattr__ - - def __del__(self): - pass - - -_PoolItem = collections.namedtuple('_PoolItem', ['ttl', 'connection']) - - -class ConnectionPool(queue.Queue): - """Base connection pool class - - This class implements the basic connection pool logic as an abstract base - class. - """ - def __init__(self, maxsize, unused_timeout, conn_get_timeout=None): - """Initialize the connection pool. - - :param maxsize: maximum number of client connections for the pool - :type maxsize: int - :param unused_timeout: idle time to live for unused clients (in - seconds). If a client connection object has been - in the pool and idle for longer than the - unused_timeout, it will be reaped. This is to - ensure resources are released as utilization - goes down. - :type unused_timeout: int - :param conn_get_timeout: maximum time in seconds to wait for a - connection. If set to `None` timeout is - indefinite. - :type conn_get_timeout: int - """ - # super() cannot be used here because Queue in stdlib is an - # old-style class - queue.Queue.__init__(self, maxsize) - self._unused_timeout = unused_timeout - self._connection_get_timeout = conn_get_timeout - self._acquired = 0 - - def _create_connection(self): - """Returns a connection instance. - - This is called when the pool needs another instance created. - - :returns: a new connection instance - - """ - raise NotImplementedError - - def _destroy_connection(self, conn): - """Destroy and cleanup a connection instance. - - This is called when the pool wishes to get rid of an existing - connection. This is the opportunity for a subclass to free up - resources and cleaup after itself. - - :param conn: the connection object to destroy - - """ - raise NotImplementedError - - def _debug_logger(self, msg, *args, **kwargs): - if LOG.isEnabledFor(logging.DEBUG): - thread_id = threading.current_thread().ident - args = (id(self), thread_id) + args - prefix = 'Memcached pool %s, thread %s: ' - LOG.debug(prefix + msg, *args, **kwargs) - - @contextlib.contextmanager - def acquire(self): - self._debug_logger('Acquiring connection') - try: - conn = self.get(timeout=self._connection_get_timeout) - except queue.Empty: - raise exception.UnexpectedError( - _('Unable to get a connection from pool id %(id)s after ' - '%(seconds)s seconds.') % - {'id': id(self), 'seconds': self._connection_get_timeout}) - self._debug_logger('Acquired connection %s', id(conn)) - try: - yield conn - finally: - self._debug_logger('Releasing connection %s', id(conn)) - self._drop_expired_connections() - try: - # super() cannot be used here because Queue in stdlib is an - # old-style class - queue.Queue.put(self, conn, block=False) - except queue.Full: - self._debug_logger('Reaping exceeding connection %s', id(conn)) - self._destroy_connection(conn) - - def _qsize(self): - if self.maxsize: - return self.maxsize - self._acquired - else: - # A value indicating there is always a free connection - # if maxsize is None or 0 - return 1 - - # NOTE(dstanek): stdlib and eventlet Queue implementations - # have different names for the qsize method. This ensures - # that we override both of them. - if not hasattr(queue.Queue, '_qsize'): - qsize = _qsize - - def _get(self): - if self.queue: - conn = self.queue.pop().connection - else: - conn = self._create_connection() - self._acquired += 1 - return conn - - def _drop_expired_connections(self): - """Drop all expired connections from the right end of the queue.""" - now = time.time() - while self.queue and self.queue[0].ttl < now: - conn = self.queue.popleft().connection - self._debug_logger('Reaping connection %s', id(conn)) - self._destroy_connection(conn) - - def _put(self, conn): - self.queue.append(_PoolItem( - ttl=time.time() + self._unused_timeout, - connection=conn, - )) - self._acquired -= 1 - - -class MemcacheClientPool(ConnectionPool): - def __init__(self, urls, arguments, **kwargs): - # super() cannot be used here because Queue in stdlib is an - # old-style class - ConnectionPool.__init__(self, **kwargs) - self.urls = urls - self._arguments = arguments - # NOTE(morganfainberg): The host objects expect an int for the - # deaduntil value. Initialize this at 0 for each host with 0 indicating - # the host is not dead. - self._hosts_deaduntil = [0] * len(urls) - - def _create_connection(self): - return _MemcacheClient(self.urls, **self._arguments) - - def _destroy_connection(self, conn): - conn.disconnect_all() - - def _get(self): - # super() cannot be used here because Queue in stdlib is an - # old-style class - conn = ConnectionPool._get(self) - try: - # Propagate host state known to us to this client's list - now = time.time() - for deaduntil, host in zip(self._hosts_deaduntil, conn.servers): - if deaduntil > now and host.deaduntil <= now: - host.mark_dead('propagating death mark from the pool') - host.deaduntil = deaduntil - except Exception: - # We need to be sure that connection doesn't leak from the pool. - # This code runs before we enter context manager's try-finally - # block, so we need to explicitly release it here. - # super() cannot be used here because Queue in stdlib is an - # old-style class - ConnectionPool._put(self, conn) - raise - return conn - - def _put(self, conn): - try: - # If this client found that one of the hosts is dead, mark it as - # such in our internal list - now = time.time() - for i, host in zip(itertools.count(), conn.servers): - deaduntil = self._hosts_deaduntil[i] - # Do nothing if we already know this host is dead - if deaduntil <= now: - if host.deaduntil > now: - self._hosts_deaduntil[i] = host.deaduntil - self._debug_logger( - 'Marked host %s dead until %s', - self.urls[i], host.deaduntil) - else: - self._hosts_deaduntil[i] = 0 - # If all hosts are dead we should forget that they're dead. This - # way we won't get completely shut off until dead_retry seconds - # pass, but will be checking servers as frequent as we can (over - # way smaller socket_timeout) - if all(deaduntil > now for deaduntil in self._hosts_deaduntil): - self._debug_logger('All hosts are dead. Marking them as live.') - self._hosts_deaduntil[:] = [0] * len(self._hosts_deaduntil) - finally: - # super() cannot be used here because Queue in stdlib is an - # old-style class - ConnectionPool._put(self, conn) diff --git a/keystone-moon/keystone/common/cache/backends/__init__.py b/keystone-moon/keystone/common/cache/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/common/cache/backends/memcache_pool.py b/keystone-moon/keystone/common/cache/backends/memcache_pool.py deleted file mode 100644 index bbe4785a..00000000 --- a/keystone-moon/keystone/common/cache/backends/memcache_pool.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright 2014 Mirantis Inc -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""This module is deprecated.""" - -from oslo_cache.backends import memcache_pool -from oslo_log import versionutils - - -@versionutils.deprecated( - versionutils.deprecated.MITAKA, - what='keystone.cache.memcache_pool backend', - in_favor_of='oslo_cache.memcache_pool backend', - remove_in=+1) -class PooledMemcachedBackend(memcache_pool.PooledMemcachedBackend): - pass diff --git a/keystone-moon/keystone/common/cache/backends/mongo.py b/keystone-moon/keystone/common/cache/backends/mongo.py deleted file mode 100644 index 861aefed..00000000 --- a/keystone-moon/keystone/common/cache/backends/mongo.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2014 Hewlett-Packard Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_cache.backends import mongo -from oslo_log import versionutils - - -@versionutils.deprecated( - versionutils.deprecated.MITAKA, - what='keystone.cache.mongo backend', - in_favor_of='oslo_cache.mongo backend', - remove_in=+1) -class MongoCacheBackend(mongo.MongoCacheBackend): - pass diff --git a/keystone-moon/keystone/common/cache/backends/noop.py b/keystone-moon/keystone/common/cache/backends/noop.py deleted file mode 100644 index eda06ec8..00000000 --- a/keystone-moon/keystone/common/cache/backends/noop.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2013 Metacloud -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from dogpile.cache import api -from oslo_log import versionutils - - -NO_VALUE = api.NO_VALUE - - -@versionutils.deprecated( - versionutils.deprecated.MITAKA, - what='keystone.common.cache.noop backend', - in_favor_of="dogpile.cache's Null backend", - remove_in=+1) -class NoopCacheBackend(api.CacheBackend): - """A no op backend as a default caching backend. - - The no op backend is provided as the default caching backend for keystone - to ensure that ``dogpile.cache.memory`` is not used in any real-world - circumstances unintentionally. ``dogpile.cache.memory`` does not have a - mechanism to cleanup it's internal dict and therefore could cause run-away - memory utilization. - """ - - def __init__(self, *args): - return - - def get(self, key): - return NO_VALUE - - def get_multi(self, keys): - return [NO_VALUE for x in keys] - - def set(self, key, value): - return - - def set_multi(self, mapping): - return - - def delete(self, key): - return - - def delete_multi(self, keys): - return diff --git a/keystone-moon/keystone/common/cache/core.py b/keystone-moon/keystone/common/cache/core.py deleted file mode 100644 index 6bb0af51..00000000 --- a/keystone-moon/keystone/common/cache/core.py +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright 2013 Metacloud -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Keystone Caching Layer Implementation.""" -import dogpile.cache -from dogpile.cache import api -from oslo_cache import core as cache -from oslo_config import cfg - -from keystone.common.cache import _context_cache - - -CONF = cfg.CONF -CACHE_REGION = cache.create_region() - - -def configure_cache(region=None): - if region is None: - region = CACHE_REGION - # NOTE(morganfainberg): running cache.configure_cache_region() - # sets region.is_configured, this must be captured before - # cache.configure_cache_region is called. - configured = region.is_configured - cache.configure_cache_region(CONF, region) - # Only wrap the region if it was not configured. This should be pushed - # to oslo_cache lib somehow. - if not configured: - region.wrap(_context_cache._ResponseCacheProxy) - - -def get_memoization_decorator(group, expiration_group=None, region=None): - if region is None: - region = CACHE_REGION - return cache.get_memoization_decorator(CONF, region, group, - expiration_group=expiration_group) - - -# NOTE(stevemar): When memcache_pool, mongo and noop backends are removed -# we no longer need to register the backends here. -dogpile.cache.register_backend( - 'keystone.common.cache.noop', - 'keystone.common.cache.backends.noop', - 'NoopCacheBackend') - -dogpile.cache.register_backend( - 'keystone.cache.mongo', - 'keystone.common.cache.backends.mongo', - 'MongoCacheBackend') - -dogpile.cache.register_backend( - 'keystone.cache.memcache_pool', - 'keystone.common.cache.backends.memcache_pool', - 'PooledMemcachedBackend') - - -# TODO(morganfainberg): Move this logic up into oslo.cache directly -# so we can handle region-wide invalidations or alternatively propose -# a fix to dogpile.cache to make region-wide invalidates possible to -# work across distributed processes. -class _RegionInvalidator(object): - - def __init__(self, region, region_name): - self.region = region - self.region_name = region_name - region_key = '_RegionExpiration.%(type)s.%(region_name)s' - self.soft_region_key = region_key % {'type': 'soft', - 'region_name': self.region_name} - self.hard_region_key = region_key % {'type': 'hard', - 'region_name': self.region_name} - - @property - def hard_invalidated(self): - invalidated = self.region.backend.get(self.hard_region_key) - if invalidated is not api.NO_VALUE: - return invalidated.payload - return None - - @hard_invalidated.setter - def hard_invalidated(self, value): - self.region.set(self.hard_region_key, value) - - @hard_invalidated.deleter - def hard_invalidated(self): - self.region.delete(self.hard_region_key) - - @property - def soft_invalidated(self): - invalidated = self.region.backend.get(self.soft_region_key) - if invalidated is not api.NO_VALUE: - return invalidated.payload - return None - - @soft_invalidated.setter - def soft_invalidated(self, value): - self.region.set(self.soft_region_key, value) - - @soft_invalidated.deleter - def soft_invalidated(self): - self.region.delete(self.soft_region_key) - - -def apply_invalidation_patch(region, region_name): - """Patch the region interfaces to ensure we share the expiration time. - - This method is used to patch region.invalidate, region._hard_invalidated, - and region._soft_invalidated. - """ - # Patch the region object. This logic needs to be moved up into dogpile - # itself. Patching the internal interfaces, unfortunately, is the only - # way to handle this at the moment. - invalidator = _RegionInvalidator(region=region, region_name=region_name) - setattr(region, '_hard_invalidated', invalidator.hard_invalidated) - setattr(region, '_soft_invalidated', invalidator.soft_invalidated) diff --git a/keystone-moon/keystone/common/clean.py b/keystone-moon/keystone/common/clean.py deleted file mode 100644 index 38564e0b..00000000 --- a/keystone-moon/keystone/common/clean.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import six - -from keystone import exception -from keystone.i18n import _ - - -def check_length(property_name, value, min_length=1, max_length=64): - if len(value) < min_length: - if min_length == 1: - msg = _("%s cannot be empty.") % property_name - else: - msg = (_("%(property_name)s cannot be less than " - "%(min_length)s characters.") % dict( - property_name=property_name, min_length=min_length)) - raise exception.ValidationError(msg) - if len(value) > max_length: - msg = (_("%(property_name)s should not be greater than " - "%(max_length)s characters.") % dict( - property_name=property_name, max_length=max_length)) - - raise exception.ValidationError(msg) - - -def check_type(property_name, value, expected_type, display_expected_type): - if not isinstance(value, expected_type): - msg = (_("%(property_name)s is not a " - "%(display_expected_type)s") % dict( - property_name=property_name, - display_expected_type=display_expected_type)) - raise exception.ValidationError(msg) - - -def check_enabled(property_name, enabled): - # Allow int and it's subclass bool - check_type('%s enabled' % property_name, enabled, int, 'boolean') - return bool(enabled) - - -def check_name(property_name, name, min_length=1, max_length=64): - check_type('%s name' % property_name, name, six.string_types, - 'str or unicode') - name = name.strip() - check_length('%s name' % property_name, name, - min_length=min_length, max_length=max_length) - return name - - -def domain_name(name): - return check_name('Domain', name) - - -def domain_enabled(enabled): - return check_enabled('Domain', enabled) - - -def project_name(name): - return check_name('Project', name) - - -def project_enabled(enabled): - return check_enabled('Project', enabled) - - -def user_name(name): - return check_name('User', name, max_length=255) - - -def user_enabled(enabled): - return check_enabled('User', enabled) - - -def group_name(name): - return check_name('Group', name) diff --git a/keystone-moon/keystone/common/config.py b/keystone-moon/keystone/common/config.py deleted file mode 100644 index 56f419b6..00000000 --- a/keystone-moon/keystone/common/config.py +++ /dev/null @@ -1,1259 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging -import os - -from oslo_cache import core as cache -from oslo_config import cfg -from oslo_log import log -import oslo_messaging -from oslo_middleware import cors -import passlib.utils - -from keystone import exception - - -_DEFAULT_AUTH_METHODS = ['external', 'password', 'token', 'oauth1'] -_CERTFILE = '/etc/keystone/ssl/certs/signing_cert.pem' -_KEYFILE = '/etc/keystone/ssl/private/signing_key.pem' -_SSO_CALLBACK = '/etc/keystone/sso_callback_template.html' - -_DEPRECATE_PKI_MSG = ('PKI token support has been deprecated in the M ' - 'release and will be removed in the O release. Fernet ' - 'or UUID tokens are recommended.') - -_DEPRECATE_INHERIT_MSG = ('The option to enable the OS-INHERIT extension has ' - 'been deprecated in the M release and will be ' - 'removed in the O release. The OS-INHERIT extension ' - 'will be enabled by default.') - -_DEPRECATE_EP_MSG = ('The option to enable the OS-ENDPOINT-POLICY extension ' - 'has been deprecated in the M release and will be ' - 'removed in the O release. The OS-ENDPOINT-POLICY ' - 'extension will be enabled by default.') - - -FILE_OPTIONS = { - None: [ - cfg.StrOpt('admin_token', secret=True, default=None, - help='A "shared secret" that can be used to bootstrap ' - 'Keystone. This "token" does not represent a user, ' - 'and carries no explicit authorization. If set ' - 'to `None`, the value is ignored and the ' - '`admin_token` log in mechanism is effectively ' - 'disabled. To completely disable `admin_token` ' - 'in production (highly recommended), remove ' - 'AdminTokenAuthMiddleware from your paste ' - 'application pipelines (for example, in ' - 'keystone-paste.ini).'), - cfg.StrOpt('public_endpoint', - help='The base public endpoint URL for Keystone that is ' - 'advertised to clients (NOTE: this does NOT affect ' - 'how Keystone listens for connections). ' - 'Defaults to the base host URL of the request. E.g. a ' - 'request to http://server:5000/v3/users will ' - 'default to http://server:5000. You should only need ' - 'to set this value if the base URL contains a path ' - '(e.g. /prefix/v3) or the endpoint should be found ' - 'on a different server.'), - cfg.StrOpt('admin_endpoint', - help='The base admin endpoint URL for Keystone that is ' - 'advertised to clients (NOTE: this does NOT affect ' - 'how Keystone listens for connections). ' - 'Defaults to the base host URL of the request. E.g. a ' - 'request to http://server:35357/v3/users will ' - 'default to http://server:35357. You should only need ' - 'to set this value if the base URL contains a path ' - '(e.g. /prefix/v3) or the endpoint should be found ' - 'on a different server.'), - cfg.IntOpt('max_project_tree_depth', default=5, - help='Maximum depth of the project hierarchy, excluding ' - 'the project acting as a domain at the top of the ' - 'hierarchy. WARNING: setting it to a large value may ' - 'adversely impact performance.'), - cfg.IntOpt('max_param_size', default=64, - help='Limit the sizes of user & project ID/names.'), - # we allow tokens to be a bit larger to accommodate PKI - cfg.IntOpt('max_token_size', default=8192, - help='Similar to max_param_size, but provides an ' - 'exception for token values.'), - cfg.StrOpt('member_role_id', - default='9fe2ff9ee4384b1894a90878d3e92bab', - help='Similar to the member_role_name option, this ' - 'represents the default role ID used to associate ' - 'users with their default projects in the v2 API. ' - 'This will be used as the explicit role where one is ' - 'not specified by the v2 API.'), - cfg.StrOpt('member_role_name', default='_member_', - help='This is the role name used in combination with the ' - 'member_role_id option; see that option for more ' - 'detail.'), - # NOTE(lbragstad/morganfainberg): This value of 10k was - # measured as having an approximate 30% clock-time savings - # over the old default of 40k. The passlib default is not - # static and grows over time to constantly approximate ~300ms - # of CPU time to hash; this was considered too high. This - # value still exceeds the glibc default of 5k. - cfg.IntOpt('crypt_strength', default=10000, min=1000, max=100000, - help='The value passed as the keyword "rounds" to ' - 'passlib\'s encrypt method.'), - cfg.IntOpt('list_limit', - help='The maximum number of entities that will be ' - 'returned in a collection, with no limit set by ' - 'default. This global limit may be then overridden ' - 'for a specific driver, by specifying a list_limit ' - 'in the appropriate section (e.g. [assignment]).'), - cfg.BoolOpt('domain_id_immutable', default=True, - help='Set this to false if you want to enable the ' - 'ability for user, group and project entities ' - 'to be moved between domains by updating their ' - 'domain_id. Allowing such movement is not ' - 'recommended if the scope of a domain admin is being ' - 'restricted by use of an appropriate policy file ' - '(see policy.v3cloudsample as an example). This ' - 'ability is deprecated and will be removed in a ' - 'future release.', - deprecated_for_removal=True), - cfg.BoolOpt('strict_password_check', default=False, - help='If set to true, strict password length checking is ' - 'performed for password manipulation. If a password ' - 'exceeds the maximum length, the operation will fail ' - 'with an HTTP 403 Forbidden error. If set to false, ' - 'passwords are automatically truncated to the ' - 'maximum length.'), - cfg.StrOpt('secure_proxy_ssl_header', default='HTTP_X_FORWARDED_PROTO', - help='The HTTP header used to determine the scheme for the ' - 'original request, even if it was removed by an SSL ' - 'terminating proxy.'), - cfg.BoolOpt('insecure_debug', default=False, - help='If set to true the server will return information ' - 'in the response that may allow an unauthenticated ' - 'or authenticated user to get more information than ' - 'normal, such as why authentication failed. This may ' - 'be useful for debugging but is insecure.'), - ], - 'identity': [ - cfg.StrOpt('default_domain_id', default='default', - help='This references the domain to use for all ' - 'Identity API v2 requests (which are not aware of ' - 'domains). A domain with this ID will be created ' - 'for you by keystone-manage db_sync in migration ' - '008. The domain referenced by this ID cannot be ' - 'deleted on the v3 API, to prevent accidentally ' - 'breaking the v2 API. There is nothing special about ' - 'this domain, other than the fact that it must ' - 'exist to order to maintain support for your v2 ' - 'clients.'), - cfg.BoolOpt('domain_specific_drivers_enabled', - default=False, - help='A subset (or all) of domains can have their own ' - 'identity driver, each with their own partial ' - 'configuration options, stored in either the ' - 'resource backend or in a file in a domain ' - 'configuration directory (depending on the setting ' - 'of domain_configurations_from_database). Only ' - 'values specific to the domain need to be specified ' - 'in this manner. This feature is disabled by ' - 'default; set to true to enable.'), - cfg.BoolOpt('domain_configurations_from_database', - default=False, - help='Extract the domain specific configuration options ' - 'from the resource backend where they have been ' - 'stored with the domain data. This feature is ' - 'disabled by default (in which case the domain ' - 'specific options will be loaded from files in the ' - 'domain configuration directory); set to true to ' - 'enable.'), - cfg.StrOpt('domain_config_dir', - default='/etc/keystone/domains', - help='Path for Keystone to locate the domain specific ' - 'identity configuration files if ' - 'domain_specific_drivers_enabled is set to true.'), - cfg.StrOpt('driver', - default='sql', - help='Entrypoint for the identity backend driver in the ' - 'keystone.identity namespace. Supplied drivers are ' - 'ldap and sql.'), - cfg.BoolOpt('caching', default=True, - help='Toggle for identity caching. This has no ' - 'effect unless global caching is enabled.'), - cfg.IntOpt('cache_time', default=600, - help='Time to cache identity data (in seconds). This has ' - 'no effect unless global and identity caching are ' - 'enabled.'), - cfg.IntOpt('max_password_length', default=4096, - max=passlib.utils.MAX_PASSWORD_SIZE, - help='Maximum supported length for user passwords; ' - 'decrease to improve performance.'), - cfg.IntOpt('list_limit', - help='Maximum number of entities that will be returned in ' - 'an identity collection.'), - ], - 'identity_mapping': [ - cfg.StrOpt('driver', - default='sql', - help='Entrypoint for the identity mapping backend driver ' - 'in the keystone.identity.id_mapping namespace.'), - cfg.StrOpt('generator', - default='sha256', - help='Entrypoint for the public ID generator for user and ' - 'group entities in the keystone.identity.id_generator ' - 'namespace. The Keystone identity mapper only ' - 'supports generators that produce no more than 64 ' - 'characters.'), - cfg.BoolOpt('backward_compatible_ids', - default=True, - help='The format of user and group IDs changed ' - 'in Juno for backends that do not generate UUIDs ' - '(e.g. LDAP), with keystone providing a hash mapping ' - 'to the underlying attribute in LDAP. By default ' - 'this mapping is disabled, which ensures that ' - 'existing IDs will not change. Even when the ' - 'mapping is enabled by using domain specific ' - 'drivers, any users and groups from the default ' - 'domain being handled by LDAP will still not be ' - 'mapped to ensure their IDs remain backward ' - 'compatible. Setting this value to False will ' - 'enable the mapping for even the default LDAP ' - 'driver. It is only safe to do this if you do not ' - 'already have assignments for users and ' - 'groups from the default LDAP domain, and it is ' - 'acceptable for Keystone to provide the different ' - 'IDs to clients than it did previously. Typically ' - 'this means that the only time you can set this ' - 'value to False is when configuring a fresh ' - 'installation.'), - ], - 'shadow_users': [ - cfg.StrOpt('driver', - default='sql', - help='Entrypoint for the shadow users backend driver ' - 'in the keystone.identity.shadow_users namespace.'), - ], - 'trust': [ - cfg.BoolOpt('enabled', default=True, - help='Delegation and impersonation features can be ' - 'optionally disabled.'), - cfg.BoolOpt('allow_redelegation', default=False, - help='Enable redelegation feature.'), - cfg.IntOpt('max_redelegation_count', default=3, - help='Maximum depth of trust redelegation.'), - cfg.StrOpt('driver', - default='sql', - help='Entrypoint for the trust backend driver in the ' - 'keystone.trust namespace.')], - 'os_inherit': [ - cfg.BoolOpt('enabled', default=True, - deprecated_for_removal=True, - deprecated_reason=_DEPRECATE_INHERIT_MSG, - help='role-assignment inheritance to projects from ' - 'owning domain or from projects higher in the ' - 'hierarchy can be optionally disabled. In the ' - 'future, this option will be removed and the ' - 'hierarchy will be always enabled.'), - ], - 'fernet_tokens': [ - cfg.StrOpt('key_repository', - default='/etc/keystone/fernet-keys/', - help='Directory containing Fernet token keys.'), - cfg.IntOpt('max_active_keys', - default=3, - help='This controls how many keys are held in rotation by ' - 'keystone-manage fernet_rotate before they are ' - 'discarded. The default value of 3 means that ' - 'keystone will maintain one staged key, one primary ' - 'key, and one secondary key. Increasing this value ' - 'means that additional secondary keys will be kept in ' - 'the rotation.'), - ], - 'token': [ - cfg.ListOpt('bind', default=[], - help='External auth mechanisms that should add bind ' - 'information to token, e.g., kerberos,x509.'), - cfg.StrOpt('enforce_token_bind', default='permissive', - help='Enforcement policy on tokens presented to Keystone ' - 'with bind information. One of disabled, permissive, ' - 'strict, required or a specifically required bind ' - 'mode, e.g., kerberos or x509 to require binding to ' - 'that authentication.'), - cfg.IntOpt('expiration', default=3600, - help='Amount of time a token should remain valid ' - '(in seconds).'), - cfg.StrOpt('provider', - default='uuid', - help='Controls the token construction, validation, and ' - 'revocation operations. Entrypoint in the ' - 'keystone.token.provider namespace. Core providers ' - 'are [fernet|pkiz|pki|uuid].'), - cfg.StrOpt('driver', - default='sql', - help='Entrypoint for the token persistence backend driver ' - 'in the keystone.token.persistence namespace. ' - 'Supplied drivers are kvs, memcache, memcache_pool, ' - 'and sql.'), - cfg.BoolOpt('caching', default=True, - help='Toggle for token system caching. This has no ' - 'effect unless global caching is enabled.'), - cfg.IntOpt('cache_time', - help='Time to cache tokens (in seconds). This has no ' - 'effect unless global and token caching are ' - 'enabled.'), - cfg.BoolOpt('revoke_by_id', default=True, - help='Revoke token by token identifier. Setting ' - 'revoke_by_id to true enables various forms of ' - 'enumerating tokens, e.g. `list tokens for user`. ' - 'These enumerations are processed to determine the ' - 'list of tokens to revoke. Only disable if you are ' - 'switching to using the Revoke extension with a ' - 'backend other than KVS, which stores events in memory.'), - cfg.BoolOpt('allow_rescope_scoped_token', default=True, - help='Allow rescoping of scoped token. Setting ' - 'allow_rescoped_scoped_token to false prevents a user ' - 'from exchanging a scoped token for any other token.'), - cfg.StrOpt('hash_algorithm', default='md5', - deprecated_for_removal=True, - deprecated_reason=_DEPRECATE_PKI_MSG, - help='The hash algorithm to use for PKI tokens. This can ' - 'be set to any algorithm that hashlib supports. ' - 'WARNING: Before changing this value, the auth_token ' - 'middleware must be configured with the ' - 'hash_algorithms, otherwise token revocation will ' - 'not be processed correctly.'), - cfg.BoolOpt('infer_roles', default=True, - help='Add roles to token that are not explicitly added, ' - 'but that are linked implicitly to other roles.'), - ], - 'revoke': [ - cfg.StrOpt('driver', - default='sql', - help='Entrypoint for an implementation of the backend for ' - 'persisting revocation events in the keystone.revoke ' - 'namespace. Supplied drivers are kvs and sql.'), - cfg.IntOpt('expiration_buffer', default=1800, - help='This value (calculated in seconds) is added to token ' - 'expiration before a revocation event may be removed ' - 'from the backend.'), - cfg.BoolOpt('caching', default=True, - help='Toggle for revocation event caching. This has no ' - 'effect unless global caching is enabled.'), - cfg.IntOpt('cache_time', default=3600, - help='Time to cache the revocation list and the revocation ' - 'events (in seconds). This has no effect unless ' - 'global and token caching are enabled.', - deprecated_opts=[cfg.DeprecatedOpt( - 'revocation_cache_time', group='token')]), - ], - 'ssl': [ - cfg.StrOpt('ca_key', - default='/etc/keystone/ssl/private/cakey.pem', - help='Path of the CA key file for SSL.'), - cfg.IntOpt('key_size', default=1024, min=1024, - help='SSL key length (in bits) (auto generated ' - 'certificate).'), - cfg.IntOpt('valid_days', default=3650, - help='Days the certificate is valid for once signed ' - '(auto generated certificate).'), - cfg.StrOpt('cert_subject', - default='/C=US/ST=Unset/L=Unset/O=Unset/CN=localhost', - help='SSL certificate subject (auto generated ' - 'certificate).'), - ], - 'signing': [ - cfg.StrOpt('certfile', - default=_CERTFILE, - deprecated_for_removal=True, - deprecated_reason=_DEPRECATE_PKI_MSG, - help='Path of the certfile for token signing. For ' - 'non-production environments, you may be interested ' - 'in using `keystone-manage pki_setup` to generate ' - 'self-signed certificates.'), - cfg.StrOpt('keyfile', - default=_KEYFILE, - deprecated_for_removal=True, - deprecated_reason=_DEPRECATE_PKI_MSG, - help='Path of the keyfile for token signing.'), - cfg.StrOpt('ca_certs', - deprecated_for_removal=True, - deprecated_reason=_DEPRECATE_PKI_MSG, - default='/etc/keystone/ssl/certs/ca.pem', - help='Path of the CA for token signing.'), - cfg.StrOpt('ca_key', - default='/etc/keystone/ssl/private/cakey.pem', - deprecated_for_removal=True, - deprecated_reason=_DEPRECATE_PKI_MSG, - help='Path of the CA key for token signing.'), - cfg.IntOpt('key_size', default=2048, min=1024, - deprecated_for_removal=True, - deprecated_reason=_DEPRECATE_PKI_MSG, - help='Key size (in bits) for token signing cert ' - '(auto generated certificate).'), - cfg.IntOpt('valid_days', default=3650, - deprecated_for_removal=True, - deprecated_reason=_DEPRECATE_PKI_MSG, - help='Days the token signing cert is valid for ' - '(auto generated certificate).'), - cfg.StrOpt('cert_subject', - deprecated_for_removal=True, - deprecated_reason=_DEPRECATE_PKI_MSG, - default=('/C=US/ST=Unset/L=Unset/O=Unset/' - 'CN=www.example.com'), - help='Certificate subject (auto generated certificate) for ' - 'token signing.'), - ], - 'assignment': [ - cfg.StrOpt('driver', - help='Entrypoint for the assignment backend driver in the ' - 'keystone.assignment namespace. Only an SQL driver is ' - 'supplied. If an assignment driver is not ' - 'specified, the identity driver will choose the ' - 'assignment driver (driver selection based on ' - '`[identity]/driver` option is deprecated and will be ' - 'removed in the "O" release).'), - cfg.ListOpt('prohibited_implied_role', default=['admin'], - help='A list of role names which are prohibited from ' - 'being an implied role.'), - ], - 'resource': [ - cfg.StrOpt('driver', - help='Entrypoint for the resource backend driver in the ' - 'keystone.resource namespace. Only an SQL driver is ' - 'supplied. If a resource driver is not specified, ' - 'the assignment driver will choose the resource ' - 'driver.'), - cfg.BoolOpt('caching', default=True, - deprecated_opts=[cfg.DeprecatedOpt('caching', - group='assignment')], - help='Toggle for resource caching. This has no effect ' - 'unless global caching is enabled.'), - cfg.IntOpt('cache_time', - deprecated_opts=[cfg.DeprecatedOpt('cache_time', - group='assignment')], - help='TTL (in seconds) to cache resource data. This has ' - 'no effect unless global caching is enabled.'), - cfg.IntOpt('list_limit', - deprecated_opts=[cfg.DeprecatedOpt('list_limit', - group='assignment')], - help='Maximum number of entities that will be returned ' - 'in a resource collection.'), - cfg.StrOpt('admin_project_domain_name', - help='Name of the domain that owns the ' - '`admin_project_name`. Defaults to None.'), - cfg.StrOpt('admin_project_name', - help='Special project for performing administrative ' - 'operations on remote services. Tokens scoped to ' - 'this project will contain the key/value ' - '`is_admin_project=true`. Defaults to None.'), - cfg.StrOpt('project_name_url_safe', - choices=['off', 'new', 'strict'], default='off', - help='Whether the names of projects are restricted from ' - 'containing url reserved characters. If set to new, ' - 'attempts to create or update a project with a url ' - 'unsafe name will return an error. In addition, if ' - 'set to strict, attempts to scope a token using ' - 'an unsafe project name will return an error.'), - cfg.StrOpt('domain_name_url_safe', - choices=['off', 'new', 'strict'], default='off', - help='Whether the names of domains are restricted from ' - 'containing url reserved characters. If set to new, ' - 'attempts to create or update a domain with a url ' - 'unsafe name will return an error. In addition, if ' - 'set to strict, attempts to scope a token using a ' - 'domain name which is unsafe will return an error.'), - ], - 'domain_config': [ - cfg.StrOpt('driver', - default='sql', - help='Entrypoint for the domain config backend driver in ' - 'the keystone.resource.domain_config namespace.'), - cfg.BoolOpt('caching', default=True, - help='Toggle for domain config caching. This has no ' - 'effect unless global caching is enabled.'), - cfg.IntOpt('cache_time', default=300, - help='TTL (in seconds) to cache domain config data. This ' - 'has no effect unless domain config caching is ' - 'enabled.'), - ], - 'role': [ - # The role driver has no default for backward compatibility reasons. - # If role driver is not specified, the assignment driver chooses - # the backend - cfg.StrOpt('driver', - help='Entrypoint for the role backend driver in the ' - 'keystone.role namespace. Supplied drivers are ldap ' - 'and sql.'), - cfg.BoolOpt('caching', default=True, - help='Toggle for role caching. This has no effect ' - 'unless global caching is enabled.'), - cfg.IntOpt('cache_time', - help='TTL (in seconds) to cache role data. This has ' - 'no effect unless global caching is enabled.'), - cfg.IntOpt('list_limit', - help='Maximum number of entities that will be returned ' - 'in a role collection.'), - ], - 'credential': [ - cfg.StrOpt('driver', - default='sql', - help='Entrypoint for the credential backend driver in the ' - 'keystone.credential namespace.'), - ], - 'oauth1': [ - cfg.StrOpt('driver', - default='sql', - help='Entrypoint for the OAuth backend driver in the ' - 'keystone.oauth1 namespace.'), - cfg.IntOpt('request_token_duration', default=28800, - help='Duration (in seconds) for the OAuth Request Token.'), - cfg.IntOpt('access_token_duration', default=86400, - help='Duration (in seconds) for the OAuth Access Token.'), - ], - 'federation': [ - cfg.StrOpt('driver', - default='sql', - help='Entrypoint for the federation backend driver in the ' - 'keystone.federation namespace.'), - cfg.StrOpt('assertion_prefix', default='', - help='Value to be used when filtering assertion parameters ' - 'from the environment.'), - cfg.StrOpt('remote_id_attribute', - help='Value to be used to obtain the entity ID of the ' - 'Identity Provider from the environment (e.g. if ' - 'using the mod_shib plugin this value is ' - '`Shib-Identity-Provider`).'), - cfg.StrOpt('federated_domain_name', default='Federated', - help='A domain name that is reserved to allow federated ' - 'ephemeral users to have a domain concept. Note that ' - 'an admin will not be able to create a domain with ' - 'this name or update an existing domain to this ' - 'name. You are not advised to change this value ' - 'unless you really have to.'), - cfg.MultiStrOpt('trusted_dashboard', default=[], - help='A list of trusted dashboard hosts. Before ' - 'accepting a Single Sign-On request to return a ' - 'token, the origin host must be a member of the ' - 'trusted_dashboard list. This configuration ' - 'option may be repeated for multiple values. ' - 'For example: ' - 'trusted_dashboard=http://acme.com/auth/websso ' - 'trusted_dashboard=http://beta.com/auth/websso'), - cfg.StrOpt('sso_callback_template', default=_SSO_CALLBACK, - help='Location of Single Sign-On callback handler, will ' - 'return a token to a trusted dashboard host.'), - ], - 'policy': [ - cfg.StrOpt('driver', - default='sql', - help='Entrypoint for the policy backend driver in the ' - 'keystone.policy namespace. Supplied drivers are ' - 'rules and sql.'), - cfg.IntOpt('list_limit', - help='Maximum number of entities that will be returned ' - 'in a policy collection.'), - ], - 'endpoint_filter': [ - cfg.StrOpt('driver', - default='sql', - help='Entrypoint for the endpoint filter backend driver in ' - 'the keystone.endpoint_filter namespace.'), - cfg.BoolOpt('return_all_endpoints_if_no_filter', default=True, - help='Toggle to return all active endpoints if no filter ' - 'exists.'), - ], - 'endpoint_policy': [ - cfg.BoolOpt('enabled', - default=True, - deprecated_for_removal=True, - deprecated_reason=_DEPRECATE_EP_MSG, - help='Enable endpoint_policy functionality.'), - cfg.StrOpt('driver', - default='sql', - help='Entrypoint for the endpoint policy backend driver in ' - 'the keystone.endpoint_policy namespace.'), - ], - 'ldap': [ - cfg.StrOpt('url', default='ldap://localhost', - help='URL(s) for connecting to the LDAP server. Multiple ' - 'LDAP URLs may be specified as a comma separated ' - 'string. The first URL to successfully bind is used ' - 'for the connection.'), - cfg.StrOpt('user', - help='User BindDN to query the LDAP server.'), - cfg.StrOpt('password', secret=True, - help='Password for the BindDN to query the LDAP server.'), - cfg.StrOpt('suffix', default='cn=example,cn=com', - help='LDAP server suffix'), - cfg.BoolOpt('use_dumb_member', default=False, - help='If true, will add a dummy member to groups. This is ' - 'required if the objectclass for groups requires the ' - '"member" attribute.'), - cfg.StrOpt('dumb_member', default='cn=dumb,dc=nonexistent', - help='DN of the "dummy member" to use when ' - '"use_dumb_member" is enabled.'), - cfg.BoolOpt('allow_subtree_delete', default=False, - help='Delete subtrees using the subtree delete control. ' - 'Only enable this option if your LDAP server ' - 'supports subtree deletion.'), - cfg.StrOpt('query_scope', default='one', - choices=['one', 'sub'], - help='The LDAP scope for queries, "one" represents ' - 'oneLevel/singleLevel and "sub" represents ' - 'subtree/wholeSubtree options.'), - cfg.IntOpt('page_size', default=0, - help='Maximum results per page; a value of zero ("0") ' - 'disables paging.'), - cfg.StrOpt('alias_dereferencing', default='default', - choices=['never', 'searching', 'always', 'finding', - 'default'], - help='The LDAP dereferencing option for queries. The ' - '"default" option falls back to using default ' - 'dereferencing configured by your ldap.conf.'), - cfg.IntOpt('debug_level', - help='Sets the LDAP debugging level for LDAP calls. ' - 'A value of 0 means that debugging is not enabled. ' - 'This value is a bitmask, consult your LDAP ' - 'documentation for possible values.'), - cfg.BoolOpt('chase_referrals', - help='Override the system\'s default referral chasing ' - 'behavior for queries.'), - cfg.StrOpt('user_tree_dn', - help='Search base for users. ' - 'Defaults to the suffix value.'), - cfg.StrOpt('user_filter', - help='LDAP search filter for users.'), - cfg.StrOpt('user_objectclass', default='inetOrgPerson', - help='LDAP objectclass for users.'), - cfg.StrOpt('user_id_attribute', default='cn', - help='LDAP attribute mapped to user id. ' - 'WARNING: must not be a multivalued attribute.'), - cfg.StrOpt('user_name_attribute', default='sn', - help='LDAP attribute mapped to user name.'), - cfg.StrOpt('user_description_attribute', default='description', - help='LDAP attribute mapped to user description.'), - cfg.StrOpt('user_mail_attribute', default='mail', - help='LDAP attribute mapped to user email.'), - cfg.StrOpt('user_pass_attribute', default='userPassword', - help='LDAP attribute mapped to password.'), - cfg.StrOpt('user_enabled_attribute', default='enabled', - help='LDAP attribute mapped to user enabled flag.'), - cfg.BoolOpt('user_enabled_invert', default=False, - help='Invert the meaning of the boolean enabled values. ' - 'Some LDAP servers use a boolean lock attribute ' - 'where "true" means an account is disabled. Setting ' - '"user_enabled_invert = true" will allow these lock ' - 'attributes to be used. This setting will have no ' - 'effect if "user_enabled_mask" or ' - '"user_enabled_emulation" settings are in use.'), - cfg.IntOpt('user_enabled_mask', default=0, - help='Bitmask integer to indicate the bit that the enabled ' - 'value is stored in if the LDAP server represents ' - '"enabled" as a bit on an integer rather than a ' - 'boolean. A value of "0" indicates the mask is not ' - 'used. If this is not set to "0" the typical value ' - 'is "2". This is typically used when ' - '"user_enabled_attribute = userAccountControl".'), - cfg.StrOpt('user_enabled_default', default='True', - help='Default value to enable users. This should match an ' - 'appropriate int value if the LDAP server uses ' - 'non-boolean (bitmask) values to indicate if a user ' - 'is enabled or disabled. If this is not set to "True" ' - 'the typical value is "512". This is typically used ' - 'when "user_enabled_attribute = userAccountControl".'), - cfg.ListOpt('user_attribute_ignore', - default=['default_project_id'], - help='List of attributes stripped off the user on ' - 'update.'), - cfg.StrOpt('user_default_project_id_attribute', - help='LDAP attribute mapped to default_project_id for ' - 'users.'), - cfg.BoolOpt('user_allow_create', default=True, - deprecated_for_removal=True, - deprecated_reason="Write support for Identity LDAP " - "backends has been deprecated in the M " - "release and will be removed in the O " - "release.", - help='Allow user creation in LDAP backend.'), - cfg.BoolOpt('user_allow_update', default=True, - deprecated_for_removal=True, - deprecated_reason="Write support for Identity LDAP " - "backends has been deprecated in the M " - "release and will be removed in the O " - "release.", - help='Allow user updates in LDAP backend.'), - cfg.BoolOpt('user_allow_delete', default=True, - deprecated_for_removal=True, - deprecated_reason="Write support for Identity LDAP " - "backends has been deprecated in the M " - "release and will be removed in the O " - "release.", - help='Allow user deletion in LDAP backend.'), - cfg.BoolOpt('user_enabled_emulation', default=False, - help='If true, Keystone uses an alternative method to ' - 'determine if a user is enabled or not by checking ' - 'if they are a member of the ' - '"user_enabled_emulation_dn" group.'), - cfg.StrOpt('user_enabled_emulation_dn', - help='DN of the group entry to hold enabled users when ' - 'using enabled emulation.'), - cfg.BoolOpt('user_enabled_emulation_use_group_config', default=False, - help='Use the "group_member_attribute" and ' - '"group_objectclass" settings to determine ' - 'membership in the emulated enabled group.'), - cfg.ListOpt('user_additional_attribute_mapping', - default=[], - help='List of additional LDAP attributes used for mapping ' - 'additional attribute mappings for users. Attribute ' - 'mapping format is :, where ' - 'ldap_attr is the attribute in the LDAP entry and ' - 'user_attr is the Identity API attribute.'), - cfg.StrOpt('group_tree_dn', - help='Search base for groups. ' - 'Defaults to the suffix value.'), - cfg.StrOpt('group_filter', - help='LDAP search filter for groups.'), - cfg.StrOpt('group_objectclass', default='groupOfNames', - help='LDAP objectclass for groups.'), - cfg.StrOpt('group_id_attribute', default='cn', - help='LDAP attribute mapped to group id.'), - cfg.StrOpt('group_name_attribute', default='ou', - help='LDAP attribute mapped to group name.'), - cfg.StrOpt('group_member_attribute', default='member', - help='LDAP attribute mapped to show group membership.'), - cfg.StrOpt('group_desc_attribute', default='description', - help='LDAP attribute mapped to group description.'), - cfg.ListOpt('group_attribute_ignore', default=[], - help='List of attributes stripped off the group on ' - 'update.'), - cfg.BoolOpt('group_allow_create', default=True, - deprecated_for_removal=True, - deprecated_reason="Write support for Identity LDAP " - "backends has been deprecated in the M " - "release and will be removed in the O " - "release.", - help='Allow group creation in LDAP backend.'), - cfg.BoolOpt('group_allow_update', default=True, - deprecated_for_removal=True, - deprecated_reason="Write support for Identity LDAP " - "backends has been deprecated in the M " - "release and will be removed in the O " - "release.", - help='Allow group update in LDAP backend.'), - cfg.BoolOpt('group_allow_delete', default=True, - deprecated_for_removal=True, - deprecated_reason="Write support for Identity LDAP " - "backends has been deprecated in the M " - "release and will be removed in the O " - "release.", - help='Allow group deletion in LDAP backend.'), - cfg.ListOpt('group_additional_attribute_mapping', - default=[], - help='Additional attribute mappings for groups. Attribute ' - 'mapping format is :, where ' - 'ldap_attr is the attribute in the LDAP entry and ' - 'user_attr is the Identity API attribute.'), - - cfg.StrOpt('tls_cacertfile', - help='CA certificate file path for communicating with ' - 'LDAP servers.'), - cfg.StrOpt('tls_cacertdir', - help='CA certificate directory path for communicating with ' - 'LDAP servers.'), - cfg.BoolOpt('use_tls', default=False, - help='Enable TLS for communicating with LDAP servers.'), - cfg.StrOpt('tls_req_cert', default='demand', - choices=['demand', 'never', 'allow'], - help='Specifies what checks to perform on client ' - 'certificates in an incoming TLS session.'), - cfg.BoolOpt('use_pool', default=True, - help='Enable LDAP connection pooling.'), - cfg.IntOpt('pool_size', default=10, - help='Connection pool size.'), - cfg.IntOpt('pool_retry_max', default=3, - help='Maximum count of reconnect trials.'), - cfg.FloatOpt('pool_retry_delay', default=0.1, - help='Time span in seconds to wait between two ' - 'reconnect trials.'), - cfg.IntOpt('pool_connection_timeout', default=-1, - help='Connector timeout in seconds. Value -1 indicates ' - 'indefinite wait for response.'), - cfg.IntOpt('pool_connection_lifetime', default=600, - help='Connection lifetime in seconds.'), - cfg.BoolOpt('use_auth_pool', default=True, - help='Enable LDAP connection pooling for end user ' - 'authentication. If use_pool is disabled, then this ' - 'setting is meaningless and is not used at all.'), - cfg.IntOpt('auth_pool_size', default=100, - help='End user auth connection pool size.'), - cfg.IntOpt('auth_pool_connection_lifetime', default=60, - help='End user auth connection lifetime in seconds.'), - cfg.BoolOpt('group_members_are_ids', default=False, - help='If the members of the group objectclass are user ' - 'IDs rather than DNs, set this to true. This is the ' - 'case when using posixGroup as the group ' - 'objectclass and OpenDirectory.'), - ], - 'auth': [ - cfg.ListOpt('methods', default=_DEFAULT_AUTH_METHODS, - help='Allowed authentication methods.'), - cfg.StrOpt('password', # nosec : This is the name of the plugin, not - # a password that needs to be protected. - help='Entrypoint for the password auth plugin module in ' - 'the keystone.auth.password namespace.'), - cfg.StrOpt('token', - help='Entrypoint for the token auth plugin module in the ' - 'keystone.auth.token namespace.'), - # deals with REMOTE_USER authentication - cfg.StrOpt('external', - help='Entrypoint for the external (REMOTE_USER) auth ' - 'plugin module in the keystone.auth.external ' - 'namespace. Supplied drivers are DefaultDomain and ' - 'Domain. The default driver is DefaultDomain.'), - cfg.StrOpt('oauth1', - help='Entrypoint for the oAuth1.0 auth plugin module in ' - 'the keystone.auth.oauth1 namespace.'), - ], - 'tokenless_auth': [ - cfg.MultiStrOpt('trusted_issuer', default=[], - help='The list of trusted issuers to further filter ' - 'the certificates that are allowed to ' - 'participate in the X.509 tokenless ' - 'authorization. If the option is absent then ' - 'no certificates will be allowed. ' - 'The naming format for the attributes of a ' - 'Distinguished Name(DN) must be separated by a ' - 'comma and contain no spaces. This configuration ' - 'option may be repeated for multiple values. ' - 'For example: ' - 'trusted_issuer=CN=john,OU=keystone,O=openstack ' - 'trusted_issuer=CN=mary,OU=eng,O=abc'), - cfg.StrOpt('protocol', default='x509', - help='The protocol name for the X.509 tokenless ' - 'authorization along with the option issuer_attribute ' - 'below can look up its corresponding mapping.'), - cfg.StrOpt('issuer_attribute', default='SSL_CLIENT_I_DN', - help='The issuer attribute that is served as an IdP ID ' - 'for the X.509 tokenless authorization along with ' - 'the protocol to look up its corresponding mapping. ' - 'It is the environment variable in the WSGI ' - 'environment that references to the issuer of the ' - 'client certificate.'), - ], - 'paste_deploy': [ - cfg.StrOpt('config_file', default='keystone-paste.ini', - help='Name of the paste configuration file that defines ' - 'the available pipelines.'), - ], - 'memcache': [ - cfg.ListOpt('servers', default=['localhost:11211'], - help='Memcache servers in the format of "host:port".'), - cfg.IntOpt('dead_retry', - default=5 * 60, - help='Number of seconds memcached server is considered dead' - ' before it is tried again. This is used by the key ' - 'value store system (e.g. token ' - 'pooled memcached persistence backend).'), - cfg.IntOpt('socket_timeout', - default=3, - help='Timeout in seconds for every call to a server. This ' - 'is used by the key value store system (e.g. token ' - 'pooled memcached persistence backend).'), - cfg.IntOpt('pool_maxsize', - default=10, - help='Max total number of open connections to every' - ' memcached server. This is used by the key value ' - 'store system (e.g. token pooled memcached ' - 'persistence backend).'), - cfg.IntOpt('pool_unused_timeout', - default=60, - help='Number of seconds a connection to memcached is held' - ' unused in the pool before it is closed. This is used' - ' by the key value store system (e.g. token pooled ' - 'memcached persistence backend).'), - cfg.IntOpt('pool_connection_get_timeout', - default=10, - help='Number of seconds that an operation will wait to get ' - 'a memcache client connection. This is used by the ' - 'key value store system (e.g. token pooled memcached ' - 'persistence backend).'), - ], - 'catalog': [ - cfg.StrOpt('template_file', - default='default_catalog.templates', - help='Catalog template file name for use with the ' - 'template catalog backend.'), - cfg.StrOpt('driver', - default='sql', - help='Entrypoint for the catalog backend driver in the ' - 'keystone.catalog namespace. Supplied drivers are ' - 'kvs, sql, templated, and endpoint_filter.sql'), - cfg.BoolOpt('caching', default=True, - help='Toggle for catalog caching. This has no ' - 'effect unless global caching is enabled.'), - cfg.IntOpt('cache_time', - help='Time to cache catalog data (in seconds). This has no ' - 'effect unless global and catalog caching are ' - 'enabled.'), - cfg.IntOpt('list_limit', - help='Maximum number of entities that will be returned ' - 'in a catalog collection.'), - ], - 'kvs': [ - cfg.ListOpt('backends', default=[], - help='Extra dogpile.cache backend modules to register ' - 'with the dogpile.cache library.'), - cfg.StrOpt('config_prefix', default='keystone.kvs', - help='Prefix for building the configuration dictionary ' - 'for the KVS region. This should not need to be ' - 'changed unless there is another dogpile.cache ' - 'region with the same configuration name.'), - cfg.BoolOpt('enable_key_mangler', default=True, - help='Toggle to disable using a key-mangling function ' - 'to ensure fixed length keys. This is toggle-able ' - 'for debugging purposes, it is highly recommended ' - 'to always leave this set to true.'), - cfg.IntOpt('default_lock_timeout', default=5, - help='Default lock timeout (in seconds) for distributed ' - 'locking.'), - ], - 'saml': [ - cfg.IntOpt('assertion_expiration_time', default=3600, - help='Default TTL, in seconds, for any generated SAML ' - 'assertion created by Keystone.'), - cfg.StrOpt('xmlsec1_binary', - default='xmlsec1', - help='Binary to be called for XML signing. Install the ' - 'appropriate package, specify absolute path or adjust ' - 'your PATH environment variable if the binary cannot ' - 'be found.'), - cfg.StrOpt('certfile', - default=_CERTFILE, - help='Path of the certfile for SAML signing. For ' - 'non-production environments, you may be interested ' - 'in using `keystone-manage pki_setup` to generate ' - 'self-signed certificates. Note, the path cannot ' - 'contain a comma.'), - cfg.StrOpt('keyfile', - default=_KEYFILE, - help='Path of the keyfile for SAML signing. Note, the path ' - 'cannot contain a comma.'), - cfg.StrOpt('idp_entity_id', - help='Entity ID value for unique Identity Provider ' - 'identification. Usually FQDN is set with a suffix. ' - 'A value is required to generate IDP Metadata. ' - 'For example: https://keystone.example.com/v3/' - 'OS-FEDERATION/saml2/idp'), - cfg.StrOpt('idp_sso_endpoint', - help='Identity Provider Single-Sign-On service value, ' - 'required in the Identity Provider\'s metadata. ' - 'A value is required to generate IDP Metadata. ' - 'For example: https://keystone.example.com/v3/' - 'OS-FEDERATION/saml2/sso'), - cfg.StrOpt('idp_lang', default='en', - help='Language used by the organization.'), - cfg.StrOpt('idp_organization_name', - help='Organization name the installation belongs to.'), - cfg.StrOpt('idp_organization_display_name', - help='Organization name to be displayed.'), - cfg.StrOpt('idp_organization_url', - help='URL of the organization.'), - cfg.StrOpt('idp_contact_company', - help='Company of contact person.'), - cfg.StrOpt('idp_contact_name', - help='Given name of contact person'), - cfg.StrOpt('idp_contact_surname', - help='Surname of contact person.'), - cfg.StrOpt('idp_contact_email', - help='Email address of contact person.'), - cfg.StrOpt('idp_contact_telephone', - help='Telephone number of contact person.'), - cfg.StrOpt('idp_contact_type', default='other', - choices=['technical', 'support', 'administrative', - 'billing', 'other'], - help='The contact type describing the main point of ' - 'contact for the identity provider.'), - cfg.StrOpt('idp_metadata_path', - default='/etc/keystone/saml2_idp_metadata.xml', - help='Path to the Identity Provider Metadata file. ' - 'This file should be generated with the ' - 'keystone-manage saml_idp_metadata command.'), - cfg.StrOpt('relay_state_prefix', - default='ss:mem:', - help='The prefix to use for the RelayState SAML ' - 'attribute, used when generating ECP wrapped ' - 'assertions.'), - ], - 'eventlet_server': [ - cfg.IntOpt('public_workers', - deprecated_name='public_workers', - deprecated_group='DEFAULT', - deprecated_for_removal=True, - help='The number of worker processes to serve the public ' - 'eventlet application. Defaults to number of CPUs ' - '(minimum of 2).'), - cfg.IntOpt('admin_workers', - deprecated_name='admin_workers', - deprecated_group='DEFAULT', - deprecated_for_removal=True, - help='The number of worker processes to serve the admin ' - 'eventlet application. Defaults to number of CPUs ' - '(minimum of 2).'), - cfg.StrOpt('public_bind_host', - default='0.0.0.0', # nosec : Bind to all interfaces by - # default for backwards compatibility. - deprecated_opts=[cfg.DeprecatedOpt('bind_host', - group='DEFAULT'), - cfg.DeprecatedOpt('public_bind_host', - group='DEFAULT'), ], - deprecated_for_removal=True, - help='The IP address of the network interface for the ' - 'public service to listen on.'), - cfg.PortOpt('public_port', default=5000, - deprecated_name='public_port', - deprecated_group='DEFAULT', - deprecated_for_removal=True, - help='The port number which the public service listens ' - 'on.'), - cfg.StrOpt('admin_bind_host', - default='0.0.0.0', # nosec : Bind to all interfaces by - # default for backwards compatibility. - deprecated_opts=[cfg.DeprecatedOpt('bind_host', - group='DEFAULT'), - cfg.DeprecatedOpt('admin_bind_host', - group='DEFAULT')], - deprecated_for_removal=True, - help='The IP address of the network interface for the ' - 'admin service to listen on.'), - cfg.PortOpt('admin_port', default=35357, - deprecated_name='admin_port', - deprecated_group='DEFAULT', - deprecated_for_removal=True, - help='The port number which the admin service listens ' - 'on.'), - cfg.BoolOpt('wsgi_keep_alive', default=True, - help='If set to false, disables keepalives on the server; ' - 'all connections will be closed after serving one ' - 'request.'), - cfg.IntOpt('client_socket_timeout', default=900, - help='Timeout for socket operations on a client ' - 'connection. If an incoming connection is idle for ' - 'this number of seconds it will be closed. A value ' - 'of "0" means wait forever.'), - cfg.BoolOpt('tcp_keepalive', default=False, - deprecated_name='tcp_keepalive', - deprecated_group='DEFAULT', - deprecated_for_removal=True, - help='Set this to true if you want to enable ' - 'TCP_KEEPALIVE on server sockets, i.e. sockets used ' - 'by the Keystone wsgi server for client ' - 'connections.'), - cfg.IntOpt('tcp_keepidle', - default=600, - deprecated_name='tcp_keepidle', - deprecated_group='DEFAULT', - deprecated_for_removal=True, - help='Sets the value of TCP_KEEPIDLE in seconds for each ' - 'server socket. Only applies if tcp_keepalive is ' - 'true. Ignored if system does not support it.'), - ], - 'eventlet_server_ssl': [ - cfg.BoolOpt('enable', default=False, deprecated_name='enable', - deprecated_group='ssl', - deprecated_for_removal=True, - help='Toggle for SSL support on the Keystone ' - 'eventlet servers.'), - cfg.StrOpt('certfile', - default='/etc/keystone/ssl/certs/keystone.pem', - deprecated_name='certfile', deprecated_group='ssl', - deprecated_for_removal=True, - help='Path of the certfile for SSL. For non-production ' - 'environments, you may be interested in using ' - '`keystone-manage ssl_setup` to generate self-signed ' - 'certificates.'), - cfg.StrOpt('keyfile', - default='/etc/keystone/ssl/private/keystonekey.pem', - deprecated_name='keyfile', deprecated_group='ssl', - deprecated_for_removal=True, - help='Path of the keyfile for SSL.'), - cfg.StrOpt('ca_certs', - default='/etc/keystone/ssl/certs/ca.pem', - deprecated_name='ca_certs', deprecated_group='ssl', - deprecated_for_removal=True, - help='Path of the CA cert file for SSL.'), - cfg.BoolOpt('cert_required', default=False, - deprecated_name='cert_required', deprecated_group='ssl', - deprecated_for_removal=True, - help='Require client certificate.'), - ], -} - - -CONF = cfg.CONF -oslo_messaging.set_transport_defaults(control_exchange='keystone') - - -def _register_auth_plugin_opt(conf, option): - conf.register_opt(option, group='auth') - - -def setup_authentication(conf=None): - # register any non-default auth methods here (used by extensions, etc) - if conf is None: - conf = CONF - for method_name in conf.auth.methods: - if method_name not in _DEFAULT_AUTH_METHODS: - option = cfg.StrOpt(method_name) - _register_auth_plugin_opt(conf, option) - - -def set_default_for_default_log_levels(): - """Set the default for the default_log_levels option for keystone. - - Keystone uses some packages that other OpenStack services don't use that do - logging. This will set the default_log_levels default level for those - packages. - - This function needs to be called before CONF(). - - """ - extra_log_level_defaults = [ - 'dogpile=INFO', - 'routes=INFO', - ] - - log.register_options(CONF) - log.set_defaults(default_log_levels=log.get_default_log_levels() + - extra_log_level_defaults) - - -def setup_logging(): - """Sets up logging for the keystone package.""" - log.setup(CONF, 'keystone') - logging.captureWarnings(True) - - -def find_paste_config(): - """Find Keystone's paste.deploy configuration file. - - Keystone's paste.deploy configuration file is specified in the - ``[paste_deploy]`` section of the main Keystone configuration file, - ``keystone.conf``. - - For example:: - - [paste_deploy] - config_file = keystone-paste.ini - - :returns: The selected configuration filename - :raises: exception.ConfigFileNotFound - - """ - if CONF.paste_deploy.config_file: - paste_config = CONF.paste_deploy.config_file - paste_config_value = paste_config - if not os.path.isabs(paste_config): - paste_config = CONF.find_file(paste_config) - elif CONF.config_file: - paste_config = CONF.config_file[0] - paste_config_value = paste_config - else: - # this provides backwards compatibility for keystone.conf files that - # still have the entire paste configuration included, rather than just - # a [paste_deploy] configuration section referring to an external file - paste_config = CONF.find_file('keystone.conf') - paste_config_value = 'keystone.conf' - if not paste_config or not os.path.exists(paste_config): - raise exception.ConfigFileNotFound(config_file=paste_config_value) - return paste_config - - -def configure(conf=None): - if conf is None: - conf = CONF - - conf.register_cli_opt( - cfg.BoolOpt('standard-threads', default=False, - help='Do not monkey-patch threading system modules.')) - conf.register_cli_opt( - cfg.StrOpt('pydev-debug-host', - help='Host to connect to for remote debugger.')) - conf.register_cli_opt( - cfg.PortOpt('pydev-debug-port', - help='Port to connect to for remote debugger.')) - - for section in FILE_OPTIONS: - for option in FILE_OPTIONS[section]: - if section: - conf.register_opt(option, group=section) - else: - conf.register_opt(option) - - # register any non-default auth methods here (used by extensions, etc) - setup_authentication(conf) - # add oslo.cache related config options - cache.configure(conf) - - -def list_opts(): - """Return a list of oslo_config options available in Keystone. - - The returned list includes all oslo_config options which are registered as - the "FILE_OPTIONS" in keystone.common.config. This list will not include - the options from the oslo-incubator library or any options registered - dynamically at run time. - - Each object in the list is a two element tuple. The first element of - each tuple is the name of the group under which the list of options in the - second element will be registered. A group name of None corresponds to the - [DEFAULT] group in config files. - - This function is also discoverable via the 'oslo_config.opts' entry point - under the 'keystone.config.opts' namespace. - - The purpose of this is to allow tools like the Oslo sample config file - generator to discover the options exposed to users by this library. - - :returns: a list of (group_name, opts) tuples - """ - return list(FILE_OPTIONS.items()) - - -def set_middleware_defaults(): - """Update default configuration options for oslo.middleware.""" - # CORS Defaults - # TODO(krotscheck): Update with https://review.openstack.org/#/c/285368/ - cfg.set_defaults(cors.CORS_OPTS, - allow_headers=['X-Auth-Token', - 'X-Openstack-Request-Id', - 'X-Subject-Token', - 'X-Project-Id', - 'X-Project-Name', - 'X-Project-Domain-Id', - 'X-Project-Domain-Name', - 'X-Domain-Id', - 'X-Domain-Name'], - expose_headers=['X-Auth-Token', - 'X-Openstack-Request-Id', - 'X-Subject-Token'], - allow_methods=['GET', - 'PUT', - 'POST', - 'DELETE', - 'PATCH'] - ) - - -def set_config_defaults(): - """Override all configuration default values for keystone.""" - set_default_for_default_log_levels() - set_middleware_defaults() diff --git a/keystone-moon/keystone/common/controller.py b/keystone-moon/keystone/common/controller.py deleted file mode 100644 index 8672525f..00000000 --- a/keystone-moon/keystone/common/controller.py +++ /dev/null @@ -1,835 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools -import uuid - -from oslo_config import cfg -from oslo_log import log -from oslo_log import versionutils -from oslo_utils import strutils -import six - -from keystone.common import authorization -from keystone.common import dependency -from keystone.common import driver_hints -from keystone.common import utils -from keystone.common import wsgi -from keystone import exception -from keystone.i18n import _, _LW -from keystone.models import token_model - - -LOG = log.getLogger(__name__) -CONF = cfg.CONF - - -def v2_deprecated(f): - @six.wraps(f) - def wrapper(*args, **kwargs): - deprecated = versionutils.deprecated( - what=f.__name__ + ' of the v2 API', - as_of=versionutils.deprecated.MITAKA, - in_favor_of='a similar function in the v3 API', - remove_in=+4) - return deprecated(f) - return wrapper() - - -def v2_ec2_deprecated(f): - @six.wraps(f) - def wrapper(*args, **kwargs): - deprecated = versionutils.deprecated( - what=f.__name__ + ' of the v2 EC2 APIs', - as_of=versionutils.deprecated.MITAKA, - in_favor_of=('a similar function in the v3 Credential APIs'), - remove_in=0) - return deprecated(f) - return wrapper() - - -def v2_auth_deprecated(f): - @six.wraps(f) - def wrapper(*args, **kwargs): - deprecated = versionutils.deprecated( - what=f.__name__ + ' of the v2 Authentication APIs', - as_of=versionutils.deprecated.MITAKA, - in_favor_of=('a similar function in the v3 Authentication APIs'), - remove_in=0) - return deprecated(f) - return wrapper() - - -def _build_policy_check_credentials(self, action, context, kwargs): - kwargs_str = ', '.join(['%s=%s' % (k, kwargs[k]) for k in kwargs]) - kwargs_str = strutils.mask_password(kwargs_str) - - LOG.debug('RBAC: Authorizing %(action)s(%(kwargs)s)', { - 'action': action, - 'kwargs': kwargs_str}) - - # see if auth context has already been created. If so use it. - if ('environment' in context and - authorization.AUTH_CONTEXT_ENV in context['environment']): - LOG.debug('RBAC: using auth context from the request environment') - return context['environment'].get(authorization.AUTH_CONTEXT_ENV) - - # There is no current auth context, build it from the incoming token. - # TODO(morganfainberg): Collapse this logic with AuthContextMiddleware - # in a sane manner as this just mirrors the logic in AuthContextMiddleware - try: - LOG.debug('RBAC: building auth context from the incoming auth token') - token_ref = token_model.KeystoneToken( - token_id=context['token_id'], - token_data=self.token_provider_api.validate_token( - context['token_id'])) - # NOTE(jamielennox): whilst this maybe shouldn't be within this - # function it would otherwise need to reload the token_ref from - # backing store. - wsgi.validate_token_bind(context, token_ref) - except exception.TokenNotFound: - LOG.warning(_LW('RBAC: Invalid token')) - raise exception.Unauthorized() - - auth_context = authorization.token_to_auth_context(token_ref) - - return auth_context - - -def protected(callback=None): - """Wraps API calls with role based access controls (RBAC). - - This handles both the protection of the API parameters as well as any - target entities for single-entity API calls. - - More complex API calls (for example that deal with several different - entities) should pass in a callback function, that will be subsequently - called to check protection for these multiple entities. This callback - function should gather the appropriate entities needed and then call - check_protection() in the V3Controller class. - - """ - def wrapper(f): - @functools.wraps(f) - def inner(self, context, *args, **kwargs): - if 'is_admin' in context and context['is_admin']: - LOG.warning(_LW('RBAC: Bypassing authorization')) - elif callback is not None: - prep_info = {'f_name': f.__name__, - 'input_attr': kwargs} - callback(self, context, prep_info, *args, **kwargs) - else: - action = 'identity:%s' % f.__name__ - creds = _build_policy_check_credentials(self, action, - context, kwargs) - - policy_dict = {} - - # Check to see if we need to include the target entity in our - # policy checks. We deduce this by seeing if the class has - # specified a get_member() method and that kwargs contains the - # appropriate entity id. - if (hasattr(self, 'get_member_from_driver') and - self.get_member_from_driver is not None): - key = '%s_id' % self.member_name - if key in kwargs: - ref = self.get_member_from_driver(kwargs[key]) - policy_dict['target'] = {self.member_name: ref} - - # TODO(henry-nash): Move this entire code to a member - # method inside v3 Auth - if context.get('subject_token_id') is not None: - token_ref = token_model.KeystoneToken( - token_id=context['subject_token_id'], - token_data=self.token_provider_api.validate_token( - context['subject_token_id'])) - policy_dict.setdefault('target', {}) - policy_dict['target'].setdefault(self.member_name, {}) - policy_dict['target'][self.member_name]['user_id'] = ( - token_ref.user_id) - try: - user_domain_id = token_ref.user_domain_id - except exception.UnexpectedError: - user_domain_id = None - if user_domain_id: - policy_dict['target'][self.member_name].setdefault( - 'user', {}) - policy_dict['target'][self.member_name][ - 'user'].setdefault('domain', {}) - policy_dict['target'][self.member_name]['user'][ - 'domain']['id'] = ( - user_domain_id) - - # Add in the kwargs, which means that any entity provided as a - # parameter for calls like create and update will be included. - policy_dict.update(kwargs) - self.policy_api.enforce(creds, - action, - utils.flatten_dict(policy_dict)) - LOG.debug('RBAC: Authorization granted') - return f(self, context, *args, **kwargs) - return inner - return wrapper - - -def filterprotected(*filters, **callback): - """Wraps API list calls with role based access controls (RBAC). - - This handles both the protection of the API parameters as well as any - filters supplied. - - More complex API list calls (for example that need to examine the contents - of an entity referenced by one of the filters) should pass in a callback - function, that will be subsequently called to check protection for these - multiple entities. This callback function should gather the appropriate - entities needed and then call check_protection() in the V3Controller class. - - """ - def _filterprotected(f): - @functools.wraps(f) - def wrapper(self, context, **kwargs): - if not context['is_admin']: - # The target dict for the policy check will include: - # - # - Any query filter parameters - # - Data from the main url (which will be in the kwargs - # parameter), which although most of our APIs do not utilize, - # in theory you could have. - # - - # First build the dict of filter parameters - target = dict() - if filters: - for item in filters: - if item in context['query_string']: - target[item] = context['query_string'][item] - - LOG.debug('RBAC: Adding query filter params (%s)', ( - ', '.join(['%s=%s' % (item, target[item]) - for item in target]))) - - if 'callback' in callback and callback['callback'] is not None: - # A callback has been specified to load additional target - # data, so pass it the formal url params as well as the - # list of filters, so it can augment these and then call - # the check_protection() method. - prep_info = {'f_name': f.__name__, - 'input_attr': kwargs, - 'filter_attr': target} - callback['callback'](self, context, prep_info, **kwargs) - else: - # No callback, so we are going to check the protection here - action = 'identity:%s' % f.__name__ - creds = _build_policy_check_credentials(self, action, - context, kwargs) - # Add in any formal url parameters - for key in kwargs: - target[key] = kwargs[key] - - self.policy_api.enforce(creds, - action, - utils.flatten_dict(target)) - - LOG.debug('RBAC: Authorization granted') - else: - LOG.warning(_LW('RBAC: Bypassing authorization')) - return f(self, context, filters, **kwargs) - return wrapper - return _filterprotected - - -class V2Controller(wsgi.Application): - """Base controller class for Identity API v2.""" - - def _normalize_domain_id(self, context, ref): - """Fill in domain_id since v2 calls are not domain-aware. - - This will overwrite any domain_id that was inadvertently - specified in the v2 call. - - """ - ref['domain_id'] = CONF.identity.default_domain_id - return ref - - @staticmethod - def filter_domain_id(ref): - """Remove domain_id since v2 calls are not domain-aware.""" - ref.pop('domain_id', None) - return ref - - @staticmethod - def filter_domain(ref): - """Remove domain since v2 calls are not domain-aware.""" - ref.pop('domain', None) - return ref - - @staticmethod - def filter_project_parent_id(ref): - """Remove parent_id since v2 calls are not hierarchy-aware.""" - ref.pop('parent_id', None) - return ref - - @staticmethod - def filter_is_domain(ref): - """Remove is_domain field since v2 calls are not domain-aware.""" - ref.pop('is_domain', None) - return ref - - @staticmethod - def normalize_username_in_response(ref): - """Adds username to outgoing user refs to match the v2 spec. - - Internally we use `name` to represent a user's name. The v2 spec - requires the use of `username` instead. - - """ - if 'username' not in ref and 'name' in ref: - ref['username'] = ref['name'] - return ref - - @staticmethod - def normalize_username_in_request(ref): - """Adds name in incoming user refs to match the v2 spec. - - Internally we use `name` to represent a user's name. The v2 spec - requires the use of `username` instead. - - """ - if 'name' not in ref and 'username' in ref: - ref['name'] = ref.pop('username') - return ref - - @staticmethod - def v3_to_v2_user(ref): - """Convert a user_ref from v3 to v2 compatible. - - * v2.0 users are not domain aware, and should have domain_id removed - * v2.0 users expect the use of tenantId instead of default_project_id - * v2.0 users have a username attribute - - If ref is a list type, we will iterate through each element and do the - conversion. - """ - def _format_default_project_id(ref): - """Convert default_project_id to tenantId for v2 calls.""" - default_project_id = ref.pop('default_project_id', None) - if default_project_id is not None: - ref['tenantId'] = default_project_id - elif 'tenantId' in ref: - # NOTE(morganfainberg): To avoid v2.0 confusion if somehow a - # tenantId property sneaks its way into the extra blob on the - # user, we remove it here. If default_project_id is set, we - # would override it in either case. - del ref['tenantId'] - - def _normalize_and_filter_user_properties(ref): - """Run through the various filter/normalization methods.""" - _format_default_project_id(ref) - V2Controller.filter_domain(ref) - V2Controller.filter_domain_id(ref) - V2Controller.normalize_username_in_response(ref) - return ref - - if isinstance(ref, dict): - return _normalize_and_filter_user_properties(ref) - elif isinstance(ref, list): - return [_normalize_and_filter_user_properties(x) for x in ref] - else: - raise ValueError(_('Expected dict or list: %s') % type(ref)) - - @staticmethod - def v3_to_v2_project(ref): - """Convert a project_ref from v3 to v2. - - * v2.0 projects are not domain aware, and should have domain_id removed - * v2.0 projects are not hierarchy aware, and should have parent_id - removed - - This method should only be applied to project_refs being returned from - the v2.0 controller(s). - - If ref is a list type, we will iterate through each element and do the - conversion. - """ - def _filter_project_properties(ref): - """Run through the various filter methods.""" - V2Controller.filter_domain_id(ref) - V2Controller.filter_project_parent_id(ref) - V2Controller.filter_is_domain(ref) - return ref - - if isinstance(ref, dict): - return _filter_project_properties(ref) - elif isinstance(ref, list): - return [_filter_project_properties(x) for x in ref] - else: - raise ValueError(_('Expected dict or list: %s') % type(ref)) - - def format_project_list(self, tenant_refs, **kwargs): - """Format a v2 style project list, including marker/limits.""" - marker = kwargs.get('marker') - first_index = 0 - if marker is not None: - for (marker_index, tenant) in enumerate(tenant_refs): - if tenant['id'] == marker: - # we start pagination after the marker - first_index = marker_index + 1 - break - else: - msg = _('Marker could not be found') - raise exception.ValidationError(message=msg) - - limit = kwargs.get('limit') - last_index = None - if limit is not None: - try: - limit = int(limit) - if limit < 0: - raise AssertionError() - except (ValueError, AssertionError): - msg = _('Invalid limit value') - raise exception.ValidationError(message=msg) - last_index = first_index + limit - - tenant_refs = tenant_refs[first_index:last_index] - - for x in tenant_refs: - if 'enabled' not in x: - x['enabled'] = True - o = {'tenants': tenant_refs, - 'tenants_links': []} - return o - - -@dependency.requires('policy_api', 'token_provider_api') -class V3Controller(wsgi.Application): - """Base controller class for Identity API v3. - - Child classes should set the ``collection_name`` and ``member_name`` class - attributes, representing the collection of entities they are exposing to - the API. This is required for supporting self-referential links, - pagination, etc. - - Class parameters: - - * `_public_parameters` - set of parameters that are exposed to the user. - Usually used by cls.filter_params() - - """ - - collection_name = 'entities' - member_name = 'entity' - get_member_from_driver = None - - @classmethod - def base_url(cls, context, path=None): - endpoint = super(V3Controller, cls).base_url(context, 'public') - if not path: - path = cls.collection_name - - return '%s/%s/%s' % (endpoint, 'v3', path.lstrip('/')) - - def get_auth_context(self, context): - # TODO(dolphm): this method of accessing the auth context is terrible, - # but context needs to be refactored to always have reasonable values. - env_context = context.get('environment', {}) - return env_context.get(authorization.AUTH_CONTEXT_ENV, {}) - - @classmethod - def full_url(cls, context, path=None): - url = cls.base_url(context, path) - if context['environment'].get('QUERY_STRING'): - url = '%s?%s' % (url, context['environment']['QUERY_STRING']) - - return url - - @classmethod - def query_filter_is_true(cls, filter_value): - """Determine if bool query param is 'True'. - - We treat this the same way as we do for policy - enforcement: - - {bool_param}=0 is treated as False - - Any other value is considered to be equivalent to - True, including the absence of a value - - """ - if (isinstance(filter_value, six.string_types) and - filter_value == '0'): - val = False - else: - val = True - return val - - @classmethod - def _add_self_referential_link(cls, context, ref): - ref.setdefault('links', {}) - ref['links']['self'] = cls.base_url(context) + '/' + ref['id'] - - @classmethod - def wrap_member(cls, context, ref): - cls._add_self_referential_link(context, ref) - return {cls.member_name: ref} - - @classmethod - def wrap_collection(cls, context, refs, hints=None): - """Wrap a collection, checking for filtering and pagination. - - Returns the wrapped collection, which includes: - - Executing any filtering not already carried out - - Truncate to a set limit if necessary - - Adds 'self' links in every member - - Adds 'next', 'self' and 'prev' links for the whole collection. - - :param context: the current context, containing the original url path - and query string - :param refs: the list of members of the collection - :param hints: list hints, containing any relevant filters and limit. - Any filters already satisfied by managers will have been - removed - """ - # Check if there are any filters in hints that were not - # handled by the drivers. The driver will not have paginated or - # limited the output if it found there were filters it was unable to - # handle. - - if hints is not None: - refs = cls.filter_by_attributes(refs, hints) - - list_limited, refs = cls.limit(refs, hints) - - for ref in refs: - cls.wrap_member(context, ref) - - container = {cls.collection_name: refs} - container['links'] = { - 'next': None, - 'self': cls.full_url(context, path=context['path']), - 'previous': None} - - if list_limited: - container['truncated'] = True - - return container - - @classmethod - def limit(cls, refs, hints): - """Limits a list of entities. - - The underlying driver layer may have already truncated the collection - for us, but in case it was unable to handle truncation we check here. - - :param refs: the list of members of the collection - :param hints: hints, containing, among other things, the limit - requested - - :returns: boolean indicating whether the list was truncated, as well - as the list of (truncated if necessary) entities. - - """ - NOT_LIMITED = False - LIMITED = True - - if hints is None or hints.limit is None: - # No truncation was requested - return NOT_LIMITED, refs - - if hints.limit.get('truncated', False): - # The driver did truncate the list - return LIMITED, refs - - if len(refs) > hints.limit['limit']: - # The driver layer wasn't able to truncate it for us, so we must - # do it here - return LIMITED, refs[:hints.limit['limit']] - - return NOT_LIMITED, refs - - @classmethod - def filter_by_attributes(cls, refs, hints): - """Filters a list of references by filter values.""" - def _attr_match(ref_attr, val_attr): - """Matches attributes allowing for booleans as strings. - - We test explicitly for a value that defines it as 'False', - which also means that the existence of the attribute with - no value implies 'True' - - """ - if type(ref_attr) is bool: - return ref_attr == utils.attr_as_boolean(val_attr) - else: - return ref_attr == val_attr - - def _inexact_attr_match(filter, ref): - """Applies an inexact filter to a result dict. - - :param filter: the filter in question - :param ref: the dict to check - - :returns: True if there is a match - - """ - comparator = filter['comparator'] - key = filter['name'] - - if key in ref: - filter_value = filter['value'] - target_value = ref[key] - if not filter['case_sensitive']: - # We only support inexact filters on strings so - # it's OK to use lower() - filter_value = filter_value.lower() - target_value = target_value.lower() - - if comparator == 'contains': - return (filter_value in target_value) - elif comparator == 'startswith': - return target_value.startswith(filter_value) - elif comparator == 'endswith': - return target_value.endswith(filter_value) - else: - # We silently ignore unsupported filters - return True - - return False - - for filter in hints.filters: - if filter['comparator'] == 'equals': - attr = filter['name'] - value = filter['value'] - refs = [r for r in refs if _attr_match( - utils.flatten_dict(r).get(attr), value)] - else: - # It might be an inexact filter - refs = [r for r in refs if _inexact_attr_match( - filter, r)] - - return refs - - @classmethod - def build_driver_hints(cls, context, supported_filters): - """Build list hints based on the context query string. - - :param context: contains the query_string from which any list hints can - be extracted - :param supported_filters: list of filters supported, so ignore any - keys in query_dict that are not in this list. - - """ - query_dict = context['query_string'] - hints = driver_hints.Hints() - - if query_dict is None: - return hints - - for key in query_dict: - # Check if this is an exact filter - if supported_filters is None or key in supported_filters: - hints.add_filter(key, query_dict[key]) - continue - - # Check if it is an inexact filter - for valid_key in supported_filters: - # See if this entry in query_dict matches a known key with an - # inexact suffix added. If it doesn't match, then that just - # means that there is no inexact filter for that key in this - # query. - if not key.startswith(valid_key + '__'): - continue - - base_key, comparator = key.split('__', 1) - - # We map the query-style inexact of, for example: - # - # {'email__contains', 'myISP'} - # - # into a list directive add filter call parameters of: - # - # name = 'email' - # value = 'myISP' - # comparator = 'contains' - # case_sensitive = True - - case_sensitive = True - if comparator.startswith('i'): - case_sensitive = False - comparator = comparator[1:] - hints.add_filter(base_key, query_dict[key], - comparator=comparator, - case_sensitive=case_sensitive) - - # NOTE(henry-nash): If we were to support pagination, we would pull any - # pagination directives out of the query_dict here, and add them into - # the hints list. - return hints - - def _require_matching_id(self, value, ref): - """Ensures the value matches the reference's ID, if any.""" - if 'id' in ref and ref['id'] != value: - raise exception.ValidationError('Cannot change ID') - - def _require_matching_domain_id(self, ref_id, ref, get_member): - """Ensure the current domain ID matches the reference one, if any. - - Provided we want domain IDs to be immutable, check whether any - domain_id specified in the ref dictionary matches the existing - domain_id for this entity. - - :param ref_id: the ID of the entity - :param ref: the dictionary of new values proposed for this entity - :param get_member: The member function to call to get the current - entity - :raises: :class:`keystone.exception.ValidationError` - - """ - # TODO(henry-nash): It might be safer and more efficient to do this - # check in the managers affected, so look to migrate this check to - # there in the future. - if CONF.domain_id_immutable and 'domain_id' in ref: - existing_ref = get_member(ref_id) - if ref['domain_id'] != existing_ref['domain_id']: - raise exception.ValidationError(_('Cannot change Domain ID')) - - def _assign_unique_id(self, ref): - """Generates and assigns a unique identifier to a reference.""" - ref = ref.copy() - ref['id'] = uuid.uuid4().hex - return ref - - def _get_domain_id_for_list_request(self, context): - """Get the domain_id for a v3 list call. - - If we running with multiple domain drivers, then the caller must - specify a domain_id either as a filter or as part of the token scope. - - """ - if not CONF.identity.domain_specific_drivers_enabled: - # We don't need to specify a domain ID in this case - return - - if context['query_string'].get('domain_id') is not None: - return context['query_string'].get('domain_id') - - token_ref = utils.get_token_ref(context) - - if token_ref.domain_scoped: - return token_ref.domain_id - elif token_ref.project_scoped: - return token_ref.project_domain_id - else: - LOG.warning( - _LW('No domain information specified as part of list request')) - raise exception.Unauthorized() - - def _get_domain_id_from_token(self, context): - """Get the domain_id for a v3 create call. - - In the case of a v3 create entity call that does not specify a domain - ID, the spec says that we should use the domain scoping from the token - being used. - - """ - try: - token_ref = utils.get_token_ref(context) - except exception.Unauthorized: - if context.get('is_admin'): - raise exception.ValidationError( - _('You have tried to create a resource using the admin ' - 'token. As this token is not within a domain you must ' - 'explicitly include a domain for this resource to ' - 'belong to.')) - raise - - if token_ref.domain_scoped: - return token_ref.domain_id - else: - # TODO(henry-nash): We should issue an exception here since if - # a v3 call does not explicitly specify the domain_id in the - # entity, it should be using a domain scoped token. However, - # the current tempest heat tests issue a v3 call without this. - # This is raised as bug #1283539. Once this is fixed, we - # should remove the line below and replace it with an error. - # - # Ahead of actually changing the code to raise an exception, we - # issue a deprecation warning. - versionutils.report_deprecated_feature( - LOG, - _LW('Not specifying a domain during a create user, group or ' - 'project call, and relying on falling back to the ' - 'default domain, is deprecated as of Liberty and will be ' - 'removed in the N release. Specify the domain explicitly ' - 'or use a domain-scoped token')) - return CONF.identity.default_domain_id - - def _normalize_domain_id(self, context, ref): - """Fill in domain_id if not specified in a v3 call.""" - if not ref.get('domain_id'): - ref['domain_id'] = self._get_domain_id_from_token(context) - return ref - - @staticmethod - def filter_domain_id(ref): - """Override v2 filter to let domain_id out for v3 calls.""" - return ref - - def check_protection(self, context, prep_info, target_attr=None): - """Provide call protection for complex target attributes. - - As well as including the standard parameters from the original API - call (which is passed in prep_info), this call will add in any - additional entities or attributes (passed in target_attr), so that - they can be referenced by policy rules. - - """ - if 'is_admin' in context and context['is_admin']: - LOG.warning(_LW('RBAC: Bypassing authorization')) - else: - action = 'identity:%s' % prep_info['f_name'] - # TODO(henry-nash) need to log the target attributes as well - creds = _build_policy_check_credentials(self, action, - context, - prep_info['input_attr']) - # Build the dict the policy engine will check against from both the - # parameters passed into the call we are protecting (which was - # stored in the prep_info by protected()), plus the target - # attributes provided. - policy_dict = {} - if target_attr: - policy_dict = {'target': target_attr} - policy_dict.update(prep_info['input_attr']) - if 'filter_attr' in prep_info: - policy_dict.update(prep_info['filter_attr']) - self.policy_api.enforce(creds, - action, - utils.flatten_dict(policy_dict)) - LOG.debug('RBAC: Authorization granted') - - @classmethod - def filter_params(cls, ref): - """Remove unspecified parameters from the dictionary. - - This function removes unspecified parameters from the dictionary. - This method checks only root-level keys from a ref dictionary. - - :param ref: a dictionary representing deserialized response to be - serialized - """ - ref_keys = set(ref.keys()) - blocked_keys = ref_keys - cls._public_parameters - for blocked_param in blocked_keys: - del ref[blocked_param] - return ref diff --git a/keystone-moon/keystone/common/dependency.py b/keystone-moon/keystone/common/dependency.py deleted file mode 100644 index d52a1ec5..00000000 --- a/keystone-moon/keystone/common/dependency.py +++ /dev/null @@ -1,230 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""This module provides support for dependency injection. - -Providers are registered via the ``@provider()`` decorator, and dependencies on -them are registered with ``@requires()``. Providers are available to their -consumers via an attribute. See the documentation for the individual functions -for more detail. - -See also: - - https://en.wikipedia.org/wiki/Dependency_injection - -""" - -import traceback - -from keystone.i18n import _ - - -_REGISTRY = {} - -_future_dependencies = {} -_factories = {} - - -def _set_provider(name, provider): - _original_provider, where_registered = _REGISTRY.get(name, (None, None)) - if where_registered: - raise Exception('%s already has a registered provider, at\n%s' % - (name, ''.join(where_registered))) - _REGISTRY[name] = (provider, traceback.format_stack()) - - -GET_REQUIRED = object() -GET_OPTIONAL = object() - - -def get_provider(name, optional=GET_REQUIRED): - if optional is GET_REQUIRED: - return _REGISTRY[name][0] - return _REGISTRY.get(name, (None, None))[0] - - -class UnresolvableDependencyException(Exception): - """Raised when a required dependency is not resolvable. - - See ``resolve_future_dependencies()`` for more details. - - """ - - def __init__(self, name, targets): - msg = _('Unregistered dependency: %(name)s for %(targets)s') % { - 'name': name, 'targets': targets} - super(UnresolvableDependencyException, self).__init__(msg) - - -def provider(name): - """A class decorator used to register providers. - - When ``@provider()`` is used to decorate a class, members of that class - will register themselves as providers for the named dependency. As an - example, In the code fragment:: - - @dependency.provider('foo_api') - class Foo: - def __init__(self): - ... - - ... - - foo = Foo() - - The object ``foo`` will be registered as a provider for ``foo_api``. No - more than one such instance should be created; additional instances will - replace the previous ones, possibly resulting in different instances being - used by different consumers. - - """ - def wrapper(cls): - def wrapped(init): - def __wrapped_init__(self, *args, **kwargs): - """Initialize the wrapped object and add it to the registry.""" - init(self, *args, **kwargs) - _set_provider(name, self) - resolve_future_dependencies(__provider_name=name) - - return __wrapped_init__ - - cls.__init__ = wrapped(cls.__init__) - _factories[name] = cls - return cls - return wrapper - - -def _process_dependencies(obj): - # Any dependencies that can be resolved immediately are resolved. - # Dependencies that cannot be resolved immediately are stored for - # resolution in resolve_future_dependencies. - - def process(obj, attr_name, unresolved_in_out): - for dependency in getattr(obj, attr_name, []): - if dependency not in _REGISTRY: - # We don't know about this dependency, so save it for later. - unresolved_in_out.setdefault(dependency, []).append(obj) - continue - - setattr(obj, dependency, get_provider(dependency)) - - process(obj, '_dependencies', _future_dependencies) - - -def requires(*dependencies): - """A class decorator used to inject providers into consumers. - - The required providers will be made available to instances of the decorated - class via an attribute with the same name as the provider. For example, in - the code fragment:: - - @dependency.requires('foo_api', 'bar_api') - class FooBarClient: - def __init__(self): - ... - - ... - - client = FooBarClient() - - The object ``client`` will have attributes named ``foo_api`` and - ``bar_api``, which are instances of the named providers. - - Objects must not rely on the existence of these attributes until after - ``resolve_future_dependencies()`` has been called; they may not exist - beforehand. - - Dependencies registered via ``@required()`` must have providers; if not, - an ``UnresolvableDependencyException`` will be raised when - ``resolve_future_dependencies()`` is called. - - """ - def wrapper(self, *args, **kwargs): - """Inject each dependency from the registry.""" - self.__wrapped_init__(*args, **kwargs) - _process_dependencies(self) - - def wrapped(cls): - """Note the required dependencies on the object for later injection. - - The dependencies of the parent class are combined with that of the - child class to create a new set of dependencies. - - """ - existing_dependencies = getattr(cls, '_dependencies', set()) - cls._dependencies = existing_dependencies.union(dependencies) - if not hasattr(cls, '__wrapped_init__'): - cls.__wrapped_init__ = cls.__init__ - cls.__init__ = wrapper - return cls - - return wrapped - - -def resolve_future_dependencies(__provider_name=None): - """Forces injection of all dependencies. - - Before this function is called, circular dependencies may not have been - injected. This function should be called only once, after all global - providers are registered. If an object needs to be created after this - call, it must not have circular dependencies. - - If any required dependencies are unresolvable, this function will raise an - ``UnresolvableDependencyException``. - - Outside of this module, this function should be called with no arguments; - the optional argument, ``__provider_name`` is used internally, and should - be treated as an implementation detail. - - """ - new_providers = dict() - if __provider_name: - # A provider was registered, so take care of any objects depending on - # it. - targets = _future_dependencies.pop(__provider_name, []) - - for target in targets: - setattr(target, __provider_name, get_provider(__provider_name)) - - return - - # Resolve future dependencies, raises UnresolvableDependencyException if - # there's no provider registered. - try: - for dependency, targets in _future_dependencies.copy().items(): - if dependency not in _REGISTRY: - # a Class was registered that could fulfill the dependency, but - # it has not yet been initialized. - factory = _factories.get(dependency) - if factory: - provider = factory() - new_providers[dependency] = provider - else: - raise UnresolvableDependencyException(dependency, targets) - - for target in targets: - setattr(target, dependency, get_provider(dependency)) - finally: - _future_dependencies.clear() - return new_providers - - -def reset(): - """Reset the registry of providers. - - This is useful for unit testing to ensure that tests don't use providers - from previous tests. - """ - _REGISTRY.clear() - _future_dependencies.clear() diff --git a/keystone-moon/keystone/common/driver_hints.py b/keystone-moon/keystone/common/driver_hints.py deleted file mode 100644 index e7c2f2ef..00000000 --- a/keystone-moon/keystone/common/driver_hints.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# Copyright 2013 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools - -from keystone import exception -from keystone.i18n import _ - - -def truncated(f): - """Ensure list truncation is detected in Driver list entity methods. - - This is designed to wrap Driver list_{entity} methods in order to - calculate if the resultant list has been truncated. Provided a limit dict - is found in the hints list, we increment the limit by one so as to ask the - wrapped function for one more entity than the limit, and then once the list - has been generated, we check to see if the original limit has been - exceeded, in which case we truncate back to that limit and set the - 'truncated' boolean to 'true' in the hints limit dict. - - """ - @functools.wraps(f) - def wrapper(self, hints, *args, **kwargs): - if not hasattr(hints, 'limit'): - raise exception.UnexpectedError( - _('Cannot truncate a driver call without hints list as ' - 'first parameter after self ')) - - if hints.limit is None: - return f(self, hints, *args, **kwargs) - - # A limit is set, so ask for one more entry than we need - list_limit = hints.limit['limit'] - hints.set_limit(list_limit + 1) - ref_list = f(self, hints, *args, **kwargs) - - # If we got more than the original limit then trim back the list and - # mark it truncated. In both cases, make sure we set the limit back - # to its original value. - if len(ref_list) > list_limit: - hints.set_limit(list_limit, truncated=True) - return ref_list[:list_limit] - else: - hints.set_limit(list_limit) - return ref_list - return wrapper - - -class Hints(object): - """Encapsulate driver hints for listing entities. - - Hints are modifiers that affect the return of entities from a - list_ operation. They are typically passed to a driver to give - direction as to what filtering, pagination or list limiting actions are - being requested. - - It is optional for a driver to action some or all of the list hints, - but any filters that it does satisfy must be marked as such by calling - removing the filter from the list. - - A Hint object contains filters, which is a list of dicts that can be - accessed publicly. Also it contains a dict called limit, which will - indicate the amount of data we want to limit our listing to. - - If the filter is discovered to never match, then `cannot_match` can be set - to indicate that there will not be any matches and the backend work can be - short-circuited. - - Each filter term consists of: - - * ``name``: the name of the attribute being matched - * ``value``: the value against which it is being matched - * ``comparator``: the operation, which can be one of ``equals``, - ``contains``, ``startswith`` or ``endswith`` - * ``case_sensitive``: whether any comparison should take account of - case - * ``type``: will always be 'filter' - - """ - - def __init__(self): - self.limit = None - self.filters = list() - self.cannot_match = False - - def add_filter(self, name, value, comparator='equals', - case_sensitive=False): - """Adds a filter to the filters list, which is publicly accessible.""" - self.filters.append({'name': name, 'value': value, - 'comparator': comparator, - 'case_sensitive': case_sensitive, - 'type': 'filter'}) - - def get_exact_filter_by_name(self, name): - """Return a filter key and value if exact filter exists for name.""" - for entry in self.filters: - if (entry['type'] == 'filter' and entry['name'] == name and - entry['comparator'] == 'equals'): - return entry - - def set_limit(self, limit, truncated=False): - """Set a limit to indicate the list should be truncated.""" - self.limit = {'limit': limit, 'type': 'limit', 'truncated': truncated} diff --git a/keystone-moon/keystone/common/environment/__init__.py b/keystone-moon/keystone/common/environment/__init__.py deleted file mode 100644 index 6748f115..00000000 --- a/keystone-moon/keystone/common/environment/__init__.py +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools -import os - -from oslo_log import log - - -LOG = log.getLogger(__name__) - - -__all__ = ('Server', 'httplib', 'subprocess') - -_configured = False - -Server = None -httplib = None -subprocess = None - - -def configure_once(name): - """Ensure that environment configuration is only run once. - - If environment is reconfigured in the same way then it is ignored. - It is an error to attempt to reconfigure environment in a different way. - """ - def decorator(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - global _configured - if _configured: - if _configured == name: - return - else: - raise SystemError("Environment has already been " - "configured as %s" % _configured) - - LOG.debug("Environment configured as: %s", name) - _configured = name - return func(*args, **kwargs) - - return wrapper - return decorator - - -@configure_once('eventlet') -def use_eventlet(monkeypatch_thread=None): - global httplib, subprocess, Server - - # This must be set before the initial import of eventlet because if - # dnspython is present in your environment then eventlet monkeypatches - # socket.getaddrinfo() with an implementation which doesn't work for IPv6. - os.environ['EVENTLET_NO_GREENDNS'] = 'yes' - - import eventlet - from eventlet.green import httplib as _httplib - from eventlet.green import subprocess as _subprocess - - from keystone.common.environment import eventlet_server - - if monkeypatch_thread is None: - monkeypatch_thread = not os.getenv('STANDARD_THREADS') - - # Raise the default from 8192 to accommodate large tokens - eventlet.wsgi.MAX_HEADER_LINE = 16384 - - # NOTE(ldbragst): Explicitly declare what should be monkey patched and - # what shouldn't. Doing this allows for more readable code when - # understanding Eventlet in Keystone. The following is a complete list - # of what is monkey patched instead of passing all=False and then passing - # module=True to monkey patch a specific module. - eventlet.patcher.monkey_patch(os=False, select=True, socket=True, - thread=monkeypatch_thread, time=True, - psycopg=False, MySQLdb=False) - - Server = eventlet_server.Server - httplib = _httplib - subprocess = _subprocess - - -@configure_once('stdlib') -def use_stdlib(): - global httplib, subprocess - - import six.moves.http_client as _httplib - import subprocess as _subprocess # nosec : This is used in .federation.idp - # and .common.openssl. See there. - - httplib = _httplib - subprocess = _subprocess diff --git a/keystone-moon/keystone/common/environment/eventlet_server.py b/keystone-moon/keystone/common/environment/eventlet_server.py deleted file mode 100644 index 430ca3e4..00000000 --- a/keystone-moon/keystone/common/environment/eventlet_server.py +++ /dev/null @@ -1,212 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2010 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import errno -import re -import socket -import ssl -import sys - -import eventlet -import eventlet.wsgi -import greenlet -from oslo_config import cfg -from oslo_log import log -from oslo_service import service - -from keystone.i18n import _LE, _LI - - -CONF = cfg.CONF - - -LOG = log.getLogger(__name__) - -# The size of a pool that is used to spawn a single green thread in which -# a wsgi server is then started. The size of one is enough, because in case -# of several workers the parent process forks and each child gets a copy -# of a pool, which does not include any greenthread object as the spawn is -# done after the fork. -POOL_SIZE = 1 - - -class EventletFilteringLogger(object): - # NOTE(morganfainberg): This logger is designed to filter out specific - # Tracebacks to limit the amount of data that eventlet can log. In the - # case of broken sockets (EPIPE and ECONNRESET), we are seeing a huge - # volume of data being written to the logs due to ~14 lines+ per traceback. - # The traceback in these cases are, at best, useful for limited debugging - # cases. - def __init__(self, logger, level=log.INFO): - self.logger = logger - self.level = level - self.regex = re.compile(r'errno (%d|%d)' % - (errno.EPIPE, errno.ECONNRESET), re.IGNORECASE) - - def write(self, msg): - m = self.regex.search(msg) - if m: - self.logger.log(log.logging.DEBUG, 'Error(%s) writing to socket.', - m.group(1)) - else: - self.logger.log(self.level, msg.rstrip()) - - -class Server(service.ServiceBase): - """Server class to manage multiple WSGI sockets and applications.""" - - def __init__(self, application, host=None, port=None, keepalive=False, - keepidle=None): - self.application = application - self.host = host or '0.0.0.0' # nosec : Bind to all interfaces by - # default for backwards compatibility. - self.port = port or 0 - # Pool for a green thread in which wsgi server will be running - self.pool = eventlet.GreenPool(POOL_SIZE) - self.socket_info = {} - self.greenthread = None - self.do_ssl = False - self.cert_required = False - self.keepalive = keepalive - self.keepidle = keepidle - self.socket = None - - def listen(self, key=None, backlog=128): - """Create and start listening on socket. - - Call before forking worker processes. - - Raises Exception if this has already been called. - """ - # TODO(dims): eventlet's green dns/socket module does not actually - # support IPv6 in getaddrinfo(). We need to get around this in the - # future or monitor upstream for a fix. - # Please refer below link - # (https://bitbucket.org/eventlet/eventlet/ - # src/e0f578180d7d82d2ed3d8a96d520103503c524ec/eventlet/support/ - # greendns.py?at=0.12#cl-163) - info = socket.getaddrinfo(self.host, - self.port, - socket.AF_UNSPEC, - socket.SOCK_STREAM)[0] - - try: - self.socket = eventlet.listen(info[-1], family=info[0], - backlog=backlog) - except EnvironmentError: - LOG.error(_LE("Could not bind to %(host)s:%(port)s"), - {'host': self.host, 'port': self.port}) - raise - - LOG.info(_LI('Starting %(arg0)s on %(host)s:%(port)s'), - {'arg0': sys.argv[0], - 'host': self.host, - 'port': self.port}) - - def start(self, key=None, backlog=128): - """Run a WSGI server with the given application.""" - if self.socket is None: - self.listen(key=key, backlog=backlog) - - dup_socket = self.socket.dup() - if key: - self.socket_info[key] = self.socket.getsockname() - # SSL is enabled - if self.do_ssl: - if self.cert_required: - cert_reqs = ssl.CERT_REQUIRED - else: - cert_reqs = ssl.CERT_NONE - - dup_socket = eventlet.wrap_ssl(dup_socket, certfile=self.certfile, - keyfile=self.keyfile, - server_side=True, - cert_reqs=cert_reqs, - ca_certs=self.ca_certs) - - # Optionally enable keepalive on the wsgi socket. - if self.keepalive: - dup_socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - - if self.keepidle is not None: - if hasattr(socket, 'TCP_KEEPIDLE'): - dup_socket.setsockopt(socket.IPPROTO_TCP, - socket.TCP_KEEPIDLE, - self.keepidle) - else: - LOG.warning("System does not support TCP_KEEPIDLE but " - "tcp_keepidle has been set. Ignoring.") - - self.greenthread = self.pool.spawn(self._run, - self.application, - dup_socket) - - def set_ssl(self, certfile, keyfile=None, ca_certs=None, - cert_required=True): - self.certfile = certfile - self.keyfile = keyfile - self.ca_certs = ca_certs - self.cert_required = cert_required - self.do_ssl = True - - def stop(self): - if self.greenthread is not None: - self.greenthread.kill() - - def wait(self): - """Wait until all servers have completed running.""" - try: - self.pool.waitall() - except KeyboardInterrupt: # nosec - # If CTRL-C, just break out of the loop. - pass - except greenlet.GreenletExit: # nosec - # If exiting, break out of the loop. - pass - - def reset(self): - """Required by the service interface. - - The service interface is used by the launcher when receiving a - SIGHUP. The service interface is defined in - oslo_service.service.Service. - - Keystone does not need to do anything here. - """ - pass - - def _run(self, application, socket): - """Start a WSGI server with a new green thread pool.""" - logger = log.getLogger('eventlet.wsgi.server') - - # NOTE(dolph): [eventlet_server] client_socket_timeout is required to - # be an integer in keystone.conf, but in order to make - # eventlet.wsgi.server() wait forever, we pass None instead of 0. - socket_timeout = CONF.eventlet_server.client_socket_timeout or None - - try: - eventlet.wsgi.server( - socket, application, log=EventletFilteringLogger(logger), - debug=False, keepalive=CONF.eventlet_server.wsgi_keep_alive, - socket_timeout=socket_timeout) - except greenlet.GreenletExit: # nosec - # Wait until all servers have completed running - pass - except Exception: - LOG.exception(_LE('Server error')) - raise diff --git a/keystone-moon/keystone/common/extension.py b/keystone-moon/keystone/common/extension.py deleted file mode 100644 index be5de631..00000000 --- a/keystone-moon/keystone/common/extension.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -ADMIN_EXTENSIONS = {} -PUBLIC_EXTENSIONS = {} - - -def register_admin_extension(url_prefix, extension_data): - """Register extension with collection of admin extensions. - - Extensions register the information here that will show - up in the /extensions page as a way to indicate that the extension is - active. - - url_prefix: unique key for the extension that will appear in the - urls generated by the extension. - - extension_data is a dictionary. The expected fields are: - 'name': short, human readable name of the extension - 'namespace': xml namespace - 'alias': identifier for the extension - 'updated': date the extension was last updated - 'description': text description of the extension - 'links': hyperlinks to documents describing the extension - - """ - ADMIN_EXTENSIONS[url_prefix] = extension_data - - -def register_public_extension(url_prefix, extension_data): - """Same as register_admin_extension but for public extensions.""" - PUBLIC_EXTENSIONS[url_prefix] = extension_data diff --git a/keystone-moon/keystone/common/json_home.py b/keystone-moon/keystone/common/json_home.py deleted file mode 100644 index 6876f8af..00000000 --- a/keystone-moon/keystone/common/json_home.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from keystone import exception -from keystone.i18n import _ - - -def build_v3_resource_relation(resource_name): - return ('http://docs.openstack.org/api/openstack-identity/3/rel/%s' % - resource_name) - - -def build_v3_extension_resource_relation(extension_name, extension_version, - resource_name): - return ( - 'http://docs.openstack.org/api/openstack-identity/3/ext/%s/%s/rel/%s' % - (extension_name, extension_version, resource_name)) - - -def build_v3_parameter_relation(parameter_name): - return ('http://docs.openstack.org/api/openstack-identity/3/param/%s' % - parameter_name) - - -def build_v3_extension_parameter_relation(extension_name, extension_version, - parameter_name): - return ( - 'http://docs.openstack.org/api/openstack-identity/3/ext/%s/%s/param/' - '%s' % (extension_name, extension_version, parameter_name)) - - -class Parameters(object): - """Relationships for Common parameters.""" - - DOMAIN_ID = build_v3_parameter_relation('domain_id') - ENDPOINT_ID = build_v3_parameter_relation('endpoint_id') - GROUP_ID = build_v3_parameter_relation('group_id') - POLICY_ID = build_v3_parameter_relation('policy_id') - PROJECT_ID = build_v3_parameter_relation('project_id') - REGION_ID = build_v3_parameter_relation('region_id') - ROLE_ID = build_v3_parameter_relation('role_id') - SERVICE_ID = build_v3_parameter_relation('service_id') - USER_ID = build_v3_parameter_relation('user_id') - - -class Status(object): - """Status values supported.""" - - DEPRECATED = 'deprecated' - EXPERIMENTAL = 'experimental' - STABLE = 'stable' - - @classmethod - def update_resource_data(cls, resource_data, status): - if status is cls.STABLE: - # We currently do not add a status if the resource is stable, the - # absence of the status property can be taken as meaning that the - # resource is stable. - return - if status is cls.DEPRECATED or status is cls.EXPERIMENTAL: - resource_data['hints'] = {'status': status} - return - - raise exception.Error(message=_( - 'Unexpected status requested for JSON Home response, %s') % status) - - -def translate_urls(json_home, new_prefix): - """Given a JSON Home document, sticks new_prefix on each of the urls.""" - for dummy_rel, resource in json_home['resources'].items(): - if 'href' in resource: - resource['href'] = new_prefix + resource['href'] - elif 'href-template' in resource: - resource['href-template'] = new_prefix + resource['href-template'] diff --git a/keystone-moon/keystone/common/kvs/__init__.py b/keystone-moon/keystone/common/kvs/__init__.py deleted file mode 100644 index 354bbd8a..00000000 --- a/keystone-moon/keystone/common/kvs/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2013 Metacloud, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from dogpile.cache import region - -from keystone.common.kvs.core import * # noqa - - -# NOTE(morganfainberg): Provided backends are registered here in the __init__ -# for the kvs system. Any out-of-tree backends should be registered via the -# ``backends`` option in the ``[kvs]`` section of the Keystone configuration -# file. -region.register_backend( - 'openstack.kvs.Memory', - 'keystone.common.kvs.backends.inmemdb', - 'MemoryBackend') - -region.register_backend( - 'openstack.kvs.Memcached', - 'keystone.common.kvs.backends.memcached', - 'MemcachedBackend') diff --git a/keystone-moon/keystone/common/kvs/backends/__init__.py b/keystone-moon/keystone/common/kvs/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/common/kvs/backends/inmemdb.py b/keystone-moon/keystone/common/kvs/backends/inmemdb.py deleted file mode 100644 index 379b54bf..00000000 --- a/keystone-moon/keystone/common/kvs/backends/inmemdb.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright 2013 Metacloud, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Keystone In-Memory Dogpile.cache backend implementation.""" - -import copy - -from dogpile.cache import api - - -NO_VALUE = api.NO_VALUE - - -class MemoryBackend(api.CacheBackend): - """A backend that uses a plain dictionary. - - There is no size management, and values which are placed into the - dictionary will remain until explicitly removed. Note that Dogpile's - expiration of items is based on timestamps and does not remove them from - the cache. - - E.g.:: - - from dogpile.cache import make_region - - region = make_region().configure( - 'keystone.common.kvs.Memory' - ) - """ - - def __init__(self, arguments): - self._db = {} - - def _isolate_value(self, value): - if value is not NO_VALUE: - return copy.deepcopy(value) - return value - - def get(self, key): - return self._isolate_value(self._db.get(key, NO_VALUE)) - - def get_multi(self, keys): - return [self.get(key) for key in keys] - - def set(self, key, value): - self._db[key] = self._isolate_value(value) - - def set_multi(self, mapping): - for key, value in mapping.items(): - self.set(key, value) - - def delete(self, key): - self._db.pop(key, None) - - def delete_multi(self, keys): - for key in keys: - self.delete(key) diff --git a/keystone-moon/keystone/common/kvs/backends/memcached.py b/keystone-moon/keystone/common/kvs/backends/memcached.py deleted file mode 100644 index a65cf877..00000000 --- a/keystone-moon/keystone/common/kvs/backends/memcached.py +++ /dev/null @@ -1,195 +0,0 @@ -# Copyright 2013 Metacloud, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Keystone Memcached dogpile.cache backend implementation.""" - -import random as _random -import time - -from dogpile.cache import api -from dogpile.cache.backends import memcached -from oslo_cache.backends import memcache_pool -from oslo_config import cfg -from six.moves import range - -from keystone import exception -from keystone.i18n import _ - - -CONF = cfg.CONF -NO_VALUE = api.NO_VALUE -random = _random.SystemRandom() - -VALID_DOGPILE_BACKENDS = dict( - pylibmc=memcached.PylibmcBackend, - bmemcached=memcached.BMemcachedBackend, - memcached=memcached.MemcachedBackend, - pooled_memcached=memcache_pool.PooledMemcachedBackend) - - -class MemcachedLock(object): - """Simple distributed lock using memcached. - - This is an adaptation of the lock featured at - http://amix.dk/blog/post/19386 - - """ - - def __init__(self, client_fn, key, lock_timeout, max_lock_attempts): - self.client_fn = client_fn - self.key = "_lock" + key - self.lock_timeout = lock_timeout - self.max_lock_attempts = max_lock_attempts - - def acquire(self, wait=True): - client = self.client_fn() - for i in range(self.max_lock_attempts): - if client.add(self.key, 1, self.lock_timeout): - return True - elif not wait: - return False - else: - sleep_time = random.random() # nosec : random is not used for - # crypto or security, it's just the time to delay between - # retries. - time.sleep(sleep_time) - raise exception.UnexpectedError( - _('Maximum lock attempts on %s occurred.') % self.key) - - def release(self): - client = self.client_fn() - client.delete(self.key) - - -class MemcachedBackend(object): - """Pivot point to leverage the various dogpile.cache memcached backends. - - To specify a specific dogpile.cache memcached backend, pass the argument - `memcached_backend` set to one of the provided memcached backends (at this - time `memcached`, `bmemcached`, `pylibmc` and `pooled_memcached` are - valid). - """ - - def __init__(self, arguments): - self._key_mangler = None - self.raw_no_expiry_keys = set(arguments.pop('no_expiry_keys', set())) - self.no_expiry_hashed_keys = set() - - self.lock_timeout = arguments.pop('lock_timeout', None) - self.max_lock_attempts = arguments.pop('max_lock_attempts', 15) - # NOTE(morganfainberg): Remove distributed locking from the arguments - # passed to the "real" backend if it exists. - arguments.pop('distributed_lock', None) - backend = arguments.pop('memcached_backend', None) - if 'url' not in arguments: - # FIXME(morganfainberg): Log deprecation warning for old-style - # configuration once full dict_config style configuration for - # KVS backends is supported. For now use the current memcache - # section of the configuration. - arguments['url'] = CONF.memcache.servers - - if backend is None: - # NOTE(morganfainberg): Use the basic memcached backend if nothing - # else is supplied. - self.driver = VALID_DOGPILE_BACKENDS['memcached'](arguments) - else: - if backend not in VALID_DOGPILE_BACKENDS: - raise ValueError( - _('Backend `%(backend)s` is not a valid memcached ' - 'backend. Valid backends: %(backend_list)s') % - {'backend': backend, - 'backend_list': ','.join(VALID_DOGPILE_BACKENDS.keys())}) - else: - self.driver = VALID_DOGPILE_BACKENDS[backend](arguments) - - def __getattr__(self, name): - """Forward calls to the underlying driver.""" - f = getattr(self.driver, name) - setattr(self, name, f) - return f - - def _get_set_arguments_driver_attr(self, exclude_expiry=False): - - # NOTE(morganfainberg): Shallow copy the .set_arguments dict to - # ensure no changes cause the values to change in the instance - # variable. - set_arguments = getattr(self.driver, 'set_arguments', {}).copy() - - if exclude_expiry: - # NOTE(morganfainberg): Explicitly strip out the 'time' key/value - # from the set_arguments in the case that this key isn't meant - # to expire - set_arguments.pop('time', None) - return set_arguments - - def set(self, key, value): - mapping = {key: value} - self.set_multi(mapping) - - def set_multi(self, mapping): - mapping_keys = set(mapping.keys()) - no_expiry_keys = mapping_keys.intersection(self.no_expiry_hashed_keys) - has_expiry_keys = mapping_keys.difference(self.no_expiry_hashed_keys) - - if no_expiry_keys: - # NOTE(morganfainberg): For keys that have expiry excluded, - # bypass the backend and directly call the client. Bypass directly - # to the client is required as the 'set_arguments' are applied to - # all ``set`` and ``set_multi`` calls by the driver, by calling - # the client directly it is possible to exclude the ``time`` - # argument to the memcached server. - new_mapping = {k: mapping[k] for k in no_expiry_keys} - set_arguments = self._get_set_arguments_driver_attr( - exclude_expiry=True) - self.driver.client.set_multi(new_mapping, **set_arguments) - - if has_expiry_keys: - new_mapping = {k: mapping[k] for k in has_expiry_keys} - self.driver.set_multi(new_mapping) - - @classmethod - def from_config_dict(cls, config_dict, prefix): - prefix_len = len(prefix) - return cls( - {key[prefix_len:]: config_dict[key] for key in config_dict - if key.startswith(prefix)}) - - @property - def key_mangler(self): - if self._key_mangler is None: - self._key_mangler = self.driver.key_mangler - return self._key_mangler - - @key_mangler.setter - def key_mangler(self, key_mangler): - if callable(key_mangler): - self._key_mangler = key_mangler - self._rehash_keys() - elif key_mangler is None: - # NOTE(morganfainberg): Set the hashed key map to the unhashed - # list since we no longer have a key_mangler. - self._key_mangler = None - self.no_expiry_hashed_keys = self.raw_no_expiry_keys - else: - raise TypeError(_('`key_mangler` functions must be callable.')) - - def _rehash_keys(self): - no_expire = set() - for key in self.raw_no_expiry_keys: - no_expire.add(self._key_mangler(key)) - self.no_expiry_hashed_keys = no_expire - - def get_mutex(self, key): - return MemcachedLock(lambda: self.driver.client, key, - self.lock_timeout, self.max_lock_attempts) diff --git a/keystone-moon/keystone/common/kvs/core.py b/keystone-moon/keystone/common/kvs/core.py deleted file mode 100644 index 064825f8..00000000 --- a/keystone-moon/keystone/common/kvs/core.py +++ /dev/null @@ -1,450 +0,0 @@ -# Copyright 2013 Metacloud, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import contextlib -import threading -import time -import weakref - -from dogpile.cache import api -from dogpile.cache import proxy -from dogpile.cache import region -from dogpile.cache import util as dogpile_util -from dogpile.core import nameregistry -from oslo_config import cfg -from oslo_log import log -from oslo_utils import importutils -from oslo_utils import reflection - -from keystone import exception -from keystone.i18n import _ -from keystone.i18n import _LI -from keystone.i18n import _LW - - -__all__ = ('KeyValueStore', 'KeyValueStoreLock', 'LockTimeout', - 'get_key_value_store') - - -BACKENDS_REGISTERED = False -CONF = cfg.CONF -KEY_VALUE_STORE_REGISTRY = weakref.WeakValueDictionary() -LOCK_WINDOW = 1 -LOG = log.getLogger(__name__) -NO_VALUE = api.NO_VALUE - - -def _register_backends(): - # NOTE(morganfainberg): This function exists to ensure we do not try and - # register the backends prior to the configuration object being fully - # available. We also need to ensure we do not register a given backend - # more than one time. All backends will be prefixed with openstack.kvs - # as the "short" name to reference them for configuration purposes. This - # function is used in addition to the pre-registered backends in the - # __init__ file for the KVS system. - global BACKENDS_REGISTERED - - if not BACKENDS_REGISTERED: - prefix = 'openstack.kvs.%s' - for backend in CONF.kvs.backends: - module, cls = backend.rsplit('.', 1) - backend_name = prefix % cls - LOG.debug(('Registering Dogpile Backend %(backend_path)s as ' - '%(backend_name)s'), - {'backend_path': backend, 'backend_name': backend_name}) - region.register_backend(backend_name, module, cls) - BACKENDS_REGISTERED = True - - -def sha1_mangle_key(key): - """Wrapper for dogpile's sha1_mangle_key. - - Taken from oslo_cache.core._sha1_mangle_key - - dogpile's sha1_mangle_key function expects an encoded string, so we - should take steps to properly handle multiple inputs before passing - the key through. - """ - try: - key = key.encode('utf-8', errors='xmlcharrefreplace') - except (UnicodeError, AttributeError): # nosec - # NOTE(stevemar): if encoding fails just continue anyway. - pass - return dogpile_util.sha1_mangle_key(key) - - -class LockTimeout(exception.UnexpectedError): - debug_message_format = _('Lock Timeout occurred for key, %(target)s') - - -class KeyValueStore(object): - """Basic KVS manager object to support Keystone Key-Value-Store systems. - - This manager also supports the concept of locking a given key resource to - allow for a guaranteed atomic transaction to the backend. - """ - - def __init__(self, kvs_region): - self.locking = True - self._lock_timeout = 0 - self._region = kvs_region - self._security_strategy = None - self._secret_key = None - self._lock_registry = nameregistry.NameRegistry(self._create_mutex) - - def configure(self, backing_store, key_mangler=None, proxy_list=None, - locking=True, **region_config_args): - """Configure the KeyValueStore instance. - - :param backing_store: dogpile.cache short name of the region backend - :param key_mangler: key_mangler function - :param proxy_list: list of proxy classes to apply to the region - :param locking: boolean that allows disabling of locking mechanism for - this instantiation - :param region_config_args: key-word args passed to the dogpile.cache - backend for configuration - """ - if self.is_configured: - # NOTE(morganfainberg): It is a bad idea to reconfigure a backend, - # there are a lot of pitfalls and potential memory leaks that could - # occur. By far the best approach is to re-create the KVS object - # with the new configuration. - raise RuntimeError(_('KVS region %s is already configured. ' - 'Cannot reconfigure.') % self._region.name) - - self.locking = locking - self._lock_timeout = region_config_args.pop( - 'lock_timeout', CONF.kvs.default_lock_timeout) - self._configure_region(backing_store, **region_config_args) - self._set_key_mangler(key_mangler) - self._apply_region_proxy(proxy_list) - - @property - def is_configured(self): - return 'backend' in self._region.__dict__ - - def _apply_region_proxy(self, proxy_list): - if isinstance(proxy_list, list): - proxies = [] - - for item in proxy_list: - if isinstance(item, str): - LOG.debug('Importing class %s as KVS proxy.', item) - pxy = importutils.import_class(item) - else: - pxy = item - - if issubclass(pxy, proxy.ProxyBackend): - proxies.append(pxy) - else: - pxy_cls_name = reflection.get_class_name( - pxy, fully_qualified=False) - LOG.warning(_LW('%s is not a dogpile.proxy.ProxyBackend'), - pxy_cls_name) - - for proxy_cls in reversed(proxies): - proxy_cls_name = reflection.get_class_name( - proxy_cls, fully_qualified=False) - LOG.info(_LI('Adding proxy \'%(proxy)s\' to KVS %(name)s.'), - {'proxy': proxy_cls_name, - 'name': self._region.name}) - self._region.wrap(proxy_cls) - - def _assert_configured(self): - if'backend' not in self._region.__dict__: - raise exception.UnexpectedError(_('Key Value Store not ' - 'configured: %s'), - self._region.name) - - def _set_keymangler_on_backend(self, key_mangler): - try: - self._region.backend.key_mangler = key_mangler - except Exception as e: - # NOTE(morganfainberg): The setting of the key_mangler on the - # backend is used to allow the backend to - # calculate a hashed key value as needed. Not all backends - # require the ability to calculate hashed keys. If the - # backend does not support/require this feature log a - # debug line and move on otherwise raise the proper exception. - # Support of the feature is implied by the existence of the - # 'raw_no_expiry_keys' attribute. - if not hasattr(self._region.backend, 'raw_no_expiry_keys'): - LOG.debug(('Non-expiring keys not supported/required by ' - '%(region)s backend; unable to set ' - 'key_mangler for backend: %(err)s'), - {'region': self._region.name, 'err': e}) - else: - raise - - def _set_key_mangler(self, key_mangler): - # Set the key_mangler that is appropriate for the given region being - # configured here. The key_mangler function is called prior to storing - # the value(s) in the backend. This is to help prevent collisions and - # limit issues such as memcache's limited cache_key size. - use_backend_key_mangler = getattr(self._region.backend, - 'use_backend_key_mangler', False) - if ((key_mangler is None or use_backend_key_mangler) and - (self._region.backend.key_mangler is not None)): - # NOTE(morganfainberg): Use the configured key_mangler as a first - # choice. Second choice would be the key_mangler defined by the - # backend itself. Finally, fall back to the defaults. The one - # exception is if the backend defines `use_backend_key_mangler` - # as True, which indicates the backend's key_mangler should be - # the first choice. - key_mangler = self._region.backend.key_mangler - - if CONF.kvs.enable_key_mangler: - if key_mangler is not None: - msg = _LI('Using %(func)s as KVS region %(name)s key_mangler') - if callable(key_mangler): - self._region.key_mangler = key_mangler - LOG.info(msg, {'func': key_mangler.__name__, - 'name': self._region.name}) - else: - # NOTE(morganfainberg): We failed to set the key_mangler, - # we should error out here to ensure we aren't causing - # key-length or collision issues. - raise exception.ValidationError( - _('`key_mangler` option must be a function reference')) - else: - msg = _LI('Using default keystone.common.kvs.sha1_mangle_key ' - 'as KVS region %s key_mangler') - LOG.info(msg, self._region.name) - # NOTE(morganfainberg): Use 'default' keymangler to ensure - # that unless explicitly changed, we mangle keys. This helps - # to limit unintended cases of exceeding cache-key in backends - # such as memcache. - self._region.key_mangler = sha1_mangle_key - self._set_keymangler_on_backend(self._region.key_mangler) - else: - LOG.info(_LI('KVS region %s key_mangler disabled.'), - self._region.name) - self._set_keymangler_on_backend(None) - - def _configure_region(self, backend, **config_args): - prefix = CONF.kvs.config_prefix - conf_dict = {} - conf_dict['%s.backend' % prefix] = backend - - if 'distributed_lock' not in config_args: - config_args['distributed_lock'] = True - - config_args['lock_timeout'] = self._lock_timeout - - # NOTE(morganfainberg): To mitigate race conditions on comparing - # the timeout and current time on the lock mutex, we are building - # in a static 1 second overlap where the lock will still be valid - # in the backend but not from the perspective of the context - # manager. Since we must develop to the lowest-common-denominator - # when it comes to the backends, memcache's cache store is not more - # refined than 1 second, therefore we must build in at least a 1 - # second overlap. `lock_timeout` of 0 means locks never expire. - if config_args['lock_timeout'] > 0: - config_args['lock_timeout'] += LOCK_WINDOW - - for argument, value in config_args.items(): - arg_key = '.'.join([prefix, 'arguments', argument]) - conf_dict[arg_key] = value - - LOG.debug('KVS region configuration for %(name)s: %(config)r', - {'name': self._region.name, 'config': conf_dict}) - self._region.configure_from_config(conf_dict, '%s.' % prefix) - - def _mutex(self, key): - return self._lock_registry.get(key) - - def _create_mutex(self, key): - mutex = self._region.backend.get_mutex(key) - if mutex is not None: - return mutex - else: - return self._LockWrapper(lock_timeout=self._lock_timeout) - - class _LockWrapper(object): - """weakref-capable threading.Lock wrapper.""" - - def __init__(self, lock_timeout): - self.lock = threading.Lock() - self.lock_timeout = lock_timeout - - def acquire(self, wait=True): - return self.lock.acquire(wait) - - def release(self): - self.lock.release() - - def get(self, key): - """Get a single value from the KVS backend.""" - self._assert_configured() - value = self._region.get(key) - if value is NO_VALUE: - raise exception.NotFound(target=key) - return value - - def get_multi(self, keys): - """Get multiple values in a single call from the KVS backend.""" - self._assert_configured() - values = self._region.get_multi(keys) - not_found = [] - for index, key in enumerate(keys): - if values[index] is NO_VALUE: - not_found.append(key) - if not_found: - # NOTE(morganfainberg): If any of the multi-get values are non- - # existent, we should raise a NotFound error to mimic the .get() - # method's behavior. In all cases the internal dogpile NO_VALUE - # should be masked from the consumer of the KeyValueStore. - raise exception.NotFound(target=not_found) - return values - - def set(self, key, value, lock=None): - """Set a single value in the KVS backend.""" - self._assert_configured() - with self._action_with_lock(key, lock): - self._region.set(key, value) - - def set_multi(self, mapping): - """Set multiple key/value pairs in the KVS backend at once. - - Like delete_multi, this call does not serialize through the - KeyValueStoreLock mechanism (locking cannot occur on more than one - key in a given context without significant deadlock potential). - """ - self._assert_configured() - self._region.set_multi(mapping) - - def delete(self, key, lock=None): - """Delete a single key from the KVS backend. - - This method will raise NotFound if the key doesn't exist. The get and - delete are done in a single transaction (via KeyValueStoreLock - mechanism). - """ - self._assert_configured() - - with self._action_with_lock(key, lock): - self.get(key) - self._region.delete(key) - - def delete_multi(self, keys): - """Delete multiple keys from the KVS backend in a single call. - - Like set_multi, this call does not serialize through the - KeyValueStoreLock mechanism (locking cannot occur on more than one - key in a given context without significant deadlock potential). - """ - self._assert_configured() - self._region.delete_multi(keys) - - def get_lock(self, key): - """Get a write lock on the KVS value referenced by `key`. - - The ability to get a context manager to pass into the set/delete - methods allows for a single-transaction to occur while guaranteeing the - backing store will not change between the start of the 'lock' and the - end. Lock timeout is fixed to the KeyValueStore configured lock - timeout. - """ - self._assert_configured() - return KeyValueStoreLock(self._mutex(key), key, self.locking, - self._lock_timeout) - - @contextlib.contextmanager - def _action_with_lock(self, key, lock=None): - """Wrapper context manager. - - Validates and handles the lock and lock timeout if passed in. - """ - if not isinstance(lock, KeyValueStoreLock): - # NOTE(morganfainberg): Locking only matters if a lock is passed in - # to this method. If lock isn't a KeyValueStoreLock, treat this as - # if no locking needs to occur. - yield - else: - if not lock.key == key: - raise ValueError(_('Lock key must match target key: %(lock)s ' - '!= %(target)s') % - {'lock': lock.key, 'target': key}) - if not lock.active: - raise exception.ValidationError(_('Must be called within an ' - 'active lock context.')) - if not lock.expired: - yield - else: - raise LockTimeout(target=key) - - -class KeyValueStoreLock(object): - """Basic KeyValueStoreLock context manager. - - Hooks into the dogpile.cache backend mutex allowing for distributed locking - on resources. This is only a write lock, and will not prevent reads from - occurring. - """ - - def __init__(self, mutex, key, locking_enabled=True, lock_timeout=0): - self.mutex = mutex - self.key = key - self.enabled = locking_enabled - self.lock_timeout = lock_timeout - self.active = False - self.acquire_time = 0 - - def acquire(self): - if self.enabled: - self.mutex.acquire() - LOG.debug('KVS lock acquired for: %s', self.key) - self.active = True - self.acquire_time = time.time() - return self - - __enter__ = acquire - - @property - def expired(self): - if self.lock_timeout: - calculated = time.time() - self.acquire_time + LOCK_WINDOW - return calculated > self.lock_timeout - else: - return False - - def release(self): - if self.enabled: - self.mutex.release() - if not self.expired: - LOG.debug('KVS lock released for: %s', self.key) - else: - LOG.warning(_LW('KVS lock released (timeout reached) for: %s'), - self.key) - - def __exit__(self, exc_type, exc_val, exc_tb): - self.release() - - -def get_key_value_store(name, kvs_region=None): - """Retrieve key value store. - - Instantiate a new :class:`.KeyValueStore` or return a previous - instantiation that has the same name. - """ - global KEY_VALUE_STORE_REGISTRY - - _register_backends() - key_value_store = KEY_VALUE_STORE_REGISTRY.get(name) - if key_value_store is None: - if kvs_region is None: - kvs_region = region.make_region(name=name) - key_value_store = KeyValueStore(kvs_region) - KEY_VALUE_STORE_REGISTRY[name] = key_value_store - return key_value_store diff --git a/keystone-moon/keystone/common/kvs/legacy.py b/keystone-moon/keystone/common/kvs/legacy.py deleted file mode 100644 index 7e27d97f..00000000 --- a/keystone-moon/keystone/common/kvs/legacy.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import versionutils - -from keystone import exception - - -class DictKvs(dict): - def get(self, key, default=None): - try: - if isinstance(self[key], dict): - return self[key].copy() - else: - return self[key][:] - except KeyError: - if default is not None: - return default - raise exception.NotFound(target=key) - - def set(self, key, value): - if isinstance(value, dict): - self[key] = value.copy() - else: - self[key] = value[:] - - def delete(self, key): - """Deletes an item, returning True on success, False otherwise.""" - try: - del self[key] - except KeyError: - raise exception.NotFound(target=key) - - -INMEMDB = DictKvs() - - -class Base(object): - @versionutils.deprecated(versionutils.deprecated.ICEHOUSE, - in_favor_of='keystone.common.kvs.KeyValueStore', - remove_in=+2, - what='keystone.common.kvs.Base') - def __init__(self, db=None): - if db is None: - db = INMEMDB - elif isinstance(db, DictKvs): - db = db - elif isinstance(db, dict): - db = DictKvs(db) - self.db = db diff --git a/keystone-moon/keystone/common/ldap/__init__.py b/keystone-moon/keystone/common/ldap/__init__.py deleted file mode 100644 index ab5bf4d0..00000000 --- a/keystone-moon/keystone/common/ldap/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common.ldap.core import * # noqa diff --git a/keystone-moon/keystone/common/ldap/core.py b/keystone-moon/keystone/common/ldap/core.py deleted file mode 100644 index d94aa04c..00000000 --- a/keystone-moon/keystone/common/ldap/core.py +++ /dev/null @@ -1,1955 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc -import codecs -import functools -import os.path -import re -import sys -import weakref - -import ldap.controls -import ldap.filter -import ldappool -from oslo_log import log -from oslo_utils import reflection -import six -from six.moves import map, zip - -from keystone.common import driver_hints -from keystone import exception -from keystone.i18n import _ -from keystone.i18n import _LW - - -LOG = log.getLogger(__name__) - -LDAP_VALUES = {'TRUE': True, 'FALSE': False} -CONTROL_TREEDELETE = '1.2.840.113556.1.4.805' -LDAP_SCOPES = {'one': ldap.SCOPE_ONELEVEL, - 'sub': ldap.SCOPE_SUBTREE} -LDAP_DEREF = {'always': ldap.DEREF_ALWAYS, - 'default': None, - 'finding': ldap.DEREF_FINDING, - 'never': ldap.DEREF_NEVER, - 'searching': ldap.DEREF_SEARCHING} -LDAP_TLS_CERTS = {'never': ldap.OPT_X_TLS_NEVER, - 'demand': ldap.OPT_X_TLS_DEMAND, - 'allow': ldap.OPT_X_TLS_ALLOW} - - -# RFC 4511 (The LDAP Protocol) defines a list containing only the OID '1.1' to -# indicate that no attributes should be returned besides the DN. -DN_ONLY = ['1.1'] - -_utf8_encoder = codecs.getencoder('utf-8') - - -def utf8_encode(value): - """Encode a basestring to UTF-8. - - If the string is unicode encode it to UTF-8, if the string is - str then assume it's already encoded. Otherwise raise a TypeError. - - :param value: A basestring - :returns: UTF-8 encoded version of value - :raises TypeError: If value is not basestring - """ - if isinstance(value, six.text_type): - return _utf8_encoder(value)[0] - elif isinstance(value, six.binary_type): - return value - else: - value_cls_name = reflection.get_class_name( - value, fully_qualified=False) - raise TypeError("value must be basestring, " - "not %s" % value_cls_name) - -_utf8_decoder = codecs.getdecoder('utf-8') - - -def utf8_decode(value): - """Decode a from UTF-8 into unicode. - - If the value is a binary string assume it's UTF-8 encoded and decode - it into a unicode string. Otherwise convert the value from its - type into a unicode string. - - :param value: value to be returned as unicode - :returns: value as unicode - :raises UnicodeDecodeError: for invalid UTF-8 encoding - """ - if isinstance(value, six.binary_type): - return _utf8_decoder(value)[0] - return six.text_type(value) - - -def py2ldap(val): - """Type convert a Python value to a type accepted by LDAP (unicode). - - The LDAP API only accepts strings for values therefore convert - the value's type to a unicode string. A subsequent type conversion - will encode the unicode as UTF-8 as required by the python-ldap API, - but for now we just want a string representation of the value. - - :param val: The value to convert to a LDAP string representation - :returns: unicode string representation of value. - """ - if isinstance(val, bool): - return u'TRUE' if val else u'FALSE' - else: - return six.text_type(val) - - -def enabled2py(val): - """Similar to ldap2py, only useful for the enabled attribute.""" - try: - return LDAP_VALUES[val] - except KeyError: # nosec - # It wasn't a boolean value, will try as an int instead. - pass - try: - return int(val) - except ValueError: # nosec - # It wasn't an int either, will try as utf8 instead. - pass - return utf8_decode(val) - - -def ldap2py(val): - """Convert an LDAP formatted value to Python type used by OpenStack. - - Virtually all LDAP values are stored as UTF-8 encoded strings. - OpenStack prefers values which are unicode friendly. - - :param val: LDAP formatted value - :returns: val converted to preferred Python type - """ - return utf8_decode(val) - - -def convert_ldap_result(ldap_result): - """Convert LDAP search result to Python types used by OpenStack. - - Each result tuple is of the form (dn, attrs), where dn is a string - containing the DN (distinguished name) of the entry, and attrs is - a dictionary containing the attributes associated with the - entry. The keys of attrs are strings, and the associated values - are lists of strings. - - OpenStack wants to use Python types of its choosing. Strings will - be unicode, truth values boolean, whole numbers int's, etc. DN's will - also be decoded from UTF-8 to unicode. - - :param ldap_result: LDAP search result - :returns: list of 2-tuples containing (dn, attrs) where dn is unicode - and attrs is a dict whose values are type converted to - OpenStack preferred types. - """ - py_result = [] - at_least_one_referral = False - for dn, attrs in ldap_result: - ldap_attrs = {} - if dn is None: - # this is a Referral object, rather than an Entry object - at_least_one_referral = True - continue - - for kind, values in attrs.items(): - try: - val2py = enabled2py if kind == 'enabled' else ldap2py - ldap_attrs[kind] = [val2py(x) for x in values] - except UnicodeDecodeError: - LOG.debug('Unable to decode value for attribute %s', kind) - - py_result.append((utf8_decode(dn), ldap_attrs)) - if at_least_one_referral: - LOG.debug(('Referrals were returned and ignored. Enable referral ' - 'chasing in keystone.conf via [ldap] chase_referrals')) - - return py_result - - -def safe_iter(attrs): - if attrs is None: - return - elif isinstance(attrs, list): - for e in attrs: - yield e - else: - yield attrs - - -def parse_deref(opt): - try: - return LDAP_DEREF[opt] - except KeyError: - raise ValueError(_('Invalid LDAP deref option: %(option)s. ' - 'Choose one of: %(options)s') % - {'option': opt, - 'options': ', '.join(LDAP_DEREF.keys()), }) - - -def parse_tls_cert(opt): - try: - return LDAP_TLS_CERTS[opt] - except KeyError: - raise ValueError(_( - 'Invalid LDAP TLS certs option: %(option)s. ' - 'Choose one of: %(options)s') % { - 'option': opt, - 'options': ', '.join(LDAP_TLS_CERTS.keys())}) - - -def ldap_scope(scope): - try: - return LDAP_SCOPES[scope] - except KeyError: - raise ValueError( - _('Invalid LDAP scope: %(scope)s. Choose one of: %(options)s') % { - 'scope': scope, - 'options': ', '.join(LDAP_SCOPES.keys())}) - - -def prep_case_insensitive(value): - """Prepare a string for case-insensitive comparison. - - This is defined in RFC4518. For simplicity, all this function does is - lowercase all the characters, strip leading and trailing whitespace, - and compress sequences of spaces to a single space. - """ - value = re.sub(r'\s+', ' ', value.strip().lower()) - return value - - -def is_ava_value_equal(attribute_type, val1, val2): - """Returns True if and only if the AVAs are equal. - - When comparing AVAs, the equality matching rule for the attribute type - should be taken into consideration. For simplicity, this implementation - does a case-insensitive comparison. - - Note that this function uses prep_case_insenstive so the limitations of - that function apply here. - - """ - return prep_case_insensitive(val1) == prep_case_insensitive(val2) - - -def is_rdn_equal(rdn1, rdn2): - """Returns True if and only if the RDNs are equal. - - * RDNs must have the same number of AVAs. - * Each AVA of the RDNs must be the equal for the same attribute type. The - order isn't significant. Note that an attribute type will only be in one - AVA in an RDN, otherwise the DN wouldn't be valid. - * Attribute types aren't case sensitive. Note that attribute type - comparison is more complicated than implemented. This function only - compares case-insentive. The code should handle multiple names for an - attribute type (e.g., cn, commonName, and 2.5.4.3 are the same). - - Note that this function uses is_ava_value_equal to compare AVAs so the - limitations of that function apply here. - - """ - if len(rdn1) != len(rdn2): - return False - - for attr_type_1, val1, dummy in rdn1: - found = False - for attr_type_2, val2, dummy in rdn2: - if attr_type_1.lower() != attr_type_2.lower(): - continue - - found = True - if not is_ava_value_equal(attr_type_1, val1, val2): - return False - break - if not found: - return False - - return True - - -def is_dn_equal(dn1, dn2): - """Returns True if and only if the DNs are equal. - - Two DNs are equal if they've got the same number of RDNs and if the RDNs - are the same at each position. See RFC4517. - - Note that this function uses is_rdn_equal to compare RDNs so the - limitations of that function apply here. - - :param dn1: Either a string DN or a DN parsed by ldap.dn.str2dn. - :param dn2: Either a string DN or a DN parsed by ldap.dn.str2dn. - - """ - if not isinstance(dn1, list): - dn1 = ldap.dn.str2dn(utf8_encode(dn1)) - if not isinstance(dn2, list): - dn2 = ldap.dn.str2dn(utf8_encode(dn2)) - - if len(dn1) != len(dn2): - return False - - for rdn1, rdn2 in zip(dn1, dn2): - if not is_rdn_equal(rdn1, rdn2): - return False - return True - - -def dn_startswith(descendant_dn, dn): - """Returns True if and only if the descendant_dn is under the dn. - - :param descendant_dn: Either a string DN or a DN parsed by ldap.dn.str2dn. - :param dn: Either a string DN or a DN parsed by ldap.dn.str2dn. - - """ - if not isinstance(descendant_dn, list): - descendant_dn = ldap.dn.str2dn(utf8_encode(descendant_dn)) - if not isinstance(dn, list): - dn = ldap.dn.str2dn(utf8_encode(dn)) - - if len(descendant_dn) <= len(dn): - return False - - # Use the last len(dn) RDNs. - return is_dn_equal(descendant_dn[-len(dn):], dn) - - -@six.add_metaclass(abc.ABCMeta) -class LDAPHandler(object): - """Abstract class which defines methods for a LDAP API provider. - - Native Keystone values cannot be passed directly into and from the - python-ldap API. Type conversion must occur at the LDAP API - boudary, examples of type conversions are: - - * booleans map to the strings 'TRUE' and 'FALSE' - - * integer values map to their string representation. - - * unicode strings are encoded in UTF-8 - - In addition to handling type conversions at the API boundary we - have the requirement to support more than one LDAP API - provider. Currently we have: - - * python-ldap, this is the standard LDAP API for Python, it - requires access to a live LDAP server. - - * Fake LDAP which emulates python-ldap. This is used for - testing without requiring a live LDAP server. - - To support these requirements we need a layer that performs type - conversions and then calls another LDAP API which is configurable - (e.g. either python-ldap or the fake emulation). - - We have an additional constraint at the time of this writing due to - limitations in the logging module. The logging module is not - capable of accepting UTF-8 encoded strings, it will throw an - encoding exception. Therefore all logging MUST be performed prior - to UTF-8 conversion. This means no logging can be performed in the - ldap APIs that implement the python-ldap API because those APIs - are defined to accept only UTF-8 strings. Thus the layer which - performs type conversions must also do the logging. We do the type - conversions in two steps, once to convert all Python types to - unicode strings, then log, then convert the unicode strings to - UTF-8. - - There are a variety of ways one could accomplish this, we elect to - use a chaining technique whereby instances of this class simply - call the next member in the chain via the "conn" attribute. The - chain is constructed by passing in an existing instance of this - class as the conn attribute when the class is instantiated. - - Here is a brief explanation of why other possible approaches were - not used: - - subclassing - - To perform the wrapping operations in the correct order - the type convesion class would have to subclass each of - the API providers. This is awkward, doubles the number of - classes, and does not scale well. It requires the type - conversion class to be aware of all possible API - providers. - - decorators - - Decorators provide an elegant solution to wrap methods and - would be an ideal way to perform type conversions before - calling the wrapped function and then converting the - values returned from the wrapped function. However - decorators need to be aware of the method signature, it - has to know what input parameters need conversion and how - to convert the result. For an API like python-ldap which - has a large number of different method signatures it would - require a large number of specialized - decorators. Experience has shown it's very easy to apply - the wrong decorator due to the inherent complexity and - tendency to cut-n-paste code. Another option is to - parameterize the decorator to make it "smart". Experience - has shown such decorators become insanely complicated and - difficult to understand and debug. Also decorators tend to - hide what's really going on when a method is called, the - operations being performed are not visible when looking at - the implemation of a decorated method, this too experience - has shown leads to mistakes. - - Chaining simplifies both wrapping to perform type conversion as - well as the substitution of alternative API providers. One simply - creates a new instance of the API interface and insert it at the - front of the chain. Type conversions are explicit and obvious. - - If a new method needs to be added to the API interface one adds it - to the abstract class definition. Should one miss adding the new - method to any derivations of the abstract class the code will fail - to load and run making it impossible to forget updating all the - derived classes. - - """ - - @abc.abstractmethod - def __init__(self, conn=None): - self.conn = conn - - @abc.abstractmethod - def connect(self, url, page_size=0, alias_dereferencing=None, - use_tls=False, tls_cacertfile=None, tls_cacertdir=None, - tls_req_cert='demand', chase_referrals=None, debug_level=None, - use_pool=None, pool_size=None, pool_retry_max=None, - pool_retry_delay=None, pool_conn_timeout=None, - pool_conn_lifetime=None): - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def set_option(self, option, invalue): - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_option(self, option): - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def simple_bind_s(self, who='', cred='', - serverctrls=None, clientctrls=None): - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def unbind_s(self): - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def add_s(self, dn, modlist): - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def search_s(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0): - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def search_ext(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0, - serverctrls=None, clientctrls=None, - timeout=-1, sizelimit=0): - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def result3(self, msgid=ldap.RES_ANY, all=1, timeout=None, - resp_ctrl_classes=None): - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def modify_s(self, dn, modlist): - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_s(self, dn): - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_ext_s(self, dn, serverctrls=None, clientctrls=None): - raise exception.NotImplemented() # pragma: no cover - - -class PythonLDAPHandler(LDAPHandler): - """LDAPHandler implementation which calls the python-ldap API. - - Note, the python-ldap API requires all string values to be UTF-8 encoded. - The KeystoneLDAPHandler enforces this prior to invoking the methods in this - class. - - """ - - def __init__(self, conn=None): - super(PythonLDAPHandler, self).__init__(conn=conn) - - def connect(self, url, page_size=0, alias_dereferencing=None, - use_tls=False, tls_cacertfile=None, tls_cacertdir=None, - tls_req_cert='demand', chase_referrals=None, debug_level=None, - use_pool=None, pool_size=None, pool_retry_max=None, - pool_retry_delay=None, pool_conn_timeout=None, - pool_conn_lifetime=None): - - _common_ldap_initialization(url=url, - use_tls=use_tls, - tls_cacertfile=tls_cacertfile, - tls_cacertdir=tls_cacertdir, - tls_req_cert=tls_req_cert, - debug_level=debug_level) - - self.conn = ldap.initialize(url) - self.conn.protocol_version = ldap.VERSION3 - - if alias_dereferencing is not None: - self.conn.set_option(ldap.OPT_DEREF, alias_dereferencing) - self.page_size = page_size - - if use_tls: - self.conn.start_tls_s() - - if chase_referrals is not None: - self.conn.set_option(ldap.OPT_REFERRALS, int(chase_referrals)) - - def set_option(self, option, invalue): - return self.conn.set_option(option, invalue) - - def get_option(self, option): - return self.conn.get_option(option) - - def simple_bind_s(self, who='', cred='', - serverctrls=None, clientctrls=None): - return self.conn.simple_bind_s(who, cred, serverctrls, clientctrls) - - def unbind_s(self): - return self.conn.unbind_s() - - def add_s(self, dn, modlist): - return self.conn.add_s(dn, modlist) - - def search_s(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0): - return self.conn.search_s(base, scope, filterstr, - attrlist, attrsonly) - - def search_ext(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0, - serverctrls=None, clientctrls=None, - timeout=-1, sizelimit=0): - return self.conn.search_ext(base, scope, - filterstr, attrlist, attrsonly, - serverctrls, clientctrls, - timeout, sizelimit) - - def result3(self, msgid=ldap.RES_ANY, all=1, timeout=None, - resp_ctrl_classes=None): - # The resp_ctrl_classes parameter is a recent addition to the - # API. It defaults to None. We do not anticipate using it. - # To run with older versions of python-ldap we do not pass it. - return self.conn.result3(msgid, all, timeout) - - def modify_s(self, dn, modlist): - return self.conn.modify_s(dn, modlist) - - def delete_s(self, dn): - return self.conn.delete_s(dn) - - def delete_ext_s(self, dn, serverctrls=None, clientctrls=None): - return self.conn.delete_ext_s(dn, serverctrls, clientctrls) - - -def _common_ldap_initialization(url, use_tls=False, tls_cacertfile=None, - tls_cacertdir=None, tls_req_cert=None, - debug_level=None): - """LDAP initialization for PythonLDAPHandler and PooledLDAPHandler.""" - LOG.debug("LDAP init: url=%s", url) - LOG.debug('LDAP init: use_tls=%s tls_cacertfile=%s tls_cacertdir=%s ' - 'tls_req_cert=%s tls_avail=%s', - use_tls, tls_cacertfile, tls_cacertdir, - tls_req_cert, ldap.TLS_AVAIL) - - if debug_level is not None: - ldap.set_option(ldap.OPT_DEBUG_LEVEL, debug_level) - - using_ldaps = url.lower().startswith("ldaps") - - if use_tls and using_ldaps: - raise AssertionError(_('Invalid TLS / LDAPS combination')) - - # The certificate trust options apply for both LDAPS and TLS. - if use_tls or using_ldaps: - if not ldap.TLS_AVAIL: - raise ValueError(_('Invalid LDAP TLS_AVAIL option: %s. TLS ' - 'not available') % ldap.TLS_AVAIL) - if tls_cacertfile: - # NOTE(topol) - # python ldap TLS does not verify CACERTFILE or CACERTDIR - # so we add some extra simple sanity check verification - # Also, setting these values globally (i.e. on the ldap object) - # works but these values are ignored when setting them on the - # connection - if not os.path.isfile(tls_cacertfile): - raise IOError(_("tls_cacertfile %s not found " - "or is not a file") % - tls_cacertfile) - ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, tls_cacertfile) - elif tls_cacertdir: - # NOTE(topol) - # python ldap TLS does not verify CACERTFILE or CACERTDIR - # so we add some extra simple sanity check verification - # Also, setting these values globally (i.e. on the ldap object) - # works but these values are ignored when setting them on the - # connection - if not os.path.isdir(tls_cacertdir): - raise IOError(_("tls_cacertdir %s not found " - "or is not a directory") % - tls_cacertdir) - ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, tls_cacertdir) - if tls_req_cert in list(LDAP_TLS_CERTS.values()): - ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, tls_req_cert) - else: - LOG.debug("LDAP TLS: invalid TLS_REQUIRE_CERT Option=%s", - tls_req_cert) - - -class MsgId(list): - """Wrapper class to hold connection and msgid.""" - - pass - - -def use_conn_pool(func): - """Use this only for connection pool specific ldap API. - - This adds connection object to decorated API as next argument after self. - - """ - def wrapper(self, *args, **kwargs): - # assert isinstance(self, PooledLDAPHandler) - with self._get_pool_connection() as conn: - self._apply_options(conn) - return func(self, conn, *args, **kwargs) - return wrapper - - -class PooledLDAPHandler(LDAPHandler): - """LDAPHandler implementation which uses pooled connection manager. - - Pool specific configuration is defined in [ldap] section. - All other LDAP configuration is still used from [ldap] section - - Keystone LDAP authentication logic authenticates an end user using its DN - and password via LDAP bind to establish supplied password is correct. - This can fill up the pool quickly (as pool re-uses existing connection - based on its bind data) and would not leave space in pool for connection - re-use for other LDAP operations. - Now a separate pool can be established for those requests when related flag - 'use_auth_pool' is enabled. That pool can have its own size and - connection lifetime. Other pool attributes are shared between those pools. - If 'use_pool' is disabled, then 'use_auth_pool' does not matter. - If 'use_auth_pool' is not enabled, then connection pooling is not used for - those LDAP operations. - - Note, the python-ldap API requires all string values to be UTF-8 - encoded. The KeystoneLDAPHandler enforces this prior to invoking - the methods in this class. - - """ - - # Added here to allow override for testing - Connector = ldappool.StateConnector - auth_pool_prefix = 'auth_pool_' - - connection_pools = {} # static connector pool dict - - def __init__(self, conn=None, use_auth_pool=False): - super(PooledLDAPHandler, self).__init__(conn=conn) - self.who = '' - self.cred = '' - self.conn_options = {} # connection specific options - self.page_size = None - self.use_auth_pool = use_auth_pool - self.conn_pool = None - - def connect(self, url, page_size=0, alias_dereferencing=None, - use_tls=False, tls_cacertfile=None, tls_cacertdir=None, - tls_req_cert='demand', chase_referrals=None, debug_level=None, - use_pool=None, pool_size=None, pool_retry_max=None, - pool_retry_delay=None, pool_conn_timeout=None, - pool_conn_lifetime=None): - - _common_ldap_initialization(url=url, - use_tls=use_tls, - tls_cacertfile=tls_cacertfile, - tls_cacertdir=tls_cacertdir, - tls_req_cert=tls_req_cert, - debug_level=debug_level) - - self.page_size = page_size - - # Following two options are not added in common initialization as they - # need to follow a sequence in PythonLDAPHandler code. - if alias_dereferencing is not None: - self.set_option(ldap.OPT_DEREF, alias_dereferencing) - if chase_referrals is not None: - self.set_option(ldap.OPT_REFERRALS, int(chase_referrals)) - - if self.use_auth_pool: # separate pool when use_auth_pool enabled - pool_url = self.auth_pool_prefix + url - else: - pool_url = url - try: - self.conn_pool = self.connection_pools[pool_url] - except KeyError: - self.conn_pool = ldappool.ConnectionManager( - url, - size=pool_size, - retry_max=pool_retry_max, - retry_delay=pool_retry_delay, - timeout=pool_conn_timeout, - connector_cls=self.Connector, - use_tls=use_tls, - max_lifetime=pool_conn_lifetime) - self.connection_pools[pool_url] = self.conn_pool - - def set_option(self, option, invalue): - self.conn_options[option] = invalue - - def get_option(self, option): - value = self.conn_options.get(option) - # if option was not specified explicitly, then use connection default - # value for that option if there. - if value is None: - with self._get_pool_connection() as conn: - value = conn.get_option(option) - return value - - def _apply_options(self, conn): - # if connection has a lifetime, then it already has options specified - if conn.get_lifetime() > 30: - return - for option, invalue in self.conn_options.items(): - conn.set_option(option, invalue) - - def _get_pool_connection(self): - return self.conn_pool.connection(self.who, self.cred) - - def simple_bind_s(self, who='', cred='', - serverctrls=None, clientctrls=None): - # Not using use_conn_pool decorator here as this API takes cred as - # input. - self.who = who - self.cred = cred - with self._get_pool_connection() as conn: - self._apply_options(conn) - - def unbind_s(self): - # After connection generator is done `with` statement execution block - # connection is always released via finally block in ldappool. - # So this unbind is a no op. - pass - - @use_conn_pool - def add_s(self, conn, dn, modlist): - return conn.add_s(dn, modlist) - - @use_conn_pool - def search_s(self, conn, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0): - return conn.search_s(base, scope, filterstr, attrlist, - attrsonly) - - def search_ext(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0, - serverctrls=None, clientctrls=None, - timeout=-1, sizelimit=0): - """Asynchronous API to return a ``MsgId`` instance. - - The ``MsgId`` instance can be safely used in a call to ``result3()``. - - To work with ``result3()`` API in predictable manner, the same LDAP - connection is needed which originally provided the ``msgid``. So, this - method wraps the existing connection and ``msgid`` in a new ``MsgId`` - instance. The connection associated with ``search_ext`` is released - once last hard reference to the ``MsgId`` instance is freed. - - """ - conn_ctxt = self._get_pool_connection() - conn = conn_ctxt.__enter__() - try: - msgid = conn.search_ext(base, scope, - filterstr, attrlist, attrsonly, - serverctrls, clientctrls, - timeout, sizelimit) - except Exception: - conn_ctxt.__exit__(*sys.exc_info()) - raise - res = MsgId((conn, msgid)) - weakref.ref(res, functools.partial(conn_ctxt.__exit__, - None, None, None)) - return res - - def result3(self, msgid, all=1, timeout=None, - resp_ctrl_classes=None): - """This method is used to wait for and return result. - - This method returns the result of an operation previously initiated by - one of the LDAP asynchronous operation routines (eg search_ext()). It - returned an invocation identifier (a message id) upon successful - initiation of their operation. - - Input msgid is expected to be instance of class MsgId which has LDAP - session/connection used to execute search_ext and message idenfier. - - The connection associated with search_ext is released once last hard - reference to MsgId object is freed. This will happen when function - which requested msgId and used it in result3 exits. - - """ - conn, msg_id = msgid - return conn.result3(msg_id, all, timeout) - - @use_conn_pool - def modify_s(self, conn, dn, modlist): - return conn.modify_s(dn, modlist) - - @use_conn_pool - def delete_s(self, conn, dn): - return conn.delete_s(dn) - - @use_conn_pool - def delete_ext_s(self, conn, dn, serverctrls=None, clientctrls=None): - return conn.delete_ext_s(dn, serverctrls, clientctrls) - - -class KeystoneLDAPHandler(LDAPHandler): - """Convert data types and perform logging. - - This LDAP inteface wraps the python-ldap based interfaces. The - python-ldap interfaces require string values encoded in UTF-8. The - OpenStack logging framework at the time of this writing is not - capable of accepting strings encoded in UTF-8, the log functions - will throw decoding errors if a non-ascii character appears in a - string. - - Prior to the call Python data types are converted to a string - representation as required by the LDAP APIs. - - Then logging is performed so we can track what is being - sent/received from LDAP. Also the logging filters security - sensitive items (i.e. passwords). - - Then the string values are encoded into UTF-8. - - Then the LDAP API entry point is invoked. - - Data returned from the LDAP call is converted back from UTF-8 - encoded strings into the Python data type used internally in - OpenStack. - - """ - - def __init__(self, conn=None): - super(KeystoneLDAPHandler, self).__init__(conn=conn) - self.page_size = 0 - - def __enter__(self): - return self - - def _disable_paging(self): - # Disable the pagination from now on - self.page_size = 0 - - def connect(self, url, page_size=0, alias_dereferencing=None, - use_tls=False, tls_cacertfile=None, tls_cacertdir=None, - tls_req_cert='demand', chase_referrals=None, debug_level=None, - use_pool=None, pool_size=None, - pool_retry_max=None, pool_retry_delay=None, - pool_conn_timeout=None, pool_conn_lifetime=None): - self.page_size = page_size - return self.conn.connect(url, page_size, alias_dereferencing, - use_tls, tls_cacertfile, tls_cacertdir, - tls_req_cert, chase_referrals, - debug_level=debug_level, - use_pool=use_pool, - pool_size=pool_size, - pool_retry_max=pool_retry_max, - pool_retry_delay=pool_retry_delay, - pool_conn_timeout=pool_conn_timeout, - pool_conn_lifetime=pool_conn_lifetime) - - def set_option(self, option, invalue): - return self.conn.set_option(option, invalue) - - def get_option(self, option): - return self.conn.get_option(option) - - def simple_bind_s(self, who='', cred='', - serverctrls=None, clientctrls=None): - LOG.debug("LDAP bind: who=%s", who) - who_utf8 = utf8_encode(who) - cred_utf8 = utf8_encode(cred) - return self.conn.simple_bind_s(who_utf8, cred_utf8, - serverctrls=serverctrls, - clientctrls=clientctrls) - - def unbind_s(self): - LOG.debug("LDAP unbind") - return self.conn.unbind_s() - - def add_s(self, dn, modlist): - ldap_attrs = [(kind, [py2ldap(x) for x in safe_iter(values)]) - for kind, values in modlist] - logging_attrs = [(kind, values - if kind != 'userPassword' - else ['****']) - for kind, values in ldap_attrs] - LOG.debug('LDAP add: dn=%s attrs=%s', - dn, logging_attrs) - dn_utf8 = utf8_encode(dn) - ldap_attrs_utf8 = [(kind, [utf8_encode(x) for x in safe_iter(values)]) - for kind, values in ldap_attrs] - return self.conn.add_s(dn_utf8, ldap_attrs_utf8) - - def search_s(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0): - # NOTE(morganfainberg): Remove "None" singletons from this list, which - # allows us to set mapped attributes to "None" as defaults in config. - # Without this filtering, the ldap query would raise a TypeError since - # attrlist is expected to be an iterable of strings. - if attrlist is not None: - attrlist = [attr for attr in attrlist if attr is not None] - LOG.debug('LDAP search: base=%s scope=%s filterstr=%s ' - 'attrs=%s attrsonly=%s', - base, scope, filterstr, attrlist, attrsonly) - if self.page_size: - ldap_result = self._paged_search_s(base, scope, - filterstr, attrlist) - else: - base_utf8 = utf8_encode(base) - filterstr_utf8 = utf8_encode(filterstr) - if attrlist is None: - attrlist_utf8 = None - else: - attrlist_utf8 = list(map(utf8_encode, attrlist)) - ldap_result = self.conn.search_s(base_utf8, scope, - filterstr_utf8, - attrlist_utf8, attrsonly) - - py_result = convert_ldap_result(ldap_result) - - return py_result - - def search_ext(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0, - serverctrls=None, clientctrls=None, - timeout=-1, sizelimit=0): - if attrlist is not None: - attrlist = [attr for attr in attrlist if attr is not None] - LOG.debug('LDAP search_ext: base=%s scope=%s filterstr=%s ' - 'attrs=%s attrsonly=%s ' - 'serverctrls=%s clientctrls=%s timeout=%s sizelimit=%s', - base, scope, filterstr, attrlist, attrsonly, - serverctrls, clientctrls, timeout, sizelimit) - return self.conn.search_ext(base, scope, - filterstr, attrlist, attrsonly, - serverctrls, clientctrls, - timeout, sizelimit) - - def _paged_search_s(self, base, scope, filterstr, attrlist=None): - res = [] - use_old_paging_api = False - # The API for the simple paged results control changed between - # python-ldap 2.3 and 2.4. We need to detect the capabilities - # of the python-ldap version we are using. - if hasattr(ldap, 'LDAP_CONTROL_PAGE_OID'): - use_old_paging_api = True - lc = ldap.controls.SimplePagedResultsControl( - controlType=ldap.LDAP_CONTROL_PAGE_OID, - criticality=True, - controlValue=(self.page_size, '')) - page_ctrl_oid = ldap.LDAP_CONTROL_PAGE_OID - else: - lc = ldap.controls.libldap.SimplePagedResultsControl( - criticality=True, - size=self.page_size, - cookie='') - page_ctrl_oid = ldap.controls.SimplePagedResultsControl.controlType - - base_utf8 = utf8_encode(base) - filterstr_utf8 = utf8_encode(filterstr) - if attrlist is None: - attrlist_utf8 = None - else: - attrlist = [attr for attr in attrlist if attr is not None] - attrlist_utf8 = list(map(utf8_encode, attrlist)) - msgid = self.conn.search_ext(base_utf8, - scope, - filterstr_utf8, - attrlist_utf8, - serverctrls=[lc]) - # Endless loop request pages on ldap server until it has no data - while True: - # Request to the ldap server a page with 'page_size' entries - rtype, rdata, rmsgid, serverctrls = self.conn.result3(msgid) - # Receive the data - res.extend(rdata) - pctrls = [c for c in serverctrls - if c.controlType == page_ctrl_oid] - if pctrls: - # LDAP server supports pagination - if use_old_paging_api: - est, cookie = pctrls[0].controlValue - lc.controlValue = (self.page_size, cookie) - else: - cookie = lc.cookie = pctrls[0].cookie - - if cookie: - # There is more data still on the server - # so we request another page - msgid = self.conn.search_ext(base_utf8, - scope, - filterstr_utf8, - attrlist_utf8, - serverctrls=[lc]) - else: - # Exit condition no more data on server - break - else: - LOG.warning(_LW('LDAP Server does not support paging. ' - 'Disable paging in keystone.conf to ' - 'avoid this message.')) - self._disable_paging() - break - return res - - def result3(self, msgid=ldap.RES_ANY, all=1, timeout=None, - resp_ctrl_classes=None): - ldap_result = self.conn.result3(msgid, all, timeout, resp_ctrl_classes) - - LOG.debug('LDAP result3: msgid=%s all=%s timeout=%s ' - 'resp_ctrl_classes=%s ldap_result=%s', - msgid, all, timeout, resp_ctrl_classes, ldap_result) - - # ldap_result returned from result3 is a tuple of - # (rtype, rdata, rmsgid, serverctrls). We don't need use of these, - # except rdata. - rtype, rdata, rmsgid, serverctrls = ldap_result - py_result = convert_ldap_result(rdata) - return py_result - - def modify_s(self, dn, modlist): - ldap_modlist = [ - (op, kind, (None if values is None - else [py2ldap(x) for x in safe_iter(values)])) - for op, kind, values in modlist] - - logging_modlist = [(op, kind, (values if kind != 'userPassword' - else ['****'])) - for op, kind, values in ldap_modlist] - LOG.debug('LDAP modify: dn=%s modlist=%s', - dn, logging_modlist) - - dn_utf8 = utf8_encode(dn) - ldap_modlist_utf8 = [ - (op, kind, (None if values is None - else [utf8_encode(x) for x in safe_iter(values)])) - for op, kind, values in ldap_modlist] - return self.conn.modify_s(dn_utf8, ldap_modlist_utf8) - - def delete_s(self, dn): - LOG.debug("LDAP delete: dn=%s", dn) - dn_utf8 = utf8_encode(dn) - return self.conn.delete_s(dn_utf8) - - def delete_ext_s(self, dn, serverctrls=None, clientctrls=None): - LOG.debug('LDAP delete_ext: dn=%s serverctrls=%s clientctrls=%s', - dn, serverctrls, clientctrls) - dn_utf8 = utf8_encode(dn) - return self.conn.delete_ext_s(dn_utf8, serverctrls, clientctrls) - - def __exit__(self, exc_type, exc_val, exc_tb): - self.unbind_s() - - -_HANDLERS = {} - - -def register_handler(prefix, handler): - _HANDLERS[prefix] = handler - - -def _get_connection(conn_url, use_pool=False, use_auth_pool=False): - for prefix, handler in _HANDLERS.items(): - if conn_url.startswith(prefix): - return handler() - - if use_pool: - return PooledLDAPHandler(use_auth_pool=use_auth_pool) - else: - return PythonLDAPHandler() - - -def filter_entity(entity_ref): - """Filter out private items in an entity dict. - - :param entity_ref: the entity dictionary. The 'dn' field will be removed. - 'dn' is used in LDAP, but should not be returned to the user. This - value may be modified. - - :returns: entity_ref - - """ - if entity_ref: - entity_ref.pop('dn', None) - return entity_ref - - -class BaseLdap(object): - DEFAULT_OU = None - DEFAULT_STRUCTURAL_CLASSES = None - DEFAULT_ID_ATTR = 'cn' - DEFAULT_OBJECTCLASS = None - DEFAULT_FILTER = None - DEFAULT_EXTRA_ATTR_MAPPING = [] - DUMB_MEMBER_DN = 'cn=dumb,dc=nonexistent' - NotFound = None - notfound_arg = None - options_name = None - model = None - attribute_options_names = {} - immutable_attrs = [] - attribute_ignore = [] - tree_dn = None - - def __init__(self, conf): - self.LDAP_URL = conf.ldap.url - self.LDAP_USER = conf.ldap.user - self.LDAP_PASSWORD = conf.ldap.password - self.LDAP_SCOPE = ldap_scope(conf.ldap.query_scope) - self.alias_dereferencing = parse_deref(conf.ldap.alias_dereferencing) - self.page_size = conf.ldap.page_size - self.use_tls = conf.ldap.use_tls - self.tls_cacertfile = conf.ldap.tls_cacertfile - self.tls_cacertdir = conf.ldap.tls_cacertdir - self.tls_req_cert = parse_tls_cert(conf.ldap.tls_req_cert) - self.attribute_mapping = {} - self.chase_referrals = conf.ldap.chase_referrals - self.debug_level = conf.ldap.debug_level - - # LDAP Pool specific attribute - self.use_pool = conf.ldap.use_pool - self.pool_size = conf.ldap.pool_size - self.pool_retry_max = conf.ldap.pool_retry_max - self.pool_retry_delay = conf.ldap.pool_retry_delay - self.pool_conn_timeout = conf.ldap.pool_connection_timeout - self.pool_conn_lifetime = conf.ldap.pool_connection_lifetime - - # End user authentication pool specific config attributes - self.use_auth_pool = self.use_pool and conf.ldap.use_auth_pool - self.auth_pool_size = conf.ldap.auth_pool_size - self.auth_pool_conn_lifetime = conf.ldap.auth_pool_connection_lifetime - - if self.options_name is not None: - self.suffix = conf.ldap.suffix - dn = '%s_tree_dn' % self.options_name - self.tree_dn = (getattr(conf.ldap, dn) - or '%s,%s' % (self.DEFAULT_OU, self.suffix)) - - idatt = '%s_id_attribute' % self.options_name - self.id_attr = getattr(conf.ldap, idatt) or self.DEFAULT_ID_ATTR - - objclass = '%s_objectclass' % self.options_name - self.object_class = (getattr(conf.ldap, objclass) - or self.DEFAULT_OBJECTCLASS) - - for k, v in self.attribute_options_names.items(): - v = '%s_%s_attribute' % (self.options_name, v) - self.attribute_mapping[k] = getattr(conf.ldap, v) - - attr_mapping_opt = ('%s_additional_attribute_mapping' % - self.options_name) - attr_mapping = (getattr(conf.ldap, attr_mapping_opt) - or self.DEFAULT_EXTRA_ATTR_MAPPING) - self.extra_attr_mapping = self._parse_extra_attrs(attr_mapping) - - ldap_filter = '%s_filter' % self.options_name - self.ldap_filter = getattr(conf.ldap, - ldap_filter) or self.DEFAULT_FILTER - - allow_create = '%s_allow_create' % self.options_name - self.allow_create = getattr(conf.ldap, allow_create) - - allow_update = '%s_allow_update' % self.options_name - self.allow_update = getattr(conf.ldap, allow_update) - - allow_delete = '%s_allow_delete' % self.options_name - self.allow_delete = getattr(conf.ldap, allow_delete) - - member_attribute = '%s_member_attribute' % self.options_name - self.member_attribute = getattr(conf.ldap, member_attribute, None) - - self.structural_classes = self.DEFAULT_STRUCTURAL_CLASSES - - if self.notfound_arg is None: - self.notfound_arg = self.options_name + '_id' - - attribute_ignore = '%s_attribute_ignore' % self.options_name - self.attribute_ignore = getattr(conf.ldap, attribute_ignore) - - self.use_dumb_member = conf.ldap.use_dumb_member - self.dumb_member = (conf.ldap.dumb_member or - self.DUMB_MEMBER_DN) - - self.subtree_delete_enabled = conf.ldap.allow_subtree_delete - - def _not_found(self, object_id): - if self.NotFound is None: - return exception.NotFound(target=object_id) - else: - return self.NotFound(**{self.notfound_arg: object_id}) - - def _parse_extra_attrs(self, option_list): - mapping = {} - for item in option_list: - try: - ldap_attr, attr_map = item.split(':') - except Exception: - LOG.warning(_LW( - 'Invalid additional attribute mapping: "%s". ' - 'Format must be :'), - item) - continue - mapping[ldap_attr] = attr_map - return mapping - - def _is_dumb_member(self, member_dn): - """Checks that member is a dumb member. - - :param member_dn: DN of member to be checked. - """ - return (self.use_dumb_member - and is_dn_equal(member_dn, self.dumb_member)) - - def get_connection(self, user=None, password=None, end_user_auth=False): - use_pool = self.use_pool - pool_size = self.pool_size - pool_conn_lifetime = self.pool_conn_lifetime - - if end_user_auth: - if not self.use_auth_pool: - use_pool = False - else: - pool_size = self.auth_pool_size - pool_conn_lifetime = self.auth_pool_conn_lifetime - - conn = _get_connection(self.LDAP_URL, use_pool, - use_auth_pool=end_user_auth) - - conn = KeystoneLDAPHandler(conn=conn) - - conn.connect(self.LDAP_URL, - page_size=self.page_size, - alias_dereferencing=self.alias_dereferencing, - use_tls=self.use_tls, - tls_cacertfile=self.tls_cacertfile, - tls_cacertdir=self.tls_cacertdir, - tls_req_cert=self.tls_req_cert, - chase_referrals=self.chase_referrals, - debug_level=self.debug_level, - use_pool=use_pool, - pool_size=pool_size, - pool_retry_max=self.pool_retry_max, - pool_retry_delay=self.pool_retry_delay, - pool_conn_timeout=self.pool_conn_timeout, - pool_conn_lifetime=pool_conn_lifetime - ) - - if user is None: - user = self.LDAP_USER - - if password is None: - password = self.LDAP_PASSWORD - - # not all LDAP servers require authentication, so we don't bind - # if we don't have any user/pass - if user and password: - conn.simple_bind_s(user, password) - - return conn - - def _id_to_dn_string(self, object_id): - return u'%s=%s,%s' % (self.id_attr, - ldap.dn.escape_dn_chars( - six.text_type(object_id)), - self.tree_dn) - - def _id_to_dn(self, object_id): - if self.LDAP_SCOPE == ldap.SCOPE_ONELEVEL: - return self._id_to_dn_string(object_id) - with self.get_connection() as conn: - search_result = conn.search_s( - self.tree_dn, self.LDAP_SCOPE, - u'(&(%(id_attr)s=%(id)s)(objectclass=%(objclass)s))' % - {'id_attr': self.id_attr, - 'id': ldap.filter.escape_filter_chars( - six.text_type(object_id)), - 'objclass': self.object_class}, - attrlist=DN_ONLY) - if search_result: - dn, attrs = search_result[0] - return dn - else: - return self._id_to_dn_string(object_id) - - @staticmethod - def _dn_to_id(dn): - return utf8_decode(ldap.dn.str2dn(utf8_encode(dn))[0][0][1]) - - def _ldap_res_to_model(self, res): - # LDAP attribute names may be returned in a different case than - # they are defined in the mapping, so we need to check for keys - # in a case-insensitive way. We use the case specified in the - # mapping for the model to ensure we have a predictable way of - # retrieving values later. - lower_res = {k.lower(): v for k, v in res[1].items()} - - id_attrs = lower_res.get(self.id_attr.lower()) - if not id_attrs: - message = _('ID attribute %(id_attr)s not found in LDAP ' - 'object %(dn)s') % ({'id_attr': self.id_attr, - 'dn': res[0]}) - raise exception.NotFound(message=message) - if len(id_attrs) > 1: - # FIXME(gyee): if this is a multi-value attribute and it has - # multiple values, we can't use it as ID. Retain the dn_to_id - # logic here so it does not potentially break existing - # deployments. We need to fix our read-write LDAP logic so - # it does not get the ID from DN. - message = _LW('ID attribute %(id_attr)s for LDAP object %(dn)s ' - 'has multiple values and therefore cannot be used ' - 'as an ID. Will get the ID from DN instead') % ( - {'id_attr': self.id_attr, - 'dn': res[0]}) - LOG.warning(message) - id_val = self._dn_to_id(res[0]) - else: - id_val = id_attrs[0] - obj = self.model(id=id_val) - - for k in obj.known_keys: - if k in self.attribute_ignore: - continue - - try: - map_attr = self.attribute_mapping.get(k, k) - if map_attr is None: - # Ignore attributes that are mapped to None. - continue - - v = lower_res[map_attr.lower()] - except KeyError: # nosec - # Didn't find the attr, so don't add it. - pass - else: - try: - obj[k] = v[0] - except IndexError: - obj[k] = None - - return obj - - def check_allow_create(self): - if not self.allow_create: - action = _('LDAP %s create') % self.options_name - raise exception.ForbiddenAction(action=action) - - def check_allow_update(self): - if not self.allow_update: - action = _('LDAP %s update') % self.options_name - raise exception.ForbiddenAction(action=action) - - def check_allow_delete(self): - if not self.allow_delete: - action = _('LDAP %s delete') % self.options_name - raise exception.ForbiddenAction(action=action) - - def affirm_unique(self, values): - if values.get('name') is not None: - try: - self.get_by_name(values['name']) - except exception.NotFound: # nosec - # Didn't find it so it's unique, good. - pass - else: - raise exception.Conflict(type=self.options_name, - details=_('Duplicate name, %s.') % - values['name']) - - if values.get('id') is not None: - try: - self.get(values['id']) - except exception.NotFound: # nosec - # Didn't find it, so it's unique, good. - pass - else: - raise exception.Conflict(type=self.options_name, - details=_('Duplicate ID, %s.') % - values['id']) - - def create(self, values): - self.affirm_unique(values) - object_classes = self.structural_classes + [self.object_class] - attrs = [('objectClass', object_classes)] - for k, v in values.items(): - if k in self.attribute_ignore: - continue - if k == 'id': - # no need to check if v is None as 'id' will always have - # a value - attrs.append((self.id_attr, [v])) - elif v is not None: - attr_type = self.attribute_mapping.get(k, k) - if attr_type is not None: - attrs.append((attr_type, [v])) - extra_attrs = [attr for attr, name - in self.extra_attr_mapping.items() - if name == k] - for attr in extra_attrs: - attrs.append((attr, [v])) - - if 'groupOfNames' in object_classes and self.use_dumb_member: - attrs.append(('member', [self.dumb_member])) - with self.get_connection() as conn: - conn.add_s(self._id_to_dn(values['id']), attrs) - return values - - def _ldap_get(self, object_id, ldap_filter=None): - query = (u'(&(%(id_attr)s=%(id)s)' - u'%(filter)s' - u'(objectClass=%(object_class)s))' - % {'id_attr': self.id_attr, - 'id': ldap.filter.escape_filter_chars( - six.text_type(object_id)), - 'filter': (ldap_filter or self.ldap_filter or ''), - 'object_class': self.object_class}) - with self.get_connection() as conn: - try: - attrs = list(set(([self.id_attr] + - list(self.attribute_mapping.values()) + - list(self.extra_attr_mapping.keys())))) - res = conn.search_s(self.tree_dn, - self.LDAP_SCOPE, - query, - attrs) - except ldap.NO_SUCH_OBJECT: - return None - try: - return res[0] - except IndexError: - return None - - def _ldap_get_limited(self, base, scope, filterstr, attrlist, sizelimit): - with self.get_connection() as conn: - try: - control = ldap.controls.libldap.SimplePagedResultsControl( - criticality=True, - size=sizelimit, - cookie='') - msgid = conn.search_ext(base, scope, filterstr, attrlist, - serverctrls=[control]) - rdata = conn.result3(msgid) - return rdata - except ldap.NO_SUCH_OBJECT: - return [] - - @driver_hints.truncated - def _ldap_get_all(self, hints, ldap_filter=None): - query = u'(&%s(objectClass=%s)(%s=*))' % ( - ldap_filter or self.ldap_filter or '', - self.object_class, - self.id_attr) - sizelimit = 0 - attrs = list(set(([self.id_attr] + - list(self.attribute_mapping.values()) + - list(self.extra_attr_mapping.keys())))) - if hints.limit: - sizelimit = hints.limit['limit'] - return self._ldap_get_limited(self.tree_dn, - self.LDAP_SCOPE, - query, - attrs, - sizelimit) - with self.get_connection() as conn: - try: - return conn.search_s(self.tree_dn, - self.LDAP_SCOPE, - query, - attrs) - except ldap.NO_SUCH_OBJECT: - return [] - - def _ldap_get_list(self, search_base, scope, query_params=None, - attrlist=None): - query = u'(objectClass=%s)' % self.object_class - if query_params: - - def calc_filter(attrname, value): - val_esc = ldap.filter.escape_filter_chars(value) - return '(%s=%s)' % (attrname, val_esc) - - query = (u'(&%s%s)' % - (query, ''.join([calc_filter(k, v) for k, v in - query_params.items()]))) - with self.get_connection() as conn: - return conn.search_s(search_base, scope, query, attrlist) - - def get(self, object_id, ldap_filter=None): - res = self._ldap_get(object_id, ldap_filter) - if res is None: - raise self._not_found(object_id) - else: - return self._ldap_res_to_model(res) - - def get_by_name(self, name, ldap_filter=None): - query = (u'(%s=%s)' % (self.attribute_mapping['name'], - ldap.filter.escape_filter_chars( - six.text_type(name)))) - res = self.get_all(query) - try: - return res[0] - except IndexError: - raise self._not_found(name) - - def get_all(self, ldap_filter=None, hints=None): - hints = hints or driver_hints.Hints() - return [self._ldap_res_to_model(x) - for x in self._ldap_get_all(hints, ldap_filter)] - - def update(self, object_id, values, old_obj=None): - if old_obj is None: - old_obj = self.get(object_id) - - modlist = [] - for k, v in values.items(): - if k == 'id': - # id can't be modified. - continue - - if k in self.attribute_ignore: - - # Handle 'enabled' specially since can't disable if ignored. - if k == 'enabled' and (not v): - action = _("Disabling an entity where the 'enable' " - "attribute is ignored by configuration.") - raise exception.ForbiddenAction(action=action) - - continue - - # attribute value has not changed - if k in old_obj and old_obj[k] == v: - continue - - if k in self.immutable_attrs: - msg = (_("Cannot change %(option_name)s %(attr)s") % - {'option_name': self.options_name, 'attr': k}) - raise exception.ValidationError(msg) - - if v is None: - if old_obj.get(k) is not None: - modlist.append((ldap.MOD_DELETE, - self.attribute_mapping.get(k, k), - None)) - continue - - current_value = old_obj.get(k) - if current_value is None: - op = ldap.MOD_ADD - modlist.append((op, self.attribute_mapping.get(k, k), [v])) - elif current_value != v: - op = ldap.MOD_REPLACE - modlist.append((op, self.attribute_mapping.get(k, k), [v])) - - if modlist: - with self.get_connection() as conn: - try: - conn.modify_s(self._id_to_dn(object_id), modlist) - except ldap.NO_SUCH_OBJECT: - raise self._not_found(object_id) - - return self.get(object_id) - - def delete(self, object_id): - with self.get_connection() as conn: - try: - conn.delete_s(self._id_to_dn(object_id)) - except ldap.NO_SUCH_OBJECT: - raise self._not_found(object_id) - - def delete_tree(self, object_id): - tree_delete_control = ldap.controls.LDAPControl(CONTROL_TREEDELETE, - 0, - None) - with self.get_connection() as conn: - try: - conn.delete_ext_s(self._id_to_dn(object_id), - serverctrls=[tree_delete_control]) - except ldap.NO_SUCH_OBJECT: - raise self._not_found(object_id) - except ldap.NOT_ALLOWED_ON_NONLEAF: - # Most LDAP servers do not support the tree_delete_control. - # In these servers, the usual idiom is to first perform a - # search to get the entries to delete, then delete them in - # in order of child to parent, since LDAP forbids the - # deletion of a parent entry before deleting the children - # of that parent. The simplest way to do that is to delete - # the entries in order of the length of the DN, from longest - # to shortest DN. - dn = self._id_to_dn(object_id) - scope = ldap.SCOPE_SUBTREE - # With some directory servers, an entry with objectclass - # ldapsubentry will not be returned unless it is explicitly - # requested, by specifying the objectclass in the search - # filter. We must specify this, with objectclass=*, in an - # LDAP filter OR clause, in order to return all entries - filt = '(|(objectclass=*)(objectclass=ldapsubentry))' - # We only need the DNs of the entries. Since no attributes - # will be returned, we do not have to specify attrsonly=1. - entries = conn.search_s(dn, scope, filt, attrlist=DN_ONLY) - if entries: - for dn in sorted((e[0] for e in entries), - key=len, reverse=True): - conn.delete_s(dn) - else: - LOG.debug('No entries in LDAP subtree %s', dn) - - def add_member(self, member_dn, member_list_dn): - """Add member to the member list. - - :param member_dn: DN of member to be added. - :param member_list_dn: DN of group to which the - member will be added. - - :raises keystone.exception.Conflict: If the user was already a member. - :raises self.NotFound: If the group entry didn't exist. - """ - with self.get_connection() as conn: - try: - mod = (ldap.MOD_ADD, self.member_attribute, member_dn) - conn.modify_s(member_list_dn, [mod]) - except ldap.TYPE_OR_VALUE_EXISTS: - raise exception.Conflict(_('Member %(member)s ' - 'is already a member' - ' of group %(group)s') % { - 'member': member_dn, - 'group': member_list_dn}) - except ldap.NO_SUCH_OBJECT: - raise self._not_found(member_list_dn) - - def remove_member(self, member_dn, member_list_dn): - """Remove member from the member list. - - :param member_dn: DN of member to be removed. - :param member_list_dn: DN of group from which the - member will be removed. - - :raises self.NotFound: If the group entry didn't exist. - :raises ldap.NO_SUCH_ATTRIBUTE: If the user wasn't a member. - """ - with self.get_connection() as conn: - try: - mod = (ldap.MOD_DELETE, self.member_attribute, member_dn) - conn.modify_s(member_list_dn, [mod]) - except ldap.NO_SUCH_OBJECT: - raise self._not_found(member_list_dn) - - def _delete_tree_nodes(self, search_base, scope, query_params=None): - query = u'(objectClass=%s)' % self.object_class - if query_params: - query = (u'(&%s%s)' % - (query, ''.join(['(%s=%s)' - % (k, ldap.filter.escape_filter_chars(v)) - for k, v in - query_params.items()]))) - not_deleted_nodes = [] - with self.get_connection() as conn: - try: - nodes = conn.search_s(search_base, scope, query, - attrlist=DN_ONLY) - except ldap.NO_SUCH_OBJECT: - LOG.debug('Could not find entry with dn=%s', search_base) - raise self._not_found(self._dn_to_id(search_base)) - else: - for node_dn, _t in nodes: - try: - conn.delete_s(node_dn) - except ldap.NO_SUCH_OBJECT: - not_deleted_nodes.append(node_dn) - - if not_deleted_nodes: - LOG.warning(_LW("When deleting entries for %(search_base)s, " - "could not delete nonexistent entries " - "%(entries)s%(dots)s"), - {'search_base': search_base, - 'entries': not_deleted_nodes[:3], - 'dots': '...' if len(not_deleted_nodes) > 3 else ''}) - - def filter_query(self, hints, query=None): - """Applies filtering to a query. - - :param hints: contains the list of filters, which may be None, - indicating that there are no filters to be applied. - If it's not None, then any filters satisfied here will be - removed so that the caller will know if any filters - remain to be applied. - :param query: LDAP query into which to include filters - - :returns query: LDAP query, updated with any filters satisfied - - """ - def build_filter(filter_, hints): - """Build a filter for the query. - - :param filter_: the dict that describes this filter - :param hints: contains the list of filters yet to be satisfied. - - :returns query: LDAP query term to be added - - """ - ldap_attr = self.attribute_mapping[filter_['name']] - val_esc = ldap.filter.escape_filter_chars(filter_['value']) - - if filter_['case_sensitive']: - # NOTE(henry-nash): Although dependent on the schema being - # used, most LDAP attributes are configured with case - # insensitive matching rules, so we'll leave this to the - # controller to filter. - return - - if filter_['name'] == 'enabled': - # NOTE(henry-nash): Due to the different options for storing - # the enabled attribute (e,g, emulated or not), for now we - # don't try and filter this at the driver level - we simply - # leave the filter to be handled by the controller. It seems - # unlikley that this will cause a signifcant performance - # issue. - return - - # TODO(henry-nash): Currently there are no booleans (other than - # 'enabled' that is handled above) on which you can filter. If - # there were, we would need to add special handling here to - # convert the booleans values to 'TRUE' and 'FALSE'. To do that - # we would also need to know which filter keys were actually - # booleans (this is related to bug #1411478). - - if filter_['comparator'] == 'equals': - query_term = (u'(%(attr)s=%(val)s)' - % {'attr': ldap_attr, 'val': val_esc}) - elif filter_['comparator'] == 'contains': - query_term = (u'(%(attr)s=*%(val)s*)' - % {'attr': ldap_attr, 'val': val_esc}) - elif filter_['comparator'] == 'startswith': - query_term = (u'(%(attr)s=%(val)s*)' - % {'attr': ldap_attr, 'val': val_esc}) - elif filter_['comparator'] == 'endswith': - query_term = (u'(%(attr)s=*%(val)s)' - % {'attr': ldap_attr, 'val': val_esc}) - else: - # It's a filter we don't understand, so let the caller - # work out if they need to do something with it. - return - - return query_term - - if query is None: - # make sure query is a string so the ldap filter is properly - # constructed from filter_list later - query = '' - - if hints is None: - return query - - filter_list = [] - satisfied_filters = [] - - for filter_ in hints.filters: - if filter_['name'] not in self.attribute_mapping: - continue - new_filter = build_filter(filter_, hints) - if new_filter is not None: - filter_list.append(new_filter) - satisfied_filters.append(filter_) - - if filter_list: - query = u'(&%s%s)' % (query, ''.join(filter_list)) - - # Remove satisfied filters, then the caller will know remaining filters - for filter_ in satisfied_filters: - hints.filters.remove(filter_) - - return query - - -class EnabledEmuMixIn(BaseLdap): - """Emulates boolean 'enabled' attribute if turned on. - - Creates a group holding all enabled objects of this class, all missing - objects are considered disabled. - - Options: - - * $name_enabled_emulation - boolean, on/off - * $name_enabled_emulation_dn - DN of that group, default is - cn=enabled_${name}s,${tree_dn} - * $name_enabled_emulation_use_group_config - boolean, on/off - - Where ${name}s is the plural of self.options_name ('users' or 'tenants'), - ${tree_dn} is self.tree_dn. - """ - - DEFAULT_GROUP_OBJECTCLASS = 'groupOfNames' - DEFAULT_MEMBER_ATTRIBUTE = 'member' - - def __init__(self, conf): - super(EnabledEmuMixIn, self).__init__(conf) - enabled_emulation = '%s_enabled_emulation' % self.options_name - self.enabled_emulation = getattr(conf.ldap, enabled_emulation) - - enabled_emulation_dn = '%s_enabled_emulation_dn' % self.options_name - self.enabled_emulation_dn = getattr(conf.ldap, enabled_emulation_dn) - - use_group_config = ('%s_enabled_emulation_use_group_config' % - self.options_name) - self.use_group_config = getattr(conf.ldap, use_group_config) - - if not self.use_group_config: - self.member_attribute = self.DEFAULT_MEMBER_ATTRIBUTE - self.group_objectclass = self.DEFAULT_GROUP_OBJECTCLASS - else: - self.member_attribute = conf.ldap.group_member_attribute - self.group_objectclass = conf.ldap.group_objectclass - - if not self.enabled_emulation_dn: - naming_attr_name = 'cn' - naming_attr_value = 'enabled_%ss' % self.options_name - sub_vals = (naming_attr_name, naming_attr_value, self.tree_dn) - self.enabled_emulation_dn = '%s=%s,%s' % sub_vals - naming_attr = (naming_attr_name, [naming_attr_value]) - else: - # Extract the attribute name and value from the configured DN. - naming_dn = ldap.dn.str2dn(utf8_encode(self.enabled_emulation_dn)) - naming_rdn = naming_dn[0][0] - naming_attr = (utf8_decode(naming_rdn[0]), - utf8_decode(naming_rdn[1])) - self.enabled_emulation_naming_attr = naming_attr - - def _get_enabled(self, object_id, conn): - dn = self._id_to_dn(object_id) - query = '(%s=%s)' % (self.member_attribute, - ldap.filter.escape_filter_chars(dn)) - try: - enabled_value = conn.search_s(self.enabled_emulation_dn, - ldap.SCOPE_BASE, - query, attrlist=DN_ONLY) - except ldap.NO_SUCH_OBJECT: - return False - else: - return bool(enabled_value) - - def _add_enabled(self, object_id): - with self.get_connection() as conn: - if not self._get_enabled(object_id, conn): - modlist = [(ldap.MOD_ADD, - self.member_attribute, - [self._id_to_dn(object_id)])] - try: - conn.modify_s(self.enabled_emulation_dn, modlist) - except ldap.NO_SUCH_OBJECT: - attr_list = [('objectClass', [self.group_objectclass]), - (self.member_attribute, - [self._id_to_dn(object_id)]), - self.enabled_emulation_naming_attr] - if self.use_dumb_member: - attr_list[1][1].append(self.dumb_member) - conn.add_s(self.enabled_emulation_dn, attr_list) - - def _remove_enabled(self, object_id): - modlist = [(ldap.MOD_DELETE, - self.member_attribute, - [self._id_to_dn(object_id)])] - with self.get_connection() as conn: - try: - conn.modify_s(self.enabled_emulation_dn, modlist) - except (ldap.NO_SUCH_OBJECT, ldap.NO_SUCH_ATTRIBUTE): # nosec - # It's already gone, good. - pass - - def create(self, values): - if self.enabled_emulation: - enabled_value = values.pop('enabled', True) - ref = super(EnabledEmuMixIn, self).create(values) - if 'enabled' not in self.attribute_ignore: - if enabled_value: - self._add_enabled(ref['id']) - ref['enabled'] = enabled_value - return ref - else: - return super(EnabledEmuMixIn, self).create(values) - - def get(self, object_id, ldap_filter=None): - with self.get_connection() as conn: - ref = super(EnabledEmuMixIn, self).get(object_id, ldap_filter) - if ('enabled' not in self.attribute_ignore and - self.enabled_emulation): - ref['enabled'] = self._get_enabled(object_id, conn) - return ref - - def get_all(self, ldap_filter=None, hints=None): - hints = hints or driver_hints.Hints() - if 'enabled' not in self.attribute_ignore and self.enabled_emulation: - # had to copy BaseLdap.get_all here to ldap_filter by DN - tenant_list = [self._ldap_res_to_model(x) - for x in self._ldap_get_all(hints, ldap_filter) - if x[0] != self.enabled_emulation_dn] - with self.get_connection() as conn: - for tenant_ref in tenant_list: - tenant_ref['enabled'] = self._get_enabled( - tenant_ref['id'], conn) - return tenant_list - else: - return super(EnabledEmuMixIn, self).get_all(ldap_filter, hints) - - def update(self, object_id, values, old_obj=None): - if 'enabled' not in self.attribute_ignore and self.enabled_emulation: - data = values.copy() - enabled_value = data.pop('enabled', None) - ref = super(EnabledEmuMixIn, self).update(object_id, data, old_obj) - if enabled_value is not None: - if enabled_value: - self._add_enabled(object_id) - else: - self._remove_enabled(object_id) - ref['enabled'] = enabled_value - return ref - else: - return super(EnabledEmuMixIn, self).update( - object_id, values, old_obj) - - def delete(self, object_id): - if self.enabled_emulation: - self._remove_enabled(object_id) - super(EnabledEmuMixIn, self).delete(object_id) diff --git a/keystone-moon/keystone/common/manager.py b/keystone-moon/keystone/common/manager.py deleted file mode 100644 index 4ce9f2a6..00000000 --- a/keystone-moon/keystone/common/manager.py +++ /dev/null @@ -1,220 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools -import inspect -import time -import types - -from oslo_log import log -from oslo_log import versionutils -from oslo_utils import importutils -from oslo_utils import reflection -import six -import stevedore - -from keystone.i18n import _ - - -LOG = log.getLogger(__name__) - - -def response_truncated(f): - """Truncate the list returned by the wrapped function. - - This is designed to wrap Manager list_{entity} methods to ensure that - any list limits that are defined are passed to the driver layer. If a - hints list is provided, the wrapper will insert the relevant limit into - the hints so that the underlying driver call can try and honor it. If the - driver does truncate the response, it will update the 'truncated' attribute - in the 'limit' entry in the hints list, which enables the caller of this - function to know if truncation has taken place. If, however, the driver - layer is unable to perform truncation, the 'limit' entry is simply left in - the hints list for the caller to handle. - - A _get_list_limit() method is required to be present in the object class - hierarchy, which returns the limit for this backend to which we will - truncate. - - If a hints list is not provided in the arguments of the wrapped call then - any limits set in the config file are ignored. This allows internal use - of such wrapped methods where the entire data set is needed as input for - the calculations of some other API (e.g. get role assignments for a given - project). - - """ - @functools.wraps(f) - def wrapper(self, *args, **kwargs): - if kwargs.get('hints') is None: - return f(self, *args, **kwargs) - - list_limit = self.driver._get_list_limit() - if list_limit: - kwargs['hints'].set_limit(list_limit) - return f(self, *args, **kwargs) - return wrapper - - -def load_driver(namespace, driver_name, *args): - try: - driver_manager = stevedore.DriverManager(namespace, - driver_name, - invoke_on_load=True, - invoke_args=args) - return driver_manager.driver - except RuntimeError as e: - LOG.debug('Failed to load %r using stevedore: %s', driver_name, e) - # Ignore failure and continue on. - - driver = importutils.import_object(driver_name, *args) - - msg = (_( - 'Direct import of driver %(name)r is deprecated as of Liberty in ' - 'favor of its entrypoint from %(namespace)r and may be removed in ' - 'N.') % - {'name': driver_name, 'namespace': namespace}) - versionutils.report_deprecated_feature(LOG, msg) - - return driver - - -class _TraceMeta(type): - """A metaclass that, in trace mode, will log entry and exit of methods. - - This metaclass automatically wraps all methods on the class when - instantiated with a decorator that will log entry/exit from a method - when keystone is run in Trace log level. - """ - - @staticmethod - def wrapper(__f, __classname): - __argspec = inspect.getargspec(__f) - __fn_info = '%(module)s.%(classname)s.%(funcname)s' % { - 'module': inspect.getmodule(__f).__name__, - 'classname': __classname, - 'funcname': __f.__name__ - } - # NOTE(morganfainberg): Omit "cls" and "self" when printing trace logs - # the index can be calculated at wrap time rather than at runtime. - if __argspec.args and __argspec.args[0] in ('self', 'cls'): - __arg_idx = 1 - else: - __arg_idx = 0 - - @functools.wraps(__f) - def wrapped(*args, **kwargs): - __exc = None - __t = time.time() - __do_trace = LOG.logger.getEffectiveLevel() <= log.TRACE - __ret_val = None - try: - if __do_trace: - LOG.trace('CALL => %s', __fn_info) - __ret_val = __f(*args, **kwargs) - except Exception as e: # nosec - __exc = e - raise - finally: - if __do_trace: - __subst = { - 'run_time': (time.time() - __t), - 'passed_args': ', '.join([ - ', '.join([repr(a) - for a in args[__arg_idx:]]), - ', '.join(['%(k)s=%(v)r' % {'k': k, 'v': v} - for k, v in kwargs.items()]), - ]), - 'function': __fn_info, - 'exception': __exc, - 'ret_val': __ret_val, - } - if __exc is not None: - __msg = ('[%(run_time)ss] %(function)s ' - '(%(passed_args)s) => raised ' - '%(exception)r') - else: - # TODO(morganfainberg): find a way to indicate if this - # was a cache hit or cache miss. - __msg = ('[%(run_time)ss] %(function)s' - '(%(passed_args)s) => %(ret_val)r') - LOG.trace(__msg, __subst) - return __ret_val - return wrapped - - def __new__(meta, classname, bases, class_dict): - final_cls_dict = {} - for attr_name, attr in class_dict.items(): - # NOTE(morganfainberg): only wrap public instances and methods. - if (isinstance(attr, types.FunctionType) and - not attr_name.startswith('_')): - attr = _TraceMeta.wrapper(attr, classname) - final_cls_dict[attr_name] = attr - return type.__new__(meta, classname, bases, final_cls_dict) - - -@six.add_metaclass(_TraceMeta) -class Manager(object): - """Base class for intermediary request layer. - - The Manager layer exists to support additional logic that applies to all - or some of the methods exposed by a service that are not specific to the - HTTP interface. - - It also provides a stable entry point to dynamic backends. - - An example of a probable use case is logging all the calls. - - """ - - driver_namespace = None - - def __init__(self, driver_name): - self.driver = load_driver(self.driver_namespace, driver_name) - - def __getattr__(self, name): - """Forward calls to the underlying driver.""" - f = getattr(self.driver, name) - setattr(self, name, f) - return f - - -def create_legacy_driver(driver_class): - """Helper function to deprecate the original driver classes. - - The keystone.{subsystem}.Driver classes are deprecated in favor of the - new versioned classes. This function creates a new class based on a - versioned class and adds a deprecation message when it is used. - - This will allow existing custom drivers to work when the Driver class is - renamed to include a version. - - Example usage: - - Driver = create_legacy_driver(CatalogDriverV8) - - """ - module_name = driver_class.__module__ - class_name = reflection.get_class_name(driver_class) - - class Driver(driver_class): - - @versionutils.deprecated( - as_of=versionutils.deprecated.LIBERTY, - what='%s.Driver' % module_name, - in_favor_of=class_name, - remove_in=+2) - def __init__(self, *args, **kwargs): - super(Driver, self).__init__(*args, **kwargs) - - return Driver diff --git a/keystone-moon/keystone/common/models.py b/keystone-moon/keystone/common/models.py deleted file mode 100644 index de996522..00000000 --- a/keystone-moon/keystone/common/models.py +++ /dev/null @@ -1,196 +0,0 @@ -# Copyright (C) 2011 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Base model for keystone internal services - -Unless marked otherwise, all fields are strings. - -""" - - -class Model(dict): - """Base model class.""" - - def __hash__(self): - return self['id'].__hash__() - - @property - def known_keys(cls): - return cls.required_keys + cls.optional_keys - - -class Token(Model): - """Token object. - - Required keys: - id - expires (datetime) - - Optional keys: - user - tenant - metadata - trust_id - """ - - required_keys = ('id', 'expires') - optional_keys = ('extra',) - - -class Service(Model): - """Service object. - - Required keys: - id - type - name - - Optional keys: - """ - - required_keys = ('id', 'type', 'name') - optional_keys = tuple() - - -class Endpoint(Model): - """Endpoint object - - Required keys: - id - region - service_id - - Optional keys: - internalurl - publicurl - adminurl - """ - - required_keys = ('id', 'region', 'service_id') - optional_keys = ('internalurl', 'publicurl', 'adminurl') - - -class User(Model): - """User object. - - Required keys: - id - name - domain_id - - Optional keys: - password - description - email - enabled (bool, default True) - default_project_id - """ - - required_keys = ('id', 'name', 'domain_id') - optional_keys = ('password', 'description', 'email', 'enabled', - 'default_project_id') - - -class Group(Model): - """Group object. - - Required keys: - id - name - domain_id - - Optional keys: - - description - - """ - - required_keys = ('id', 'name', 'domain_id') - optional_keys = ('description',) - - -class Project(Model): - """Project object. - - Required keys: - id - name - domain_id - - Optional Keys: - description - enabled (bool, default True) - is_domain (bool, default False) - - """ - - required_keys = ('id', 'name', 'domain_id') - optional_keys = ('description', 'enabled', 'is_domain') - - -class Role(Model): - """Role object. - - Required keys: - id - name - - """ - - required_keys = ('id', 'name') - optional_keys = tuple() - - -class ImpliedRole(Model): - """ImpliedRole object. - - Required keys: - prior_role_id - implied_role_id - """ - - required_keys = ('prior_role_id', 'implied_role_id') - optional_keys = tuple() - - -class Trust(Model): - """Trust object. - - Required keys: - id - trustor_user_id - trustee_user_id - project_id - """ - - required_keys = ('id', 'trustor_user_id', 'trustee_user_id', 'project_id') - optional_keys = ('expires_at',) - - -class Domain(Model): - """Domain object. - - Required keys: - id - name - - Optional keys: - - description - enabled (bool, default True) - - """ - - required_keys = ('id', 'name') - optional_keys = ('description', 'enabled') diff --git a/keystone-moon/keystone/common/openssl.py b/keystone-moon/keystone/common/openssl.py deleted file mode 100644 index 0bea6d8e..00000000 --- a/keystone-moon/keystone/common/openssl.py +++ /dev/null @@ -1,337 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# - -import os - -from oslo_config import cfg -from oslo_log import log - -from keystone.common import environment -from keystone.common import utils -from keystone.i18n import _LI, _LE, _LW - -LOG = log.getLogger(__name__) -CONF = cfg.CONF - -PUBLIC_DIR_PERMS = 0o755 # -rwxr-xr-x -PRIVATE_DIR_PERMS = 0o750 # -rwxr-x--- -PUBLIC_FILE_PERMS = 0o644 # -rw-r--r-- -PRIVATE_FILE_PERMS = 0o640 # -rw-r----- - - -def file_exists(file_path): - return os.path.exists(file_path) - - -class BaseCertificateConfigure(object): - """Create a certificate signing environment. - - This is based on a config section and reasonable OpenSSL defaults. - - """ - - def __init__(self, conf_obj, server_conf_obj, keystone_user, - keystone_group, rebuild, **kwargs): - self.conf_dir = os.path.dirname(server_conf_obj.ca_certs) - self.use_keystone_user = keystone_user - self.use_keystone_group = keystone_group - self.rebuild = rebuild - self.ssl_config_file_name = os.path.join(self.conf_dir, "openssl.conf") - self.request_file_name = os.path.join(self.conf_dir, "req.pem") - self.ssl_dictionary = {'conf_dir': self.conf_dir, - 'ca_cert': server_conf_obj.ca_certs, - 'default_md': 'default', - 'ssl_config': self.ssl_config_file_name, - 'ca_private_key': conf_obj.ca_key, - 'request_file': self.request_file_name, - 'signing_key': server_conf_obj.keyfile, - 'signing_cert': server_conf_obj.certfile, - 'key_size': int(conf_obj.key_size), - 'valid_days': int(conf_obj.valid_days), - 'cert_subject': conf_obj.cert_subject} - - try: - # OpenSSL 1.0 and newer support default_md = default, - # older versions do not - openssl_ver = environment.subprocess.check_output( # the arguments - # are hardcoded and just check the openssl version - ['openssl', 'version']) - if b'OpenSSL 0.' in openssl_ver: - self.ssl_dictionary['default_md'] = 'sha1' - except environment.subprocess.CalledProcessError: - LOG.warning(_LW('Failed to invoke ``openssl version``, ' - 'assuming is v1.0 or newer')) - self.ssl_dictionary.update(kwargs) - - def exec_command(self, command): - to_exec = [part % self.ssl_dictionary for part in command] - LOG.info(_LI('Running command - %s'), ' '.join(to_exec)) - try: - # NOTE(shaleh): use check_output instead of the simpler - # `check_call()` in order to log any output from an error. - environment.subprocess.check_output( # the arguments being passed - # in are defined in this file and trusted to build CAs, keys - # and certs - to_exec, - stderr=environment.subprocess.STDOUT) - except environment.subprocess.CalledProcessError as e: - LOG.error(_LE('Command %(to_exec)s exited with %(retcode)s ' - '- %(output)s'), - {'to_exec': to_exec, - 'retcode': e.returncode, - 'output': e.output}) - raise e - - def clean_up_existing_files(self): - files_to_clean = [self.ssl_dictionary['ca_private_key'], - self.ssl_dictionary['ca_cert'], - self.ssl_dictionary['signing_key'], - self.ssl_dictionary['signing_cert'], - ] - - existing_files = [] - - for file_path in files_to_clean: - if file_exists(file_path): - if self.rebuild: - # The file exists but the user wants to rebuild it, so blow - # it away - try: - os.remove(file_path) - except OSError as exc: - LOG.error(_LE('Failed to remove file %(file_path)r: ' - '%(error)s'), - {'file_path': file_path, - 'error': exc.strerror}) - raise - else: - existing_files.append(file_path) - - return existing_files - - def build_ssl_config_file(self): - utils.make_dirs(os.path.dirname(self.ssl_config_file_name), - mode=PUBLIC_DIR_PERMS, - user=self.use_keystone_user, - group=self.use_keystone_group, log=LOG) - if not file_exists(self.ssl_config_file_name): - with open(self.ssl_config_file_name, 'w') as ssl_config_file: - ssl_config_file.write(self.sslconfig % self.ssl_dictionary) - utils.set_permissions(self.ssl_config_file_name, - mode=PRIVATE_FILE_PERMS, - user=self.use_keystone_user, - group=self.use_keystone_group, log=LOG) - - index_file_name = os.path.join(self.conf_dir, 'index.txt') - if not file_exists(index_file_name): - with open(index_file_name, 'w') as index_file: - index_file.write('') - utils.set_permissions(index_file_name, - mode=PRIVATE_FILE_PERMS, - user=self.use_keystone_user, - group=self.use_keystone_group, log=LOG) - - serial_file_name = os.path.join(self.conf_dir, 'serial') - if not file_exists(serial_file_name): - with open(serial_file_name, 'w') as index_file: - index_file.write('01') - utils.set_permissions(serial_file_name, - mode=PRIVATE_FILE_PERMS, - user=self.use_keystone_user, - group=self.use_keystone_group, log=LOG) - - def build_ca_cert(self): - ca_key_file = self.ssl_dictionary['ca_private_key'] - utils.make_dirs(os.path.dirname(ca_key_file), - mode=PRIVATE_DIR_PERMS, - user=self.use_keystone_user, - group=self.use_keystone_group, log=LOG) - if not file_exists(ca_key_file): - self.exec_command(['openssl', 'genrsa', - '-out', '%(ca_private_key)s', - '%(key_size)d']) - utils.set_permissions(ca_key_file, - mode=PRIVATE_FILE_PERMS, - user=self.use_keystone_user, - group=self.use_keystone_group, log=LOG) - - ca_cert = self.ssl_dictionary['ca_cert'] - utils.make_dirs(os.path.dirname(ca_cert), - mode=PUBLIC_DIR_PERMS, - user=self.use_keystone_user, - group=self.use_keystone_group, log=LOG) - if not file_exists(ca_cert): - self.exec_command(['openssl', 'req', '-new', '-x509', - '-extensions', 'v3_ca', - '-key', '%(ca_private_key)s', - '-out', '%(ca_cert)s', - '-days', '%(valid_days)d', - '-config', '%(ssl_config)s', - '-subj', '%(cert_subject)s']) - utils.set_permissions(ca_cert, - mode=PUBLIC_FILE_PERMS, - user=self.use_keystone_user, - group=self.use_keystone_group, log=LOG) - - def build_private_key(self): - signing_keyfile = self.ssl_dictionary['signing_key'] - utils.make_dirs(os.path.dirname(signing_keyfile), - mode=PRIVATE_DIR_PERMS, - user=self.use_keystone_user, - group=self.use_keystone_group, log=LOG) - if not file_exists(signing_keyfile): - self.exec_command(['openssl', 'genrsa', '-out', '%(signing_key)s', - '%(key_size)d']) - utils.set_permissions(signing_keyfile, - mode=PRIVATE_FILE_PERMS, - user=self.use_keystone_user, - group=self.use_keystone_group, log=LOG) - - def build_signing_cert(self): - signing_cert = self.ssl_dictionary['signing_cert'] - - utils.make_dirs(os.path.dirname(signing_cert), - mode=PUBLIC_DIR_PERMS, - user=self.use_keystone_user, - group=self.use_keystone_group, log=LOG) - if not file_exists(signing_cert): - self.exec_command(['openssl', 'req', '-key', '%(signing_key)s', - '-new', '-out', '%(request_file)s', - '-config', '%(ssl_config)s', - '-subj', '%(cert_subject)s']) - - self.exec_command(['openssl', 'ca', '-batch', - '-out', '%(signing_cert)s', - '-config', '%(ssl_config)s', - '-days', '%(valid_days)dd', - '-cert', '%(ca_cert)s', - '-keyfile', '%(ca_private_key)s', - '-infiles', '%(request_file)s']) - - def run(self): - try: - existing_files = self.clean_up_existing_files() - except OSError: - print('An error occurred when rebuilding cert files.') - return - if existing_files: - print('The following cert files already exist, use --rebuild to ' - 'remove the existing files before regenerating:') - for f in existing_files: - print('%s already exists' % f) - return - - self.build_ssl_config_file() - self.build_ca_cert() - self.build_private_key() - self.build_signing_cert() - - -class ConfigurePKI(BaseCertificateConfigure): - """Generate files for PKI signing using OpenSSL. - - Signed tokens require a private key and signing certificate which itself - must be signed by a CA. This class generates them with workable defaults - if each of the files are not present - - """ - - def __init__(self, keystone_user, keystone_group, rebuild=False): - super(ConfigurePKI, self).__init__(CONF.signing, CONF.signing, - keystone_user, keystone_group, - rebuild=rebuild) - - -class ConfigureSSL(BaseCertificateConfigure): - """Generate files for HTTPS using OpenSSL. - - Creates a public/private key and certificates. If a CA is not given - one will be generated using provided arguments. - """ - - def __init__(self, keystone_user, keystone_group, rebuild=False): - super(ConfigureSSL, self).__init__(CONF.ssl, CONF.eventlet_server_ssl, - keystone_user, keystone_group, - rebuild=rebuild) - - -BaseCertificateConfigure.sslconfig = """ -# OpenSSL configuration file. -# - -# Establish working directory. - -dir = %(conf_dir)s - -[ ca ] -default_ca = CA_default - -[ CA_default ] -new_certs_dir = $dir -serial = $dir/serial -database = $dir/index.txt -default_days = 365 -default_md = %(default_md)s -preserve = no -email_in_dn = no -nameopt = default_ca -certopt = default_ca -policy = policy_anything -x509_extensions = usr_cert -unique_subject = no - -[ policy_anything ] -countryName = optional -stateOrProvinceName = optional -organizationName = optional -organizationalUnitName = optional -commonName = supplied -emailAddress = optional - -[ req ] -default_bits = 2048 # Size of keys -default_keyfile = key.pem # name of generated keys -string_mask = utf8only # permitted characters -distinguished_name = req_distinguished_name -req_extensions = v3_req -x509_extensions = v3_ca - -[ req_distinguished_name ] -countryName = Country Name (2 letter code) -countryName_min = 2 -countryName_max = 2 -stateOrProvinceName = State or Province Name (full name) -localityName = Locality Name (city, district) -0.organizationName = Organization Name (company) -organizationalUnitName = Organizational Unit Name (department, division) -commonName = Common Name (hostname, IP, or your name) -commonName_max = 64 -emailAddress = Email Address -emailAddress_max = 64 - -[ v3_ca ] -basicConstraints = CA:TRUE -subjectKeyIdentifier = hash -authorityKeyIdentifier = keyid:always,issuer - -[ v3_req ] -basicConstraints = CA:FALSE -keyUsage = nonRepudiation, digitalSignature, keyEncipherment - -[ usr_cert ] -basicConstraints = CA:FALSE -subjectKeyIdentifier = hash -authorityKeyIdentifier = keyid:always -""" diff --git a/keystone-moon/keystone/common/pemutils.py b/keystone-moon/keystone/common/pemutils.py deleted file mode 100755 index ddbe05cf..00000000 --- a/keystone-moon/keystone/common/pemutils.py +++ /dev/null @@ -1,509 +0,0 @@ -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -""" -PEM formatted data is used frequently in conjunction with X509 PKI as -a data exchange mechanism for binary data. The acronym PEM stands for -Privacy Enhanced Mail as defined in RFC-1421. Contrary to expectation -the PEM format in common use has little to do with RFC-1421. Instead -what we know as PEM format grew out of the need for a data exchange -mechanism largely by the influence of OpenSSL. Other X509 -implementations have adopted it. - -Unfortunately PEM format has never been officially standarized. It's -basic format is as follows: - -1) A header consisting of 5 hyphens followed by the word BEGIN and a -single space. Then an upper case string describing the contents of the -PEM block, this is followed by 5 hyphens and a newline. - -2) Binary data (typically in DER ASN.1 format) encoded in base64. The -base64 text is line wrapped so that each line of base64 is 64 -characters long and terminated with a newline. The last line of base64 -text may be less than 64 characters. The content and format of the -binary data is entirely dependent upon the type of data announced in -the header and footer. - -3) A footer in the exact same as the header except the word BEGIN is -replaced by END. The content name in both the header and footer should -exactly match. - -The above is called a PEM block. It is permissible for multiple PEM -blocks to appear in a single file or block of text. This is often used -when specifying multiple X509 certificates. - -An example PEM block for a certificate is: - ------BEGIN CERTIFICATE----- -MIIC0TCCAjqgAwIBAgIJANsHKV73HYOwMA0GCSqGSIb3DQEBBQUAMIGeMQowCAYD -VQQFEwE1MQswCQYDVQQGEwJVUzELMAkGA1UECBMCQ0ExEjAQBgNVBAcTCVN1bm55 -dmFsZTESMBAGA1UEChMJT3BlblN0YWNrMREwDwYDVQQLEwhLZXlzdG9uZTElMCMG -CSqGSIb3DQEJARYWa2V5c3RvbmVAb3BlbnN0YWNrLm9yZzEUMBIGA1UEAxMLU2Vs -ZiBTaWduZWQwIBcNMTIxMTA1MTgxODI0WhgPMjA3MTA0MzAxODE4MjRaMIGeMQow -CAYDVQQFEwE1MQswCQYDVQQGEwJVUzELMAkGA1UECBMCQ0ExEjAQBgNVBAcTCVN1 -bm55dmFsZTESMBAGA1UEChMJT3BlblN0YWNrMREwDwYDVQQLEwhLZXlzdG9uZTEl -MCMGCSqGSIb3DQEJARYWa2V5c3RvbmVAb3BlbnN0YWNrLm9yZzEUMBIGA1UEAxML -U2VsZiBTaWduZWQwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBALzI17ExCaqd -r7xY2Q5CBZ1bW1lsrXxS8eNJRdQtskDuQVAluY03/OGZd8HQYiiY/ci2tYy7BNIC -bh5GaO95eqTDykJR3liOYE/tHbY6puQlj2ZivmhlSd2d5d7lF0/H28RQsLu9VktM -uw6q9DpDm35jfrr8LgSeA3MdVqcS/4OhAgMBAAGjEzARMA8GA1UdEwEB/wQFMAMB -Af8wDQYJKoZIhvcNAQEFBQADgYEAjSQND7i1dNZtLKpWgX+JqMr3BdVlM15mFeVr -C26ZspZjZVY5okdozO9gU3xcwRe4Cg30sKFOe6EBQKpkTZucFOXwBtD3h6dWJrdD -c+m/CL/rs0GatDavbaIT2vv405SQUQooCdVh72LYel+4/a6xmRd7fQx3iEXN9QYj -vmHJUcA= ------END CERTIFICATE----- - -PEM format is safe for transmission in 7-bit ASCII systems -(i.e. standard email). Since 7-bit ASCII is a proper subset of UTF-8 -and Latin-1 it is not affected by transcoding between those -charsets. Nor is PEM format affected by the choice of line -endings. This makes PEM format particularity attractive for transport -and storage of binary data. - -This module provides a number of utilities supporting the generation -and consumption of PEM formatted data including: - - * parse text and find all PEM blocks contained in the - text. Information on the location of the block in the text, the - type of PEM block, and it's base64 and binary data contents. - - * parse text assumed to contain PEM data and return the binary - data. - - * test if a block of text is a PEM block - - * convert base64 text into a formatted PEM block - - * convert binary data into a formatted PEM block - - * access to the valid PEM types and their headers - -""" - -import base64 -import re - -import six - -from keystone.common import base64utils -from keystone.i18n import _ - - -PEM_TYPE_TO_HEADER = { - u'cms': u'CMS', - u'dsa-private': u'DSA PRIVATE KEY', - u'dsa-public': u'DSA PUBLIC KEY', - u'ecdsa-public': u'ECDSA PUBLIC KEY', - u'ec-private': u'EC PRIVATE KEY', - u'pkcs7': u'PKCS7', - u'pkcs7-signed': u'PKCS', - u'pkcs8': u'ENCRYPTED PRIVATE KEY', - u'private-key': u'PRIVATE KEY', - u'public-key': u'PUBLIC KEY', - u'rsa-private': u'RSA PRIVATE KEY', - u'rsa-public': u'RSA PUBLIC KEY', - u'cert': u'CERTIFICATE', - u'crl': u'X509 CRL', - u'cert-pair': u'CERTIFICATE PAIR', - u'csr': u'CERTIFICATE REQUEST', -} - -# This is not a 1-to-1 reverse map of PEM_TYPE_TO_HEADER -# because it includes deprecated headers that map to 1 pem_type. -PEM_HEADER_TO_TYPE = { - u'CMS': u'cms', - u'DSA PRIVATE KEY': u'dsa-private', - u'DSA PUBLIC KEY': u'dsa-public', - u'ECDSA PUBLIC KEY': u'ecdsa-public', - u'EC PRIVATE KEY': u'ec-private', - u'PKCS7': u'pkcs7', - u'PKCS': u'pkcs7-signed', - u'ENCRYPTED PRIVATE KEY': u'pkcs8', - u'PRIVATE KEY': u'private-key', - u'PUBLIC KEY': u'public-key', - u'RSA PRIVATE KEY': u'rsa-private', - u'RSA PUBLIC KEY': u'rsa-public', - u'CERTIFICATE': u'cert', - u'X509 CERTIFICATE': u'cert', - u'CERTIFICATE PAIR': u'cert-pair', - u'X509 CRL': u'crl', - u'CERTIFICATE REQUEST': u'csr', - u'NEW CERTIFICATE REQUEST': u'csr', -} - -# List of valid pem_types -pem_types = sorted(PEM_TYPE_TO_HEADER.keys()) - -# List of valid pem_headers -pem_headers = sorted(PEM_TYPE_TO_HEADER.values()) - -_pem_begin_re = re.compile(r'^-{5}BEGIN\s+([^-]+)-{5}\s*$', re.MULTILINE) -_pem_end_re = re.compile(r'^-{5}END\s+([^-]+)-{5}\s*$', re.MULTILINE) - - -class PEMParseResult(object): - """Information returned when a PEM block is found in text. - - PEMParseResult contains information about a PEM block discovered - while parsing text. The following properties are defined: - - pem_type - A short hand name for the type of the PEM data, e.g. cert, - csr, crl, cms, key. Valid pem_types are listed in pem_types. - When the pem_type is set the pem_header is updated to match it. - - pem_header - The text following '-----BEGIN ' in the PEM header. - Common examples are: - - -----BEGIN CERTIFICATE----- - -----BEGIN CMS----- - - Thus the pem_header would be CERTIFICATE and CMS respectively. - When the pem_header is set the pem_type is updated to match it. - - pem_start, pem_end - The beginning and ending positions of the PEM block - including the PEM header and footer. - - base64_start, base64_end - The beginning and ending positions of the base64 data - contained inside the PEM header and footer. Includes trailing - new line - - binary_data - The decoded base64 data. None if not decoded. - - """ - - def __init__(self, pem_type=None, pem_header=None, - pem_start=None, pem_end=None, - base64_start=None, base64_end=None, - binary_data=None): - - self._pem_type = None - self._pem_header = None - - if pem_type is not None: - self.pem_type = pem_type - - if pem_header is not None: - self.pem_header = pem_header - - self.pem_start = pem_start - self.pem_end = pem_end - self.base64_start = base64_start - self.base64_end = base64_end - self.binary_data = binary_data - - @property - def pem_type(self): - return self._pem_type - - @pem_type.setter - def pem_type(self, pem_type): - if pem_type is None: - self._pem_type = None - self._pem_header = None - else: - pem_header = PEM_TYPE_TO_HEADER.get(pem_type) - if pem_header is None: - raise ValueError(_('unknown pem_type "%(pem_type)s", ' - 'valid types are: %(valid_pem_types)s') % - {'pem_type': pem_type, - 'valid_pem_types': ', '.join(pem_types)}) - self._pem_type = pem_type - self._pem_header = pem_header - - @property - def pem_header(self): - return self._pem_header - - @pem_header.setter - def pem_header(self, pem_header): - if pem_header is None: - self._pem_type = None - self._pem_header = None - else: - pem_type = PEM_HEADER_TO_TYPE.get(pem_header) - if pem_type is None: - raise ValueError(_('unknown pem header "%(pem_header)s", ' - 'valid headers are: ' - '%(valid_pem_headers)s') % - {'pem_header': pem_header, - 'valid_pem_headers': - ', '.join("'%s'" % - [x for x in pem_headers])}) - - self._pem_type = pem_type - self._pem_header = pem_header - - -def pem_search(text, start=0): - """Search for a block of PEM formatted data - - Search for a PEM block in a text string. The search begins at - start. If a PEM block is found a PEMParseResult object is - returned, otherwise if no PEM block is found None is returned. - - If the pem_type is not the same in both the header and footer - a ValueError is raised. - - The start and end positions are suitable for use as slices into - the text. To search for multiple PEM blocks pass pem_end as the - start position for the next iteration. Terminate the iteration - when None is returned. Example:: - - start = 0 - while True: - block = pem_search(text, start) - if block is None: - break - base64_data = text[block.base64_start : block.base64_end] - start = block.pem_end - - :param text: the text to search for PEM blocks - :type text: string - :param start: the position in text to start searching from (default: 0) - :type start: int - :returns: PEMParseResult or None if not found - :raises: ValueError - """ - - match = _pem_begin_re.search(text, pos=start) - if match: - pem_start = match.start() - begin_text = match.group(0) - base64_start = min(len(text), match.end() + 1) - begin_pem_header = match.group(1).strip() - - match = _pem_end_re.search(text, pos=base64_start) - if match: - pem_end = min(len(text), match.end() + 1) - base64_end = match.start() - end_pem_header = match.group(1).strip() - else: - raise ValueError(_('failed to find end matching "%s"') % - begin_text) - - if begin_pem_header != end_pem_header: - raise ValueError(_('beginning & end PEM headers do not match ' - '(%(begin_pem_header)s' - '!= ' - '%(end_pem_header)s)') % - {'begin_pem_header': begin_pem_header, - 'end_pem_header': end_pem_header}) - else: - return None - - result = PEMParseResult(pem_header=begin_pem_header, - pem_start=pem_start, pem_end=pem_end, - base64_start=base64_start, base64_end=base64_end) - - return result - - -def parse_pem(text, pem_type=None, max_items=None): - """Scan text for PEM data, return list of PEM items - - The input text is scanned for PEM blocks, for each one found a - PEMParseResult is constructed and added to the return list. - - pem_type operates as a filter on the type of PEM desired. If - pem_type is specified only those PEM blocks which match will be - included. The pem_type is a logical name, not the actual text in - the pem header (e.g. 'cert'). If the pem_type is None all PEM - blocks are returned. - - If max_items is specified the result is limited to that number of - items. - - The return value is a list of PEMParseResult objects. The - PEMParseResult provides complete information about the PEM block - including the decoded binary data for the PEM block. The list is - ordered in the same order as found in the text. - - Examples:: - - # Get all certs - certs = parse_pem(text, 'cert') - - # Get the first cert - try: - binary_cert = parse_pem(text, 'cert', 1)[0].binary_data - except IndexError: - raise ValueError('no cert found') - - :param text: The text to search for PEM blocks - :type text: string - :param pem_type: Only return data for this pem_type. - Valid types are: csr, cert, crl, cms, key. - If pem_type is None no filtering is performed. - (default: None) - :type pem_type: string or None - :param max_items: Limit the number of blocks returned. (default: None) - :type max_items: int or None - :return: List of PEMParseResult, one for each PEM block found - :raises: ValueError, InvalidBase64Error - """ - - pem_blocks = [] - start = 0 - - while True: - block = pem_search(text, start) - if block is None: - break - start = block.pem_end - if pem_type is None: - pem_blocks.append(block) - else: - try: - if block.pem_type == pem_type: - pem_blocks.append(block) - except KeyError: - raise ValueError(_('unknown pem_type: "%s"') % (pem_type)) - - if max_items is not None and len(pem_blocks) >= max_items: - break - - for block in pem_blocks: - base64_data = text[block.base64_start:block.base64_end] - try: - binary_data = base64.b64decode(base64_data) - except Exception as e: - block.binary_data = None - raise base64utils.InvalidBase64Error( - _('failed to base64 decode %(pem_type)s PEM at position' - '%(position)d: %(err_msg)s') % - {'pem_type': block.pem_type, - 'position': block.pem_start, - 'err_msg': six.text_type(e)}) - else: - block.binary_data = binary_data - - return pem_blocks - - -def get_pem_data(text, pem_type='cert'): - """Scan text for PEM data, return binary contents - - The input text is scanned for a PEM block which matches the pem_type. - If found the binary data contained in the PEM block is returned. - If no PEM block is found or it does not match the specified pem type - None is returned. - - :param text: The text to search for the PEM block - :type text: string - :param pem_type: Only return data for this pem_type. - Valid types are: csr, cert, crl, cms, key. - (default: 'cert') - :type pem_type: string - :return: binary data or None if not found. - """ - - blocks = parse_pem(text, pem_type, 1) - if not blocks: - return None - return blocks[0].binary_data - - -def is_pem(text, pem_type='cert'): - """Does this text contain a PEM block. - - Check for the existence of a PEM formatted block in the - text, if one is found verify it's contents can be base64 - decoded, if so return True. Return False otherwise. - - :param text: The text to search for PEM blocks - :type text: string - :param pem_type: Only return data for this pem_type. - Valid types are: csr, cert, crl, cms, key. - (default: 'cert') - :type pem_type: string - :returns: bool -- True if text contains PEM matching the pem_type, - False otherwise. - """ - - try: - pem_blocks = parse_pem(text, pem_type, max_items=1) - except base64utils.InvalidBase64Error: - return False - - if pem_blocks: - return True - else: - return False - - -def base64_to_pem(base64_text, pem_type='cert'): - """Format string of base64 text into PEM format - - Input is assumed to consist only of members of the base64 alphabet - (i.e no whitepace). Use one of the filter functions from - base64utils to assure the input is clean - (i.e. strip_whitespace()). - - :param base64_text: text containing ONLY base64 alphabet - characters to be inserted into PEM output. - :type base64_text: string - :param pem_type: Produce a PEM block for this type. - Valid types are: csr, cert, crl, cms, key. - (default: 'cert') - :type pem_type: string - :returns: string -- PEM formatted text - - - """ - pem_header = PEM_TYPE_TO_HEADER[pem_type] - buf = six.StringIO() - - buf.write(u'-----BEGIN %s-----' % pem_header) - buf.write(u'\n') - - for line in base64utils.base64_wrap_iter(base64_text, width=64): - buf.write(line) - buf.write(u'\n') - - buf.write(u'-----END %s-----' % pem_header) - buf.write(u'\n') - - text = buf.getvalue() - buf.close() - return text - - -def binary_to_pem(binary_data, pem_type='cert'): - """Format binary data into PEM format - - Example: - - # get the certificate binary data in DER format - der_data = certificate.der - # convert the DER binary data into a PEM - pem = binary_to_pem(der_data, 'cert') - - - :param binary_data: binary data to encapsulate into PEM - :type binary_data: buffer - :param pem_type: Produce a PEM block for this type. - Valid types are: csr, cert, crl, cms, key. - (default: 'cert') - :type pem_type: string - :returns: string -- PEM formatted text - - """ - base64_text = base64.b64encode(binary_data) - return base64_to_pem(base64_text, pem_type) diff --git a/keystone-moon/keystone/common/router.py b/keystone-moon/keystone/common/router.py deleted file mode 100644 index 74e03ad2..00000000 --- a/keystone-moon/keystone/common/router.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import json_home -from keystone.common import wsgi - - -class Router(wsgi.ComposableRouter): - def __init__(self, controller, collection_key, key, - resource_descriptions=None, - is_entity_implemented=True, - method_template=None): - self.controller = controller - self.key = key - self.collection_key = collection_key - self._resource_descriptions = resource_descriptions - self._is_entity_implemented = is_entity_implemented - self.method_template = method_template or '%s' - - def add_routes(self, mapper): - collection_path = '/%(collection_key)s' % { - 'collection_key': self.collection_key} - entity_path = '/%(collection_key)s/{%(key)s_id}' % { - 'collection_key': self.collection_key, - 'key': self.key} - - mapper.connect( - collection_path, - controller=self.controller, - action=self.method_template % 'create_%s' % self.key, - conditions=dict(method=['POST'])) - mapper.connect( - collection_path, - controller=self.controller, - action=self.method_template % 'list_%s' % self.collection_key, - conditions=dict(method=['GET'])) - mapper.connect( - entity_path, - controller=self.controller, - action=self.method_template % 'get_%s' % self.key, - conditions=dict(method=['GET'])) - mapper.connect( - entity_path, - controller=self.controller, - action=self.method_template % 'update_%s' % self.key, - conditions=dict(method=['PATCH'])) - mapper.connect( - entity_path, - controller=self.controller, - action=self.method_template % 'delete_%s' % self.key, - conditions=dict(method=['DELETE'])) - - # Add the collection resource and entity resource to the resource - # descriptions. - - collection_rel = json_home.build_v3_resource_relation( - self.collection_key) - rel_data = {'href': collection_path, } - self._resource_descriptions.append((collection_rel, rel_data)) - - if self._is_entity_implemented: - entity_rel = json_home.build_v3_resource_relation(self.key) - id_str = '%s_id' % self.key - id_param_rel = json_home.build_v3_parameter_relation(id_str) - entity_rel_data = { - 'href-template': entity_path, - 'href-vars': { - id_str: id_param_rel, - }, - } - self._resource_descriptions.append((entity_rel, entity_rel_data)) diff --git a/keystone-moon/keystone/common/sql/__init__.py b/keystone-moon/keystone/common/sql/__init__.py deleted file mode 100644 index 84e0fb83..00000000 --- a/keystone-moon/keystone/common/sql/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common.sql.core import * # noqa diff --git a/keystone-moon/keystone/common/sql/core.py b/keystone-moon/keystone/common/sql/core.py deleted file mode 100644 index cb026356..00000000 --- a/keystone-moon/keystone/common/sql/core.py +++ /dev/null @@ -1,434 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""SQL backends for the various services. - -Before using this module, call initialize(). This has to be done before -CONF() because it sets up configuration options. - -""" -import functools - -from oslo_config import cfg -from oslo_db import exception as db_exception -from oslo_db import options as db_options -from oslo_db.sqlalchemy import enginefacade -from oslo_db.sqlalchemy import models -from oslo_log import log -from oslo_serialization import jsonutils -import six -import sqlalchemy as sql -from sqlalchemy.ext import declarative -from sqlalchemy.orm.attributes import flag_modified, InstrumentedAttribute -from sqlalchemy import types as sql_types - -from keystone.common import driver_hints -from keystone.common import utils -from keystone import exception -from keystone.i18n import _ - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - -ModelBase = declarative.declarative_base() - - -# For exporting to other modules -Column = sql.Column -Index = sql.Index -String = sql.String -Integer = sql.Integer -Enum = sql.Enum -ForeignKey = sql.ForeignKey -DateTime = sql.DateTime -IntegrityError = sql.exc.IntegrityError -DBDuplicateEntry = db_exception.DBDuplicateEntry -OperationalError = sql.exc.OperationalError -NotFound = sql.orm.exc.NoResultFound -Boolean = sql.Boolean -Text = sql.Text -UniqueConstraint = sql.UniqueConstraint -PrimaryKeyConstraint = sql.PrimaryKeyConstraint -joinedload = sql.orm.joinedload -# Suppress flake8's unused import warning for flag_modified: -flag_modified = flag_modified - - -def initialize(): - """Initialize the module.""" - db_options.set_defaults( - CONF, - connection="sqlite:///keystone.db") - - -def initialize_decorator(init): - """Ensure that the length of string field do not exceed the limit. - - This decorator check the initialize arguments, to make sure the - length of string field do not exceed the length limit, or raise a - 'StringLengthExceeded' exception. - - Use decorator instead of inheritance, because the metaclass will - check the __tablename__, primary key columns, etc. at the class - definition. - - """ - def initialize(self, *args, **kwargs): - cls = type(self) - for k, v in kwargs.items(): - if hasattr(cls, k): - attr = getattr(cls, k) - if isinstance(attr, InstrumentedAttribute): - column = attr.property.columns[0] - if isinstance(column.type, String): - if not isinstance(v, six.text_type): - v = six.text_type(v) - if column.type.length and column.type.length < len(v): - raise exception.StringLengthExceeded( - string=v, type=k, length=column.type.length) - - init(self, *args, **kwargs) - return initialize - -ModelBase.__init__ = initialize_decorator(ModelBase.__init__) - - -# Special Fields -class JsonBlob(sql_types.TypeDecorator): - - impl = sql.Text - - def process_bind_param(self, value, dialect): - return jsonutils.dumps(value) - - def process_result_value(self, value, dialect): - return jsonutils.loads(value) - - -class DictBase(models.ModelBase): - attributes = [] - - @classmethod - def from_dict(cls, d): - new_d = d.copy() - - new_d['extra'] = {k: new_d.pop(k) for k in six.iterkeys(d) - if k not in cls.attributes and k != 'extra'} - - return cls(**new_d) - - def to_dict(self, include_extra_dict=False): - """Returns the model's attributes as a dictionary. - - If include_extra_dict is True, 'extra' attributes are literally - included in the resulting dictionary twice, for backwards-compatibility - with a broken implementation. - - """ - d = self.extra.copy() - for attr in self.__class__.attributes: - d[attr] = getattr(self, attr) - - if include_extra_dict: - d['extra'] = self.extra.copy() - - return d - - def __getitem__(self, key): - if key in self.extra: - return self.extra[key] - return getattr(self, key) - - -class ModelDictMixin(object): - - @classmethod - def from_dict(cls, d): - """Returns a model instance from a dictionary.""" - return cls(**d) - - def to_dict(self): - """Returns the model's attributes as a dictionary.""" - names = (column.name for column in self.__table__.columns) - return {name: getattr(self, name) for name in names} - - -_main_context_manager = None - - -def _get_main_context_manager(): - global _main_context_manager - - if not _main_context_manager: - _main_context_manager = enginefacade.transaction_context() - - return _main_context_manager - - -def cleanup(): - global _main_context_manager - - _main_context_manager = None - - -_CONTEXT = None - - -def _get_context(): - global _CONTEXT - if _CONTEXT is None: - # NOTE(dims): Delay the `threading.local` import to allow for - # eventlet/gevent monkeypatching to happen - import threading - _CONTEXT = threading.local() - return _CONTEXT - - -def session_for_read(): - return _get_main_context_manager().reader.using(_get_context()) - - -def session_for_write(): - return _get_main_context_manager().writer.using(_get_context()) - - -def truncated(f): - return driver_hints.truncated(f) - - -class _WontMatch(Exception): - """Raised to indicate that the filter won't match. - - This is raised to short-circuit the computation of the filter as soon as - it's discovered that the filter requested isn't going to match anything. - - A filter isn't going to match anything if the value is too long for the - field, for example. - - """ - - @classmethod - def check(cls, value, col_attr): - """Check if the value can match given the column attributes. - - Raises this class if the value provided can't match any value in the - column in the table given the column's attributes. For example, if the - column is a string and the value is longer than the column then it - won't match any value in the column in the table. - - """ - col = col_attr.property.columns[0] - if isinstance(col.type, sql.types.Boolean): - # The column is a Boolean, we should have already validated input. - return - if not col.type.length: - # The column doesn't have a length so can't validate anymore. - return - if len(value) > col.type.length: - raise cls() - # Otherwise the value could match a value in the column. - - -def _filter(model, query, hints): - """Applies filtering to a query. - - :param model: the table model in question - :param query: query to apply filters to - :param hints: contains the list of filters yet to be satisfied. - Any filters satisfied here will be removed so that - the caller will know if any filters remain. - - :returns query: query, updated with any filters satisfied - - """ - def inexact_filter(model, query, filter_, satisfied_filters): - """Applies an inexact filter to a query. - - :param model: the table model in question - :param query: query to apply filters to - :param dict filter_: describes this filter - :param list satisfied_filters: filter_ will be added if it is - satisfied. - - :returns query: query updated to add any inexact filters we could - satisfy - - """ - column_attr = getattr(model, filter_['name']) - - # TODO(henry-nash): Sqlalchemy 0.7 defaults to case insensitivity - # so once we find a way of changing that (maybe on a call-by-call - # basis), we can add support for the case sensitive versions of - # the filters below. For now, these case sensitive versions will - # be handled at the controller level. - - if filter_['case_sensitive']: - return query - - if filter_['comparator'] == 'contains': - _WontMatch.check(filter_['value'], column_attr) - query_term = column_attr.ilike('%%%s%%' % filter_['value']) - elif filter_['comparator'] == 'startswith': - _WontMatch.check(filter_['value'], column_attr) - query_term = column_attr.ilike('%s%%' % filter_['value']) - elif filter_['comparator'] == 'endswith': - _WontMatch.check(filter_['value'], column_attr) - query_term = column_attr.ilike('%%%s' % filter_['value']) - else: - # It's a filter we don't understand, so let the caller - # work out if they need to do something with it. - return query - - satisfied_filters.append(filter_) - return query.filter(query_term) - - def exact_filter(model, query, filter_, satisfied_filters): - """Applies an exact filter to a query. - - :param model: the table model in question - :param query: query to apply filters to - :param dict filter_: describes this filter - :param list satisfied_filters: filter_ will be added if it is - satisfied. - :returns query: query updated to add any exact filters we could - satisfy - """ - key = filter_['name'] - - col = getattr(model, key) - if isinstance(col.property.columns[0].type, sql.types.Boolean): - filter_val = utils.attr_as_boolean(filter_['value']) - else: - _WontMatch.check(filter_['value'], col) - filter_val = filter_['value'] - - satisfied_filters.append(filter_) - return query.filter(col == filter_val) - - try: - satisfied_filters = [] - for filter_ in hints.filters: - if filter_['name'] not in model.attributes: - continue - if filter_['comparator'] == 'equals': - query = exact_filter(model, query, filter_, - satisfied_filters) - else: - query = inexact_filter(model, query, filter_, - satisfied_filters) - - # Remove satisfied filters, then the caller will know remaining filters - for filter_ in satisfied_filters: - hints.filters.remove(filter_) - - return query - except _WontMatch: - hints.cannot_match = True - return - - -def _limit(query, hints): - """Applies a limit to a query. - - :param query: query to apply filters to - :param hints: contains the list of filters and limit details. - - :returns: updated query - - """ - # NOTE(henry-nash): If we were to implement pagination, then we - # we would expand this method to support pagination and limiting. - - # If we satisfied all the filters, set an upper limit if supplied - if hints.limit: - query = query.limit(hints.limit['limit']) - return query - - -def filter_limit_query(model, query, hints): - """Applies filtering and limit to a query. - - :param model: table model - :param query: query to apply filters to - :param hints: contains the list of filters and limit details. This may - be None, indicating that there are no filters or limits - to be applied. If it's not None, then any filters - satisfied here will be removed so that the caller will - know if any filters remain. - - :returns: updated query - - """ - if hints is None: - return query - - # First try and satisfy any filters - query = _filter(model, query, hints) - - if hints.cannot_match: - # Nothing's going to match, so don't bother with the query. - return [] - - # NOTE(henry-nash): Any unsatisfied filters will have been left in - # the hints list for the controller to handle. We can only try and - # limit here if all the filters are already satisfied since, if not, - # doing so might mess up the final results. If there are still - # unsatisfied filters, we have to leave any limiting to the controller - # as well. - - if not hints.filters: - return _limit(query, hints) - else: - return query - - -def handle_conflicts(conflict_type='object'): - """Converts select sqlalchemy exceptions into HTTP 409 Conflict.""" - _conflict_msg = 'Conflict %(conflict_type)s: %(details)s' - - def decorator(method): - @functools.wraps(method) - def wrapper(*args, **kwargs): - try: - return method(*args, **kwargs) - except db_exception.DBDuplicateEntry as e: - # LOG the exception for debug purposes, do not send the - # exception details out with the raised Conflict exception - # as it can contain raw SQL. - LOG.debug(_conflict_msg, {'conflict_type': conflict_type, - 'details': six.text_type(e)}) - raise exception.Conflict(type=conflict_type, - details=_('Duplicate Entry')) - except db_exception.DBError as e: - # TODO(blk-u): inspecting inner_exception breaks encapsulation; - # oslo_db should provide exception we need. - if isinstance(e.inner_exception, IntegrityError): - # LOG the exception for debug purposes, do not send the - # exception details out with the raised Conflict exception - # as it can contain raw SQL. - LOG.debug(_conflict_msg, {'conflict_type': conflict_type, - 'details': six.text_type(e)}) - # NOTE(morganfainberg): This is really a case where the SQL - # failed to store the data. This is not something that the - # user has done wrong. Example would be a ForeignKey is - # missing; the code that is executed before reaching the - # SQL writing to the DB should catch the issue. - raise exception.UnexpectedError( - _('An unexpected error occurred when trying to ' - 'store %s') % conflict_type) - raise - - return wrapper - return decorator diff --git a/keystone-moon/keystone/common/sql/migrate_repo/README b/keystone-moon/keystone/common/sql/migrate_repo/README deleted file mode 100644 index 4ea8dd4f..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/README +++ /dev/null @@ -1,4 +0,0 @@ -This is a database migration repository. - -More information at -https://git.openstack.org/cgit/openstack/sqlalchemy-migrate diff --git a/keystone-moon/keystone/common/sql/migrate_repo/__init__.py b/keystone-moon/keystone/common/sql/migrate_repo/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/common/sql/migrate_repo/manage.py b/keystone-moon/keystone/common/sql/migrate_repo/manage.py deleted file mode 100644 index 39fa3892..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/manage.py +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env python -from migrate.versioning.shell import main - -if __name__ == '__main__': - main(debug='False') diff --git a/keystone-moon/keystone/common/sql/migrate_repo/migrate.cfg b/keystone-moon/keystone/common/sql/migrate_repo/migrate.cfg deleted file mode 100644 index db531bb4..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/migrate.cfg +++ /dev/null @@ -1,25 +0,0 @@ -[db_settings] -# Used to identify which repository this database is versioned under. -# You can use the name of your project. -repository_id=keystone - -# The name of the database table used to track the schema version. -# This name shouldn't already be used by your project. -# If this is changed once a database is under version control, you'll need to -# change the table name in each database too. -version_table=migrate_version - -# When committing a change script, Migrate will attempt to generate the -# sql for all supported databases; normally, if one of them fails - probably -# because you don't have that database installed - it is ignored and the -# commit continues, perhaps ending successfully. -# Databases in this list MUST compile successfully during a commit, or the -# entire commit will fail. List the databases your application will actually -# be using to ensure your updates to that database work properly. -# This must be a list; example: ['postgres','sqlite'] -required_dbs=[] - -# When creating new change scripts, Migrate will stamp the new script with -# a version number. By default this is latest_version + 1. You can set this -# to 'true' to tell Migrate to use the UTC timestamp instead. -use_timestamp_numbering=False diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/067_kilo.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/067_kilo.py deleted file mode 100644 index a6dbed67..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/067_kilo.py +++ /dev/null @@ -1,317 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import migrate -from oslo_log import log -import sqlalchemy as sql - -from keystone.assignment.backends import sql as assignment_sql -from keystone.common import sql as ks_sql -from keystone.identity.mapping_backends import mapping as mapping_backend - - -LOG = log.getLogger(__name__) - - -def upgrade(migrate_engine): - meta = sql.MetaData() - meta.bind = migrate_engine - - if migrate_engine.name == 'mysql': - # In Folsom we explicitly converted migrate_version to UTF8. - migrate_engine.execute( - 'ALTER TABLE migrate_version CONVERT TO CHARACTER SET utf8') - # Set default DB charset to UTF8. - migrate_engine.execute( - 'ALTER DATABASE %s DEFAULT CHARACTER SET utf8' % - migrate_engine.url.database) - - credential = sql.Table( - 'credential', meta, - sql.Column('id', sql.String(length=64), primary_key=True), - sql.Column('user_id', sql.String(length=64), nullable=False), - sql.Column('project_id', sql.String(length=64)), - sql.Column('blob', ks_sql.JsonBlob, nullable=False), - sql.Column('type', sql.String(length=255), nullable=False), - sql.Column('extra', ks_sql.JsonBlob.impl), - mysql_engine='InnoDB', - mysql_charset='utf8') - - domain = sql.Table( - 'domain', meta, - sql.Column('id', sql.String(length=64), primary_key=True), - sql.Column('name', sql.String(length=64), nullable=False), - sql.Column('enabled', sql.Boolean, default=True, nullable=False), - sql.Column('extra', ks_sql.JsonBlob.impl), - mysql_engine='InnoDB', - mysql_charset='utf8') - - endpoint = sql.Table( - 'endpoint', meta, - sql.Column('id', sql.String(length=64), primary_key=True), - sql.Column('legacy_endpoint_id', sql.String(length=64)), - sql.Column('interface', sql.String(length=8), nullable=False), - sql.Column('service_id', sql.String(length=64), nullable=False), - sql.Column('url', sql.Text, nullable=False), - sql.Column('extra', ks_sql.JsonBlob.impl), - sql.Column('enabled', sql.Boolean, nullable=False, default=True, - server_default='1'), - sql.Column('region_id', sql.String(length=255), nullable=True), - mysql_engine='InnoDB', - mysql_charset='utf8') - - group = sql.Table( - 'group', meta, - sql.Column('id', sql.String(length=64), primary_key=True), - sql.Column('domain_id', sql.String(length=64), nullable=False), - sql.Column('name', sql.String(length=64), nullable=False), - sql.Column('description', sql.Text), - sql.Column('extra', ks_sql.JsonBlob.impl), - mysql_engine='InnoDB', - mysql_charset='utf8') - - policy = sql.Table( - 'policy', meta, - sql.Column('id', sql.String(length=64), primary_key=True), - sql.Column('type', sql.String(length=255), nullable=False), - sql.Column('blob', ks_sql.JsonBlob, nullable=False), - sql.Column('extra', ks_sql.JsonBlob.impl), - mysql_engine='InnoDB', - mysql_charset='utf8') - - project = sql.Table( - 'project', meta, - sql.Column('id', sql.String(length=64), primary_key=True), - sql.Column('name', sql.String(length=64), nullable=False), - sql.Column('extra', ks_sql.JsonBlob.impl), - sql.Column('description', sql.Text), - sql.Column('enabled', sql.Boolean), - sql.Column('domain_id', sql.String(length=64), nullable=False), - sql.Column('parent_id', sql.String(64), nullable=True), - mysql_engine='InnoDB', - mysql_charset='utf8') - - role = sql.Table( - 'role', meta, - sql.Column('id', sql.String(length=64), primary_key=True), - sql.Column('name', sql.String(length=255), nullable=False), - sql.Column('extra', ks_sql.JsonBlob.impl), - mysql_engine='InnoDB', - mysql_charset='utf8') - - service = sql.Table( - 'service', meta, - sql.Column('id', sql.String(length=64), primary_key=True), - sql.Column('type', sql.String(length=255)), - sql.Column('enabled', sql.Boolean, nullable=False, default=True, - server_default='1'), - sql.Column('extra', ks_sql.JsonBlob.impl), - mysql_engine='InnoDB', - mysql_charset='utf8') - - token = sql.Table( - 'token', meta, - sql.Column('id', sql.String(length=64), primary_key=True), - sql.Column('expires', sql.DateTime, default=None), - sql.Column('extra', ks_sql.JsonBlob.impl), - sql.Column('valid', sql.Boolean, default=True, nullable=False), - sql.Column('trust_id', sql.String(length=64)), - sql.Column('user_id', sql.String(length=64)), - mysql_engine='InnoDB', - mysql_charset='utf8') - - trust = sql.Table( - 'trust', meta, - sql.Column('id', sql.String(length=64), primary_key=True), - sql.Column('trustor_user_id', sql.String(length=64), nullable=False), - sql.Column('trustee_user_id', sql.String(length=64), nullable=False), - sql.Column('project_id', sql.String(length=64)), - sql.Column('impersonation', sql.Boolean, nullable=False), - sql.Column('deleted_at', sql.DateTime), - sql.Column('expires_at', sql.DateTime), - sql.Column('remaining_uses', sql.Integer, nullable=True), - sql.Column('extra', ks_sql.JsonBlob.impl), - mysql_engine='InnoDB', - mysql_charset='utf8') - - trust_role = sql.Table( - 'trust_role', meta, - sql.Column('trust_id', sql.String(length=64), primary_key=True, - nullable=False), - sql.Column('role_id', sql.String(length=64), primary_key=True, - nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - - user = sql.Table( - 'user', meta, - sql.Column('id', sql.String(length=64), primary_key=True), - sql.Column('name', sql.String(length=255), nullable=False), - sql.Column('extra', ks_sql.JsonBlob.impl), - sql.Column('password', sql.String(length=128)), - sql.Column('enabled', sql.Boolean), - sql.Column('domain_id', sql.String(length=64), nullable=False), - sql.Column('default_project_id', sql.String(length=64)), - mysql_engine='InnoDB', - mysql_charset='utf8') - - user_group_membership = sql.Table( - 'user_group_membership', meta, - sql.Column('user_id', sql.String(length=64), primary_key=True), - sql.Column('group_id', sql.String(length=64), primary_key=True), - mysql_engine='InnoDB', - mysql_charset='utf8') - - region = sql.Table( - 'region', - meta, - sql.Column('id', sql.String(255), primary_key=True), - sql.Column('description', sql.String(255), nullable=False), - sql.Column('parent_region_id', sql.String(255), nullable=True), - sql.Column('extra', sql.Text()), - mysql_engine='InnoDB', - mysql_charset='utf8') - - assignment = sql.Table( - 'assignment', - meta, - sql.Column('type', sql.Enum( - assignment_sql.AssignmentType.USER_PROJECT, - assignment_sql.AssignmentType.GROUP_PROJECT, - assignment_sql.AssignmentType.USER_DOMAIN, - assignment_sql.AssignmentType.GROUP_DOMAIN, - name='type'), - nullable=False), - sql.Column('actor_id', sql.String(64), nullable=False), - sql.Column('target_id', sql.String(64), nullable=False), - sql.Column('role_id', sql.String(64), nullable=False), - sql.Column('inherited', sql.Boolean, default=False, nullable=False), - sql.PrimaryKeyConstraint('type', 'actor_id', 'target_id', 'role_id'), - mysql_engine='InnoDB', - mysql_charset='utf8') - - mapping = sql.Table( - 'id_mapping', - meta, - sql.Column('public_id', sql.String(64), primary_key=True), - sql.Column('domain_id', sql.String(64), nullable=False), - sql.Column('local_id', sql.String(64), nullable=False), - sql.Column('entity_type', sql.Enum( - mapping_backend.EntityType.USER, - mapping_backend.EntityType.GROUP, - name='entity_type'), - nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - - domain_config_whitelist = sql.Table( - 'whitelisted_config', - meta, - sql.Column('domain_id', sql.String(64), primary_key=True), - sql.Column('group', sql.String(255), primary_key=True), - sql.Column('option', sql.String(255), primary_key=True), - sql.Column('value', ks_sql.JsonBlob.impl, nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - - domain_config_sensitive = sql.Table( - 'sensitive_config', - meta, - sql.Column('domain_id', sql.String(64), primary_key=True), - sql.Column('group', sql.String(255), primary_key=True), - sql.Column('option', sql.String(255), primary_key=True), - sql.Column('value', ks_sql.JsonBlob.impl, nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - - # create all tables - tables = [credential, domain, endpoint, group, policy, project, role, - service, token, trust, trust_role, user, user_group_membership, - region, assignment, mapping, domain_config_whitelist, - domain_config_sensitive] - - for table in tables: - try: - table.create() - except Exception: - LOG.exception('Exception while creating table: %r', table) - raise - - # Unique Constraints - migrate.UniqueConstraint(user.c.domain_id, - user.c.name, - name='ixu_user_name_domain_id').create() - migrate.UniqueConstraint(group.c.domain_id, - group.c.name, - name='ixu_group_name_domain_id').create() - migrate.UniqueConstraint(role.c.name, - name='ixu_role_name').create() - migrate.UniqueConstraint(project.c.domain_id, - project.c.name, - name='ixu_project_name_domain_id').create() - migrate.UniqueConstraint(domain.c.name, - name='ixu_domain_name').create() - migrate.UniqueConstraint(mapping.c.domain_id, - mapping.c.local_id, - mapping.c.entity_type, - name='domain_id').create() - - # Indexes - sql.Index('ix_token_expires', token.c.expires).create() - sql.Index('ix_token_expires_valid', token.c.expires, - token.c.valid).create() - sql.Index('ix_actor_id', assignment.c.actor_id).create() - sql.Index('ix_token_user_id', token.c.user_id).create() - sql.Index('ix_token_trust_id', token.c.trust_id).create() - # NOTE(stevemar): The two indexes below were named 'service_id' and - # 'group_id' in 050_fk_consistent_indexes.py, and need to be preserved - sql.Index('service_id', endpoint.c.service_id).create() - sql.Index('group_id', user_group_membership.c.group_id).create() - - fkeys = [ - {'columns': [endpoint.c.service_id], - 'references': [service.c.id]}, - - {'columns': [user_group_membership.c.group_id], - 'references': [group.c.id], - 'name': 'fk_user_group_membership_group_id'}, - - {'columns': [user_group_membership.c.user_id], - 'references':[user.c.id], - 'name': 'fk_user_group_membership_user_id'}, - - {'columns': [project.c.domain_id], - 'references': [domain.c.id], - 'name': 'fk_project_domain_id'}, - - {'columns': [endpoint.c.region_id], - 'references': [region.c.id], - 'name': 'fk_endpoint_region_id'}, - - {'columns': [project.c.parent_id], - 'references': [project.c.id], - 'name': 'project_parent_id_fkey'}, - ] - - if migrate_engine.name == 'sqlite': - # NOTE(stevemar): We need to keep this FK constraint due to 073, but - # only for sqlite, once we collapse 073 we can remove this constraint - fkeys.append( - {'columns': [assignment.c.role_id], - 'references': [role.c.id], - 'name': 'fk_assignment_role_id'}) - - for fkey in fkeys: - migrate.ForeignKeyConstraint(columns=fkey['columns'], - refcolumns=fkey['references'], - name=fkey.get('name')).create() diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/068_placeholder.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/068_placeholder.py deleted file mode 100644 index 111df9d4..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/068_placeholder.py +++ /dev/null @@ -1,18 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# This is a placeholder for Kilo backports. Do not use this number for new -# Liberty work. New Liberty work starts after all the placeholders. - - -def upgrade(migrate_engine): - pass diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/069_placeholder.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/069_placeholder.py deleted file mode 100644 index 111df9d4..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/069_placeholder.py +++ /dev/null @@ -1,18 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# This is a placeholder for Kilo backports. Do not use this number for new -# Liberty work. New Liberty work starts after all the placeholders. - - -def upgrade(migrate_engine): - pass diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/070_placeholder.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/070_placeholder.py deleted file mode 100644 index 111df9d4..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/070_placeholder.py +++ /dev/null @@ -1,18 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# This is a placeholder for Kilo backports. Do not use this number for new -# Liberty work. New Liberty work starts after all the placeholders. - - -def upgrade(migrate_engine): - pass diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/071_placeholder.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/071_placeholder.py deleted file mode 100644 index 111df9d4..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/071_placeholder.py +++ /dev/null @@ -1,18 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# This is a placeholder for Kilo backports. Do not use this number for new -# Liberty work. New Liberty work starts after all the placeholders. - - -def upgrade(migrate_engine): - pass diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/072_placeholder.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/072_placeholder.py deleted file mode 100644 index 111df9d4..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/072_placeholder.py +++ /dev/null @@ -1,18 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# This is a placeholder for Kilo backports. Do not use this number for new -# Liberty work. New Liberty work starts after all the placeholders. - - -def upgrade(migrate_engine): - pass diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/073_insert_assignment_inherited_pk.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/073_insert_assignment_inherited_pk.py deleted file mode 100644 index 205f809e..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/073_insert_assignment_inherited_pk.py +++ /dev/null @@ -1,113 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import migrate -import sqlalchemy as sql -from sqlalchemy.orm import sessionmaker - -from keystone.assignment.backends import sql as assignment_sql - - -def upgrade(migrate_engine): - """Inserts inherited column to assignment table PK constraints. - - For non-SQLite databases, it changes the constraint in the existing table. - - For SQLite, since changing constraints is not supported, it recreates the - assignment table with the new PK constraint and migrates the existing data. - - """ - ASSIGNMENT_TABLE_NAME = 'assignment' - - metadata = sql.MetaData() - metadata.bind = migrate_engine - - # Retrieve the existing assignment table - assignment_table = sql.Table(ASSIGNMENT_TABLE_NAME, metadata, - autoload=True) - - if migrate_engine.name == 'sqlite': - ACTOR_ID_INDEX_NAME = 'ix_actor_id' - TMP_ASSIGNMENT_TABLE_NAME = 'tmp_assignment' - - # Define the new assignment table with a temporary name - new_assignment_table = sql.Table( - TMP_ASSIGNMENT_TABLE_NAME, metadata, - sql.Column('type', sql.Enum( - assignment_sql.AssignmentType.USER_PROJECT, - assignment_sql.AssignmentType.GROUP_PROJECT, - assignment_sql.AssignmentType.USER_DOMAIN, - assignment_sql.AssignmentType.GROUP_DOMAIN, - name='type'), - nullable=False), - sql.Column('actor_id', sql.String(64), nullable=False), - sql.Column('target_id', sql.String(64), nullable=False), - sql.Column('role_id', sql.String(64), sql.ForeignKey('role.id'), - nullable=False), - sql.Column('inherited', sql.Boolean, default=False, - nullable=False), - sql.PrimaryKeyConstraint('type', 'actor_id', 'target_id', - 'role_id', 'inherited'), - mysql_engine='InnoDB', - mysql_charset='utf8') - - # Create the new assignment table - new_assignment_table.create(migrate_engine, checkfirst=True) - - # Change the index from the existing assignment table to the new one - sql.Index(ACTOR_ID_INDEX_NAME, assignment_table.c.actor_id).drop() - sql.Index(ACTOR_ID_INDEX_NAME, - new_assignment_table.c.actor_id).create() - - # Instantiate session - maker = sessionmaker(bind=migrate_engine) - session = maker() - - # Migrate existing data - insert = new_assignment_table.insert().from_select( - assignment_table.c, select=session.query(assignment_table)) - session.execute(insert) - session.commit() - - # Drop the existing assignment table, in favor of the new one - assignment_table.deregister() - assignment_table.drop() - - # Finally, rename the new table to the original assignment table name - new_assignment_table.rename(ASSIGNMENT_TABLE_NAME) - elif migrate_engine.name == 'ibm_db_sa': - # Recreate the existing constraint, marking the inherited column as PK - # for DB2. - - # This is a workaround to the general case in the else statement below. - # Due to a bug in the DB2 sqlalchemy dialect, Column.alter() actually - # creates a primary key over only the "inherited" column. This is wrong - # because the primary key for the table actually covers other columns - # too, not just the "inherited" column. Since the primary key already - # exists for the table after the Column.alter() call, it causes the - # next line to fail with an error that the primary key already exists. - - # The workaround here skips doing the Column.alter(). This causes a - # warning message since the metadata is out of sync. We can remove this - # workaround once the DB2 sqlalchemy dialect is fixed. - # DB2 Issue: https://code.google.com/p/ibm-db/issues/detail?id=173 - - migrate.PrimaryKeyConstraint(table=assignment_table).drop() - migrate.PrimaryKeyConstraint( - assignment_table.c.type, assignment_table.c.actor_id, - assignment_table.c.target_id, assignment_table.c.role_id, - assignment_table.c.inherited).create() - else: - # Recreate the existing constraint, marking the inherited column as PK - migrate.PrimaryKeyConstraint(table=assignment_table).drop() - assignment_table.c.inherited.alter(primary_key=True) - migrate.PrimaryKeyConstraint(table=assignment_table).create() diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/074_add_is_domain_project.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/074_add_is_domain_project.py deleted file mode 100644 index dcb89b07..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/074_add_is_domain_project.py +++ /dev/null @@ -1,27 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sqlalchemy as sql - - -_PROJECT_TABLE_NAME = 'project' -_IS_DOMAIN_COLUMN_NAME = 'is_domain' - - -def upgrade(migrate_engine): - meta = sql.MetaData() - meta.bind = migrate_engine - - project_table = sql.Table(_PROJECT_TABLE_NAME, meta, autoload=True) - is_domain = sql.Column(_IS_DOMAIN_COLUMN_NAME, sql.Boolean, nullable=False, - server_default='0', default=False) - project_table.create_column(is_domain) diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/075_confirm_config_registration.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/075_confirm_config_registration.py deleted file mode 100644 index 576842c6..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/075_confirm_config_registration.py +++ /dev/null @@ -1,29 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sqlalchemy as sql - -REGISTRATION_TABLE = 'config_register' - - -def upgrade(migrate_engine): - meta = sql.MetaData() - meta.bind = migrate_engine - - registration_table = sql.Table( - REGISTRATION_TABLE, - meta, - sql.Column('type', sql.String(64), primary_key=True), - sql.Column('domain_id', sql.String(64), nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - registration_table.create(migrate_engine, checkfirst=True) diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/076_placeholder.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/076_placeholder.py deleted file mode 100644 index 9f6e8415..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/076_placeholder.py +++ /dev/null @@ -1,18 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# This is a placeholder for Liberty backports. Do not use this number for new -# Mitaka work. New Mitaka work starts after all the placeholders. - - -def upgrade(migrate_engine): - pass diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/077_placeholder.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/077_placeholder.py deleted file mode 100644 index 9f6e8415..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/077_placeholder.py +++ /dev/null @@ -1,18 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# This is a placeholder for Liberty backports. Do not use this number for new -# Mitaka work. New Mitaka work starts after all the placeholders. - - -def upgrade(migrate_engine): - pass diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/078_placeholder.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/078_placeholder.py deleted file mode 100644 index 9f6e8415..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/078_placeholder.py +++ /dev/null @@ -1,18 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# This is a placeholder for Liberty backports. Do not use this number for new -# Mitaka work. New Mitaka work starts after all the placeholders. - - -def upgrade(migrate_engine): - pass diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/079_placeholder.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/079_placeholder.py deleted file mode 100644 index 9f6e8415..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/079_placeholder.py +++ /dev/null @@ -1,18 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# This is a placeholder for Liberty backports. Do not use this number for new -# Mitaka work. New Mitaka work starts after all the placeholders. - - -def upgrade(migrate_engine): - pass diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/080_placeholder.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/080_placeholder.py deleted file mode 100644 index 9f6e8415..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/080_placeholder.py +++ /dev/null @@ -1,18 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# This is a placeholder for Liberty backports. Do not use this number for new -# Mitaka work. New Mitaka work starts after all the placeholders. - - -def upgrade(migrate_engine): - pass diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/081_add_endpoint_policy_table.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/081_add_endpoint_policy_table.py deleted file mode 100644 index a0c307d0..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/081_add_endpoint_policy_table.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sqlalchemy as sql - -from keystone.common.sql import migration_helpers - - -def upgrade(migrate_engine): - try: - extension_version = migration_helpers.get_db_version( - extension='endpoint_policy', - engine=migrate_engine) - except Exception: - extension_version = 0 - - # This migration corresponds to endpoint_policy extension migration 1. Only - # update if it has not been run. - if extension_version >= 1: - return - - # Upgrade operations go here. Don't create your own engine; bind - # migrate_engine to your metadata - meta = sql.MetaData() - meta.bind = migrate_engine - - endpoint_policy_table = sql.Table( - 'policy_association', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('policy_id', sql.String(64), - nullable=False), - sql.Column('endpoint_id', sql.String(64), - nullable=True), - sql.Column('service_id', sql.String(64), - nullable=True), - sql.Column('region_id', sql.String(64), - nullable=True), - sql.UniqueConstraint('endpoint_id', 'service_id', 'region_id'), - mysql_engine='InnoDB', - mysql_charset='utf8') - - endpoint_policy_table.create(migrate_engine, checkfirst=True) diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/082_add_federation_tables.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/082_add_federation_tables.py deleted file mode 100644 index 7e426373..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/082_add_federation_tables.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -import sqlalchemy as sql - -from keystone.common.sql import migration_helpers - -CONF = cfg.CONF -_RELAY_STATE_PREFIX = 'relay_state_prefix' - - -def upgrade(migrate_engine): - try: - extension_version = migration_helpers.get_db_version( - extension='federation', - engine=migrate_engine) - except Exception: - extension_version = 0 - - # This migration corresponds to federation extension migration 8. Only - # update if it has not been run. - if extension_version >= 8: - return - - # Upgrade operations go here. Don't create your own engine; bind - # migrate_engine to your metadata - meta = sql.MetaData() - meta.bind = migrate_engine - - idp_table = sql.Table( - 'identity_provider', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('enabled', sql.Boolean, nullable=False), - sql.Column('description', sql.Text(), nullable=True), - mysql_engine='InnoDB', - mysql_charset='utf8') - idp_table.create(migrate_engine, checkfirst=True) - - federation_protocol_table = sql.Table( - 'federation_protocol', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('idp_id', sql.String(64), - sql.ForeignKey('identity_provider.id', ondelete='CASCADE'), - primary_key=True), - sql.Column('mapping_id', sql.String(64), nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - federation_protocol_table.create(migrate_engine, checkfirst=True) - - mapping_table = sql.Table( - 'mapping', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('rules', sql.Text(), nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - mapping_table.create(migrate_engine, checkfirst=True) - - relay_state_prefix_default = CONF.saml.relay_state_prefix - sp_table = sql.Table( - 'service_provider', - meta, - sql.Column('auth_url', sql.String(256), nullable=False), - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('enabled', sql.Boolean, nullable=False), - sql.Column('description', sql.Text(), nullable=True), - sql.Column('sp_url', sql.String(256), nullable=False), - sql.Column(_RELAY_STATE_PREFIX, sql.String(256), nullable=False, - server_default=relay_state_prefix_default), - mysql_engine='InnoDB', - mysql_charset='utf8') - sp_table.create(migrate_engine, checkfirst=True) - - idp_table = sql.Table('identity_provider', meta, autoload=True) - remote_id_table = sql.Table( - 'idp_remote_ids', - meta, - sql.Column('idp_id', sql.String(64), - sql.ForeignKey('identity_provider.id', ondelete='CASCADE')), - sql.Column('remote_id', sql.String(255), primary_key=True), - mysql_engine='InnoDB', - mysql_charset='utf8') - remote_id_table.create(migrate_engine, checkfirst=True) diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/083_add_oauth1_tables.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/083_add_oauth1_tables.py deleted file mode 100644 index 5a859b4b..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/083_add_oauth1_tables.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sqlalchemy as sql - -from keystone.common.sql import migration_helpers - - -def upgrade(migrate_engine): - try: - extension_version = migration_helpers.get_db_version( - extension='oauth1', - engine=migrate_engine) - except Exception: - extension_version = 0 - - # This migration corresponds to oauth extension migration 5. Only - # update if it has not been run. - if extension_version >= 5: - return - - # Upgrade operations go here. Don't create your own engine; bind - # migrate_engine to your metadata - meta = sql.MetaData() - meta.bind = migrate_engine - - consumer_table = sql.Table( - 'consumer', - meta, - sql.Column('id', sql.String(64), primary_key=True, nullable=False), - sql.Column('description', sql.String(64), nullable=True), - sql.Column('secret', sql.String(64), nullable=False), - sql.Column('extra', sql.Text(), nullable=False)) - consumer_table.create(migrate_engine, checkfirst=True) - - request_token_table = sql.Table( - 'request_token', - meta, - sql.Column('id', sql.String(64), primary_key=True, nullable=False), - sql.Column('request_secret', sql.String(64), nullable=False), - sql.Column('verifier', sql.String(64), nullable=True), - sql.Column('authorizing_user_id', sql.String(64), nullable=True), - sql.Column('requested_project_id', sql.String(64), nullable=False), - sql.Column('role_ids', sql.Text(), nullable=True), - sql.Column('consumer_id', sql.String(64), - sql.ForeignKey('consumer.id'), - nullable=False, index=True), - sql.Column('expires_at', sql.String(64), nullable=True)) - request_token_table.create(migrate_engine, checkfirst=True) - - access_token_table = sql.Table( - 'access_token', - meta, - sql.Column('id', sql.String(64), primary_key=True, nullable=False), - sql.Column('access_secret', sql.String(64), nullable=False), - sql.Column('authorizing_user_id', sql.String(64), - nullable=False, index=True), - sql.Column('project_id', sql.String(64), nullable=False), - sql.Column('role_ids', sql.Text(), nullable=False), - sql.Column('consumer_id', sql.String(64), - sql.ForeignKey('consumer.id'), - nullable=False, index=True), - sql.Column('expires_at', sql.String(64), nullable=True)) - access_token_table.create(migrate_engine, checkfirst=True) diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/084_add_revoke_tables.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/084_add_revoke_tables.py deleted file mode 100644 index 1a28a53c..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/084_add_revoke_tables.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sqlalchemy as sql - -from keystone.common.sql import migration_helpers - - -def upgrade(migrate_engine): - try: - extension_version = migration_helpers.get_db_version( - extension='revoke', - engine=migrate_engine) - except Exception: - extension_version = 0 - - # This migration corresponds to revoke extension migration 2. Only - # update if it has not been run. - if extension_version >= 2: - return - - # Upgrade operations go here. Don't create your own engine; bind - # migrate_engine to your metadata - meta = sql.MetaData() - meta.bind = migrate_engine - - service_table = sql.Table( - 'revocation_event', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('domain_id', sql.String(64)), - sql.Column('project_id', sql.String(64)), - sql.Column('user_id', sql.String(64)), - sql.Column('role_id', sql.String(64)), - sql.Column('trust_id', sql.String(64)), - sql.Column('consumer_id', sql.String(64)), - sql.Column('access_token_id', sql.String(64)), - sql.Column('issued_before', sql.DateTime(), nullable=False), - sql.Column('expires_at', sql.DateTime()), - sql.Column('revoked_at', sql.DateTime(), index=True, nullable=False), - sql.Column('audit_id', sql.String(32), nullable=True), - sql.Column('audit_chain_id', sql.String(32), nullable=True)) - - service_table.create(migrate_engine, checkfirst=True) diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/085_add_endpoint_filtering_table.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/085_add_endpoint_filtering_table.py deleted file mode 100644 index 5790bd98..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/085_add_endpoint_filtering_table.py +++ /dev/null @@ -1,70 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sqlalchemy as sql - -from keystone.common.sql import migration_helpers - - -def upgrade(migrate_engine): - try: - extension_version = migration_helpers.get_db_version( - extension='endpoint_filter', - engine=migrate_engine) - except Exception: - extension_version = 0 - - # This migration corresponds to endpoint_filter extension migration 2. Only - # update if it has not been run. - if extension_version >= 2: - return - - # Upgrade operations go here. Don't create your own engine; bind - # migrate_engine to your metadata - meta = sql.MetaData() - meta.bind = migrate_engine - - EP_GROUP_ID = 'endpoint_group_id' - PROJECT_ID = 'project_id' - - endpoint_filtering_table = sql.Table( - 'project_endpoint', - meta, - sql.Column( - 'endpoint_id', - sql.String(64), - primary_key=True, - nullable=False), - sql.Column( - 'project_id', - sql.String(64), - primary_key=True, - nullable=False)) - endpoint_filtering_table.create(migrate_engine, checkfirst=True) - - endpoint_group_table = sql.Table( - 'endpoint_group', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('name', sql.String(255), nullable=False), - sql.Column('description', sql.Text, nullable=True), - sql.Column('filters', sql.Text(), nullable=False)) - endpoint_group_table.create(migrate_engine, checkfirst=True) - - project_endpoint_group_table = sql.Table( - 'project_endpoint_group', - meta, - sql.Column(EP_GROUP_ID, sql.String(64), - sql.ForeignKey('endpoint_group.id'), nullable=False), - sql.Column(PROJECT_ID, sql.String(64), nullable=False), - sql.PrimaryKeyConstraint(EP_GROUP_ID, PROJECT_ID)) - project_endpoint_group_table.create(migrate_engine, checkfirst=True) diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/086_add_duplicate_constraint_trusts.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/086_add_duplicate_constraint_trusts.py deleted file mode 100644 index 2b115ea4..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/086_add_duplicate_constraint_trusts.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2015 Intel Corporation -# All Rights Reserved -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from migrate import UniqueConstraint -from sqlalchemy import MetaData, Table - - -def upgrade(migrate_engine): - meta = MetaData(bind=migrate_engine) - trusts = Table('trust', meta, autoload=True) - - UniqueConstraint('trustor_user_id', 'trustee_user_id', 'project_id', - 'impersonation', 'expires_at', table=trusts, - name='duplicate_trust_constraint').create() diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/087_implied_roles.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/087_implied_roles.py deleted file mode 100644 index 7713ce8f..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/087_implied_roles.py +++ /dev/null @@ -1,43 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import migrate -import sqlalchemy as sql - - -ROLE_TABLE = 'role' - - -def upgrade(migrate_engine): - meta = sql.MetaData() - meta.bind = migrate_engine - - implied_role = sql.Table( - 'implied_role', meta, - sql.Column('prior_role_id', sql.String(length=64), primary_key=True), - sql.Column( - 'implied_role_id', sql.String(length=64), primary_key=True), - mysql_engine='InnoDB', - mysql_charset='utf8') - implied_role.create() - role = sql.Table(ROLE_TABLE, meta, autoload=True) - fkeys = [ - {'columns': [implied_role.c.prior_role_id], - 'references': [role.c.id]}, - {'columns': [implied_role.c.implied_role_id], - 'references': [role.c.id]}, - ] - for fkey in fkeys: - migrate.ForeignKeyConstraint(columns=fkey['columns'], - refcolumns=fkey['references'], - name=fkey.get('name')).create() diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/088_domain_specific_roles.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/088_domain_specific_roles.py deleted file mode 100644 index 8b792dfa..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/088_domain_specific_roles.py +++ /dev/null @@ -1,60 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import migrate -import sqlalchemy as sql - - -_ROLE_NAME_NEW_CONSTRAINT = 'ixu_role_name_domain_id' -_ROLE_TABLE_NAME = 'role' -_ROLE_NAME_COLUMN_NAME = 'name' -_DOMAIN_ID_COLUMN_NAME = 'domain_id' -_NULL_DOMAIN_ID = '<>' - - -def upgrade(migrate_engine): - meta = sql.MetaData() - meta.bind = migrate_engine - - role_table = sql.Table(_ROLE_TABLE_NAME, meta, autoload=True) - domain_id = sql.Column(_DOMAIN_ID_COLUMN_NAME, sql.String(64), - nullable=False, server_default=_NULL_DOMAIN_ID) - - # NOTE(morganfainberg): the `role_name` unique constraint is not - # guaranteed to be a fixed name, such as 'ixu_role_name`, so we need to - # search for the correct constraint that only affects role_table.c.name - # and drop that constraint. - to_drop = None - if migrate_engine.name == 'mysql': - for c in role_table.indexes: - if (c.unique and len(c.columns) == 1 and - _ROLE_NAME_COLUMN_NAME in c.columns): - to_drop = c - break - else: - for c in role_table.constraints: - if len(c.columns) == 1 and _ROLE_NAME_COLUMN_NAME in c.columns: - to_drop = c - break - - if to_drop is not None: - migrate.UniqueConstraint(role_table.c.name, - name=to_drop.name).drop() - - # perform changes after constraint is dropped. - if 'domain_id' not in role_table.columns: - # Only create the column if it doesn't already exist. - role_table.create_column(domain_id) - - migrate.UniqueConstraint(role_table.c.name, - role_table.c.domain_id, - name=_ROLE_NAME_NEW_CONSTRAINT).create() diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/089_add_root_of_all_domains.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/089_add_root_of_all_domains.py deleted file mode 100644 index 477c719a..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/089_add_root_of_all_domains.py +++ /dev/null @@ -1,76 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sqlalchemy as sql - - -_PROJECT_TABLE_NAME = 'project' -_DOMAIN_TABLE_NAME = 'domain' -NULL_DOMAIN_ID = '<>' - - -def upgrade(migrate_engine): - - def _generate_root_domain_project(): - # Generate a project that will act as a root for all domains, in order - # for use to be able to use a FK constraint on domain_id. Projects - # acting as a domain will not reference this as their parent_id, just - # as domain_id. - # - # This special project is filtered out by the driver, so is never - # visible to the manager or API. - - project_ref = { - 'id': NULL_DOMAIN_ID, - 'name': NULL_DOMAIN_ID, - 'enabled': False, - 'description': '', - 'domain_id': NULL_DOMAIN_ID, - 'is_domain': True, - 'parent_id': None, - 'extra': '{}' - } - return project_ref - - def _generate_root_domain(): - # Generate a similar root for the domain table, this is an interim - # step so as to allow continuation of current project domain_id FK. - # - # This special domain is filtered out by the driver, so is never - # visible to the manager or API. - - domain_ref = { - 'id': NULL_DOMAIN_ID, - 'name': NULL_DOMAIN_ID, - 'enabled': False, - 'extra': '{}' - } - return domain_ref - - meta = sql.MetaData() - meta.bind = migrate_engine - session = sql.orm.sessionmaker(bind=migrate_engine)() - - project_table = sql.Table(_PROJECT_TABLE_NAME, meta, autoload=True) - domain_table = sql.Table(_DOMAIN_TABLE_NAME, meta, autoload=True) - - root_domain = _generate_root_domain() - new_entry = domain_table.insert().values(**root_domain) - session.execute(new_entry) - session.commit() - - root_domain_project = _generate_root_domain_project() - new_entry = project_table.insert().values(**root_domain_project) - session.execute(new_entry) - session.commit() - - session.close() diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/090_add_local_user_and_password_tables.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/090_add_local_user_and_password_tables.py deleted file mode 100644 index 800ba47e..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/090_add_local_user_and_password_tables.py +++ /dev/null @@ -1,42 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sqlalchemy as sql - - -def upgrade(migrate_engine): - meta = sql.MetaData() - meta.bind = migrate_engine - - user = sql.Table('user', meta, autoload=True) - - local_user = sql.Table( - 'local_user', - meta, - sql.Column('id', sql.Integer, primary_key=True, nullable=False), - sql.Column('user_id', sql.String(64), - sql.ForeignKey(user.c.id, ondelete='CASCADE'), - nullable=False, unique=True), - sql.Column('domain_id', sql.String(64), nullable=False), - sql.Column('name', sql.String(255), nullable=False), - sql.UniqueConstraint('domain_id', 'name')) - local_user.create(migrate_engine, checkfirst=True) - - password = sql.Table( - 'password', - meta, - sql.Column('id', sql.Integer, primary_key=True, nullable=False), - sql.Column('local_user_id', sql.Integer, - sql.ForeignKey(local_user.c.id, ondelete='CASCADE'), - nullable=False), - sql.Column('password', sql.String(128), nullable=False)) - password.create(migrate_engine, checkfirst=True) diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/091_migrate_data_to_local_user_and_password_tables.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/091_migrate_data_to_local_user_and_password_tables.py deleted file mode 100644 index 1f41fd89..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/091_migrate_data_to_local_user_and_password_tables.py +++ /dev/null @@ -1,66 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import migrate -import sqlalchemy as sql -from sqlalchemy import func - - -def upgrade(migrate_engine): - meta = sql.MetaData() - meta.bind = migrate_engine - - user_table = sql.Table('user', meta, autoload=True) - local_user_table = sql.Table('local_user', meta, autoload=True) - password_table = sql.Table('password', meta, autoload=True) - - # migrate data to local_user table - local_user_values = [] - for row in user_table.select().execute(): - # skip the row that already exists in `local_user`, this could - # happen if run into a partially-migrated table due to the - # bug #1549705. - filter_by = local_user_table.c.user_id == row['id'] - user_count = sql.select([func.count()]).select_from( - local_user_table).where(filter_by).execute().fetchone()[0] - if user_count == 0: - local_user_values.append({'user_id': row['id'], - 'domain_id': row['domain_id'], - 'name': row['name']}) - if local_user_values: - local_user_table.insert().values(local_user_values).execute() - - # migrate data to password table - sel = ( - sql.select([user_table, local_user_table], use_labels=True) - .select_from(user_table.join(local_user_table, user_table.c.id == - local_user_table.c.user_id)) - ) - user_rows = sel.execute() - password_values = [] - for row in user_rows: - if row['user_password']: - password_values.append({'local_user_id': row['local_user_id'], - 'password': row['user_password']}) - if password_values: - password_table.insert().values(password_values).execute() - - # remove domain_id and name unique constraint - if migrate_engine.name != 'sqlite': - migrate.UniqueConstraint(user_table.c.domain_id, - user_table.c.name, - name='ixu_user_name_domain_id').drop() - - # drop user columns - user_table.c.domain_id.drop() - user_table.c.name.drop() - user_table.c.password.drop() diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/092_make_implied_roles_fks_cascaded.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/092_make_implied_roles_fks_cascaded.py deleted file mode 100644 index 5e841899..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/092_make_implied_roles_fks_cascaded.py +++ /dev/null @@ -1,46 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import migrate -import sqlalchemy as sql - - -ROLE_TABLE = 'role' -IMPLIED_ROLE_TABLE = 'implied_role' - - -def upgrade(migrate_engine): - meta = sql.MetaData() - meta.bind = migrate_engine - - role = sql.Table(ROLE_TABLE, meta, autoload=True) - implied_role = sql.Table(IMPLIED_ROLE_TABLE, meta, autoload=True) - - fkeys = [ - {'columns': [implied_role.c.prior_role_id], - 'references': [role.c.id]}, - {'columns': [implied_role.c.implied_role_id], - 'references': [role.c.id]}, - ] - - # NOTE(stevemar): We need to divide these into two separate loops otherwise - # they may clobber each other and only end up with one foreign key. - for fkey in fkeys: - migrate.ForeignKeyConstraint(columns=fkey['columns'], - refcolumns=fkey['references'], - name=fkey.get('name')).drop() - for fkey in fkeys: - migrate.ForeignKeyConstraint(columns=fkey['columns'], - refcolumns=fkey['references'], - name=fkey.get('name'), - ondelete="CASCADE").create() diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/093_migrate_domains_to_projects.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/093_migrate_domains_to_projects.py deleted file mode 100644 index f6bba7d9..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/093_migrate_domains_to_projects.py +++ /dev/null @@ -1,125 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import json - -import sqlalchemy as sql - -from keystone.common.sql import migration_helpers - - -_PROJECT_TABLE_NAME = 'project' -_DOMAIN_TABLE_NAME = 'domain' -_PARENT_ID_COLUMN_NAME = 'parent_id' -_DOMAIN_ID_COLUMN_NAME = 'domain_id' - -# Above the driver level, the domain_id of a project acting as a domain is -# None. However, in order to enable sql integrity constraints to still operate -# on this column, we create a special "root of all domains" row, with an ID of -# NULL_DOMAIN_ID, which all projects acting as a domain reference in their -# domain_id attribute. This special row, as well as NULL_DOMAIN_ID, are never -# exposed outside of sql driver layer. -NULL_DOMAIN_ID = '<>' - - -def list_existing_project_constraints(project_table, domain_table): - constraints = [{'table': project_table, - 'fk_column': _PARENT_ID_COLUMN_NAME, - 'ref_column': project_table.c.id}, - {'table': project_table, - 'fk_column': _DOMAIN_ID_COLUMN_NAME, - 'ref_column': domain_table.c.id}] - - return constraints - - -def list_new_project_constraints(project_table): - constraints = [{'table': project_table, - 'fk_column': _PARENT_ID_COLUMN_NAME, - 'ref_column': project_table.c.id}, - {'table': project_table, - 'fk_column': _DOMAIN_ID_COLUMN_NAME, - 'ref_column': project_table.c.id}] - - return constraints - - -def upgrade(migrate_engine): - - def _project_from_domain(domain): - # Creates a project dict with is_domain=True from the provided - # domain. - - description = None - extra = {} - if domain.extra is not None: - # 'description' property is an extra attribute in domains but a - # first class attribute in projects - extra = json.loads(domain.extra) - description = extra.pop('description', None) - - return { - 'id': domain.id, - 'name': domain.name, - 'enabled': domain.enabled, - 'description': description, - 'domain_id': NULL_DOMAIN_ID, - 'is_domain': True, - 'parent_id': None, - 'extra': json.dumps(extra) - } - - meta = sql.MetaData() - meta.bind = migrate_engine - session = sql.orm.sessionmaker(bind=migrate_engine)() - - project_table = sql.Table(_PROJECT_TABLE_NAME, meta, autoload=True) - domain_table = sql.Table(_DOMAIN_TABLE_NAME, meta, autoload=True) - - # NOTE(htruta): Remove the parent_id constraint during the migration - # because for every root project inside this domain, we will set - # the project domain_id to be its parent_id. We re-enable the constraint - # in the end of this method. We also remove the domain_id constraint, - # while be recreated a FK to the project_id at the end. - migration_helpers.remove_constraints( - list_existing_project_constraints(project_table, domain_table)) - - # For each domain, create a project acting as a domain. We ignore the - # "root of all domains" row, since we already have one of these in the - # project table. - domains = list(domain_table.select().execute()) - for domain in domains: - if domain.id == NULL_DOMAIN_ID: - continue - is_domain_project = _project_from_domain(domain) - new_entry = project_table.insert().values(**is_domain_project) - session.execute(new_entry) - session.commit() - - # For each project, that has no parent (i.e. a top level project), update - # it's parent_id to point at the project acting as its domain. We ignore - # the "root of all domains" row, since its parent_id must always be None. - projects = list(project_table.select().execute()) - for project in projects: - if (project.parent_id is not None or project.is_domain or - project.id == NULL_DOMAIN_ID): - continue - values = {'parent_id': project.domain_id} - update = project_table.update().where( - project_table.c.id == project.id).values(values) - session.execute(update) - session.commit() - - migration_helpers.add_constraints( - list_new_project_constraints(project_table)) - - session.close() diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/094_add_federated_user_table.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/094_add_federated_user_table.py deleted file mode 100644 index 6fd3f051..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/094_add_federated_user_table.py +++ /dev/null @@ -1,43 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import migrate -import sqlalchemy as sql - - -def upgrade(migrate_engine): - meta = sql.MetaData() - meta.bind = migrate_engine - - user_table = sql.Table('user', meta, autoload=True) - idp_table = sql.Table('identity_provider', meta, autoload=True) - protocol_table = sql.Table('federation_protocol', meta, autoload=True) - - federated_table = sql.Table( - 'federated_user', - meta, - sql.Column('id', sql.Integer, primary_key=True, nullable=False), - sql.Column('user_id', sql.String(64), - sql.ForeignKey(user_table.c.id, ondelete='CASCADE'), - nullable=False), - sql.Column('idp_id', sql.String(64), - sql.ForeignKey(idp_table.c.id, ondelete='CASCADE'), - nullable=False), - sql.Column('protocol_id', sql.String(64), nullable=False), - sql.Column('unique_id', sql.String(255), nullable=False), - sql.Column('display_name', sql.String(255), nullable=True), - sql.UniqueConstraint('idp_id', 'protocol_id', 'unique_id')) - federated_table.create(migrate_engine, checkfirst=True) - - migrate.ForeignKeyConstraint( - columns=[federated_table.c.protocol_id, federated_table.c.idp_id], - refcolumns=[protocol_table.c.id, protocol_table.c.idp_id]).create() diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/095_add_integer_pkey_to_revocation_event_table.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/095_add_integer_pkey_to_revocation_event_table.py deleted file mode 100644 index 7a75f7b1..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/095_add_integer_pkey_to_revocation_event_table.py +++ /dev/null @@ -1,62 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sqlalchemy as sql - - -def upgrade(migrate_engine): - meta = sql.MetaData() - meta.bind = migrate_engine - - # You can specify primary keys when creating tables, however adding - # auto-increment integer primary keys for existing tables is not - # cross-engine compatibility supported. Thus, the approach is to: - # (1) create a new revocation_event table with an int pkey, - # (2) migrate data from the old table to the new table, - # (3) delete the old revocation_event table - # (4) rename the new revocation_event table - revocation_table = sql.Table('revocation_event', meta, autoload=True) - - revocation_table_new = sql.Table( - 'revocation_event_new', - meta, - sql.Column('id', sql.Integer, primary_key=True), - sql.Column('domain_id', sql.String(64)), - sql.Column('project_id', sql.String(64)), - sql.Column('user_id', sql.String(64)), - sql.Column('role_id', sql.String(64)), - sql.Column('trust_id', sql.String(64)), - sql.Column('consumer_id', sql.String(64)), - sql.Column('access_token_id', sql.String(64)), - sql.Column('issued_before', sql.DateTime(), nullable=False), - sql.Column('expires_at', sql.DateTime()), - sql.Column('revoked_at', sql.DateTime(), index=True, nullable=False), - sql.Column('audit_id', sql.String(32), nullable=True), - sql.Column('audit_chain_id', sql.String(32), nullable=True)) - revocation_table_new.create(migrate_engine, checkfirst=True) - - revocation_table_new.insert().from_select(['domain_id', - 'project_id', - 'user_id', - 'role_id', - 'trust_id', - 'consumer_id', - 'access_token_id', - 'issued_before', - 'expires_at', - 'revoked_at', - 'audit_id', - 'audit_chain_id'], - revocation_table.select()) - - revocation_table.drop() - revocation_table_new.rename('revocation_event') diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/096_drop_role_name_constraint.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/096_drop_role_name_constraint.py deleted file mode 100644 index 0156de21..00000000 --- a/keystone-moon/keystone/common/sql/migrate_repo/versions/096_drop_role_name_constraint.py +++ /dev/null @@ -1,50 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import migrate -import sqlalchemy as sql - -_ROLE_TABLE_NAME = 'role' -_ROLE_NAME_COLUMN_NAME = 'name' - - -def upgrade(migrate_engine): - meta = sql.MetaData() - meta.bind = migrate_engine - - role_table = sql.Table(_ROLE_TABLE_NAME, meta, autoload=True) - - # NOTE(morganfainberg): the `role_name` unique constraint is not - # guaranteed to be named 'ixu_role_name', so we need to search for the - # correct constraint that only affects role_table.c.name and drop - # that constraint. - # - # This is an idempotent change that reflects the fix to migration - # 88 if the role_name unique constraint was not named consistently and - # someone manually fixed the migrations / db without dropping the - # old constraint. - to_drop = None - if migrate_engine.name == 'mysql': - for c in role_table.indexes: - if (c.unique and len(c.columns) == 1 and - _ROLE_NAME_COLUMN_NAME in c.columns): - to_drop = c - break - else: - for c in role_table.constraints: - if len(c.columns) == 1 and _ROLE_NAME_COLUMN_NAME in c.columns: - to_drop = c - break - - if to_drop is not None: - migrate.UniqueConstraint(role_table.c.name, - name=to_drop.name).drop() diff --git a/keystone-moon/keystone/common/sql/migrate_repo/versions/__init__.py b/keystone-moon/keystone/common/sql/migrate_repo/versions/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/common/sql/migration_helpers.py b/keystone-moon/keystone/common/sql/migration_helpers.py deleted file mode 100644 index 40c1fbb5..00000000 --- a/keystone-moon/keystone/common/sql/migration_helpers.py +++ /dev/null @@ -1,245 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# Copyright 2013 Red Hat, Inc. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import sys - -import migrate -from migrate import exceptions -from oslo_config import cfg -from oslo_db.sqlalchemy import migration -from oslo_utils import importutils -import six -import sqlalchemy - -from keystone.common import sql -from keystone import contrib -from keystone import exception -from keystone.i18n import _ - - -CONF = cfg.CONF -DEFAULT_EXTENSIONS = [] - -MIGRATED_EXTENSIONS = ['endpoint_policy', - 'federation', - 'oauth1', - 'revoke', - 'endpoint_filter' - ] - - -# Different RDBMSs use different schemes for naming the Foreign Key -# Constraints. SQLAlchemy does not yet attempt to determine the name -# for the constraint, and instead attempts to deduce it from the column. -# This fails on MySQL. -def get_constraints_names(table, column_name): - fkeys = [fk.name for fk in table.constraints - if (isinstance(fk, sqlalchemy.ForeignKeyConstraint) and - column_name in fk.columns)] - return fkeys - - -# remove_constraints and add_constraints both accept a list of dictionaries -# that contain: -# {'table': a sqlalchemy table. The constraint is added to dropped from -# this table. -# 'fk_column': the name of a column on the above table, The constraint -# is added to or dropped from this column -# 'ref_column':a sqlalchemy column object. This is the reference column -# for the constraint. -def remove_constraints(constraints): - for constraint_def in constraints: - constraint_names = get_constraints_names(constraint_def['table'], - constraint_def['fk_column']) - for constraint_name in constraint_names: - migrate.ForeignKeyConstraint( - columns=[getattr(constraint_def['table'].c, - constraint_def['fk_column'])], - refcolumns=[constraint_def['ref_column']], - name=constraint_name).drop() - - -def add_constraints(constraints): - for constraint_def in constraints: - - if constraint_def['table'].kwargs.get('mysql_engine') == 'MyISAM': - # Don't try to create constraint when using MyISAM because it's - # not supported. - continue - - ref_col = constraint_def['ref_column'] - ref_engine = ref_col.table.kwargs.get('mysql_engine') - if ref_engine == 'MyISAM': - # Don't try to create constraint when using MyISAM because it's - # not supported. - continue - - migrate.ForeignKeyConstraint( - columns=[getattr(constraint_def['table'].c, - constraint_def['fk_column'])], - refcolumns=[constraint_def['ref_column']]).create() - - -def rename_tables_with_constraints(renames, constraints, engine): - """Renames tables with foreign key constraints. - - Tables are renamed after first removing constraints. The constraints are - replaced after the rename is complete. - - This works on databases that don't support renaming tables that have - constraints on them (DB2). - - `renames` is a dict, mapping {'to_table_name': from_table, ...} - """ - if engine.name != 'sqlite': - # SQLite doesn't support constraints, so nothing to remove. - remove_constraints(constraints) - - for to_table_name in renames: - from_table = renames[to_table_name] - from_table.rename(to_table_name) - - if engine != 'sqlite': - add_constraints(constraints) - - -def find_migrate_repo(package=None, repo_name='migrate_repo'): - package = package or sql - path = os.path.abspath(os.path.join( - os.path.dirname(package.__file__), repo_name)) - if os.path.isdir(path): - return path - raise exception.MigrationNotProvided(package.__name__, path) - - -def _sync_common_repo(version): - abs_path = find_migrate_repo() - init_version = get_init_version() - with sql.session_for_write() as session: - engine = session.get_bind() - _assert_not_schema_downgrade(version=version) - migration.db_sync(engine, abs_path, version=version, - init_version=init_version, sanity_check=False) - - -def get_init_version(abs_path=None): - """Get the initial version of a migrate repository - - :param abs_path: Absolute path to migrate repository. - :return: initial version number or None, if DB is empty. - """ - if abs_path is None: - abs_path = find_migrate_repo() - - repo = migrate.versioning.repository.Repository(abs_path) - - # Sadly, Repository has a `latest` but not an `oldest`. - # The value is a VerNum object which needs to be converted into an int. - oldest = int(min(repo.versions.versions)) - - if oldest < 1: - return None - - # The initial version is one less - return oldest - 1 - - -def _assert_not_schema_downgrade(extension=None, version=None): - if version is not None: - try: - current_ver = int(six.text_type(get_db_version(extension))) - if int(version) < current_ver: - raise migration.exception.DbMigrationError( - _("Unable to downgrade schema")) - except exceptions.DatabaseNotControlledError: # nosec - # NOTE(morganfainberg): The database is not controlled, this action - # cannot be a downgrade. - pass - - -def _sync_extension_repo(extension, version): - if extension in MIGRATED_EXTENSIONS: - raise exception.MigrationMovedFailure(extension=extension) - - with sql.session_for_write() as session: - engine = session.get_bind() - - try: - package_name = '.'.join((contrib.__name__, extension)) - package = importutils.import_module(package_name) - except ImportError: - raise ImportError(_("%s extension does not exist.") - % package_name) - try: - abs_path = find_migrate_repo(package) - try: - migration.db_version_control(engine, abs_path) - # Register the repo with the version control API - # If it already knows about the repo, it will throw - # an exception that we can safely ignore - except exceptions.DatabaseAlreadyControlledError: # nosec - pass - except exception.MigrationNotProvided as e: - print(e) - sys.exit(1) - - _assert_not_schema_downgrade(extension=extension, version=version) - - init_version = get_init_version(abs_path=abs_path) - - migration.db_sync(engine, abs_path, version=version, - init_version=init_version, sanity_check=False) - - -def sync_database_to_version(extension=None, version=None): - if not extension: - _sync_common_repo(version) - # If version is greater than 0, it is for the common - # repository only, and only that will be synchronized. - if version is None: - for default_extension in DEFAULT_EXTENSIONS: - _sync_extension_repo(default_extension, version) - else: - _sync_extension_repo(extension, version) - - -def get_db_version(extension=None): - if not extension: - with sql.session_for_write() as session: - return migration.db_version(session.get_bind(), - find_migrate_repo(), - get_init_version()) - - try: - package_name = '.'.join((contrib.__name__, extension)) - package = importutils.import_module(package_name) - except ImportError: - raise ImportError(_("%s extension does not exist.") - % package_name) - - with sql.session_for_write() as session: - return migration.db_version( - session.get_bind(), find_migrate_repo(package), 0) - - -def print_db_version(extension=None): - try: - db_version = get_db_version(extension=extension) - print(db_version) - except exception.MigrationNotProvided as e: - print(e) - sys.exit(1) diff --git a/keystone-moon/keystone/common/tokenless_auth.py b/keystone-moon/keystone/common/tokenless_auth.py deleted file mode 100644 index fd9c1592..00000000 --- a/keystone-moon/keystone/common/tokenless_auth.py +++ /dev/null @@ -1,192 +0,0 @@ -# Copyright 2015 Hewlett-Packard -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import hashlib - -from oslo_config import cfg -from oslo_log import log - -from keystone.auth import controllers -from keystone.common import dependency -from keystone import exception -from keystone.federation import constants as federation_constants -from keystone.federation import utils -from keystone.i18n import _ - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -@dependency.requires('assignment_api', 'federation_api', - 'identity_api', 'resource_api') -class TokenlessAuthHelper(object): - def __init__(self, env): - """A init class for TokenlessAuthHelper. - - :param env: The HTTP request environment that should contain - client certificate attributes. These attributes should match - with what the mapping defines. Or a user cannot be mapped and - results un-authenticated. The following examples are for the - attributes that reference to the client certificate's Subject's - Common Name and Organization: - SSL_CLIENT_S_DN_CN, SSL_CLIENT_S_DN_O - :type env: dict - """ - self.env = env - - def _build_scope_info(self): - """Build the token request scope based on the headers. - - :returns: scope data - :rtype: dict - """ - project_id = self.env.get('HTTP_X_PROJECT_ID') - project_name = self.env.get('HTTP_X_PROJECT_NAME') - project_domain_id = self.env.get('HTTP_X_PROJECT_DOMAIN_ID') - project_domain_name = self.env.get('HTTP_X_PROJECT_DOMAIN_NAME') - domain_id = self.env.get('HTTP_X_DOMAIN_ID') - domain_name = self.env.get('HTTP_X_DOMAIN_NAME') - - scope = {} - if project_id: - scope['project'] = {'id': project_id} - elif project_name: - scope['project'] = {'name': project_name} - if project_domain_id: - scope['project']['domain'] = {'id': project_domain_id} - elif project_domain_name: - scope['project']['domain'] = {'name': project_domain_name} - else: - msg = _('Neither Project Domain ID nor Project Domain Name ' - 'was provided.') - raise exception.ValidationError(msg) - elif domain_id: - scope['domain'] = {'id': domain_id} - elif domain_name: - scope['domain'] = {'name': domain_name} - else: - raise exception.ValidationError( - attribute='project or domain', - target='scope') - return scope - - def get_scope(self): - auth = {} - # NOTE(chioleong): Auth methods here are insignificant because - # we only care about using auth.controllers.AuthInfo - # to validate the scope information. Therefore, - # we don't provide any identity. - auth['scope'] = self._build_scope_info() - - # NOTE(chioleong): We'll let AuthInfo validate the scope for us - auth_info = controllers.AuthInfo.create({}, auth, scope_only=True) - return auth_info.get_scope() - - def get_mapped_user(self, project_id=None, domain_id=None): - """Map client certificate to an existing user. - - If user is ephemeral, there is no validation on the user himself; - however it will be mapped to a corresponding group(s) and the scope - of this ephemeral user is the same as what is assigned to the group. - - :param project_id: Project scope of the mapped user. - :param domain_id: Domain scope of the mapped user. - :returns: A dictionary that contains the keys, such as - user_id, user_name, domain_id, domain_name - :rtype: dict - """ - idp_id = self._build_idp_id() - LOG.debug('The IdP Id %s and protocol Id %s are used to look up ' - 'the mapping.', idp_id, CONF.tokenless_auth.protocol) - - mapped_properties, mapping_id = self.federation_api.evaluate( - idp_id, CONF.tokenless_auth.protocol, self.env) - - user = mapped_properties.get('user', {}) - user_id = user.get('id') - user_name = user.get('name') - user_type = user.get('type') - if user.get('domain') is not None: - user_domain_id = user.get('domain').get('id') - user_domain_name = user.get('domain').get('name') - else: - user_domain_id = None - user_domain_name = None - - # if user is ephemeral type, we don't care if the user exists - # or not, but just care if the mapped group(s) is valid. - if user_type == utils.UserType.EPHEMERAL: - user_ref = {'type': utils.UserType.EPHEMERAL} - group_ids = mapped_properties['group_ids'] - utils.validate_groups_in_backend(group_ids, - mapping_id, - self.identity_api) - group_ids.extend( - utils.transform_to_group_ids( - mapped_properties['group_names'], mapping_id, - self.identity_api, self.assignment_api)) - roles = self.assignment_api.get_roles_for_groups(group_ids, - project_id, - domain_id) - if roles is not None: - role_names = [role['name'] for role in roles] - user_ref['roles'] = role_names - user_ref['group_ids'] = list(group_ids) - user_ref[federation_constants.IDENTITY_PROVIDER] = idp_id - user_ref[federation_constants.PROTOCOL] = ( - CONF.tokenless_auth.protocol) - return user_ref - - if user_id: - user_ref = self.identity_api.get_user(user_id) - elif user_name and (user_domain_name or user_domain_id): - if user_domain_name: - user_domain = self.resource_api.get_domain_by_name( - user_domain_name) - self.resource_api.assert_domain_enabled(user_domain['id'], - user_domain) - user_domain_id = user_domain['id'] - user_ref = self.identity_api.get_user_by_name(user_name, - user_domain_id) - else: - msg = _('User auth cannot be built due to missing either ' - 'user id, or user name with domain id, or user name ' - 'with domain name.') - raise exception.ValidationError(msg) - self.identity_api.assert_user_enabled( - user_id=user_ref['id'], - user=user_ref) - user_ref['type'] = utils.UserType.LOCAL - return user_ref - - def _build_idp_id(self): - """Build the IdP name from the given config option issuer_attribute. - - The default issuer attribute SSL_CLIENT_I_DN in the environment is - built with the following formula - - - base64_idp = sha1(env['SSL_CLIENT_I_DN']) - - :returns: base64_idp like the above example - :rtype: str - """ - idp = self.env.get(CONF.tokenless_auth.issuer_attribute) - if idp is None: - raise exception.TokenlessAuthConfigError( - issuer_attribute=CONF.tokenless_auth.issuer_attribute) - - hashed_idp = hashlib.sha256(idp.encode('utf-8')) - return hashed_idp.hexdigest() diff --git a/keystone-moon/keystone/common/utils.py b/keystone-moon/keystone/common/utils.py deleted file mode 100644 index 5438ad43..00000000 --- a/keystone-moon/keystone/common/utils.py +++ /dev/null @@ -1,598 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 - 2012 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import calendar -import collections -import grp -import hashlib -import os -import pwd -import uuid - -from oslo_config import cfg -from oslo_log import log -from oslo_serialization import jsonutils -from oslo_utils import reflection -from oslo_utils import strutils -from oslo_utils import timeutils -import passlib.hash -import six -from six import moves - -from keystone.common import authorization -from keystone import exception -from keystone.i18n import _, _LE, _LW - - -CONF = cfg.CONF - -LOG = log.getLogger(__name__) - - -# NOTE(stevermar): This UUID must stay the same, forever, across -# all of keystone to preserve its value as a URN namespace, which is -# used for ID transformation. -RESOURCE_ID_NAMESPACE = uuid.UUID('4332ecab-770b-4288-a680-b9aca3b1b153') - - -def resource_uuid(value): - """Converts input to valid UUID hex digits.""" - try: - uuid.UUID(value) - return value - except ValueError: - if len(value) <= 64: - if six.PY2 and isinstance(value, six.text_type): - value = value.encode('utf-8') - return uuid.uuid5(RESOURCE_ID_NAMESPACE, value).hex - raise ValueError(_('Length of transformable resource id > 64, ' - 'which is max allowed characters')) - - -def flatten_dict(d, parent_key=''): - """Flatten a nested dictionary - - Converts a dictionary with nested values to a single level flat - dictionary, with dotted notation for each key. - - """ - items = [] - for k, v in d.items(): - new_key = parent_key + '.' + k if parent_key else k - if isinstance(v, collections.MutableMapping): - items.extend(list(flatten_dict(v, new_key).items())) - else: - items.append((new_key, v)) - return dict(items) - - -def read_cached_file(filename, cache_info, reload_func=None): - """Read from a file if it has been modified. - - :param cache_info: dictionary to hold opaque cache. - :param reload_func: optional function to be called with data when - file is reloaded due to a modification. - - :returns: data from file. - - """ - mtime = os.path.getmtime(filename) - if not cache_info or mtime != cache_info.get('mtime'): - with open(filename) as fap: - cache_info['data'] = fap.read() - cache_info['mtime'] = mtime - if reload_func: - reload_func(cache_info['data']) - return cache_info['data'] - - -class SmarterEncoder(jsonutils.json.JSONEncoder): - """Help for JSON encoding dict-like objects.""" - - def default(self, obj): - if not isinstance(obj, dict) and hasattr(obj, 'iteritems'): - return dict(obj.iteritems()) - return super(SmarterEncoder, self).default(obj) - - -class PKIEncoder(SmarterEncoder): - """Special encoder to make token JSON a bit shorter.""" - - item_separator = ',' - key_separator = ':' - - -def verify_length_and_trunc_password(password): - """Verify and truncate the provided password to the max_password_length.""" - max_length = CONF.identity.max_password_length - try: - if len(password) > max_length: - if CONF.strict_password_check: - raise exception.PasswordVerificationError(size=max_length) - else: - LOG.warning( - _LW('Truncating user password to ' - '%d characters.'), max_length) - return password[:max_length] - else: - return password - except TypeError: - raise exception.ValidationError(attribute='string', target='password') - - -def hash_access_key(access): - hash_ = hashlib.sha256() - if not isinstance(access, six.binary_type): - access = access.encode('utf-8') - hash_.update(access) - return hash_.hexdigest() - - -def hash_user_password(user): - """Hash a user dict's password without modifying the passed-in dict.""" - password = user.get('password') - if password is None: - return user - - return dict(user, password=hash_password(password)) - - -def hash_password(password): - """Hash a password. Hard.""" - password_utf8 = verify_length_and_trunc_password(password).encode('utf-8') - return passlib.hash.sha512_crypt.encrypt( - password_utf8, rounds=CONF.crypt_strength) - - -def check_password(password, hashed): - """Check that a plaintext password matches hashed. - - hashpw returns the salt value concatenated with the actual hash value. - It extracts the actual salt if this value is then passed as the salt. - - """ - if password is None or hashed is None: - return False - password_utf8 = verify_length_and_trunc_password(password).encode('utf-8') - return passlib.hash.sha512_crypt.verify(password_utf8, hashed) - - -def attr_as_boolean(val_attr): - """Returns the boolean value, decoded from a string. - - We test explicitly for a value meaning False, which can be one of - several formats as specified in oslo strutils.FALSE_STRINGS. - All other string values (including an empty string) are treated as - meaning True. - - """ - return strutils.bool_from_string(val_attr, default=True) - - -def get_blob_from_credential(credential): - try: - blob = jsonutils.loads(credential.blob) - except (ValueError, TypeError): - raise exception.ValidationError( - message=_('Invalid blob in credential')) - if not blob or not isinstance(blob, dict): - raise exception.ValidationError(attribute='blob', - target='credential') - return blob - - -def convert_ec2_to_v3_credential(ec2credential): - blob = {'access': ec2credential.access, - 'secret': ec2credential.secret} - return {'id': hash_access_key(ec2credential.access), - 'user_id': ec2credential.user_id, - 'project_id': ec2credential.tenant_id, - 'blob': jsonutils.dumps(blob), - 'type': 'ec2', - 'extra': jsonutils.dumps({})} - - -def convert_v3_to_ec2_credential(credential): - blob = get_blob_from_credential(credential) - return {'access': blob.get('access'), - 'secret': blob.get('secret'), - 'user_id': credential.user_id, - 'tenant_id': credential.project_id, - } - - -def unixtime(dt_obj): - """Format datetime object as unix timestamp - - :param dt_obj: datetime.datetime object - :returns: float - - """ - return calendar.timegm(dt_obj.utctimetuple()) - - -def auth_str_equal(provided, known): - """Constant-time string comparison. - - :params provided: the first string - :params known: the second string - - :returns: True if the strings are equal. - - This function takes two strings and compares them. It is intended to be - used when doing a comparison for authentication purposes to help guard - against timing attacks. When using the function for this purpose, always - provide the user-provided password as the first argument. The time this - function will take is always a factor of the length of this string. - """ - result = 0 - p_len = len(provided) - k_len = len(known) - for i in moves.range(p_len): - a = ord(provided[i]) if i < p_len else 0 - b = ord(known[i]) if i < k_len else 0 - result |= a ^ b - return (p_len == k_len) & (result == 0) - - -def setup_remote_pydev_debug(): - if CONF.pydev_debug_host and CONF.pydev_debug_port: - try: - try: - from pydev import pydevd - except ImportError: - import pydevd - - pydevd.settrace(CONF.pydev_debug_host, - port=CONF.pydev_debug_port, - stdoutToServer=True, - stderrToServer=True) - return True - except Exception: - LOG.exception(_LE( - 'Error setting up the debug environment. Verify that the ' - 'option --debug-url has the format : and that a ' - 'debugger processes is listening on that port.')) - raise - - -def get_unix_user(user=None): - """Get the uid and user name. - - This is a convenience utility which accepts a variety of input - which might represent a unix user. If successful it returns the uid - and name. Valid input is: - - string - A string is first considered to be a user name and a lookup is - attempted under that name. If no name is found then an attempt - is made to convert the string to an integer and perform a - lookup as a uid. - - int - An integer is interpreted as a uid. - - None - None is interpreted to mean use the current process's - effective user. - - If the input is a valid type but no user is found a KeyError is - raised. If the input is not a valid type a TypeError is raised. - - :param object user: string, int or None specifying the user to - lookup. - - :returns: tuple of (uid, name) - - """ - if isinstance(user, six.string_types): - try: - user_info = pwd.getpwnam(user) - except KeyError: - try: - i = int(user) - except ValueError: - raise KeyError("user name '%s' not found" % user) - try: - user_info = pwd.getpwuid(i) - except KeyError: - raise KeyError("user id %d not found" % i) - elif isinstance(user, int): - try: - user_info = pwd.getpwuid(user) - except KeyError: - raise KeyError("user id %d not found" % user) - elif user is None: - user_info = pwd.getpwuid(os.geteuid()) - else: - user_cls_name = reflection.get_class_name(user, - fully_qualified=False) - raise TypeError('user must be string, int or None; not %s (%r)' % - (user_cls_name, user)) - - return user_info.pw_uid, user_info.pw_name - - -def get_unix_group(group=None): - """Get the gid and group name. - - This is a convenience utility which accepts a variety of input - which might represent a unix group. If successful it returns the gid - and name. Valid input is: - - string - A string is first considered to be a group name and a lookup is - attempted under that name. If no name is found then an attempt - is made to convert the string to an integer and perform a - lookup as a gid. - - int - An integer is interpreted as a gid. - - None - None is interpreted to mean use the current process's - effective group. - - If the input is a valid type but no group is found a KeyError is - raised. If the input is not a valid type a TypeError is raised. - - - :param object group: string, int or None specifying the group to - lookup. - - :returns: tuple of (gid, name) - - """ - if isinstance(group, six.string_types): - try: - group_info = grp.getgrnam(group) - except KeyError: - # Was an int passed as a string? - # Try converting to int and lookup by id instead. - try: - i = int(group) - except ValueError: - raise KeyError("group name '%s' not found" % group) - try: - group_info = grp.getgrgid(i) - except KeyError: - raise KeyError("group id %d not found" % i) - elif isinstance(group, int): - try: - group_info = grp.getgrgid(group) - except KeyError: - raise KeyError("group id %d not found" % group) - elif group is None: - group_info = grp.getgrgid(os.getegid()) - else: - group_cls_name = reflection.get_class_name(group, - fully_qualified=False) - raise TypeError('group must be string, int or None; not %s (%r)' % - (group_cls_name, group)) - - return group_info.gr_gid, group_info.gr_name - - -def set_permissions(path, mode=None, user=None, group=None, log=None): - """Set the ownership and permissions on the pathname. - - Each of the mode, user and group are optional, if None then - that aspect is not modified. - - Owner and group may be specified either with a symbolic name - or numeric id. - - :param string path: Pathname of directory whose existence is assured. - :param object mode: ownership permissions flags (int) i.e. chmod, - if None do not set. - :param object user: set user, name (string) or uid (integer), - if None do not set. - :param object group: set group, name (string) or gid (integer) - if None do not set. - :param logger log: logging.logger object, used to emit log messages, - if None no logging is performed. - - """ - if user is None: - user_uid, user_name = None, None - else: - user_uid, user_name = get_unix_user(user) - - if group is None: - group_gid, group_name = None, None - else: - group_gid, group_name = get_unix_group(group) - - if log: - if mode is None: - mode_string = str(mode) - else: - mode_string = oct(mode) - log.debug("set_permissions: " - "path='%s' mode=%s user=%s(%s) group=%s(%s)", - path, mode_string, - user_name, user_uid, group_name, group_gid) - - # Change user and group if specified - if user_uid is not None or group_gid is not None: - if user_uid is None: - user_uid = -1 - if group_gid is None: - group_gid = -1 - try: - os.chown(path, user_uid, group_gid) - except OSError as exc: - raise EnvironmentError("chown('%s', %s, %s): %s" % - (path, - user_name, group_name, - exc.strerror)) - - # Change permission flags - if mode is not None: - try: - os.chmod(path, mode) - except OSError as exc: - raise EnvironmentError("chmod('%s', %#o): %s" % - (path, mode, exc.strerror)) - - -def make_dirs(path, mode=None, user=None, group=None, log=None): - """Assure directory exists, set ownership and permissions. - - Assure the directory exists and optionally set its ownership - and permissions. - - Each of the mode, user and group are optional, if None then - that aspect is not modified. - - Owner and group may be specified either with a symbolic name - or numeric id. - - :param string path: Pathname of directory whose existence is assured. - :param object mode: ownership permissions flags (int) i.e. chmod, - if None do not set. - :param object user: set user, name (string) or uid (integer), - if None do not set. - :param object group: set group, name (string) or gid (integer) - if None do not set. - :param logger log: logging.logger object, used to emit log messages, - if None no logging is performed. - - """ - if log: - if mode is None: - mode_string = str(mode) - else: - mode_string = oct(mode) - log.debug("make_dirs path='%s' mode=%s user=%s group=%s", - path, mode_string, user, group) - - if not os.path.exists(path): - try: - os.makedirs(path) - except OSError as exc: - raise EnvironmentError("makedirs('%s'): %s" % (path, exc.strerror)) - - set_permissions(path, mode, user, group, log) - - -class WhiteListedItemFilter(object): - - def __init__(self, whitelist, data): - self._whitelist = set(whitelist or []) - self._data = data - - def __getitem__(self, name): - if name not in self._whitelist: - raise KeyError - return self._data[name] - - -_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f' -_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' - - -def isotime(at=None, subsecond=False): - """Stringify time in ISO 8601 format.""" - # Python provides a similar instance method for datetime.datetime objects - # called isoformat(). The format of the strings generated by isoformat() - # have a couple of problems: - # 1) The strings generated by isotime are used in tokens and other public - # APIs that we can't change without a deprecation period. The strings - # generated by isoformat are not the same format, so we can't just - # change to it. - # 2) The strings generated by isoformat do not include the microseconds if - # the value happens to be 0. This will likely show up as random failures - # as parsers may be written to always expect microseconds, and it will - # parse correctly most of the time. - - if not at: - at = timeutils.utcnow() - st = at.strftime(_ISO8601_TIME_FORMAT - if not subsecond - else _ISO8601_TIME_FORMAT_SUBSECOND) - tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' - st += ('Z' if tz == 'UTC' else tz) - return st - - -def strtime(): - at = timeutils.utcnow() - return at.strftime(timeutils.PERFECT_TIME_FORMAT) - - -def get_token_ref(context): - """Retrieves KeystoneToken object from the auth context and returns it. - - :param dict context: The request context. - :raises keystone.exception.Unauthorized: If auth context cannot be found. - :returns: The KeystoneToken object. - """ - try: - # Retrieve the auth context that was prepared by AuthContextMiddleware. - auth_context = (context['environment'] - [authorization.AUTH_CONTEXT_ENV]) - return auth_context['token'] - except KeyError: - LOG.warning(_LW("Couldn't find the auth context.")) - raise exception.Unauthorized() - - -URL_RESERVED_CHARS = ":/?#[]@!$&'()*+,;=" - - -def is_not_url_safe(name): - """Check if a string contains any url reserved characters.""" - return len(list_url_unsafe_chars(name)) > 0 - - -def list_url_unsafe_chars(name): - """Return a list of the reserved characters.""" - reserved_chars = '' - for i in name: - if i in URL_RESERVED_CHARS: - reserved_chars += i - return reserved_chars - - -def lower_case_hostname(url): - """Change the URL's hostname to lowercase""" - # NOTE(gyee): according to - # https://www.w3.org/TR/WD-html40-970708/htmlweb.html, the netloc portion - # of the URL is case-insensitive - parsed = moves.urllib.parse.urlparse(url) - # Note: _replace method for named tuples is public and defined in docs - replaced = parsed._replace(netloc=parsed.netloc.lower()) - return moves.urllib.parse.urlunparse(replaced) - - -def remove_standard_port(url): - # remove the default ports specified in RFC2616 and 2818 - o = moves.urllib.parse.urlparse(url) - separator = ':' - (host, separator, port) = o.netloc.partition(':') - if o.scheme.lower() == 'http' and port == '80': - # NOTE(gyee): _replace() is not a private method. It has an - # an underscore prefix to prevent conflict with field names. - # See https://docs.python.org/2/library/collections.html# - # collections.namedtuple - o = o._replace(netloc=host) - if o.scheme.lower() == 'https' and port == '443': - o = o._replace(netloc=host) - - return moves.urllib.parse.urlunparse(o) diff --git a/keystone-moon/keystone/common/validation/__init__.py b/keystone-moon/keystone/common/validation/__init__.py deleted file mode 100644 index 9d812f40..00000000 --- a/keystone-moon/keystone/common/validation/__init__.py +++ /dev/null @@ -1,96 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Request body validating middleware for OpenStack Identity resources.""" - -import functools -import inspect - -from keystone.common.validation import validators -from keystone import exception -from keystone.i18n import _ - - -def validated(request_body_schema, resource_to_validate): - """Register a schema to validate a resource reference. - - Registered schema will be used for validating a request body just before - API method execution. - - :param request_body_schema: a schema to validate the resource reference - :param resource_to_validate: the reference to validate - :raises keystone.exception.ValidationError: if `resource_to_validate` is - None. (see wrapper method below). - :raises TypeError: at decoration time when the expected resource to - validate isn't found in the decorated method's - signature - - """ - schema_validator = validators.SchemaValidator(request_body_schema) - - def add_validator(func): - argspec = inspect.getargspec(func) - try: - arg_index = argspec.args.index(resource_to_validate) - except ValueError: - raise TypeError(_('validated expected to find %(param_name)r in ' - 'function signature for %(func_name)r.') % - {'param_name': resource_to_validate, - 'func_name': func.__name__}) - - @functools.wraps(func) - def wrapper(*args, **kwargs): - if (resource_to_validate in kwargs and - kwargs[resource_to_validate] is not None): - schema_validator.validate(kwargs[resource_to_validate]) - else: - try: - resource = args[arg_index] - # If the resource to be validated is not None but - # empty, it is possible to be validated by jsonschema. - if resource is not None: - schema_validator.validate(resource) - else: - raise exception.ValidationError( - attribute=resource_to_validate, - target='request body') - # We cannot find the resource neither from kwargs nor args. - except IndexError: - raise exception.ValidationError( - attribute=resource_to_validate, - target='request body') - return func(*args, **kwargs) - return wrapper - return add_validator - - -def nullable(property_schema): - """Clone a property schema into one that is nullable. - - :param dict property_schema: schema to clone into a nullable schema - :returns: a new dict schema - """ - # TODO(dstanek): deal with the case where type is already a list; we don't - # do that yet so I'm not wasting time on it - new_schema = property_schema.copy() - new_schema['type'] = [property_schema['type'], 'null'] - return new_schema - - -def add_array_type(property_schema): - """Convert the parameter schema to be of type list. - - :param dict property_schema: schema to add array type to - :returns: a new dict schema - """ - new_schema = property_schema.copy() - new_schema['type'] = [property_schema['type'], 'array'] - return new_schema diff --git a/keystone-moon/keystone/common/validation/parameter_types.py b/keystone-moon/keystone/common/validation/parameter_types.py deleted file mode 100644 index c0753827..00000000 --- a/keystone-moon/keystone/common/validation/parameter_types.py +++ /dev/null @@ -1,70 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Common parameter types for validating a request reference.""" - -boolean = { - 'type': 'boolean', - 'enum': [True, False] -} - -# NOTE(lbragstad): Be mindful of this pattern as it might require changes -# once this is used on user names, LDAP-based user names specifically since -# commas aren't allowed in the following pattern. Here we are only going to -# check the length of the name and ensure that it's a string. Right now we are -# not going to validate on a naming pattern for issues with -# internationalization. -name = { - 'type': 'string', - 'minLength': 1, - 'maxLength': 255 -} - -external_id_string = { - 'type': 'string', - 'minLength': 1, - 'maxLength': 64 -} - -id_string = { - 'type': 'string', - 'minLength': 1, - 'maxLength': 64, - # TODO(lbragstad): Find a way to make this configurable such that the end - # user chooses how much control they want over id_strings with a regex - 'pattern': '^[a-zA-Z0-9-]+$' -} - -mapping_id_string = { - 'type': 'string', - 'minLength': 1, - 'maxLength': 64, - 'pattern': '^[a-zA-Z0-9-_]+$' -} - -description = { - 'type': 'string' -} - -url = { - 'type': 'string', - 'minLength': 0, - 'maxLength': 225, - # NOTE(edmondsw): we could do more to validate per various RFCs, but - # decision was made to err on the side of leniency. The following is based - # on rfc1738 section 2.1 - 'pattern': '^[a-zA-Z0-9+.-]+:.+' -} - -email = { - 'type': 'string', - 'format': 'email' -} diff --git a/keystone-moon/keystone/common/validation/validators.py b/keystone-moon/keystone/common/validation/validators.py deleted file mode 100644 index c6d52e9a..00000000 --- a/keystone-moon/keystone/common/validation/validators.py +++ /dev/null @@ -1,58 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Internal implementation of request body validating middleware.""" - -import jsonschema - -from keystone import exception -from keystone.i18n import _ - - -class SchemaValidator(object): - """Resource reference validator class.""" - - validator_org = jsonschema.Draft4Validator - - def __init__(self, schema): - # NOTE(lbragstad): If at some point in the future we want to extend - # our validators to include something specific we need to check for, - # we can do it here. Nova's V3 API validators extend the validator to - # include `self._validate_minimum` and `self._validate_maximum`. This - # would be handy if we needed to check for something the jsonschema - # didn't by default. See the Nova V3 validator for details on how this - # is done. - validators = {} - validator_cls = jsonschema.validators.extend(self.validator_org, - validators) - fc = jsonschema.FormatChecker() - self.validator = validator_cls(schema, format_checker=fc) - - def validate(self, *args, **kwargs): - try: - self.validator.validate(*args, **kwargs) - except jsonschema.ValidationError as ex: - # NOTE: For whole OpenStack message consistency, this error - # message has been written in a format consistent with WSME. - if ex.path: - # NOTE(lbragstad): Here we could think about using iter_errors - # as a method of providing invalid parameters back to the - # user. - # TODO(lbragstad): If the value of a field is confidential or - # too long, then we should build the masking in here so that - # we don't expose sensitive user information in the event it - # fails validation. - detail = _("Invalid input for field '%(path)s'. The value is " - "'%(value)s'.") % {'path': ex.path.pop(), - 'value': ex.instance} - else: - detail = ex.message - raise exception.SchemaValidationError(detail=detail) diff --git a/keystone-moon/keystone/common/wsgi.py b/keystone-moon/keystone/common/wsgi.py deleted file mode 100644 index 04528a0c..00000000 --- a/keystone-moon/keystone/common/wsgi.py +++ /dev/null @@ -1,834 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2010 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Utility methods for working with WSGI servers.""" - -import copy -import itertools -import re -import wsgiref.util - -from oslo_config import cfg -import oslo_i18n -from oslo_log import log -from oslo_serialization import jsonutils -from oslo_utils import importutils -from oslo_utils import strutils -import routes.middleware -import six -import webob.dec -import webob.exc - -from keystone.common import dependency -from keystone.common import json_home -from keystone.common import utils -from keystone import exception -from keystone.i18n import _ -from keystone.i18n import _LI -from keystone.i18n import _LW -from keystone.models import token_model - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - -# Environment variable used to pass the request context -CONTEXT_ENV = 'openstack.context' - -# Environment variable used to pass the request params -PARAMS_ENV = 'openstack.params' - -JSON_ENCODE_CONTENT_TYPES = set(['application/json', - 'application/json-home']) - - -def validate_token_bind(context, token_ref): - bind_mode = CONF.token.enforce_token_bind - - if bind_mode == 'disabled': - return - - if not isinstance(token_ref, token_model.KeystoneToken): - raise exception.UnexpectedError(_('token reference must be a ' - 'KeystoneToken type, got: %s') % - type(token_ref)) - bind = token_ref.bind - - # permissive and strict modes don't require there to be a bind - permissive = bind_mode in ('permissive', 'strict') - - if not bind: - if permissive: - # no bind provided and none required - return - else: - LOG.info(_LI("No bind information present in token")) - raise exception.Unauthorized() - - # get the named mode if bind_mode is not one of the known - name = None if permissive or bind_mode == 'required' else bind_mode - - if name and name not in bind: - LOG.info(_LI("Named bind mode %s not in bind information"), name) - raise exception.Unauthorized() - - for bind_type, identifier in bind.items(): - if bind_type == 'kerberos': - if not (context['environment'].get('AUTH_TYPE', '').lower() - == 'negotiate'): - LOG.info(_LI("Kerberos credentials required and not present")) - raise exception.Unauthorized() - - if not context['environment'].get('REMOTE_USER') == identifier: - LOG.info(_LI("Kerberos credentials do not match " - "those in bind")) - raise exception.Unauthorized() - - LOG.info(_LI("Kerberos bind authentication successful")) - - elif bind_mode == 'permissive': - LOG.debug(("Ignoring unknown bind for permissive mode: " - "{%(bind_type)s: %(identifier)s}"), - {'bind_type': bind_type, 'identifier': identifier}) - else: - LOG.info(_LI("Couldn't verify unknown bind: " - "{%(bind_type)s: %(identifier)s}"), - {'bind_type': bind_type, 'identifier': identifier}) - raise exception.Unauthorized() - - -def best_match_language(req): - """Determines the best available locale. - - This returns best available locale based on the Accept-Language HTTP - header passed in the request. - """ - if not req.accept_language: - return None - return req.accept_language.best_match( - oslo_i18n.get_available_languages('keystone')) - - -class BaseApplication(object): - """Base WSGI application wrapper. Subclasses need to implement __call__.""" - - @classmethod - def factory(cls, global_config, **local_config): - """Used for paste app factories in paste.deploy config files. - - Any local configuration (that is, values under the [app:APPNAME] - section of the paste config) will be passed into the `__init__` method - as kwargs. - - A hypothetical configuration would look like: - - [app:wadl] - latest_version = 1.3 - paste.app_factory = keystone.fancy_api:Wadl.factory - - which would result in a call to the `Wadl` class as - - import keystone.fancy_api - keystone.fancy_api.Wadl(latest_version='1.3') - - You could of course re-implement the `factory` method in subclasses, - but using the kwarg passing it shouldn't be necessary. - - """ - return cls(**local_config) - - def __call__(self, environ, start_response): - r"""Subclasses will probably want to implement __call__ like this: - - @webob.dec.wsgify() - def __call__(self, req): - # Any of the following objects work as responses: - - # Option 1: simple string - res = 'message\n' - - # Option 2: a nicely formatted HTTP exception page - res = exc.HTTPForbidden(explanation='Nice try') - - # Option 3: a webob Response object (in case you need to play with - # headers, or you want to be treated like an iterable, or or or) - res = Response(); - res.app_iter = open('somefile') - - # Option 4: any wsgi app to be run next - res = self.application - - # Option 5: you can get a Response object for a wsgi app, too, to - # play with headers etc - res = req.get_response(self.application) - - # You can then just return your response... - return res - # ... or set req.response and return None. - req.response = res - - See the end of http://pythonpaste.org/webob/modules/dec.html - for more info. - - """ - raise NotImplementedError('You must implement __call__') - - -@dependency.requires('assignment_api', 'policy_api', 'token_provider_api') -class Application(BaseApplication): - @webob.dec.wsgify() - def __call__(self, req): - arg_dict = req.environ['wsgiorg.routing_args'][1] - action = arg_dict.pop('action') - del arg_dict['controller'] - - # allow middleware up the stack to provide context, params and headers. - context = req.environ.get(CONTEXT_ENV, {}) - - try: - context['query_string'] = dict(req.params.items()) - except UnicodeDecodeError as e: - # The webob package throws UnicodeError when a request cannot be - # decoded. Raise ValidationError instead to avoid an UnknownError. - msg = _('Query string is not UTF-8 encoded') - raise exception.ValidationError(msg) - - context['headers'] = dict(req.headers.items()) - context['path'] = req.environ['PATH_INFO'] - scheme = req.environ.get(CONF.secure_proxy_ssl_header) - if scheme: - # NOTE(andrey-mp): "wsgi.url_scheme" contains the protocol used - # before the proxy removed it ('https' usually). So if - # the webob.Request instance is modified in order to use this - # scheme instead of the one defined by API, the call to - # webob.Request.relative_url() will return a URL with the correct - # scheme. - req.environ['wsgi.url_scheme'] = scheme - context['host_url'] = req.host_url - params = req.environ.get(PARAMS_ENV, {}) - # authentication and authorization attributes are set as environment - # values by the container and processed by the pipeline. The complete - # set is not yet known. - context['environment'] = req.environ - context['accept_header'] = req.accept - req.environ = None - - params.update(arg_dict) - - context.setdefault('is_admin', False) - - # TODO(termie): do some basic normalization on methods - method = getattr(self, action) - - # NOTE(morganfainberg): use the request method to normalize the - # response code between GET and HEAD requests. The HTTP status should - # be the same. - LOG.info('%(req_method)s %(uri)s', { - 'req_method': req.environ['REQUEST_METHOD'].upper(), - 'uri': wsgiref.util.request_uri(req.environ), - }) - - params = self._normalize_dict(params) - - try: - result = method(context, **params) - except exception.Unauthorized as e: - LOG.warning( - _LW("Authorization failed. %(exception)s from " - "%(remote_addr)s"), - {'exception': e, 'remote_addr': req.environ['REMOTE_ADDR']}) - return render_exception(e, context=context, - user_locale=best_match_language(req)) - except exception.Error as e: - LOG.warning(six.text_type(e)) - return render_exception(e, context=context, - user_locale=best_match_language(req)) - except TypeError as e: - LOG.exception(six.text_type(e)) - return render_exception(exception.ValidationError(e), - context=context, - user_locale=best_match_language(req)) - except Exception as e: - LOG.exception(six.text_type(e)) - return render_exception(exception.UnexpectedError(exception=e), - context=context, - user_locale=best_match_language(req)) - - if result is None: - return render_response(status=(204, 'No Content')) - elif isinstance(result, six.string_types): - return result - elif isinstance(result, webob.Response): - return result - elif isinstance(result, webob.exc.WSGIHTTPException): - return result - - response_code = self._get_response_code(req) - return render_response(body=result, status=response_code, - method=req.environ['REQUEST_METHOD']) - - def _get_response_code(self, req): - req_method = req.environ['REQUEST_METHOD'] - controller = importutils.import_class('keystone.common.controller') - code = None - if isinstance(self, controller.V3Controller) and req_method == 'POST': - code = (201, 'Created') - return code - - def _normalize_arg(self, arg): - return arg.replace(':', '_').replace('-', '_') - - def _normalize_dict(self, d): - return {self._normalize_arg(k): v for (k, v) in d.items()} - - def assert_admin(self, context): - """Ensure the user is an admin. - - :raises keystone.exception.Unauthorized: if a token could not be - found/authorized, a user is invalid, or a tenant is - invalid/not scoped. - :raises keystone.exception.Forbidden: if the user is not an admin and - does not have the admin role - - """ - if not context['is_admin']: - user_token_ref = utils.get_token_ref(context) - - validate_token_bind(context, user_token_ref) - creds = copy.deepcopy(user_token_ref.metadata) - - try: - creds['user_id'] = user_token_ref.user_id - except exception.UnexpectedError: - LOG.debug('Invalid user') - raise exception.Unauthorized() - - if user_token_ref.project_scoped: - creds['tenant_id'] = user_token_ref.project_id - else: - LOG.debug('Invalid tenant') - raise exception.Unauthorized() - - creds['roles'] = user_token_ref.role_names - # Accept either is_admin or the admin role - self.policy_api.enforce(creds, 'admin_required', {}) - - def _attribute_is_empty(self, ref, attribute): - """Determine if the attribute in ref is empty or None.""" - return ref.get(attribute) is None or ref.get(attribute) == '' - - def _require_attribute(self, ref, attribute): - """Ensures the reference contains the specified attribute. - - Raise a ValidationError if the given attribute is not present - """ - if self._attribute_is_empty(ref, attribute): - msg = _('%s field is required and cannot be empty') % attribute - raise exception.ValidationError(message=msg) - - def _require_attributes(self, ref, attrs): - """Ensures the reference contains the specified attributes. - - Raise a ValidationError if any of the given attributes is not present - """ - missing_attrs = [attribute for attribute in attrs - if self._attribute_is_empty(ref, attribute)] - - if missing_attrs: - msg = _('%s field(s) cannot be empty') % ', '.join(missing_attrs) - raise exception.ValidationError(message=msg) - - def _get_trust_id_for_request(self, context): - """Get the trust_id for a call. - - Retrieve the trust_id from the token - Returns None if token is not trust scoped - """ - if ('token_id' not in context or - context.get('token_id') == CONF.admin_token): - LOG.debug(('will not lookup trust as the request auth token is ' - 'either absent or it is the system admin token')) - return None - token_ref = utils.get_token_ref(context) - return token_ref.trust_id - - @classmethod - def base_url(cls, context, endpoint_type): - url = CONF['%s_endpoint' % endpoint_type] - - if url: - substitutions = dict( - itertools.chain(CONF.items(), CONF.eventlet_server.items())) - - url = url % substitutions - elif 'environment' in context: - url = wsgiref.util.application_uri(context['environment']) - # remove version from the URL as it may be part of SCRIPT_NAME but - # it should not be part of base URL - url = re.sub(r'/v(3|(2\.0))/*$', '', url) - - # now remove the standard port - url = utils.remove_standard_port(url) - else: - # if we don't have enough information to come up with a base URL, - # then fall back to localhost. This should never happen in - # production environment. - url = 'http://localhost:%d' % CONF.eventlet_server.public_port - - return url.rstrip('/') - - -class Middleware(Application): - """Base WSGI middleware. - - These classes require an application to be - initialized that will be called next. By default the middleware will - simply call its wrapped app, or you can override __call__ to customize its - behavior. - - """ - - @classmethod - def factory(cls, global_config): - """Used for paste app factories in paste.deploy config files.""" - def _factory(app): - return cls(app) - return _factory - - def __init__(self, application): - super(Middleware, self).__init__() - self.application = application - - def process_request(self, request): - """Called on each request. - - If this returns None, the next application down the stack will be - executed. If it returns a response then that response will be returned - and execution will stop here. - - """ - return None - - def process_response(self, request, response): - """Do whatever you'd like to the response, based on the request.""" - return response - - @webob.dec.wsgify() - def __call__(self, request): - try: - response = self.process_request(request) - if response: - return response - response = request.get_response(self.application) - return self.process_response(request, response) - except exception.Error as e: - LOG.warning(six.text_type(e)) - return render_exception(e, request=request, - user_locale=best_match_language(request)) - except TypeError as e: - LOG.exception(six.text_type(e)) - return render_exception(exception.ValidationError(e), - request=request, - user_locale=best_match_language(request)) - except Exception as e: - LOG.exception(six.text_type(e)) - return render_exception(exception.UnexpectedError(exception=e), - request=request, - user_locale=best_match_language(request)) - - -class Debug(Middleware): - """Helper class for debugging a WSGI application. - - Can be inserted into any WSGI application chain to get information - about the request and response. - - """ - - @webob.dec.wsgify() - def __call__(self, req): - if not hasattr(LOG, 'isEnabledFor') or LOG.isEnabledFor(LOG.debug): - LOG.debug('%s %s %s', ('*' * 20), 'REQUEST ENVIRON', ('*' * 20)) - for key, value in req.environ.items(): - LOG.debug('%s = %s', key, - strutils.mask_password(value)) - LOG.debug('') - LOG.debug('%s %s %s', ('*' * 20), 'REQUEST BODY', ('*' * 20)) - for line in req.body_file: - LOG.debug('%s', strutils.mask_password(line)) - LOG.debug('') - - resp = req.get_response(self.application) - if not hasattr(LOG, 'isEnabledFor') or LOG.isEnabledFor(LOG.debug): - LOG.debug('%s %s %s', ('*' * 20), 'RESPONSE HEADERS', ('*' * 20)) - for (key, value) in resp.headers.items(): - LOG.debug('%s = %s', key, value) - LOG.debug('') - - resp.app_iter = self.print_generator(resp.app_iter) - - return resp - - @staticmethod - def print_generator(app_iter): - """Iterator that prints the contents of a wrapper string.""" - LOG.debug('%s %s %s', ('*' * 20), 'RESPONSE BODY', ('*' * 20)) - for part in app_iter: - LOG.debug(part) - yield part - - -class Router(object): - """WSGI middleware that maps incoming requests to WSGI apps.""" - - def __init__(self, mapper): - """Create a router for the given routes.Mapper. - - Each route in `mapper` must specify a 'controller', which is a - WSGI app to call. You'll probably want to specify an 'action' as - well and have your controller be an object that can route - the request to the action-specific method. - - Examples: - mapper = routes.Mapper() - sc = ServerController() - - # Explicit mapping of one route to a controller+action - mapper.connect(None, '/svrlist', controller=sc, action='list') - - # Actions are all implicitly defined - mapper.resource('server', 'servers', controller=sc) - - # Pointing to an arbitrary WSGI app. You can specify the - # {path_info:.*} parameter so the target app can be handed just that - # section of the URL. - mapper.connect(None, '/v1.0/{path_info:.*}', controller=BlogApp()) - - """ - self.map = mapper - self._router = routes.middleware.RoutesMiddleware(self._dispatch, - self.map) - - @webob.dec.wsgify() - def __call__(self, req): - """Route the incoming request to a controller based on self.map. - - If no match, return a 404. - - """ - return self._router - - @staticmethod - @webob.dec.wsgify() - def _dispatch(req): - """Dispatch the request to the appropriate controller. - - Called by self._router after matching the incoming request to a route - and putting the information into req.environ. Either returns 404 - or the routed WSGI app's response. - - """ - match = req.environ['wsgiorg.routing_args'][1] - if not match: - msg = _('The resource could not be found.') - return render_exception(exception.NotFound(msg), - request=req, - user_locale=best_match_language(req)) - app = match['controller'] - return app - - -class ComposingRouter(Router): - def __init__(self, mapper=None, routers=None): - if mapper is None: - mapper = routes.Mapper() - if routers is None: - routers = [] - for router in routers: - router.add_routes(mapper) - super(ComposingRouter, self).__init__(mapper) - - -class ComposableRouter(Router): - """Router that supports use by ComposingRouter.""" - - def __init__(self, mapper=None): - if mapper is None: - mapper = routes.Mapper() - self.add_routes(mapper) - super(ComposableRouter, self).__init__(mapper) - - def add_routes(self, mapper): - """Add routes to given mapper.""" - pass - - -class ExtensionRouter(Router): - """A router that allows extensions to supplement or overwrite routes. - - Expects to be subclassed. - """ - - def __init__(self, application, mapper=None): - if mapper is None: - mapper = routes.Mapper() - self.application = application - self.add_routes(mapper) - mapper.connect('/{path_info:.*}', controller=self.application) - super(ExtensionRouter, self).__init__(mapper) - - def add_routes(self, mapper): - pass - - @classmethod - def factory(cls, global_config, **local_config): - """Used for paste app factories in paste.deploy config files. - - Any local configuration (that is, values under the [filter:APPNAME] - section of the paste config) will be passed into the `__init__` method - as kwargs. - - A hypothetical configuration would look like: - - [filter:analytics] - redis_host = 127.0.0.1 - paste.filter_factory = keystone.analytics:Analytics.factory - - which would result in a call to the `Analytics` class as - - import keystone.analytics - keystone.analytics.Analytics(app, redis_host='127.0.0.1') - - You could of course re-implement the `factory` method in subclasses, - but using the kwarg passing it shouldn't be necessary. - - """ - def _factory(app): - conf = global_config.copy() - conf.update(local_config) - return cls(app, **local_config) - return _factory - - -class RoutersBase(object): - """Base class for Routers.""" - - def __init__(self): - self.v3_resources = [] - - def append_v3_routers(self, mapper, routers): - """Append v3 routers. - - Subclasses should override this method to map its routes. - - Use self._add_resource() to map routes for a resource. - """ - - def _add_resource(self, mapper, controller, path, rel, - get_action=None, head_action=None, get_head_action=None, - put_action=None, post_action=None, patch_action=None, - delete_action=None, get_post_action=None, - path_vars=None, status=json_home.Status.STABLE, - new_path=None): - if get_head_action: - getattr(controller, get_head_action) # ensure the attribute exists - mapper.connect(path, controller=controller, action=get_head_action, - conditions=dict(method=['GET', 'HEAD'])) - if get_action: - getattr(controller, get_action) # ensure the attribute exists - mapper.connect(path, controller=controller, action=get_action, - conditions=dict(method=['GET'])) - if head_action: - getattr(controller, head_action) # ensure the attribute exists - mapper.connect(path, controller=controller, action=head_action, - conditions=dict(method=['HEAD'])) - if put_action: - getattr(controller, put_action) # ensure the attribute exists - mapper.connect(path, controller=controller, action=put_action, - conditions=dict(method=['PUT'])) - if post_action: - getattr(controller, post_action) # ensure the attribute exists - mapper.connect(path, controller=controller, action=post_action, - conditions=dict(method=['POST'])) - if patch_action: - getattr(controller, patch_action) # ensure the attribute exists - mapper.connect(path, controller=controller, action=patch_action, - conditions=dict(method=['PATCH'])) - if delete_action: - getattr(controller, delete_action) # ensure the attribute exists - mapper.connect(path, controller=controller, action=delete_action, - conditions=dict(method=['DELETE'])) - if get_post_action: - getattr(controller, get_post_action) # ensure the attribute exists - mapper.connect(path, controller=controller, action=get_post_action, - conditions=dict(method=['GET', 'POST'])) - - resource_data = dict() - - if path_vars: - resource_data['href-template'] = new_path or path - resource_data['href-vars'] = path_vars - else: - resource_data['href'] = new_path or path - - json_home.Status.update_resource_data(resource_data, status) - - self.v3_resources.append((rel, resource_data)) - - -class V3ExtensionRouter(ExtensionRouter, RoutersBase): - """Base class for V3 extension router.""" - - def __init__(self, application, mapper=None): - self.v3_resources = list() - super(V3ExtensionRouter, self).__init__(application, mapper) - - def _update_version_response(self, response_data): - response_data['resources'].update(self.v3_resources) - - @webob.dec.wsgify() - def __call__(self, request): - if request.path_info != '/': - # Not a request for version info so forward to super. - return super(V3ExtensionRouter, self).__call__(request) - - response = request.get_response(self.application) - - if response.status_code != 200: - # The request failed, so don't update the response. - return response - - if response.headers['Content-Type'] != 'application/json-home': - # Not a request for JSON Home document, so don't update the - # response. - return response - - response_data = jsonutils.loads(response.body) - self._update_version_response(response_data) - response.body = jsonutils.dump_as_bytes(response_data, - cls=utils.SmarterEncoder) - return response - - -def render_response(body=None, status=None, headers=None, method=None): - """Forms a WSGI response.""" - if headers is None: - headers = [] - else: - headers = list(headers) - headers.append(('Vary', 'X-Auth-Token')) - - if body is None: - body = b'' - status = status or (204, 'No Content') - else: - content_types = [v for h, v in headers if h == 'Content-Type'] - if content_types: - content_type = content_types[0] - else: - content_type = None - - if content_type is None or content_type in JSON_ENCODE_CONTENT_TYPES: - body = jsonutils.dump_as_bytes(body, cls=utils.SmarterEncoder) - if content_type is None: - headers.append(('Content-Type', 'application/json')) - status = status or (200, 'OK') - - # NOTE(davechen): `mod_wsgi` follows the standards from pep-3333 and - # requires the value in response header to be binary type(str) on python2, - # unicode based string(str) on python3, or else keystone will not work - # under apache with `mod_wsgi`. - # keystone needs to check the data type of each header and convert the - # type if needed. - # see bug: - # https://bugs.launchpad.net/keystone/+bug/1528981 - # see pep-3333: - # https://www.python.org/dev/peps/pep-3333/#a-note-on-string-types - # see source from mod_wsgi: - # https://github.com/GrahamDumpleton/mod_wsgi(methods: - # wsgi_convert_headers_to_bytes(...), wsgi_convert_string_to_bytes(...) - # and wsgi_validate_header_value(...)). - def _convert_to_str(headers): - str_headers = [] - for header in headers: - str_header = [] - for value in header: - if not isinstance(value, str): - str_header.append(str(value)) - else: - str_header.append(value) - # convert the list to the immutable tuple to build the headers. - # header's key/value will be guaranteed to be str type. - str_headers.append(tuple(str_header)) - return str_headers - - headers = _convert_to_str(headers) - - resp = webob.Response(body=body, - status='%s %s' % status, - headerlist=headers) - - if method and method.upper() == 'HEAD': - # NOTE(morganfainberg): HEAD requests should return the same status - # as a GET request and same headers (including content-type and - # content-length). The webob.Response object automatically changes - # content-length (and other headers) if the body is set to b''. Capture - # all headers and reset them on the response object after clearing the - # body. The body can only be set to a binary-type (not TextType or - # NoneType), so b'' is used here and should be compatible with - # both py2x and py3x. - stored_headers = resp.headers.copy() - resp.body = b'' - for header, value in stored_headers.items(): - resp.headers[header] = value - - return resp - - -def render_exception(error, context=None, request=None, user_locale=None): - """Forms a WSGI response based on the current error.""" - error_message = error.args[0] - message = oslo_i18n.translate(error_message, desired_locale=user_locale) - if message is error_message: - # translate() didn't do anything because it wasn't a Message, - # convert to a string. - message = six.text_type(message) - - body = {'error': { - 'code': error.code, - 'title': error.title, - 'message': message, - }} - headers = [] - if isinstance(error, exception.AuthPluginException): - body['error']['identity'] = error.authentication - elif isinstance(error, exception.Unauthorized): - # NOTE(gyee): we only care about the request environment in the - # context. Also, its OK to pass the environemt as it is read-only in - # Application.base_url() - local_context = {} - if request: - local_context = {'environment': request.environ} - elif context and 'environment' in context: - local_context = {'environment': context['environment']} - url = Application.base_url(local_context, 'public') - - headers.append(('WWW-Authenticate', 'Keystone uri="%s"' % url)) - return render_response(status=(error.code, error.title), - body=body, - headers=headers) diff --git a/keystone-moon/keystone/config.py b/keystone-moon/keystone/config.py deleted file mode 100644 index 3967cee0..00000000 --- a/keystone-moon/keystone/config.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""Wrapper for keystone.common.config that configures itself on import.""" - -import logging -import os - -from oslo_config import cfg -from oslo_log import log - -from keystone.common import config -from keystone import exception - - -CONF = cfg.CONF - -setup_authentication = config.setup_authentication -configure = config.configure - - -def set_default_for_default_log_levels(): - """Set the default for the default_log_levels option for keystone. - - Keystone uses some packages that other OpenStack services don't use that do - logging. This will set the default_log_levels default level for those - packages. - - This function needs to be called before CONF(). - - """ - - extra_log_level_defaults = [ - 'dogpile=INFO', - 'routes=INFO', - 'keystone.common._memcache_pool=INFO', - ] - - log.register_options(CONF) - CONF.set_default('default_log_levels', - CONF.default_log_levels + extra_log_level_defaults) - - -def setup_logging(): - """Sets up logging for the keystone package.""" - log.setup(CONF, 'keystone') - logging.captureWarnings(True) - - -def find_paste_config(): - """Find Keystone's paste.deploy configuration file. - - Keystone's paste.deploy configuration file is specified in the - ``[paste_deploy]`` section of the main Keystone configuration file, - ``keystone.conf``. - - For example:: - - [paste_deploy] - config_file = keystone-paste.ini - - :returns: The selected configuration filename - :raises: exception.ConfigFileNotFound - - """ - if CONF.paste_deploy.config_file: - paste_config = CONF.paste_deploy.config_file - paste_config_value = paste_config - if not os.path.isabs(paste_config): - paste_config = CONF.find_file(paste_config) - elif CONF.config_file: - paste_config = CONF.config_file[0] - paste_config_value = paste_config - else: - # this provides backwards compatibility for keystone.conf files that - # still have the entire paste configuration included, rather than just - # a [paste_deploy] configuration section referring to an external file - paste_config = CONF.find_file('keystone.conf') - paste_config_value = 'keystone.conf' - if not paste_config or not os.path.exists(paste_config): - raise exception.ConfigFileNotFound(config_file=paste_config_value) - return paste_config diff --git a/keystone-moon/keystone/contrib/__init__.py b/keystone-moon/keystone/contrib/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/admin_crud/__init__.py b/keystone-moon/keystone/contrib/admin_crud/__init__.py deleted file mode 100644 index d6020920..00000000 --- a/keystone-moon/keystone/contrib/admin_crud/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.contrib.admin_crud.core import * # noqa diff --git a/keystone-moon/keystone/contrib/admin_crud/core.py b/keystone-moon/keystone/contrib/admin_crud/core.py deleted file mode 100644 index 739cc0ff..00000000 --- a/keystone-moon/keystone/contrib/admin_crud/core.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log -from oslo_log import versionutils - -from keystone.common import wsgi -from keystone.i18n import _ - - -LOG = log.getLogger(__name__) - - -class CrudExtension(wsgi.Middleware): - def __init__(self, application): - super(CrudExtension, self).__init__(application) - msg = _("Remove admin_crud_extension from the paste pipeline, the " - "admin_crud extension is now always available. Update" - "the [pipeline:admin_api] section in keystone-paste.ini " - "accordingly, as it will be removed in the O release.") - versionutils.report_deprecated_feature(LOG, msg) diff --git a/keystone-moon/keystone/contrib/ec2/__init__.py b/keystone-moon/keystone/contrib/ec2/__init__.py deleted file mode 100644 index 88622e53..00000000 --- a/keystone-moon/keystone/contrib/ec2/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.contrib.ec2 import controllers # noqa -from keystone.contrib.ec2.core import * # noqa -from keystone.contrib.ec2.routers import Ec2Extension # noqa -from keystone.contrib.ec2.routers import Ec2ExtensionV3 # noqa diff --git a/keystone-moon/keystone/contrib/ec2/controllers.py b/keystone-moon/keystone/contrib/ec2/controllers.py deleted file mode 100644 index c0f6067e..00000000 --- a/keystone-moon/keystone/contrib/ec2/controllers.py +++ /dev/null @@ -1,435 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into the EC2 Credentials service. - -This service allows the creation of access/secret credentials used for -the ec2 interop layer of OpenStack. - -A user can create as many access/secret pairs, each of which is mapped to a -specific project. This is required because OpenStack supports a user -belonging to multiple projects, whereas the signatures created on ec2-style -requests don't allow specification of which project the user wishes to act -upon. - -To complete the cycle, we provide a method that OpenStack services can -use to validate a signature and get a corresponding OpenStack token. This -token allows method calls to other services within the context the -access/secret was created. As an example, Nova requests Keystone to validate -the signature of a request, receives a token, and then makes a request to -Glance to list images needed to perform the requested task. - -""" - -import abc -import sys -import uuid - -from keystoneclient.contrib.ec2 import utils as ec2_utils -from oslo_serialization import jsonutils -import six - -from keystone.common import controller -from keystone.common import dependency -from keystone.common import utils -from keystone.common import wsgi -from keystone import exception -from keystone.i18n import _ - -CRED_TYPE_EC2 = 'ec2' - - -@dependency.requires('assignment_api', 'catalog_api', 'credential_api', - 'identity_api', 'resource_api', 'role_api', - 'token_provider_api') -@six.add_metaclass(abc.ABCMeta) -class Ec2ControllerCommon(object): - def check_signature(self, creds_ref, credentials): - signer = ec2_utils.Ec2Signer(creds_ref['secret']) - signature = signer.generate(credentials) - # NOTE(davechen): credentials.get('signature') is not guaranteed to - # exist, we need check it explicitly. - if credentials.get('signature'): - if utils.auth_str_equal(credentials['signature'], signature): - return True - # NOTE(vish): Some client libraries don't use the port when signing - # requests, so try again without port. - elif ':' in credentials['host']: - hostname, _port = credentials['host'].split(':') - credentials['host'] = hostname - # NOTE(davechen): we need reinitialize 'signer' to avoid - # contaminated status of signature, this is similar with - # other programming language libraries, JAVA for example. - signer = ec2_utils.Ec2Signer(creds_ref['secret']) - signature = signer.generate(credentials) - if utils.auth_str_equal(credentials['signature'], - signature): - return True - raise exception.Unauthorized( - message=_('Invalid EC2 signature.')) - else: - raise exception.Unauthorized( - message=_('EC2 signature not supplied.')) - # Raise the exception when credentials.get('signature') is None - else: - raise exception.Unauthorized( - message=_('EC2 signature not supplied.')) - - @abc.abstractmethod - def authenticate(self, context, credentials=None, ec2Credentials=None): - """Validate a signed EC2 request and provide a token. - - Other services (such as Nova) use this **admin** call to determine - if a request they signed received is from a valid user. - - If it is a valid signature, an OpenStack token that maps - to the user/tenant is returned to the caller, along with - all the other details returned from a normal token validation - call. - - The returned token is useful for making calls to other - OpenStack services within the context of the request. - - :param context: standard context - :param credentials: dict of ec2 signature - :param ec2Credentials: DEPRECATED dict of ec2 signature - :returns: token: OpenStack token equivalent to access key along - with the corresponding service catalog and roles - """ - raise exception.NotImplemented() - - def _authenticate(self, credentials=None, ec2credentials=None): - """Common code shared between the V2 and V3 authenticate methods. - - :returns: user_ref, tenant_ref, metadata_ref, roles_ref, catalog_ref - """ - # FIXME(ja): validate that a service token was used! - - # NOTE(termie): backwards compat hack - if not credentials and ec2credentials: - credentials = ec2credentials - - if 'access' not in credentials: - raise exception.Unauthorized( - message=_('EC2 signature not supplied.')) - - creds_ref = self._get_credentials(credentials['access']) - self.check_signature(creds_ref, credentials) - - # TODO(termie): don't create new tokens every time - # TODO(termie): this is copied from TokenController.authenticate - tenant_ref = self.resource_api.get_project(creds_ref['tenant_id']) - user_ref = self.identity_api.get_user(creds_ref['user_id']) - metadata_ref = {} - metadata_ref['roles'] = ( - self.assignment_api.get_roles_for_user_and_project( - user_ref['id'], tenant_ref['id'])) - - trust_id = creds_ref.get('trust_id') - if trust_id: - metadata_ref['trust_id'] = trust_id - metadata_ref['trustee_user_id'] = user_ref['id'] - - # Validate that the auth info is valid and nothing is disabled - try: - self.identity_api.assert_user_enabled( - user_id=user_ref['id'], user=user_ref) - self.resource_api.assert_domain_enabled( - domain_id=user_ref['domain_id']) - self.resource_api.assert_project_enabled( - project_id=tenant_ref['id'], project=tenant_ref) - except AssertionError as e: - six.reraise(exception.Unauthorized, exception.Unauthorized(e), - sys.exc_info()[2]) - - roles = metadata_ref.get('roles', []) - if not roles: - raise exception.Unauthorized( - message=_('User not valid for tenant.')) - roles_ref = [self.role_api.get_role(role_id) for role_id in roles] - - catalog_ref = self.catalog_api.get_catalog( - user_ref['id'], tenant_ref['id']) - - return user_ref, tenant_ref, metadata_ref, roles_ref, catalog_ref - - def create_credential(self, context, user_id, tenant_id): - """Create a secret/access pair for use with ec2 style auth. - - Generates a new set of credentials that map the user/tenant - pair. - - :param context: standard context - :param user_id: id of user - :param tenant_id: id of tenant - :returns: credential: dict of ec2 credential - """ - self.identity_api.get_user(user_id) - self.resource_api.get_project(tenant_id) - trust_id = self._get_trust_id_for_request(context) - blob = {'access': uuid.uuid4().hex, - 'secret': uuid.uuid4().hex, - 'trust_id': trust_id} - credential_id = utils.hash_access_key(blob['access']) - cred_ref = {'user_id': user_id, - 'project_id': tenant_id, - 'blob': jsonutils.dumps(blob), - 'id': credential_id, - 'type': CRED_TYPE_EC2} - self.credential_api.create_credential(credential_id, cred_ref) - return {'credential': self._convert_v3_to_ec2_credential(cred_ref)} - - def get_credentials(self, user_id): - """List all credentials for a user. - - :param user_id: id of user - :returns: credentials: list of ec2 credential dicts - """ - self.identity_api.get_user(user_id) - credential_refs = self.credential_api.list_credentials_for_user( - user_id, type=CRED_TYPE_EC2) - return {'credentials': - [self._convert_v3_to_ec2_credential(credential) - for credential in credential_refs]} - - def get_credential(self, user_id, credential_id): - """Retrieve a user's access/secret pair by the access key. - - Grab the full access/secret pair for a given access key. - - :param user_id: id of user - :param credential_id: access key for credentials - :returns: credential: dict of ec2 credential - """ - self.identity_api.get_user(user_id) - return {'credential': self._get_credentials(credential_id)} - - def delete_credential(self, user_id, credential_id): - """Delete a user's access/secret pair. - - Used to revoke a user's access/secret pair - - :param user_id: id of user - :param credential_id: access key for credentials - :returns: bool: success - """ - self.identity_api.get_user(user_id) - self._get_credentials(credential_id) - ec2_credential_id = utils.hash_access_key(credential_id) - return self.credential_api.delete_credential(ec2_credential_id) - - @staticmethod - def _convert_v3_to_ec2_credential(credential): - # Prior to bug #1259584 fix, blob was stored unserialized - # but it should be stored as a json string for compatibility - # with the v3 credentials API. Fall back to the old behavior - # for backwards compatibility with existing DB contents - try: - blob = jsonutils.loads(credential['blob']) - except TypeError: - blob = credential['blob'] - return {'user_id': credential.get('user_id'), - 'tenant_id': credential.get('project_id'), - 'access': blob.get('access'), - 'secret': blob.get('secret'), - 'trust_id': blob.get('trust_id')} - - def _get_credentials(self, credential_id): - """Return credentials from an ID. - - :param credential_id: id of credential - :raises keystone.exception.Unauthorized: when credential id is invalid - or when the credential type is not ec2 - :returns: credential: dict of ec2 credential. - """ - ec2_credential_id = utils.hash_access_key(credential_id) - cred = self.credential_api.get_credential(ec2_credential_id) - if not cred or cred['type'] != CRED_TYPE_EC2: - raise exception.Unauthorized( - message=_('EC2 access key not found.')) - return self._convert_v3_to_ec2_credential(cred) - - -@dependency.requires('policy_api', 'token_provider_api') -class Ec2Controller(Ec2ControllerCommon, controller.V2Controller): - - @controller.v2_ec2_deprecated - def authenticate(self, context, credentials=None, ec2Credentials=None): - (user_ref, tenant_ref, metadata_ref, roles_ref, - catalog_ref) = self._authenticate(credentials=credentials, - ec2credentials=ec2Credentials) - - # NOTE(morganfainberg): Make sure the data is in correct form since it - # might be consumed external to Keystone and this is a v2.0 controller. - # The token provider does not explicitly care about user_ref version - # in this case, but the data is stored in the token itself and should - # match the version - user_ref = self.v3_to_v2_user(user_ref) - auth_token_data = dict(user=user_ref, - tenant=tenant_ref, - metadata=metadata_ref, - id='placeholder') - (token_id, token_data) = self.token_provider_api.issue_v2_token( - auth_token_data, roles_ref, catalog_ref) - return token_data - - @controller.v2_ec2_deprecated - def get_credential(self, context, user_id, credential_id): - if not self._is_admin(context): - self._assert_identity(context, user_id) - return super(Ec2Controller, self).get_credential(user_id, - credential_id) - - @controller.v2_ec2_deprecated - def get_credentials(self, context, user_id): - if not self._is_admin(context): - self._assert_identity(context, user_id) - return super(Ec2Controller, self).get_credentials(user_id) - - @controller.v2_ec2_deprecated - def create_credential(self, context, user_id, tenant_id): - if not self._is_admin(context): - self._assert_identity(context, user_id) - return super(Ec2Controller, self).create_credential(context, user_id, - tenant_id) - - @controller.v2_ec2_deprecated - def delete_credential(self, context, user_id, credential_id): - if not self._is_admin(context): - self._assert_identity(context, user_id) - self._assert_owner(user_id, credential_id) - return super(Ec2Controller, self).delete_credential(user_id, - credential_id) - - def _assert_identity(self, context, user_id): - """Check that the provided token belongs to the user. - - :param context: standard context - :param user_id: id of user - :raises keystone.exception.Forbidden: when token is invalid - - """ - token_ref = utils.get_token_ref(context) - - if token_ref.user_id != user_id: - raise exception.Forbidden(_('Token belongs to another user')) - - def _is_admin(self, context): - """Wrap admin assertion error return statement. - - :param context: standard context - :returns: bool: success - - """ - try: - # NOTE(morganfainberg): policy_api is required for assert_admin - # to properly perform policy enforcement. - self.assert_admin(context) - return True - except (exception.Forbidden, exception.Unauthorized): - return False - - def _assert_owner(self, user_id, credential_id): - """Ensure the provided user owns the credential. - - :param user_id: expected credential owner - :param credential_id: id of credential object - :raises keystone.exception.Forbidden: on failure - - """ - ec2_credential_id = utils.hash_access_key(credential_id) - cred_ref = self.credential_api.get_credential(ec2_credential_id) - if user_id != cred_ref['user_id']: - raise exception.Forbidden(_('Credential belongs to another user')) - - -@dependency.requires('policy_api', 'token_provider_api') -class Ec2ControllerV3(Ec2ControllerCommon, controller.V3Controller): - - collection_name = 'credentials' - member_name = 'credential' - - def __init__(self): - super(Ec2ControllerV3, self).__init__() - - def _check_credential_owner_and_user_id_match(self, context, prep_info, - user_id, credential_id): - # NOTE(morganfainberg): this method needs to capture the arguments of - # the method that is decorated with @controller.protected() (with - # exception of the first argument ('context') since the protected - # method passes in *args, **kwargs. In this case, it is easier to see - # the expected input if the argspec is `user_id` and `credential_id` - # explicitly (matching the :class:`.ec2_delete_credential()` method - # below). - ref = {} - credential_id = utils.hash_access_key(credential_id) - ref['credential'] = self.credential_api.get_credential(credential_id) - # NOTE(morganfainberg): policy_api is required for this - # check_protection to properly be able to perform policy enforcement. - self.check_protection(context, prep_info, ref) - - def authenticate(self, context, credentials=None, ec2Credentials=None): - (user_ref, project_ref, metadata_ref, roles_ref, - catalog_ref) = self._authenticate(credentials=credentials, - ec2credentials=ec2Credentials) - - method_names = ['ec2credential'] - - token_id, token_data = self.token_provider_api.issue_v3_token( - user_ref['id'], method_names, project_id=project_ref['id'], - metadata_ref=metadata_ref) - return render_token_data_response(token_id, token_data) - - @controller.protected(callback=_check_credential_owner_and_user_id_match) - def ec2_get_credential(self, context, user_id, credential_id): - ref = super(Ec2ControllerV3, self).get_credential(user_id, - credential_id) - return Ec2ControllerV3.wrap_member(context, ref['credential']) - - @controller.protected() - def ec2_list_credentials(self, context, user_id): - refs = super(Ec2ControllerV3, self).get_credentials(user_id) - return Ec2ControllerV3.wrap_collection(context, refs['credentials']) - - @controller.protected() - def ec2_create_credential(self, context, user_id, tenant_id): - ref = super(Ec2ControllerV3, self).create_credential(context, user_id, - tenant_id) - return Ec2ControllerV3.wrap_member(context, ref['credential']) - - @controller.protected(callback=_check_credential_owner_and_user_id_match) - def ec2_delete_credential(self, context, user_id, credential_id): - return super(Ec2ControllerV3, self).delete_credential(user_id, - credential_id) - - @classmethod - def _add_self_referential_link(cls, context, ref): - path = '/users/%(user_id)s/credentials/OS-EC2/%(credential_id)s' - url = cls.base_url(context, path) % { - 'user_id': ref['user_id'], - 'credential_id': ref['access']} - ref.setdefault('links', {}) - ref['links']['self'] = url - - -def render_token_data_response(token_id, token_data): - """Render token data HTTP response. - - Stash token ID into the X-Subject-Token header. - - """ - headers = [('X-Subject-Token', token_id)] - - return wsgi.render_response(body=token_data, - status=(200, 'OK'), headers=headers) diff --git a/keystone-moon/keystone/contrib/ec2/core.py b/keystone-moon/keystone/contrib/ec2/core.py deleted file mode 100644 index 7bba8cab..00000000 --- a/keystone-moon/keystone/contrib/ec2/core.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import extension - - -EXTENSION_DATA = { - 'name': 'OpenStack EC2 API', - 'namespace': 'http://docs.openstack.org/identity/api/ext/' - 'OS-EC2/v1.0', - 'alias': 'OS-EC2', - 'updated': '2013-07-07T12:00:0-00:00', - 'description': 'OpenStack EC2 Credentials backend.', - 'links': [ - { - 'rel': 'describedby', - 'type': 'text/html', - 'href': 'http://developer.openstack.org/' - 'api-ref-identity-v2-ext.html', - } - ]} -extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) -extension.register_public_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) diff --git a/keystone-moon/keystone/contrib/ec2/routers.py b/keystone-moon/keystone/contrib/ec2/routers.py deleted file mode 100644 index 97c68cf7..00000000 --- a/keystone-moon/keystone/contrib/ec2/routers.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools - -from keystone.common import json_home -from keystone.common import wsgi -from keystone.contrib.ec2 import controllers - - -build_resource_relation = functools.partial( - json_home.build_v3_extension_resource_relation, extension_name='OS-EC2', - extension_version='1.0') - - -class Ec2Extension(wsgi.ExtensionRouter): - def add_routes(self, mapper): - ec2_controller = controllers.Ec2Controller() - # validation - mapper.connect( - '/ec2tokens', - controller=ec2_controller, - action='authenticate', - conditions=dict(method=['POST'])) - - # crud - mapper.connect( - '/users/{user_id}/credentials/OS-EC2', - controller=ec2_controller, - action='create_credential', - conditions=dict(method=['POST'])) - mapper.connect( - '/users/{user_id}/credentials/OS-EC2', - controller=ec2_controller, - action='get_credentials', - conditions=dict(method=['GET'])) - mapper.connect( - '/users/{user_id}/credentials/OS-EC2/{credential_id}', - controller=ec2_controller, - action='get_credential', - conditions=dict(method=['GET'])) - mapper.connect( - '/users/{user_id}/credentials/OS-EC2/{credential_id}', - controller=ec2_controller, - action='delete_credential', - conditions=dict(method=['DELETE'])) - - -class Ec2ExtensionV3(wsgi.V3ExtensionRouter): - - def add_routes(self, mapper): - ec2_controller = controllers.Ec2ControllerV3() - # validation - self._add_resource( - mapper, ec2_controller, - path='/ec2tokens', - post_action='authenticate', - rel=build_resource_relation(resource_name='ec2tokens')) - - # crud - self._add_resource( - mapper, ec2_controller, - path='/users/{user_id}/credentials/OS-EC2', - get_action='ec2_list_credentials', - post_action='ec2_create_credential', - rel=build_resource_relation(resource_name='user_credentials'), - path_vars={ - 'user_id': json_home.Parameters.USER_ID, - }) - self._add_resource( - mapper, ec2_controller, - path='/users/{user_id}/credentials/OS-EC2/{credential_id}', - get_action='ec2_get_credential', - delete_action='ec2_delete_credential', - rel=build_resource_relation(resource_name='user_credential'), - path_vars={ - 'credential_id': - json_home.build_v3_parameter_relation('credential_id'), - 'user_id': json_home.Parameters.USER_ID, - }) diff --git a/keystone-moon/keystone/contrib/endpoint_filter/__init__.py b/keystone-moon/keystone/contrib/endpoint_filter/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/endpoint_filter/backends/__init__.py b/keystone-moon/keystone/contrib/endpoint_filter/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/endpoint_filter/backends/catalog_sql.py b/keystone-moon/keystone/contrib/endpoint_filter/backends/catalog_sql.py deleted file mode 100644 index ad39d045..00000000 --- a/keystone-moon/keystone/contrib/endpoint_filter/backends/catalog_sql.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg - -from keystone.catalog.backends import sql -from keystone.catalog import core as catalog_core -from keystone.common import dependency - - -CONF = cfg.CONF - - -@dependency.requires('catalog_api') -class EndpointFilterCatalog(sql.Catalog): - def get_v3_catalog(self, user_id, project_id): - substitutions = dict(CONF.items()) - substitutions.update({ - 'tenant_id': project_id, - 'project_id': project_id, - 'user_id': user_id, - }) - - services = {} - - dict_of_endpoint_refs = (self.catalog_api. - list_endpoints_for_project(project_id)) - - if (not dict_of_endpoint_refs and - CONF.endpoint_filter.return_all_endpoints_if_no_filter): - return super(EndpointFilterCatalog, self).get_v3_catalog( - user_id, project_id) - - for endpoint_id, endpoint in dict_of_endpoint_refs.items(): - if not endpoint['enabled']: - # Skip disabled endpoints. - continue - service_id = endpoint['service_id'] - services.setdefault( - service_id, - self.get_service(service_id)) - service = services[service_id] - del endpoint['service_id'] - del endpoint['enabled'] - del endpoint['legacy_endpoint_id'] - # Include deprecated region for backwards compatibility - endpoint['region'] = endpoint['region_id'] - endpoint['url'] = catalog_core.format_url( - endpoint['url'], substitutions) - # populate filtered endpoints - if 'endpoints' in services[service_id]: - service['endpoints'].append(endpoint) - else: - service['endpoints'] = [endpoint] - - # format catalog - catalog = [] - for service_id, service in services.items(): - formatted_service = {} - formatted_service['id'] = service['id'] - formatted_service['type'] = service['type'] - formatted_service['name'] = service['name'] - formatted_service['endpoints'] = service['endpoints'] - catalog.append(formatted_service) - - return catalog diff --git a/keystone-moon/keystone/contrib/endpoint_filter/backends/sql.py b/keystone-moon/keystone/contrib/endpoint_filter/backends/sql.py deleted file mode 100644 index 484934bb..00000000 --- a/keystone-moon/keystone/contrib/endpoint_filter/backends/sql.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import versionutils - -from keystone.catalog.backends import sql - -_OLD = 'keystone.contrib.endpoint_filter.backends.sql.EndpointFilter' -_NEW = 'sql' - - -class EndpointFilter(sql.Catalog): - @versionutils.deprecated( - as_of=versionutils.deprecated.MITAKA, - in_favor_of=_NEW, - what=_OLD, - remove_in=2) - def __init__(self, *args, **kwargs): - super(EndpointFilter, self).__init__(*args, **kwargs) diff --git a/keystone-moon/keystone/contrib/endpoint_filter/controllers.py b/keystone-moon/keystone/contrib/endpoint_filter/controllers.py deleted file mode 100644 index eb627c6b..00000000 --- a/keystone-moon/keystone/contrib/endpoint_filter/controllers.py +++ /dev/null @@ -1,300 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import six - -from keystone.catalog import controllers as catalog_controllers -from keystone.common import controller -from keystone.common import dependency -from keystone.common import validation -from keystone.contrib.endpoint_filter import schema -from keystone import exception -from keystone import notifications -from keystone import resource - - -@dependency.requires('catalog_api', 'endpoint_filter_api', 'resource_api') -class _ControllerBase(controller.V3Controller): - """Base behaviors for endpoint filter controllers.""" - - def _get_endpoint_groups_for_project(self, project_id): - # recover the project endpoint group memberships and for each - # membership recover the endpoint group - self.resource_api.get_project(project_id) - try: - refs = self.endpoint_filter_api.list_endpoint_groups_for_project( - project_id) - endpoint_groups = [self.endpoint_filter_api.get_endpoint_group( - ref['endpoint_group_id']) for ref in refs] - return endpoint_groups - except exception.EndpointGroupNotFound: - return [] - - def _get_endpoints_filtered_by_endpoint_group(self, endpoint_group_id): - endpoints = self.catalog_api.list_endpoints() - filters = self.endpoint_filter_api.get_endpoint_group( - endpoint_group_id)['filters'] - filtered_endpoints = [] - - for endpoint in endpoints: - is_candidate = True - for key, value in filters.items(): - if endpoint[key] != value: - is_candidate = False - break - if is_candidate: - filtered_endpoints.append(endpoint) - return filtered_endpoints - - -class EndpointFilterV3Controller(_ControllerBase): - - def __init__(self): - super(EndpointFilterV3Controller, self).__init__() - notifications.register_event_callback( - notifications.ACTIONS.deleted, 'project', - self._on_project_or_endpoint_delete) - notifications.register_event_callback( - notifications.ACTIONS.deleted, 'endpoint', - self._on_project_or_endpoint_delete) - - def _on_project_or_endpoint_delete(self, service, resource_type, operation, - payload): - project_or_endpoint_id = payload['resource_info'] - if resource_type == 'project': - self.endpoint_filter_api.delete_association_by_project( - project_or_endpoint_id) - else: - self.endpoint_filter_api.delete_association_by_endpoint( - project_or_endpoint_id) - - @controller.protected() - def add_endpoint_to_project(self, context, project_id, endpoint_id): - """Establishes an association between an endpoint and a project.""" - # NOTE(gyee): we just need to make sure endpoint and project exist - # first. We don't really care whether if project is disabled. - # The relationship can still be established even with a disabled - # project as there are no security implications. - self.catalog_api.get_endpoint(endpoint_id) - self.resource_api.get_project(project_id) - self.endpoint_filter_api.add_endpoint_to_project(endpoint_id, - project_id) - - @controller.protected() - def check_endpoint_in_project(self, context, project_id, endpoint_id): - """Verifies endpoint is currently associated with given project.""" - self.catalog_api.get_endpoint(endpoint_id) - self.resource_api.get_project(project_id) - self.endpoint_filter_api.check_endpoint_in_project(endpoint_id, - project_id) - - @controller.protected() - def list_endpoints_for_project(self, context, project_id): - """List all endpoints currently associated with a given project.""" - self.resource_api.get_project(project_id) - refs = self.endpoint_filter_api.list_endpoints_for_project(project_id) - filtered_endpoints = {ref['endpoint_id']: - self.catalog_api.get_endpoint(ref['endpoint_id']) - for ref in refs} - - # need to recover endpoint_groups associated with project - # then for each endpoint group return the endpoints. - endpoint_groups = self._get_endpoint_groups_for_project(project_id) - for endpoint_group in endpoint_groups: - endpoint_refs = self._get_endpoints_filtered_by_endpoint_group( - endpoint_group['id']) - # now check if any endpoints for current endpoint group are not - # contained in the list of filtered endpoints - for endpoint_ref in endpoint_refs: - if endpoint_ref['id'] not in filtered_endpoints: - filtered_endpoints[endpoint_ref['id']] = endpoint_ref - - return catalog_controllers.EndpointV3.wrap_collection( - context, [v for v in six.itervalues(filtered_endpoints)]) - - @controller.protected() - def remove_endpoint_from_project(self, context, project_id, endpoint_id): - """Remove the endpoint from the association with given project.""" - self.endpoint_filter_api.remove_endpoint_from_project(endpoint_id, - project_id) - - @controller.protected() - def list_projects_for_endpoint(self, context, endpoint_id): - """Return a list of projects associated with the endpoint.""" - self.catalog_api.get_endpoint(endpoint_id) - refs = self.endpoint_filter_api.list_projects_for_endpoint(endpoint_id) - - projects = [self.resource_api.get_project( - ref['project_id']) for ref in refs] - return resource.controllers.ProjectV3.wrap_collection(context, - projects) - - -class EndpointGroupV3Controller(_ControllerBase): - collection_name = 'endpoint_groups' - member_name = 'endpoint_group' - - VALID_FILTER_KEYS = ['service_id', 'region_id', 'interface'] - - def __init__(self): - super(EndpointGroupV3Controller, self).__init__() - - @classmethod - def base_url(cls, context, path=None): - """Construct a path and pass it to V3Controller.base_url method.""" - - path = '/OS-EP-FILTER/' + cls.collection_name - return super(EndpointGroupV3Controller, cls).base_url(context, - path=path) - - @controller.protected() - @validation.validated(schema.endpoint_group_create, 'endpoint_group') - def create_endpoint_group(self, context, endpoint_group): - """Creates an Endpoint Group with the associated filters.""" - ref = self._assign_unique_id(self._normalize_dict(endpoint_group)) - self._require_attribute(ref, 'filters') - self._require_valid_filter(ref) - ref = self.endpoint_filter_api.create_endpoint_group(ref['id'], ref) - return EndpointGroupV3Controller.wrap_member(context, ref) - - def _require_valid_filter(self, endpoint_group): - filters = endpoint_group.get('filters') - for key in six.iterkeys(filters): - if key not in self.VALID_FILTER_KEYS: - raise exception.ValidationError( - attribute=self._valid_filter_keys(), - target='endpoint_group') - - def _valid_filter_keys(self): - return ' or '.join(self.VALID_FILTER_KEYS) - - @controller.protected() - def get_endpoint_group(self, context, endpoint_group_id): - """Retrieve the endpoint group associated with the id if exists.""" - ref = self.endpoint_filter_api.get_endpoint_group(endpoint_group_id) - return EndpointGroupV3Controller.wrap_member( - context, ref) - - @controller.protected() - @validation.validated(schema.endpoint_group_update, 'endpoint_group') - def update_endpoint_group(self, context, endpoint_group_id, - endpoint_group): - """Update fixed values and/or extend the filters.""" - if 'filters' in endpoint_group: - self._require_valid_filter(endpoint_group) - ref = self.endpoint_filter_api.update_endpoint_group(endpoint_group_id, - endpoint_group) - return EndpointGroupV3Controller.wrap_member( - context, ref) - - @controller.protected() - def delete_endpoint_group(self, context, endpoint_group_id): - """Delete endpoint_group.""" - self.endpoint_filter_api.delete_endpoint_group(endpoint_group_id) - - @controller.protected() - def list_endpoint_groups(self, context): - """List all endpoint groups.""" - refs = self.endpoint_filter_api.list_endpoint_groups() - return EndpointGroupV3Controller.wrap_collection( - context, refs) - - @controller.protected() - def list_endpoint_groups_for_project(self, context, project_id): - """List all endpoint groups associated with a given project.""" - return EndpointGroupV3Controller.wrap_collection( - context, self._get_endpoint_groups_for_project(project_id)) - - @controller.protected() - def list_projects_associated_with_endpoint_group(self, - context, - endpoint_group_id): - """List all projects associated with endpoint group.""" - endpoint_group_refs = (self.endpoint_filter_api. - list_projects_associated_with_endpoint_group( - endpoint_group_id)) - projects = [] - for endpoint_group_ref in endpoint_group_refs: - project = self.resource_api.get_project( - endpoint_group_ref['project_id']) - if project: - projects.append(project) - return resource.controllers.ProjectV3.wrap_collection(context, - projects) - - @controller.protected() - def list_endpoints_associated_with_endpoint_group(self, - context, - endpoint_group_id): - """List all the endpoints filtered by a specific endpoint group.""" - filtered_endpoints = self._get_endpoints_filtered_by_endpoint_group( - endpoint_group_id) - return catalog_controllers.EndpointV3.wrap_collection( - context, filtered_endpoints) - - -class ProjectEndpointGroupV3Controller(_ControllerBase): - collection_name = 'project_endpoint_groups' - member_name = 'project_endpoint_group' - - def __init__(self): - super(ProjectEndpointGroupV3Controller, self).__init__() - notifications.register_event_callback( - notifications.ACTIONS.deleted, 'project', - self._on_project_delete) - - def _on_project_delete(self, service, resource_type, - operation, payload): - project_id = payload['resource_info'] - (self.endpoint_filter_api. - delete_endpoint_group_association_by_project( - project_id)) - - @controller.protected() - def get_endpoint_group_in_project(self, context, endpoint_group_id, - project_id): - """Retrieve the endpoint group associated with the id if exists.""" - self.resource_api.get_project(project_id) - self.endpoint_filter_api.get_endpoint_group(endpoint_group_id) - ref = self.endpoint_filter_api.get_endpoint_group_in_project( - endpoint_group_id, project_id) - return ProjectEndpointGroupV3Controller.wrap_member( - context, ref) - - @controller.protected() - def add_endpoint_group_to_project(self, context, endpoint_group_id, - project_id): - """Creates an association between an endpoint group and project.""" - self.resource_api.get_project(project_id) - self.endpoint_filter_api.get_endpoint_group(endpoint_group_id) - self.endpoint_filter_api.add_endpoint_group_to_project( - endpoint_group_id, project_id) - - @controller.protected() - def remove_endpoint_group_from_project(self, context, endpoint_group_id, - project_id): - """Remove the endpoint group from associated project.""" - self.resource_api.get_project(project_id) - self.endpoint_filter_api.get_endpoint_group(endpoint_group_id) - self.endpoint_filter_api.remove_endpoint_group_from_project( - endpoint_group_id, project_id) - - @classmethod - def _add_self_referential_link(cls, context, ref): - url = ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' - '/projects/%(project_id)s' % { - 'endpoint_group_id': ref['endpoint_group_id'], - 'project_id': ref['project_id']}) - ref.setdefault('links', {}) - ref['links']['self'] = url diff --git a/keystone-moon/keystone/contrib/endpoint_filter/core.py b/keystone-moon/keystone/contrib/endpoint_filter/core.py deleted file mode 100644 index b66465ea..00000000 --- a/keystone-moon/keystone/contrib/endpoint_filter/core.py +++ /dev/null @@ -1,296 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into the Endpoint Filter service.""" - -import abc - -from oslo_config import cfg -from oslo_log import log -import six - -from keystone.common import dependency -from keystone.common import extension -from keystone.common import manager -from keystone import exception - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - -extension_data = { - 'name': 'OpenStack Keystone Endpoint Filter API', - 'namespace': 'http://docs.openstack.org/identity/api/ext/' - 'OS-EP-FILTER/v1.0', - 'alias': 'OS-EP-FILTER', - 'updated': '2013-07-23T12:00:0-00:00', - 'description': 'OpenStack Keystone Endpoint Filter API.', - 'links': [ - { - 'rel': 'describedby', - # TODO(ayoung): needs a description - 'type': 'text/html', - 'href': 'https://github.com/openstack/identity-api/blob/master' - '/openstack-identity-api/v3/src/markdown/' - 'identity-api-v3-os-ep-filter-ext.md', - } - ]} -extension.register_admin_extension(extension_data['alias'], extension_data) - - -@dependency.provider('endpoint_filter_api') -class Manager(manager.Manager): - """Default pivot point for the Endpoint Filter backend. - - See :mod:`keystone.common.manager.Manager` for more details on how this - dynamically calls the backend. - - """ - - driver_namespace = 'keystone.endpoint_filter' - - def __init__(self): - super(Manager, self).__init__(CONF.endpoint_filter.driver) - - -@six.add_metaclass(abc.ABCMeta) -class EndpointFilterDriverV8(object): - """Interface description for an Endpoint Filter driver.""" - - @abc.abstractmethod - def add_endpoint_to_project(self, endpoint_id, project_id): - """Create an endpoint to project association. - - :param endpoint_id: identity of endpoint to associate - :type endpoint_id: string - :param project_id: identity of the project to be associated with - :type project_id: string - :raises: keystone.exception.Conflict, - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def remove_endpoint_from_project(self, endpoint_id, project_id): - """Removes an endpoint to project association. - - :param endpoint_id: identity of endpoint to remove - :type endpoint_id: string - :param project_id: identity of the project associated with - :type project_id: string - :raises: exception.NotFound - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def check_endpoint_in_project(self, endpoint_id, project_id): - """Checks if an endpoint is associated with a project. - - :param endpoint_id: identity of endpoint to check - :type endpoint_id: string - :param project_id: identity of the project associated with - :type project_id: string - :raises: exception.NotFound - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_endpoints_for_project(self, project_id): - """List all endpoints associated with a project. - - :param project_id: identity of the project to check - :type project_id: string - :returns: a list of identity endpoint ids or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_projects_for_endpoint(self, endpoint_id): - """List all projects associated with an endpoint. - - :param endpoint_id: identity of endpoint to check - :type endpoint_id: string - :returns: a list of projects or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_association_by_endpoint(self, endpoint_id): - """Removes all the endpoints to project association with endpoint. - - :param endpoint_id: identity of endpoint to check - :type endpoint_id: string - :returns: None - - """ - raise exception.NotImplemented() - - @abc.abstractmethod - def delete_association_by_project(self, project_id): - """Removes all the endpoints to project association with project. - - :param project_id: identity of the project to check - :type project_id: string - :returns: None - - """ - raise exception.NotImplemented() - - @abc.abstractmethod - def create_endpoint_group(self, endpoint_group): - """Create an endpoint group. - - :param endpoint_group: endpoint group to create - :type endpoint_group: dictionary - :raises: keystone.exception.Conflict, - :returns: an endpoint group representation. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_endpoint_group(self, endpoint_group_id): - """Get an endpoint group. - - :param endpoint_group_id: identity of endpoint group to retrieve - :type endpoint_group_id: string - :raises: exception.NotFound - :returns: an endpoint group representation. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_endpoint_group(self, endpoint_group_id, endpoint_group): - """Update an endpoint group. - - :param endpoint_group_id: identity of endpoint group to retrieve - :type endpoint_group_id: string - :param endpoint_group: A full or partial endpoint_group - :type endpoint_group: dictionary - :raises: exception.NotFound - :returns: an endpoint group representation. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_endpoint_group(self, endpoint_group_id): - """Delete an endpoint group. - - :param endpoint_group_id: identity of endpoint group to delete - :type endpoint_group_id: string - :raises: exception.NotFound - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def add_endpoint_group_to_project(self, endpoint_group_id, project_id): - """Adds an endpoint group to project association. - - :param endpoint_group_id: identity of endpoint to associate - :type endpoint_group_id: string - :param project_id: identity of project to associate - :type project_id: string - :raises: keystone.exception.Conflict, - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_endpoint_group_in_project(self, endpoint_group_id, project_id): - """Get endpoint group to project association. - - :param endpoint_group_id: identity of endpoint group to retrieve - :type endpoint_group_id: string - :param project_id: identity of project to associate - :type project_id: string - :raises: exception.NotFound - :returns: a project endpoint group representation. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_endpoint_groups(self): - """List all endpoint groups. - - :raises: exception.NotFound - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_endpoint_groups_for_project(self, project_id): - """List all endpoint group to project associations for a project. - - :param project_id: identity of project to associate - :type project_id: string - :raises: exception.NotFound - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_projects_associated_with_endpoint_group(self, endpoint_group_id): - """List all projects associated with endpoint group. - - :param endpoint_group_id: identity of endpoint to associate - :type endpoint_group_id: string - :raises: exception.NotFound - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def remove_endpoint_group_from_project(self, endpoint_group_id, - project_id): - """Remove an endpoint to project association. - - :param endpoint_group_id: identity of endpoint to associate - :type endpoint_group_id: string - :param project_id: identity of project to associate - :type project_id: string - :raises: exception.NotFound - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_endpoint_group_association_by_project(self, project_id): - """Remove endpoint group to project associations. - - :param project_id: identity of the project to check - :type project_id: string - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover - - -Driver = manager.create_legacy_driver(EndpointFilterDriverV8) diff --git a/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/__init__.py b/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/migrate.cfg b/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/migrate.cfg deleted file mode 100644 index c7d34785..00000000 --- a/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/migrate.cfg +++ /dev/null @@ -1,25 +0,0 @@ -[db_settings] -# Used to identify which repository this database is versioned under. -# You can use the name of your project. -repository_id=endpoint_filter - -# The name of the database table used to track the schema version. -# This name shouldn't already be used by your project. -# If this is changed once a database is under version control, you'll need to -# change the table name in each database too. -version_table=migrate_version - -# When committing a change script, Migrate will attempt to generate the -# sql for all supported databases; normally, if one of them fails - probably -# because you don't have that database installed - it is ignored and the -# commit continues, perhaps ending successfully. -# Databases in this list MUST compile successfully during a commit, or the -# entire commit will fail. List the databases your application will actually -# be using to ensure your updates to that database work properly. -# This must be a list; example: ['postgres','sqlite'] -required_dbs=[] - -# When creating new change scripts, Migrate will stamp the new script with -# a version number. By default this is latest_version + 1. You can set this -# to 'true' to tell Migrate to use the UTC timestamp instead. -use_timestamp_numbering=False diff --git a/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/001_add_endpoint_filtering_table.py b/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/001_add_endpoint_filtering_table.py deleted file mode 100644 index ac0a30cc..00000000 --- a/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/001_add_endpoint_filtering_table.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='endpoint_filter') diff --git a/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/002_add_endpoint_groups.py b/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/002_add_endpoint_groups.py deleted file mode 100644 index ac5aa5b3..00000000 --- a/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/002_add_endpoint_groups.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2014 Hewlett-Packard Company -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='endpoint_filter') diff --git a/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/__init__.py b/keystone-moon/keystone/contrib/endpoint_filter/migrate_repo/versions/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/endpoint_filter/routers.py b/keystone-moon/keystone/contrib/endpoint_filter/routers.py deleted file mode 100644 index f75110f9..00000000 --- a/keystone-moon/keystone/contrib/endpoint_filter/routers.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log -from oslo_log import versionutils - -from keystone.common import wsgi -from keystone.i18n import _ - - -LOG = log.getLogger(__name__) - - -class EndpointFilterExtension(wsgi.Middleware): - - def __init__(self, *args, **kwargs): - super(EndpointFilterExtension, self).__init__(*args, **kwargs) - msg = _("Remove endpoint_filter_extension from the paste pipeline, " - "the endpoint filter extension is now always available. " - "Update the [pipeline:api_v3] section in keystone-paste.ini " - "accordingly as it will be removed in the O release.") - versionutils.report_deprecated_feature(LOG, msg) diff --git a/keystone-moon/keystone/contrib/endpoint_filter/schema.py b/keystone-moon/keystone/contrib/endpoint_filter/schema.py deleted file mode 100644 index cbe54e36..00000000 --- a/keystone-moon/keystone/contrib/endpoint_filter/schema.py +++ /dev/null @@ -1,35 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import validation -from keystone.common.validation import parameter_types - - -_endpoint_group_properties = { - 'description': validation.nullable(parameter_types.description), - 'filters': { - 'type': 'object' - }, - 'name': parameter_types.name -} - -endpoint_group_create = { - 'type': 'object', - 'properties': _endpoint_group_properties, - 'required': ['name', 'filters'] -} - -endpoint_group_update = { - 'type': 'object', - 'properties': _endpoint_group_properties, - 'minProperties': 1 -} diff --git a/keystone-moon/keystone/contrib/endpoint_policy/__init__.py b/keystone-moon/keystone/contrib/endpoint_policy/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/endpoint_policy/backends/__init__.py b/keystone-moon/keystone/contrib/endpoint_policy/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/endpoint_policy/backends/sql.py b/keystone-moon/keystone/contrib/endpoint_policy/backends/sql.py deleted file mode 100644 index 93331779..00000000 --- a/keystone-moon/keystone/contrib/endpoint_policy/backends/sql.py +++ /dev/null @@ -1,28 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import versionutils - -from keystone.endpoint_policy.backends import sql - -_OLD = 'keystone.contrib.endpoint_policy.backends.sql.EndpointPolicy' -_NEW = 'keystone.endpoint_policy.backends.sql.EndpointPolicy' - - -class EndpointPolicy(sql.EndpointPolicy): - - @versionutils.deprecated(versionutils.deprecated.LIBERTY, - in_favor_of=_NEW, - remove_in=1, - what=_OLD) - def __init__(self, *args, **kwargs): - super(EndpointPolicy, self).__init__(*args, **kwargs) diff --git a/keystone-moon/keystone/contrib/endpoint_policy/controllers.py b/keystone-moon/keystone/contrib/endpoint_policy/controllers.py deleted file mode 100644 index b96834dc..00000000 --- a/keystone-moon/keystone/contrib/endpoint_policy/controllers.py +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import controller -from keystone.common import dependency -from keystone import notifications - - -@dependency.requires('policy_api', 'catalog_api', 'endpoint_policy_api') -class EndpointPolicyV3Controller(controller.V3Controller): - collection_name = 'endpoints' - member_name = 'endpoint' - - def __init__(self): - super(EndpointPolicyV3Controller, self).__init__() - notifications.register_event_callback( - 'deleted', 'endpoint', self._on_endpoint_delete) - notifications.register_event_callback( - 'deleted', 'service', self._on_service_delete) - notifications.register_event_callback( - 'deleted', 'region', self._on_region_delete) - notifications.register_event_callback( - 'deleted', 'policy', self._on_policy_delete) - - def _on_endpoint_delete(self, service, resource_type, operation, payload): - self.endpoint_policy_api.delete_association_by_endpoint( - payload['resource_info']) - - def _on_service_delete(self, service, resource_type, operation, payload): - self.endpoint_policy_api.delete_association_by_service( - payload['resource_info']) - - def _on_region_delete(self, service, resource_type, operation, payload): - self.endpoint_policy_api.delete_association_by_region( - payload['resource_info']) - - def _on_policy_delete(self, service, resource_type, operation, payload): - self.endpoint_policy_api.delete_association_by_policy( - payload['resource_info']) - - @controller.protected() - def create_policy_association_for_endpoint(self, context, - policy_id, endpoint_id): - """Create an association between a policy and an endpoint.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_endpoint(endpoint_id) - self.endpoint_policy_api.create_policy_association( - policy_id, endpoint_id=endpoint_id) - - @controller.protected() - def check_policy_association_for_endpoint(self, context, - policy_id, endpoint_id): - """Check an association between a policy and an endpoint.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_endpoint(endpoint_id) - self.endpoint_policy_api.check_policy_association( - policy_id, endpoint_id=endpoint_id) - - @controller.protected() - def delete_policy_association_for_endpoint(self, context, - policy_id, endpoint_id): - """Delete an association between a policy and an endpoint.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_endpoint(endpoint_id) - self.endpoint_policy_api.delete_policy_association( - policy_id, endpoint_id=endpoint_id) - - @controller.protected() - def create_policy_association_for_service(self, context, - policy_id, service_id): - """Create an association between a policy and a service.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_service(service_id) - self.endpoint_policy_api.create_policy_association( - policy_id, service_id=service_id) - - @controller.protected() - def check_policy_association_for_service(self, context, - policy_id, service_id): - """Check an association between a policy and a service.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_service(service_id) - self.endpoint_policy_api.check_policy_association( - policy_id, service_id=service_id) - - @controller.protected() - def delete_policy_association_for_service(self, context, - policy_id, service_id): - """Delete an association between a policy and a service.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_service(service_id) - self.endpoint_policy_api.delete_policy_association( - policy_id, service_id=service_id) - - @controller.protected() - def create_policy_association_for_region_and_service( - self, context, policy_id, service_id, region_id): - """Create an association between a policy and region+service.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_service(service_id) - self.catalog_api.get_region(region_id) - self.endpoint_policy_api.create_policy_association( - policy_id, service_id=service_id, region_id=region_id) - - @controller.protected() - def check_policy_association_for_region_and_service( - self, context, policy_id, service_id, region_id): - """Check an association between a policy and region+service.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_service(service_id) - self.catalog_api.get_region(region_id) - self.endpoint_policy_api.check_policy_association( - policy_id, service_id=service_id, region_id=region_id) - - @controller.protected() - def delete_policy_association_for_region_and_service( - self, context, policy_id, service_id, region_id): - """Delete an association between a policy and region+service.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_service(service_id) - self.catalog_api.get_region(region_id) - self.endpoint_policy_api.delete_policy_association( - policy_id, service_id=service_id, region_id=region_id) - - @controller.protected() - def get_policy_for_endpoint(self, context, endpoint_id): - """Get the effective policy for an endpoint.""" - self.catalog_api.get_endpoint(endpoint_id) - ref = self.endpoint_policy_api.get_policy_for_endpoint(endpoint_id) - # NOTE(henry-nash): since the collection and member for this class is - # set to endpoints, we have to handle wrapping this policy entity - # ourselves. - self._add_self_referential_link(context, ref) - return {'policy': ref} - - # NOTE(henry-nash): As in the catalog controller, we must ensure that the - # legacy_endpoint_id does not escape. - - @classmethod - def filter_endpoint(cls, ref): - if 'legacy_endpoint_id' in ref: - ref.pop('legacy_endpoint_id') - return ref - - @classmethod - def wrap_member(cls, context, ref): - ref = cls.filter_endpoint(ref) - return super(EndpointPolicyV3Controller, cls).wrap_member(context, ref) - - @controller.protected() - def list_endpoints_for_policy(self, context, policy_id): - """List endpoints with the effective association to a policy.""" - self.policy_api.get_policy(policy_id) - refs = self.endpoint_policy_api.list_endpoints_for_policy(policy_id) - return EndpointPolicyV3Controller.wrap_collection(context, refs) diff --git a/keystone-moon/keystone/contrib/endpoint_policy/core.py b/keystone-moon/keystone/contrib/endpoint_policy/core.py deleted file mode 100644 index 1aa03267..00000000 --- a/keystone-moon/keystone/contrib/endpoint_policy/core.py +++ /dev/null @@ -1,430 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc - -from oslo_config import cfg -from oslo_log import log -import six - -from keystone.common import dependency -from keystone.common import manager -from keystone import exception -from keystone.i18n import _, _LE, _LW - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -@dependency.provider('endpoint_policy_api') -@dependency.requires('catalog_api', 'policy_api') -class Manager(manager.Manager): - """Default pivot point for the Endpoint Policy backend. - - See :mod:`keystone.common.manager.Manager` for more details on how this - dynamically calls the backend. - - """ - - def __init__(self): - super(Manager, self).__init__(CONF.endpoint_policy.driver) - - def _assert_valid_association(self, endpoint_id, service_id, region_id): - """Assert that the association is supported. - - There are three types of association supported: - - - Endpoint (in which case service and region must be None) - - Service and region (in which endpoint must be None) - - Service (in which case endpoint and region must be None) - - """ - if (endpoint_id is not None and - service_id is None and region_id is None): - return - if (service_id is not None and region_id is not None and - endpoint_id is None): - return - if (service_id is not None and - endpoint_id is None and region_id is None): - return - - raise exception.InvalidPolicyAssociation(endpoint_id=endpoint_id, - service_id=service_id, - region_id=region_id) - - def create_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): - self._assert_valid_association(endpoint_id, service_id, region_id) - self.driver.create_policy_association(policy_id, endpoint_id, - service_id, region_id) - - def check_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): - self._assert_valid_association(endpoint_id, service_id, region_id) - self.driver.check_policy_association(policy_id, endpoint_id, - service_id, region_id) - - def delete_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): - self._assert_valid_association(endpoint_id, service_id, region_id) - self.driver.delete_policy_association(policy_id, endpoint_id, - service_id, region_id) - - def list_endpoints_for_policy(self, policy_id): - - def _get_endpoint(endpoint_id, policy_id): - try: - return self.catalog_api.get_endpoint(endpoint_id) - except exception.EndpointNotFound: - msg = _LW('Endpoint %(endpoint_id)s referenced in ' - 'association for policy %(policy_id)s not found.') - LOG.warning(msg, {'policy_id': policy_id, - 'endpoint_id': endpoint_id}) - raise - - def _get_endpoints_for_service(service_id, endpoints): - # TODO(henry-nash): Consider optimizing this in the future by - # adding an explicit list_endpoints_for_service to the catalog API. - return [ep for ep in endpoints if ep['service_id'] == service_id] - - def _get_endpoints_for_service_and_region( - service_id, region_id, endpoints, regions): - # TODO(henry-nash): Consider optimizing this in the future. - # The lack of a two-way pointer in the region tree structure - # makes this somewhat inefficient. - - def _recursively_get_endpoints_for_region( - region_id, service_id, endpoint_list, region_list, - endpoints_found, regions_examined): - """Recursively search down a region tree for endpoints. - - :param region_id: the point in the tree to examine - :param service_id: the service we are interested in - :param endpoint_list: list of all endpoints - :param region_list: list of all regions - :param endpoints_found: list of matching endpoints found so - far - which will be updated if more are - found in this iteration - :param regions_examined: list of regions we have already looked - at - used to spot illegal circular - references in the tree to avoid never - completing search - :returns: list of endpoints that match - - """ - - if region_id in regions_examined: - msg = _LE('Circular reference or a repeated entry found ' - 'in region tree - %(region_id)s.') - LOG.error(msg, {'region_id': ref.region_id}) - return - - regions_examined.append(region_id) - endpoints_found += ( - [ep for ep in endpoint_list if - ep['service_id'] == service_id and - ep['region_id'] == region_id]) - - for region in region_list: - if region['parent_region_id'] == region_id: - _recursively_get_endpoints_for_region( - region['id'], service_id, endpoints, regions, - endpoints_found, regions_examined) - - endpoints_found = [] - regions_examined = [] - - # Now walk down the region tree - _recursively_get_endpoints_for_region( - region_id, service_id, endpoints, regions, - endpoints_found, regions_examined) - - return endpoints_found - - matching_endpoints = [] - endpoints = self.catalog_api.list_endpoints() - regions = self.catalog_api.list_regions() - for ref in self.driver.list_associations_for_policy(policy_id): - if ref.get('endpoint_id') is not None: - matching_endpoints.append( - _get_endpoint(ref['endpoint_id'], policy_id)) - continue - - if (ref.get('service_id') is not None and - ref.get('region_id') is None): - matching_endpoints += _get_endpoints_for_service( - ref['service_id'], endpoints) - continue - - if (ref.get('service_id') is not None and - ref.get('region_id') is not None): - matching_endpoints += ( - _get_endpoints_for_service_and_region( - ref['service_id'], ref['region_id'], - endpoints, regions)) - continue - - msg = _LW('Unsupported policy association found - ' - 'Policy %(policy_id)s, Endpoint %(endpoint_id)s, ' - 'Service %(service_id)s, Region %(region_id)s, ') - LOG.warning(msg, {'policy_id': policy_id, - 'endpoint_id': ref['endpoint_id'], - 'service_id': ref['service_id'], - 'region_id': ref['region_id']}) - - return matching_endpoints - - def get_policy_for_endpoint(self, endpoint_id): - - def _get_policy(policy_id, endpoint_id): - try: - return self.policy_api.get_policy(policy_id) - except exception.PolicyNotFound: - msg = _LW('Policy %(policy_id)s referenced in association ' - 'for endpoint %(endpoint_id)s not found.') - LOG.warning(msg, {'policy_id': policy_id, - 'endpoint_id': endpoint_id}) - raise - - def _look_for_policy_for_region_and_service(endpoint): - """Look in the region and its parents for a policy. - - Examine the region of the endpoint for a policy appropriate for - the service of the endpoint. If there isn't a match, then chase up - the region tree to find one. - - """ - region_id = endpoint['region_id'] - regions_examined = [] - while region_id is not None: - try: - ref = self.driver.get_policy_association( - service_id=endpoint['service_id'], - region_id=region_id) - return ref['policy_id'] - except exception.PolicyAssociationNotFound: - pass - - # There wasn't one for that region & service, let's - # chase up the region tree - regions_examined.append(region_id) - region = self.catalog_api.get_region(region_id) - region_id = None - if region.get('parent_region_id') is not None: - region_id = region['parent_region_id'] - if region_id in regions_examined: - msg = _LE('Circular reference or a repeated entry ' - 'found in region tree - %(region_id)s.') - LOG.error(msg, {'region_id': region_id}) - break - - # First let's see if there is a policy explicitly defined for - # this endpoint. - - try: - ref = self.driver.get_policy_association(endpoint_id=endpoint_id) - return _get_policy(ref['policy_id'], endpoint_id) - except exception.PolicyAssociationNotFound: - pass - - # There wasn't a policy explicitly defined for this endpoint, so - # now let's see if there is one for the Region & Service. - - endpoint = self.catalog_api.get_endpoint(endpoint_id) - policy_id = _look_for_policy_for_region_and_service(endpoint) - if policy_id is not None: - return _get_policy(policy_id, endpoint_id) - - # Finally, just check if there is one for the service. - try: - ref = self.driver.get_policy_association( - service_id=endpoint['service_id']) - return _get_policy(ref['policy_id'], endpoint_id) - except exception.PolicyAssociationNotFound: - pass - - msg = _('No policy is associated with endpoint ' - '%(endpoint_id)s.') % {'endpoint_id': endpoint_id} - raise exception.NotFound(msg) - - -@six.add_metaclass(abc.ABCMeta) -class Driver(object): - """Interface description for an Endpoint Policy driver.""" - - @abc.abstractmethod - def create_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): - """Creates a policy association. - - :param policy_id: identity of policy that is being associated - :type policy_id: string - :param endpoint_id: identity of endpoint to associate - :type endpoint_id: string - :param service_id: identity of the service to associate - :type service_id: string - :param region_id: identity of the region to associate - :type region_id: string - :returns: None - - There are three types of association permitted: - - - Endpoint (in which case service and region must be None) - - Service and region (in which endpoint must be None) - - Service (in which case endpoint and region must be None) - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def check_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): - """Checks existence a policy association. - - :param policy_id: identity of policy that is being associated - :type policy_id: string - :param endpoint_id: identity of endpoint to associate - :type endpoint_id: string - :param service_id: identity of the service to associate - :type service_id: string - :param region_id: identity of the region to associate - :type region_id: string - :raises: keystone.exception.PolicyAssociationNotFound if there is no - match for the specified association - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): - """Deletes a policy association. - - :param policy_id: identity of policy that is being associated - :type policy_id: string - :param endpoint_id: identity of endpoint to associate - :type endpoint_id: string - :param service_id: identity of the service to associate - :type service_id: string - :param region_id: identity of the region to associate - :type region_id: string - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_policy_association(self, endpoint_id=None, - service_id=None, region_id=None): - """Gets the policy for an explicit association. - - This method is not exposed as a public API, but is used by - get_policy_for_endpoint(). - - :param endpoint_id: identity of endpoint - :type endpoint_id: string - :param service_id: identity of the service - :type service_id: string - :param region_id: identity of the region - :type region_id: string - :raises: keystone.exception.PolicyAssociationNotFound if there is no - match for the specified association - :returns: dict containing policy_id - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_associations_for_policy(self, policy_id): - """List the associations for a policy. - - This method is not exposed as a public API, but is used by - list_endpoints_for_policy(). - - :param policy_id: identity of policy - :type policy_id: string - :returns: List of association dicts - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_endpoints_for_policy(self, policy_id): - """List all the endpoints using a given policy. - - :param policy_id: identity of policy that is being associated - :type policy_id: string - :returns: list of endpoints that have an effective association with - that policy - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_policy_for_endpoint(self, endpoint_id): - """Get the appropriate policy for a given endpoint. - - :param endpoint_id: identity of endpoint - :type endpoint_id: string - :returns: Policy entity for the endpoint - - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_association_by_endpoint(self, endpoint_id): - """Removes all the policy associations with the specific endpoint. - - :param endpoint_id: identity of endpoint to check - :type endpoint_id: string - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_association_by_service(self, service_id): - """Removes all the policy associations with the specific service. - - :param service_id: identity of endpoint to check - :type service_id: string - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_association_by_region(self, region_id): - """Removes all the policy associations with the specific region. - - :param region_id: identity of endpoint to check - :type region_id: string - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_association_by_policy(self, policy_id): - """Removes all the policy associations with the specific policy. - - :param policy_id: identity of endpoint to check - :type policy_id: string - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover diff --git a/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/__init__.py b/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/migrate.cfg b/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/migrate.cfg deleted file mode 100644 index 62895d6f..00000000 --- a/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/migrate.cfg +++ /dev/null @@ -1,25 +0,0 @@ -[db_settings] -# Used to identify which repository this database is versioned under. -# You can use the name of your project. -repository_id=endpoint_policy - -# The name of the database table used to track the schema version. -# This name shouldn't already be used by your project. -# If this is changed once a database is under version control, you'll need to -# change the table name in each database too. -version_table=migrate_version - -# When committing a change script, Migrate will attempt to generate the -# sql for all supported databases; normally, if one of them fails - probably -# because you don't have that database installed - it is ignored and the -# commit continues, perhaps ending successfully. -# Databases in this list MUST compile successfully during a commit, or the -# entire commit will fail. List the databases your application will actually -# be using to ensure your updates to that database work properly. -# This must be a list; example: ['postgres','sqlite'] -required_dbs=[] - -# When creating new change scripts, Migrate will stamp the new script with -# a version number. By default this is latest_version + 1. You can set this -# to 'true' to tell Migrate to use the UTC timestamp instead. -use_timestamp_numbering=False diff --git a/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/versions/001_add_endpoint_policy_table.py b/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/versions/001_add_endpoint_policy_table.py deleted file mode 100644 index 32bdabdd..00000000 --- a/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/versions/001_add_endpoint_policy_table.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='endpoint_policy') diff --git a/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/versions/__init__.py b/keystone-moon/keystone/contrib/endpoint_policy/migrate_repo/versions/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/endpoint_policy/routers.py b/keystone-moon/keystone/contrib/endpoint_policy/routers.py deleted file mode 100644 index c8f7f154..00000000 --- a/keystone-moon/keystone/contrib/endpoint_policy/routers.py +++ /dev/null @@ -1,28 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import versionutils - -from keystone.common import wsgi - -_OLD = 'keystone.contrib.endpoint_policy.routers.EndpointPolicyExtension' -_NEW = 'keystone.endpoint_policy.routers.Routers' - - -class EndpointPolicyExtension(wsgi.Middleware): - - @versionutils.deprecated(versionutils.deprecated.LIBERTY, - in_favor_of=_NEW, - remove_in=1, - what=_OLD) - def __init__(self, *args, **kwargs): - super(EndpointPolicyExtension, self).__init__(*args, **kwargs) diff --git a/keystone-moon/keystone/contrib/example/__init__.py b/keystone-moon/keystone/contrib/example/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/example/configuration.rst b/keystone-moon/keystone/contrib/example/configuration.rst deleted file mode 100644 index 979d3457..00000000 --- a/keystone-moon/keystone/contrib/example/configuration.rst +++ /dev/null @@ -1,31 +0,0 @@ -.. - Copyright 2013 OpenStack, Foundation - All Rights Reserved. - - Licensed under the Apache License, Version 2.0 (the "License"); you may - not use this file except in compliance with the License. You may obtain - a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - License for the specific language governing permissions and limitations - under the License. - -================= -Extension Example -================= - -Please describe here in details how to enable your extension: - -1. Add the required fields and values in the ``[example]`` section - in ``keystone.conf``. - -2. Optional: add the required ``filter`` to the ``pipeline`` in ``keystone-paste.ini`` - -3. Optional: create the extension tables if using the provided sql backend. Example:: - - - ./bin/keystone-manage db_sync --extension example \ No newline at end of file diff --git a/keystone-moon/keystone/contrib/example/controllers.py b/keystone-moon/keystone/contrib/example/controllers.py deleted file mode 100644 index 95b3e82f..00000000 --- a/keystone-moon/keystone/contrib/example/controllers.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from keystone.common import controller -from keystone.common import dependency - - -@dependency.requires('example_api') -class ExampleV3Controller(controller.V3Controller): - - @controller.protected() - def example_get(self, context): - """Description of the controller logic.""" - self.example_api.do_something(context) diff --git a/keystone-moon/keystone/contrib/example/core.py b/keystone-moon/keystone/contrib/example/core.py deleted file mode 100644 index e369dc4d..00000000 --- a/keystone-moon/keystone/contrib/example/core.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into this Example service.""" - -from oslo_log import log - -from keystone.common import dependency -from keystone.common import manager -from keystone import exception -from keystone.i18n import _LI -from keystone import notifications - - -LOG = log.getLogger(__name__) - - -@notifications.listener # NOTE(dstanek): only needed if using event_callbacks -@dependency.provider('example_api') -class ExampleManager(manager.Manager): - """Default pivot point for this Example backend. - - See :mod:`keystone.common.manager.Manager` for more details on - how this dynamically calls the backend. - - """ - - driver_namespace = 'keystone.example' - - def __init__(self): - # The following is an example of event callbacks. In this setup, - # ExampleManager's data model is depended on project's data model. - # It must create additional aggregates when a new project is created, - # and it must cleanup data related to the project whenever a project - # has been deleted. - # - # In this example, the project_deleted_callback will be invoked - # whenever a project has been deleted. Similarly, the - # project_created_callback will be invoked whenever a new project is - # created. - - # This information is used when the @notifications.listener decorator - # acts on the class. - self.event_callbacks = { - notifications.ACTIONS.deleted: { - 'project': [self.project_deleted_callback], - }, - notifications.ACTIONS.created: { - 'project': [self.project_created_callback], - }, - } - super(ExampleManager, self).__init__( - 'keystone.contrib.example.core.ExampleDriver') - - def project_deleted_callback(self, service, resource_type, operation, - payload): - # The code below is merely an example. - msg = _LI('Received the following notification: service %(service)s, ' - 'resource_type: %(resource_type)s, operation %(operation)s ' - 'payload %(payload)s') - LOG.info(msg, {'service': service, 'resource_type': resource_type, - 'operation': operation, 'payload': payload}) - - def project_created_callback(self, service, resource_type, operation, - payload): - # The code below is merely an example. - msg = _LI('Received the following notification: service %(service)s, ' - 'resource_type: %(resource_type)s, operation %(operation)s ' - 'payload %(payload)s') - LOG.info(msg, {'service': service, 'resource_type': resource_type, - 'operation': operation, 'payload': payload}) - - -class ExampleDriver(object): - """Interface description for Example driver.""" - - def do_something(self, data): - """Do something - - :param data: example data - :type data: string - :raises: keystone.exception, - :returns: None. - - """ - raise exception.NotImplemented() diff --git a/keystone-moon/keystone/contrib/example/migrate_repo/__init__.py b/keystone-moon/keystone/contrib/example/migrate_repo/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/example/migrate_repo/migrate.cfg b/keystone-moon/keystone/contrib/example/migrate_repo/migrate.cfg deleted file mode 100644 index 5b1b1c0a..00000000 --- a/keystone-moon/keystone/contrib/example/migrate_repo/migrate.cfg +++ /dev/null @@ -1,25 +0,0 @@ -[db_settings] -# Used to identify which repository this database is versioned under. -# You can use the name of your project. -repository_id=example - -# The name of the database table used to track the schema version. -# This name shouldn't already be used by your project. -# If this is changed once a database is under version control, you'll need to -# change the table name in each database too. -version_table=migrate_version - -# When committing a change script, Migrate will attempt to generate the -# sql for all supported databases; normally, if one of them fails - probably -# because you don't have that database installed - it is ignored and the -# commit continues, perhaps ending successfully. -# Databases in this list MUST compile successfully during a commit, or the -# entire commit will fail. List the databases your application will actually -# be using to ensure your updates to that database work properly. -# This must be a list; example: ['postgres','sqlite'] -required_dbs=[] - -# When creating new change scripts, Migrate will stamp the new script with -# a version number. By default this is latest_version + 1. You can set this -# to 'true' to tell Migrate to use the UTC timestamp instead. -use_timestamp_numbering=False diff --git a/keystone-moon/keystone/contrib/example/migrate_repo/versions/001_example_table.py b/keystone-moon/keystone/contrib/example/migrate_repo/versions/001_example_table.py deleted file mode 100644 index 35061780..00000000 --- a/keystone-moon/keystone/contrib/example/migrate_repo/versions/001_example_table.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sqlalchemy as sql - - -def upgrade(migrate_engine): - # Upgrade operations go here. Don't create your own engine; bind - # migrate_engine to your metadata - meta = sql.MetaData() - meta.bind = migrate_engine - - # catalog - - service_table = sql.Table( - 'example', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('type', sql.String(255)), - sql.Column('extra', sql.Text())) - service_table.create(migrate_engine, checkfirst=True) diff --git a/keystone-moon/keystone/contrib/example/migrate_repo/versions/__init__.py b/keystone-moon/keystone/contrib/example/migrate_repo/versions/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/example/routers.py b/keystone-moon/keystone/contrib/example/routers.py deleted file mode 100644 index 30cffe1b..00000000 --- a/keystone-moon/keystone/contrib/example/routers.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools - -from keystone.common import json_home -from keystone.common import wsgi -from keystone.contrib.example import controllers - - -build_resource_relation = functools.partial( - json_home.build_v3_extension_resource_relation, - extension_name='OS-EXAMPLE', extension_version='1.0') - - -class ExampleRouter(wsgi.V3ExtensionRouter): - - PATH_PREFIX = '/OS-EXAMPLE' - - def add_routes(self, mapper): - example_controller = controllers.ExampleV3Controller() - - self._add_resource( - mapper, example_controller, - path=self.PATH_PREFIX + '/example', - get_action='do_something', - rel=build_resource_relation(resource_name='example')) diff --git a/keystone-moon/keystone/contrib/federation/__init__.py b/keystone-moon/keystone/contrib/federation/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/federation/backends/__init__.py b/keystone-moon/keystone/contrib/federation/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/federation/backends/sql.py b/keystone-moon/keystone/contrib/federation/backends/sql.py deleted file mode 100644 index 3c24d9c0..00000000 --- a/keystone-moon/keystone/contrib/federation/backends/sql.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2014 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import versionutils - -from keystone.federation.backends import sql - -_OLD = "keystone.contrib.federation.backends.sql.Federation" -_NEW = "sql" - - -class Federation(sql.Federation): - - @versionutils.deprecated(versionutils.deprecated.MITAKA, - in_favor_of=_NEW, - what=_OLD) - def __init__(self, *args, **kwargs): - super(Federation, self).__init__(*args, **kwargs) diff --git a/keystone-moon/keystone/contrib/federation/constants.py b/keystone-moon/keystone/contrib/federation/constants.py deleted file mode 100644 index afb38494..00000000 --- a/keystone-moon/keystone/contrib/federation/constants.py +++ /dev/null @@ -1,15 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -FEDERATION = 'OS-FEDERATION' -IDENTITY_PROVIDER = 'OS-FEDERATION:identity_provider' -PROTOCOL = 'OS-FEDERATION:protocol' diff --git a/keystone-moon/keystone/contrib/federation/controllers.py b/keystone-moon/keystone/contrib/federation/controllers.py deleted file mode 100644 index d0bd2bce..00000000 --- a/keystone-moon/keystone/contrib/federation/controllers.py +++ /dev/null @@ -1,520 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Workflow logic for the Federation service.""" - -import string - -from oslo_config import cfg -from oslo_log import log -import six -from six.moves import urllib -import webob - -from keystone.auth import controllers as auth_controllers -from keystone.common import authorization -from keystone.common import controller -from keystone.common import dependency -from keystone.common import validation -from keystone.common import wsgi -from keystone.contrib.federation import idp as keystone_idp -from keystone.contrib.federation import schema -from keystone.contrib.federation import utils -from keystone import exception -from keystone.i18n import _ -from keystone.models import token_model - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -class _ControllerBase(controller.V3Controller): - """Base behaviors for federation controllers.""" - - @classmethod - def base_url(cls, context, path=None): - """Construct a path and pass it to V3Controller.base_url method.""" - - path = '/OS-FEDERATION/' + cls.collection_name - return super(_ControllerBase, cls).base_url(context, path=path) - - -@dependency.requires('federation_api') -class IdentityProvider(_ControllerBase): - """Identity Provider representation.""" - collection_name = 'identity_providers' - member_name = 'identity_provider' - - _mutable_parameters = frozenset(['description', 'enabled', 'remote_ids']) - _public_parameters = frozenset(['id', 'enabled', 'description', - 'remote_ids', 'links' - ]) - - @classmethod - def _add_related_links(cls, context, ref): - """Add URLs for entities related with Identity Provider. - - Add URLs pointing to: - - protocols tied to the Identity Provider - - """ - ref.setdefault('links', {}) - base_path = ref['links'].get('self') - if base_path is None: - base_path = '/'.join([IdentityProvider.base_url(context), - ref['id']]) - for name in ['protocols']: - ref['links'][name] = '/'.join([base_path, name]) - - @classmethod - def _add_self_referential_link(cls, context, ref): - id = ref.get('id') - self_path = '/'.join([cls.base_url(context), id]) - ref.setdefault('links', {}) - ref['links']['self'] = self_path - - @classmethod - def wrap_member(cls, context, ref): - cls._add_self_referential_link(context, ref) - cls._add_related_links(context, ref) - ref = cls.filter_params(ref) - return {cls.member_name: ref} - - @controller.protected() - def create_identity_provider(self, context, idp_id, identity_provider): - identity_provider = self._normalize_dict(identity_provider) - identity_provider.setdefault('enabled', False) - IdentityProvider.check_immutable_params(identity_provider) - idp_ref = self.federation_api.create_idp(idp_id, identity_provider) - response = IdentityProvider.wrap_member(context, idp_ref) - return wsgi.render_response(body=response, status=('201', 'Created')) - - @controller.protected() - def list_identity_providers(self, context): - ref = self.federation_api.list_idps() - ref = [self.filter_params(x) for x in ref] - return IdentityProvider.wrap_collection(context, ref) - - @controller.protected() - def get_identity_provider(self, context, idp_id): - ref = self.federation_api.get_idp(idp_id) - return IdentityProvider.wrap_member(context, ref) - - @controller.protected() - def delete_identity_provider(self, context, idp_id): - self.federation_api.delete_idp(idp_id) - - @controller.protected() - def update_identity_provider(self, context, idp_id, identity_provider): - identity_provider = self._normalize_dict(identity_provider) - IdentityProvider.check_immutable_params(identity_provider) - idp_ref = self.federation_api.update_idp(idp_id, identity_provider) - return IdentityProvider.wrap_member(context, idp_ref) - - -@dependency.requires('federation_api') -class FederationProtocol(_ControllerBase): - """A federation protocol representation. - - See IdentityProvider docstring for explanation on _mutable_parameters - and _public_parameters class attributes. - - """ - collection_name = 'protocols' - member_name = 'protocol' - - _public_parameters = frozenset(['id', 'mapping_id', 'links']) - _mutable_parameters = frozenset(['mapping_id']) - - @classmethod - def _add_self_referential_link(cls, context, ref): - """Add 'links' entry to the response dictionary. - - Calls IdentityProvider.base_url() class method, as it constructs - proper URL along with the 'identity providers' part included. - - :param ref: response dictionary - - """ - ref.setdefault('links', {}) - base_path = ref['links'].get('identity_provider') - if base_path is None: - base_path = [IdentityProvider.base_url(context), ref['idp_id']] - base_path = '/'.join(base_path) - self_path = [base_path, 'protocols', ref['id']] - self_path = '/'.join(self_path) - ref['links']['self'] = self_path - - @classmethod - def _add_related_links(cls, context, ref): - """Add new entries to the 'links' subdictionary in the response. - - Adds 'identity_provider' key with URL pointing to related identity - provider as a value. - - :param ref: response dictionary - - """ - ref.setdefault('links', {}) - base_path = '/'.join([IdentityProvider.base_url(context), - ref['idp_id']]) - ref['links']['identity_provider'] = base_path - - @classmethod - def wrap_member(cls, context, ref): - cls._add_related_links(context, ref) - cls._add_self_referential_link(context, ref) - ref = cls.filter_params(ref) - return {cls.member_name: ref} - - @controller.protected() - def create_protocol(self, context, idp_id, protocol_id, protocol): - ref = self._normalize_dict(protocol) - FederationProtocol.check_immutable_params(ref) - ref = self.federation_api.create_protocol(idp_id, protocol_id, ref) - response = FederationProtocol.wrap_member(context, ref) - return wsgi.render_response(body=response, status=('201', 'Created')) - - @controller.protected() - def update_protocol(self, context, idp_id, protocol_id, protocol): - ref = self._normalize_dict(protocol) - FederationProtocol.check_immutable_params(ref) - ref = self.federation_api.update_protocol(idp_id, protocol_id, - protocol) - return FederationProtocol.wrap_member(context, ref) - - @controller.protected() - def get_protocol(self, context, idp_id, protocol_id): - ref = self.federation_api.get_protocol(idp_id, protocol_id) - return FederationProtocol.wrap_member(context, ref) - - @controller.protected() - def list_protocols(self, context, idp_id): - protocols_ref = self.federation_api.list_protocols(idp_id) - protocols = list(protocols_ref) - return FederationProtocol.wrap_collection(context, protocols) - - @controller.protected() - def delete_protocol(self, context, idp_id, protocol_id): - self.federation_api.delete_protocol(idp_id, protocol_id) - - -@dependency.requires('federation_api') -class MappingController(_ControllerBase): - collection_name = 'mappings' - member_name = 'mapping' - - @controller.protected() - def create_mapping(self, context, mapping_id, mapping): - ref = self._normalize_dict(mapping) - utils.validate_mapping_structure(ref) - mapping_ref = self.federation_api.create_mapping(mapping_id, ref) - response = MappingController.wrap_member(context, mapping_ref) - return wsgi.render_response(body=response, status=('201', 'Created')) - - @controller.protected() - def list_mappings(self, context): - ref = self.federation_api.list_mappings() - return MappingController.wrap_collection(context, ref) - - @controller.protected() - def get_mapping(self, context, mapping_id): - ref = self.federation_api.get_mapping(mapping_id) - return MappingController.wrap_member(context, ref) - - @controller.protected() - def delete_mapping(self, context, mapping_id): - self.federation_api.delete_mapping(mapping_id) - - @controller.protected() - def update_mapping(self, context, mapping_id, mapping): - mapping = self._normalize_dict(mapping) - utils.validate_mapping_structure(mapping) - mapping_ref = self.federation_api.update_mapping(mapping_id, mapping) - return MappingController.wrap_member(context, mapping_ref) - - -@dependency.requires('federation_api') -class Auth(auth_controllers.Auth): - - def _get_sso_origin_host(self, context): - """Validate and return originating dashboard URL. - - Make sure the parameter is specified in the request's URL as well its - value belongs to a list of trusted dashboards. - - :param context: request's context - :raises: exception.ValidationError: ``origin`` query parameter was not - specified. The URL is deemed invalid. - :raises: exception.Unauthorized: URL specified in origin query - parameter does not exist in list of websso trusted dashboards. - :returns: URL with the originating dashboard - - """ - if 'origin' in context['query_string']: - origin = context['query_string'].get('origin') - host = urllib.parse.unquote_plus(origin) - else: - msg = _('Request must have an origin query parameter') - LOG.error(msg) - raise exception.ValidationError(msg) - - if host not in CONF.federation.trusted_dashboard: - msg = _('%(host)s is not a trusted dashboard host') - msg = msg % {'host': host} - LOG.error(msg) - raise exception.Unauthorized(msg) - - return host - - def federated_authentication(self, context, identity_provider, protocol): - """Authenticate from dedicated url endpoint. - - Build HTTP request body for federated authentication and inject - it into the ``authenticate_for_token`` function. - - """ - auth = { - 'identity': { - 'methods': [protocol], - protocol: { - 'identity_provider': identity_provider, - 'protocol': protocol - } - } - } - - return self.authenticate_for_token(context, auth=auth) - - def federated_sso_auth(self, context, protocol_id): - try: - remote_id_name = utils.get_remote_id_parameter(protocol_id) - remote_id = context['environment'][remote_id_name] - except KeyError: - msg = _('Missing entity ID from environment') - LOG.error(msg) - raise exception.Unauthorized(msg) - - host = self._get_sso_origin_host(context) - - ref = self.federation_api.get_idp_from_remote_id(remote_id) - # NOTE(stevemar): the returned object is a simple dict that - # contains the idp_id and remote_id. - identity_provider = ref['idp_id'] - res = self.federated_authentication(context, identity_provider, - protocol_id) - token_id = res.headers['X-Subject-Token'] - return self.render_html_response(host, token_id) - - def federated_idp_specific_sso_auth(self, context, idp_id, protocol_id): - host = self._get_sso_origin_host(context) - - # NOTE(lbragstad): We validate that the Identity Provider actually - # exists in the Mapped authentication plugin. - res = self.federated_authentication(context, idp_id, protocol_id) - token_id = res.headers['X-Subject-Token'] - return self.render_html_response(host, token_id) - - def render_html_response(self, host, token_id): - """Forms an HTML Form from a template with autosubmit.""" - - headers = [('Content-Type', 'text/html')] - - with open(CONF.federation.sso_callback_template) as template: - src = string.Template(template.read()) - - subs = {'host': host, 'token': token_id} - body = src.substitute(subs) - return webob.Response(body=body, status='200', - headerlist=headers) - - def _create_base_saml_assertion(self, context, auth): - issuer = CONF.saml.idp_entity_id - sp_id = auth['scope']['service_provider']['id'] - service_provider = self.federation_api.get_sp(sp_id) - utils.assert_enabled_service_provider_object(service_provider) - sp_url = service_provider.get('sp_url') - - token_id = auth['identity']['token']['id'] - token_data = self.token_provider_api.validate_token(token_id) - token_ref = token_model.KeystoneToken(token_id, token_data) - - if not token_ref.project_scoped: - action = _('Use a project scoped token when attempting to create ' - 'a SAML assertion') - raise exception.ForbiddenAction(action=action) - - subject = token_ref.user_name - roles = token_ref.role_names - project = token_ref.project_name - # NOTE(rodrigods): the domain name is necessary in order to distinguish - # between projects and users with the same name in different domains. - project_domain_name = token_ref.project_domain_name - subject_domain_name = token_ref.user_domain_name - - generator = keystone_idp.SAMLGenerator() - response = generator.samlize_token( - issuer, sp_url, subject, subject_domain_name, - roles, project, project_domain_name) - return (response, service_provider) - - def _build_response_headers(self, service_provider): - return [('Content-Type', 'text/xml'), - ('X-sp-url', six.binary_type(service_provider['sp_url'])), - ('X-auth-url', six.binary_type(service_provider['auth_url']))] - - @validation.validated(schema.saml_create, 'auth') - def create_saml_assertion(self, context, auth): - """Exchange a scoped token for a SAML assertion. - - :param auth: Dictionary that contains a token and service provider ID - :returns: SAML Assertion based on properties from the token - """ - - t = self._create_base_saml_assertion(context, auth) - (response, service_provider) = t - - headers = self._build_response_headers(service_provider) - return wsgi.render_response(body=response.to_string(), - status=('200', 'OK'), - headers=headers) - - @validation.validated(schema.saml_create, 'auth') - def create_ecp_assertion(self, context, auth): - """Exchange a scoped token for an ECP assertion. - - :param auth: Dictionary that contains a token and service provider ID - :returns: ECP Assertion based on properties from the token - """ - - t = self._create_base_saml_assertion(context, auth) - (saml_assertion, service_provider) = t - relay_state_prefix = service_provider.get('relay_state_prefix') - - generator = keystone_idp.ECPGenerator() - ecp_assertion = generator.generate_ecp(saml_assertion, - relay_state_prefix) - - headers = self._build_response_headers(service_provider) - return wsgi.render_response(body=ecp_assertion.to_string(), - status=('200', 'OK'), - headers=headers) - - -@dependency.requires('assignment_api', 'resource_api') -class DomainV3(controller.V3Controller): - collection_name = 'domains' - member_name = 'domain' - - def __init__(self): - super(DomainV3, self).__init__() - self.get_member_from_driver = self.resource_api.get_domain - - @controller.protected() - def list_domains_for_groups(self, context): - """List all domains available to an authenticated user's groups. - - :param context: request context - :returns: list of accessible domains - - """ - auth_context = context['environment'][authorization.AUTH_CONTEXT_ENV] - domains = self.assignment_api.list_domains_for_groups( - auth_context['group_ids']) - return DomainV3.wrap_collection(context, domains) - - -@dependency.requires('assignment_api', 'resource_api') -class ProjectAssignmentV3(controller.V3Controller): - collection_name = 'projects' - member_name = 'project' - - def __init__(self): - super(ProjectAssignmentV3, self).__init__() - self.get_member_from_driver = self.resource_api.get_project - - @controller.protected() - def list_projects_for_groups(self, context): - """List all projects available to an authenticated user's groups. - - :param context: request context - :returns: list of accessible projects - - """ - auth_context = context['environment'][authorization.AUTH_CONTEXT_ENV] - projects = self.assignment_api.list_projects_for_groups( - auth_context['group_ids']) - return ProjectAssignmentV3.wrap_collection(context, projects) - - -@dependency.requires('federation_api') -class ServiceProvider(_ControllerBase): - """Service Provider representation.""" - - collection_name = 'service_providers' - member_name = 'service_provider' - - _mutable_parameters = frozenset(['auth_url', 'description', 'enabled', - 'relay_state_prefix', 'sp_url']) - _public_parameters = frozenset(['auth_url', 'id', 'enabled', 'description', - 'links', 'relay_state_prefix', 'sp_url']) - - @controller.protected() - @validation.validated(schema.service_provider_create, 'service_provider') - def create_service_provider(self, context, sp_id, service_provider): - service_provider = self._normalize_dict(service_provider) - service_provider.setdefault('enabled', False) - service_provider.setdefault('relay_state_prefix', - CONF.saml.relay_state_prefix) - ServiceProvider.check_immutable_params(service_provider) - sp_ref = self.federation_api.create_sp(sp_id, service_provider) - response = ServiceProvider.wrap_member(context, sp_ref) - return wsgi.render_response(body=response, status=('201', 'Created')) - - @controller.protected() - def list_service_providers(self, context): - ref = self.federation_api.list_sps() - ref = [self.filter_params(x) for x in ref] - return ServiceProvider.wrap_collection(context, ref) - - @controller.protected() - def get_service_provider(self, context, sp_id): - ref = self.federation_api.get_sp(sp_id) - return ServiceProvider.wrap_member(context, ref) - - @controller.protected() - def delete_service_provider(self, context, sp_id): - self.federation_api.delete_sp(sp_id) - - @controller.protected() - @validation.validated(schema.service_provider_update, 'service_provider') - def update_service_provider(self, context, sp_id, service_provider): - service_provider = self._normalize_dict(service_provider) - ServiceProvider.check_immutable_params(service_provider) - sp_ref = self.federation_api.update_sp(sp_id, service_provider) - return ServiceProvider.wrap_member(context, sp_ref) - - -class SAMLMetadataV3(_ControllerBase): - member_name = 'metadata' - - def get_metadata(self, context): - metadata_path = CONF.saml.idp_metadata_path - try: - with open(metadata_path, 'r') as metadata_handler: - metadata = metadata_handler.read() - except IOError as e: - # Raise HTTP 500 in case Metadata file cannot be read. - raise exception.MetadataFileError(reason=e) - return wsgi.render_response(body=metadata, status=('200', 'OK'), - headers=[('Content-Type', 'text/xml')]) diff --git a/keystone-moon/keystone/contrib/federation/core.py b/keystone-moon/keystone/contrib/federation/core.py deleted file mode 100644 index 1595be1d..00000000 --- a/keystone-moon/keystone/contrib/federation/core.py +++ /dev/null @@ -1,355 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into the Federation service.""" - -import abc - -from oslo_config import cfg -from oslo_log import log as logging -import six - -from keystone.common import dependency -from keystone.common import extension -from keystone.common import manager -from keystone.contrib.federation import utils -from keystone import exception - - -CONF = cfg.CONF -LOG = logging.getLogger(__name__) -EXTENSION_DATA = { - 'name': 'OpenStack Federation APIs', - 'namespace': 'http://docs.openstack.org/identity/api/ext/' - 'OS-FEDERATION/v1.0', - 'alias': 'OS-FEDERATION', - 'updated': '2013-12-17T12:00:0-00:00', - 'description': 'OpenStack Identity Providers Mechanism.', - 'links': [{ - 'rel': 'describedby', - 'type': 'text/html', - 'href': 'https://github.com/openstack/identity-api' - }]} -extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) -extension.register_public_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) - - -@dependency.provider('federation_api') -class Manager(manager.Manager): - """Default pivot point for the Federation backend. - - See :mod:`keystone.common.manager.Manager` for more details on how this - dynamically calls the backend. - - """ - - driver_namespace = 'keystone.federation' - - def __init__(self): - super(Manager, self).__init__(CONF.federation.driver) - - def get_enabled_service_providers(self): - """List enabled service providers for Service Catalog - - Service Provider in a catalog contains three attributes: ``id``, - ``auth_url``, ``sp_url``, where: - - - id is an unique, user defined identifier for service provider object - - auth_url is a authentication URL of remote Keystone - - sp_url a URL accessible at the remote service provider where SAML - assertion is transmitted. - - :returns: list of dictionaries with enabled service providers - :rtype: list of dicts - - """ - def normalize(sp): - ref = { - 'auth_url': sp.auth_url, - 'id': sp.id, - 'sp_url': sp.sp_url - } - return ref - - service_providers = self.driver.get_enabled_service_providers() - return [normalize(sp) for sp in service_providers] - - def evaluate(self, idp_id, protocol_id, assertion_data): - mapping = self.get_mapping_from_idp_and_protocol(idp_id, protocol_id) - rules = mapping['rules'] - rule_processor = utils.RuleProcessor(rules) - mapped_properties = rule_processor.process(assertion_data) - return mapped_properties, mapping['id'] - - -@six.add_metaclass(abc.ABCMeta) -class FederationDriverV8(object): - - @abc.abstractmethod - def create_idp(self, idp_id, idp): - """Create an identity provider. - - :returns: idp_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_idp(self, idp_id): - """Delete an identity provider. - - :raises: keystone.exception.IdentityProviderNotFound - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_idps(self): - """List all identity providers. - - :raises: keystone.exception.IdentityProviderNotFound - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_idp(self, idp_id): - """Get an identity provider by ID. - - :raises: keystone.exception.IdentityProviderNotFound - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_idp_from_remote_id(self, remote_id): - """Get an identity provider by remote ID. - - :raises: keystone.exception.IdentityProviderNotFound - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_idp(self, idp_id, idp): - """Update an identity provider by ID. - - :raises: keystone.exception.IdentityProviderNotFound - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def create_protocol(self, idp_id, protocol_id, protocol): - """Add an IdP-Protocol configuration. - - :raises: keystone.exception.IdentityProviderNotFound - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_protocol(self, idp_id, protocol_id, protocol): - """Change an IdP-Protocol configuration. - - :raises: keystone.exception.IdentityProviderNotFound, - keystone.exception.FederatedProtocolNotFound - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_protocol(self, idp_id, protocol_id): - """Get an IdP-Protocol configuration. - - :raises: keystone.exception.IdentityProviderNotFound, - keystone.exception.FederatedProtocolNotFound - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_protocols(self, idp_id): - """List an IdP's supported protocols. - - :raises: keystone.exception.IdentityProviderNotFound, - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_protocol(self, idp_id, protocol_id): - """Delete an IdP-Protocol configuration. - - :raises: keystone.exception.IdentityProviderNotFound, - keystone.exception.FederatedProtocolNotFound, - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def create_mapping(self, mapping_ref): - """Create a mapping. - - :param mapping_ref: mapping ref with mapping name - :type mapping_ref: dict - :returns: mapping_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_mapping(self, mapping_id): - """Delete a mapping. - - :param mapping_id: id of mapping to delete - :type mapping_ref: string - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_mapping(self, mapping_id, mapping_ref): - """Update a mapping. - - :param mapping_id: id of mapping to update - :type mapping_id: string - :param mapping_ref: new mapping ref - :type mapping_ref: dict - :returns: mapping_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_mappings(self): - """List all mappings. - - returns: list of mappings - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_mapping(self, mapping_id): - """Get a mapping, returns the mapping based - on mapping_id. - - :param mapping_id: id of mapping to get - :type mapping_ref: string - :returns: mapping_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_mapping_from_idp_and_protocol(self, idp_id, protocol_id): - """Get mapping based on idp_id and protocol_id. - - :param idp_id: id of the identity provider - :type idp_id: string - :param protocol_id: id of the protocol - :type protocol_id: string - :raises: keystone.exception.IdentityProviderNotFound, - keystone.exception.FederatedProtocolNotFound, - :returns: mapping_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def create_sp(self, sp_id, sp): - """Create a service provider. - - :param sp_id: id of the service provider - :type sp_id: string - :param sp: service prvider object - :type sp: dict - - :returns: sp_ref - :rtype: dict - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_sp(self, sp_id): - """Delete a service provider. - - :param sp_id: id of the service provider - :type sp_id: string - - :raises: keystone.exception.ServiceProviderNotFound - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_sps(self): - """List all service providers. - - :returns List of sp_ref objects - :rtype: list of dicts - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_sp(self, sp_id): - """Get a service provider. - - :param sp_id: id of the service provider - :type sp_id: string - - :returns: sp_ref - :raises: keystone.exception.ServiceProviderNotFound - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_sp(self, sp_id, sp): - """Update a service provider. - - :param sp_id: id of the service provider - :type sp_id: string - :param sp: service prvider object - :type sp: dict - - :returns: sp_ref - :rtype: dict - - :raises: keystone.exception.ServiceProviderNotFound - - """ - raise exception.NotImplemented() # pragma: no cover - - def get_enabled_service_providers(self): - """List enabled service providers for Service Catalog - - Service Provider in a catalog contains three attributes: ``id``, - ``auth_url``, ``sp_url``, where: - - - id is an unique, user defined identifier for service provider object - - auth_url is a authentication URL of remote Keystone - - sp_url a URL accessible at the remote service provider where SAML - assertion is transmitted. - - :returns: list of dictionaries with enabled service providers - :rtype: list of dicts - - """ - raise exception.NotImplemented() # pragma: no cover - - -Driver = manager.create_legacy_driver(FederationDriverV8) diff --git a/keystone-moon/keystone/contrib/federation/idp.py b/keystone-moon/keystone/contrib/federation/idp.py deleted file mode 100644 index 51689989..00000000 --- a/keystone-moon/keystone/contrib/federation/idp.py +++ /dev/null @@ -1,609 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime -import os -import uuid - -from oslo_config import cfg -from oslo_log import log -from oslo_utils import fileutils -from oslo_utils import importutils -from oslo_utils import timeutils -import saml2 -from saml2 import client_base -from saml2 import md -from saml2.profile import ecp -from saml2 import saml -from saml2 import samlp -from saml2.schema import soapenv -from saml2 import sigver -xmldsig = importutils.try_import("saml2.xmldsig") -if not xmldsig: - xmldsig = importutils.try_import("xmldsig") - -from keystone.common import environment -from keystone.common import utils -from keystone import exception -from keystone.i18n import _, _LE - - -subprocess = environment.subprocess - -LOG = log.getLogger(__name__) -CONF = cfg.CONF - - -class SAMLGenerator(object): - """A class to generate SAML assertions.""" - - def __init__(self): - self.assertion_id = uuid.uuid4().hex - - def samlize_token(self, issuer, recipient, user, user_domain_name, roles, - project, project_domain_name, expires_in=None): - """Convert Keystone attributes to a SAML assertion. - - :param issuer: URL of the issuing party - :type issuer: string - :param recipient: URL of the recipient - :type recipient: string - :param user: User name - :type user: string - :param user_domain_name: User Domain name - :type user_domain_name: string - :param roles: List of role names - :type roles: list - :param project: Project name - :type project: string - :param project_domain_name: Project Domain name - :type project_domain_name: string - :param expires_in: Sets how long the assertion is valid for, in seconds - :type expires_in: int - - :return: XML object - - """ - expiration_time = self._determine_expiration_time(expires_in) - status = self._create_status() - saml_issuer = self._create_issuer(issuer) - subject = self._create_subject(user, expiration_time, recipient) - attribute_statement = self._create_attribute_statement( - user, user_domain_name, roles, project, project_domain_name) - authn_statement = self._create_authn_statement(issuer, expiration_time) - signature = self._create_signature() - - assertion = self._create_assertion(saml_issuer, signature, - subject, authn_statement, - attribute_statement) - - assertion = _sign_assertion(assertion) - - response = self._create_response(saml_issuer, status, assertion, - recipient) - return response - - def _determine_expiration_time(self, expires_in): - if expires_in is None: - expires_in = CONF.saml.assertion_expiration_time - now = timeutils.utcnow() - future = now + datetime.timedelta(seconds=expires_in) - return utils.isotime(future, subsecond=True) - - def _create_status(self): - """Create an object that represents a SAML Status. - - - - - - :return: XML object - - """ - status = samlp.Status() - status_code = samlp.StatusCode() - status_code.value = samlp.STATUS_SUCCESS - status_code.set_text('') - status.status_code = status_code - return status - - def _create_issuer(self, issuer_url): - """Create an object that represents a SAML Issuer. - - - https://acme.com/FIM/sps/openstack/saml20 - - :return: XML object - - """ - issuer = saml.Issuer() - issuer.format = saml.NAMEID_FORMAT_ENTITY - issuer.set_text(issuer_url) - return issuer - - def _create_subject(self, user, expiration_time, recipient): - """Create an object that represents a SAML Subject. - - - - john@smith.com - - - - - - :return: XML object - - """ - name_id = saml.NameID() - name_id.set_text(user) - subject_conf_data = saml.SubjectConfirmationData() - subject_conf_data.recipient = recipient - subject_conf_data.not_on_or_after = expiration_time - subject_conf = saml.SubjectConfirmation() - subject_conf.method = saml.SCM_BEARER - subject_conf.subject_confirmation_data = subject_conf_data - subject = saml.Subject() - subject.subject_confirmation = subject_conf - subject.name_id = name_id - return subject - - def _create_attribute_statement(self, user, user_domain_name, roles, - project, project_domain_name): - """Create an object that represents a SAML AttributeStatement. - - - - test_user - - - Default - - - admin - member - - - development - - - Default - - - - :return: XML object - - """ - - def _build_attribute(attribute_name, attribute_values): - attribute = saml.Attribute() - attribute.name = attribute_name - - for value in attribute_values: - attribute_value = saml.AttributeValue() - attribute_value.set_text(value) - attribute.attribute_value.append(attribute_value) - - return attribute - - user_attribute = _build_attribute('openstack_user', [user]) - roles_attribute = _build_attribute('openstack_roles', roles) - project_attribute = _build_attribute('openstack_project', [project]) - project_domain_attribute = _build_attribute( - 'openstack_project_domain', [project_domain_name]) - user_domain_attribute = _build_attribute( - 'openstack_user_domain', [user_domain_name]) - - attribute_statement = saml.AttributeStatement() - attribute_statement.attribute.append(user_attribute) - attribute_statement.attribute.append(roles_attribute) - attribute_statement.attribute.append(project_attribute) - attribute_statement.attribute.append(project_domain_attribute) - attribute_statement.attribute.append(user_domain_attribute) - return attribute_statement - - def _create_authn_statement(self, issuer, expiration_time): - """Create an object that represents a SAML AuthnStatement. - - - - - urn:oasis:names:tc:SAML:2.0:ac:classes:Password - - - https://acme.com/FIM/sps/openstack/saml20 - - - - - :return: XML object - - """ - authn_statement = saml.AuthnStatement() - authn_statement.authn_instant = utils.isotime() - authn_statement.session_index = uuid.uuid4().hex - authn_statement.session_not_on_or_after = expiration_time - - authn_context = saml.AuthnContext() - authn_context_class = saml.AuthnContextClassRef() - authn_context_class.set_text(saml.AUTHN_PASSWORD) - - authn_authority = saml.AuthenticatingAuthority() - authn_authority.set_text(issuer) - authn_context.authn_context_class_ref = authn_context_class - authn_context.authenticating_authority = authn_authority - - authn_statement.authn_context = authn_context - - return authn_statement - - def _create_assertion(self, issuer, signature, subject, authn_statement, - attribute_statement): - """Create an object that represents a SAML Assertion. - - - ... - ... - ... - ... - ... - - - :return: XML object - - """ - assertion = saml.Assertion() - assertion.id = self.assertion_id - assertion.issue_instant = utils.isotime() - assertion.version = '2.0' - assertion.issuer = issuer - assertion.signature = signature - assertion.subject = subject - assertion.authn_statement = authn_statement - assertion.attribute_statement = attribute_statement - return assertion - - def _create_response(self, issuer, status, assertion, recipient): - """Create an object that represents a SAML Response. - - - ... - ... - ... - - - :return: XML object - - """ - response = samlp.Response() - response.id = uuid.uuid4().hex - response.destination = recipient - response.issue_instant = utils.isotime() - response.version = '2.0' - response.issuer = issuer - response.status = status - response.assertion = assertion - return response - - def _create_signature(self): - """Create an object that represents a SAML . - - This must be filled with algorithms that the signing binary will apply - in order to sign the whole message. - Currently we enforce X509 signing. - Example of the template:: - - - - - - - - - - - - - - - - - - - - - :return: XML object - - """ - canonicalization_method = xmldsig.CanonicalizationMethod() - canonicalization_method.algorithm = xmldsig.ALG_EXC_C14N - signature_method = xmldsig.SignatureMethod( - algorithm=xmldsig.SIG_RSA_SHA1) - - transforms = xmldsig.Transforms() - envelope_transform = xmldsig.Transform( - algorithm=xmldsig.TRANSFORM_ENVELOPED) - - c14_transform = xmldsig.Transform(algorithm=xmldsig.ALG_EXC_C14N) - transforms.transform = [envelope_transform, c14_transform] - - digest_method = xmldsig.DigestMethod(algorithm=xmldsig.DIGEST_SHA1) - digest_value = xmldsig.DigestValue() - - reference = xmldsig.Reference() - reference.uri = '#' + self.assertion_id - reference.digest_method = digest_method - reference.digest_value = digest_value - reference.transforms = transforms - - signed_info = xmldsig.SignedInfo() - signed_info.canonicalization_method = canonicalization_method - signed_info.signature_method = signature_method - signed_info.reference = reference - - key_info = xmldsig.KeyInfo() - key_info.x509_data = xmldsig.X509Data() - - signature = xmldsig.Signature() - signature.signed_info = signed_info - signature.signature_value = xmldsig.SignatureValue() - signature.key_info = key_info - - return signature - - -def _sign_assertion(assertion): - """Sign a SAML assertion. - - This method utilizes ``xmlsec1`` binary and signs SAML assertions in a - separate process. ``xmlsec1`` cannot read input data from stdin so the - prepared assertion needs to be serialized and stored in a temporary - file. This file will be deleted immediately after ``xmlsec1`` returns. - The signed assertion is redirected to a standard output and read using - subprocess.PIPE redirection. A ``saml.Assertion`` class is created - from the signed string again and returned. - - Parameters that are required in the CONF:: - * xmlsec_binary - * private key file path - * public key file path - :return: XML object - - """ - xmlsec_binary = CONF.saml.xmlsec1_binary - idp_private_key = CONF.saml.keyfile - idp_public_key = CONF.saml.certfile - - # xmlsec1 --sign --privkey-pem privkey,cert --id-attr:ID - certificates = '%(idp_private_key)s,%(idp_public_key)s' % { - 'idp_public_key': idp_public_key, - 'idp_private_key': idp_private_key - } - - command_list = [xmlsec_binary, '--sign', '--privkey-pem', certificates, - '--id-attr:ID', 'Assertion'] - - file_path = None - try: - # NOTE(gyee): need to make the namespace prefixes explicit so - # they won't get reassigned when we wrap the assertion into - # SAML2 response - file_path = fileutils.write_to_tempfile(assertion.to_string( - nspair={'saml': saml2.NAMESPACE, - 'xmldsig': xmldsig.NAMESPACE})) - command_list.append(file_path) - stdout = subprocess.check_output(command_list, - stderr=subprocess.STDOUT) - except Exception as e: - msg = _LE('Error when signing assertion, reason: %(reason)s%(output)s') - LOG.error(msg, - {'reason': e, - 'output': ' ' + e.output if hasattr(e, 'output') else ''}) - raise exception.SAMLSigningError(reason=e) - finally: - try: - if file_path: - os.remove(file_path) - except OSError: - pass - - return saml2.create_class_from_xml_string(saml.Assertion, stdout) - - -class MetadataGenerator(object): - """A class for generating SAML IdP Metadata.""" - - def generate_metadata(self): - """Generate Identity Provider Metadata. - - Generate and format metadata into XML that can be exposed and - consumed by a federated Service Provider. - - :return: XML object. - :raises: keystone.exception.ValidationError: Raises if the required - config options aren't set. - - """ - self._ensure_required_values_present() - entity_descriptor = self._create_entity_descriptor() - entity_descriptor.idpsso_descriptor = ( - self._create_idp_sso_descriptor()) - return entity_descriptor - - def _create_entity_descriptor(self): - ed = md.EntityDescriptor() - ed.entity_id = CONF.saml.idp_entity_id - return ed - - def _create_idp_sso_descriptor(self): - - def get_cert(): - try: - return sigver.read_cert_from_file(CONF.saml.certfile, 'pem') - except (IOError, sigver.CertificateError) as e: - msg = _('Cannot open certificate %(cert_file)s. ' - 'Reason: %(reason)s') - msg = msg % {'cert_file': CONF.saml.certfile, 'reason': e} - LOG.error(msg) - raise IOError(msg) - - def key_descriptor(): - cert = get_cert() - return md.KeyDescriptor( - key_info=xmldsig.KeyInfo( - x509_data=xmldsig.X509Data( - x509_certificate=xmldsig.X509Certificate(text=cert) - ) - ), use='signing' - ) - - def single_sign_on_service(): - idp_sso_endpoint = CONF.saml.idp_sso_endpoint - return md.SingleSignOnService( - binding=saml2.BINDING_URI, - location=idp_sso_endpoint) - - def organization(): - name = md.OrganizationName(lang=CONF.saml.idp_lang, - text=CONF.saml.idp_organization_name) - display_name = md.OrganizationDisplayName( - lang=CONF.saml.idp_lang, - text=CONF.saml.idp_organization_display_name) - url = md.OrganizationURL(lang=CONF.saml.idp_lang, - text=CONF.saml.idp_organization_url) - - return md.Organization( - organization_display_name=display_name, - organization_url=url, organization_name=name) - - def contact_person(): - company = md.Company(text=CONF.saml.idp_contact_company) - given_name = md.GivenName(text=CONF.saml.idp_contact_name) - surname = md.SurName(text=CONF.saml.idp_contact_surname) - email = md.EmailAddress(text=CONF.saml.idp_contact_email) - telephone = md.TelephoneNumber( - text=CONF.saml.idp_contact_telephone) - contact_type = CONF.saml.idp_contact_type - - return md.ContactPerson( - company=company, given_name=given_name, sur_name=surname, - email_address=email, telephone_number=telephone, - contact_type=contact_type) - - def name_id_format(): - return md.NameIDFormat(text=saml.NAMEID_FORMAT_TRANSIENT) - - idpsso = md.IDPSSODescriptor() - idpsso.protocol_support_enumeration = samlp.NAMESPACE - idpsso.key_descriptor = key_descriptor() - idpsso.single_sign_on_service = single_sign_on_service() - idpsso.name_id_format = name_id_format() - if self._check_organization_values(): - idpsso.organization = organization() - if self._check_contact_person_values(): - idpsso.contact_person = contact_person() - return idpsso - - def _ensure_required_values_present(self): - """Ensure idp_sso_endpoint and idp_entity_id have values.""" - - if CONF.saml.idp_entity_id is None: - msg = _('Ensure configuration option idp_entity_id is set.') - raise exception.ValidationError(msg) - if CONF.saml.idp_sso_endpoint is None: - msg = _('Ensure configuration option idp_sso_endpoint is set.') - raise exception.ValidationError(msg) - - def _check_contact_person_values(self): - """Determine if contact information is included in metadata.""" - - # Check if we should include contact information - params = [CONF.saml.idp_contact_company, - CONF.saml.idp_contact_name, - CONF.saml.idp_contact_surname, - CONF.saml.idp_contact_email, - CONF.saml.idp_contact_telephone] - for value in params: - if value is None: - return False - - # Check if contact type is an invalid value - valid_type_values = ['technical', 'other', 'support', 'administrative', - 'billing'] - if CONF.saml.idp_contact_type not in valid_type_values: - msg = _('idp_contact_type must be one of: [technical, other, ' - 'support, administrative or billing.') - raise exception.ValidationError(msg) - return True - - def _check_organization_values(self): - """Determine if organization information is included in metadata.""" - - params = [CONF.saml.idp_organization_name, - CONF.saml.idp_organization_display_name, - CONF.saml.idp_organization_url] - for value in params: - if value is None: - return False - return True - - -class ECPGenerator(object): - """A class for generating an ECP assertion.""" - - @staticmethod - def generate_ecp(saml_assertion, relay_state_prefix): - ecp_generator = ECPGenerator() - header = ecp_generator._create_header(relay_state_prefix) - body = ecp_generator._create_body(saml_assertion) - envelope = soapenv.Envelope(header=header, body=body) - return envelope - - def _create_header(self, relay_state_prefix): - relay_state_text = relay_state_prefix + uuid.uuid4().hex - relay_state = ecp.RelayState(actor=client_base.ACTOR, - must_understand='1', - text=relay_state_text) - header = soapenv.Header() - header.extension_elements = ( - [saml2.element_to_extension_element(relay_state)]) - return header - - def _create_body(self, saml_assertion): - body = soapenv.Body() - body.extension_elements = ( - [saml2.element_to_extension_element(saml_assertion)]) - return body diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/__init__.py b/keystone-moon/keystone/contrib/federation/migrate_repo/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/migrate.cfg b/keystone-moon/keystone/contrib/federation/migrate_repo/migrate.cfg deleted file mode 100644 index 464ab62b..00000000 --- a/keystone-moon/keystone/contrib/federation/migrate_repo/migrate.cfg +++ /dev/null @@ -1,25 +0,0 @@ -[db_settings] -# Used to identify which repository this database is versioned under. -# You can use the name of your project. -repository_id=federation - -# The name of the database table used to track the schema version. -# This name shouldn't already be used by your project. -# If this is changed once a database is under version control, you'll need to -# change the table name in each database too. -version_table=migrate_version - -# When committing a change script, Migrate will attempt to generate the -# sql for all supported databases; normally, if one of them fails - probably -# because you don't have that database installed - it is ignored and the -# commit continues, perhaps ending successfully. -# Databases in this list MUST compile successfully during a commit, or the -# entire commit will fail. List the databases your application will actually -# be using to ensure your updates to that database work properly. -# This must be a list; example: ['postgres','sqlite'] -required_dbs=[] - -# When creating new change scripts, Migrate will stamp the new script with -# a version number. By default this is latest_version + 1. You can set this -# to 'true' to tell Migrate to use the UTC timestamp instead. -use_timestamp_numbering=False diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/001_add_identity_provider_table.py b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/001_add_identity_provider_table.py deleted file mode 100644 index d9b24a00..00000000 --- a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/001_add_identity_provider_table.py +++ /dev/null @@ -1,17 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='federation') diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/002_add_mapping_tables.py b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/002_add_mapping_tables.py deleted file mode 100644 index d9b24a00..00000000 --- a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/002_add_mapping_tables.py +++ /dev/null @@ -1,17 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='federation') diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/003_mapping_id_nullable_false.py b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/003_mapping_id_nullable_false.py deleted file mode 100644 index 8ce8c6fa..00000000 --- a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/003_mapping_id_nullable_false.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright 2014 Mirantis.inc -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='federation') diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/004_add_remote_id_column.py b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/004_add_remote_id_column.py deleted file mode 100644 index d9b24a00..00000000 --- a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/004_add_remote_id_column.py +++ /dev/null @@ -1,17 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='federation') diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/005_add_service_provider_table.py b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/005_add_service_provider_table.py deleted file mode 100644 index d9b24a00..00000000 --- a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/005_add_service_provider_table.py +++ /dev/null @@ -1,17 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='federation') diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/006_fixup_service_provider_attributes.py b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/006_fixup_service_provider_attributes.py deleted file mode 100644 index d9b24a00..00000000 --- a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/006_fixup_service_provider_attributes.py +++ /dev/null @@ -1,17 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='federation') diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/007_add_remote_id_table.py b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/007_add_remote_id_table.py deleted file mode 100644 index d9b24a00..00000000 --- a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/007_add_remote_id_table.py +++ /dev/null @@ -1,17 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='federation') diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/008_add_relay_state_to_sp.py b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/008_add_relay_state_to_sp.py deleted file mode 100644 index d9b24a00..00000000 --- a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/008_add_relay_state_to_sp.py +++ /dev/null @@ -1,17 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='federation') diff --git a/keystone-moon/keystone/contrib/federation/migrate_repo/versions/__init__.py b/keystone-moon/keystone/contrib/federation/migrate_repo/versions/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/federation/routers.py b/keystone-moon/keystone/contrib/federation/routers.py deleted file mode 100644 index d5857ca6..00000000 --- a/keystone-moon/keystone/contrib/federation/routers.py +++ /dev/null @@ -1,31 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log -from oslo_log import versionutils - -from keystone.common import wsgi -from keystone.i18n import _ - - -LOG = log.getLogger(__name__) - - -class FederationExtension(wsgi.Middleware): - - def __init__(self, *args, **kwargs): - super(FederationExtension, self).__init__(*args, **kwargs) - msg = _("Remove federation_extension from the paste pipeline, the " - "federation extension is now always available. Update the " - "[pipeline:api_v3] section in keystone-paste.ini accordingly, " - "as it will be removed in the O release.") - versionutils.report_deprecated_feature(LOG, msg) diff --git a/keystone-moon/keystone/contrib/federation/schema.py b/keystone-moon/keystone/contrib/federation/schema.py deleted file mode 100644 index 17818a98..00000000 --- a/keystone-moon/keystone/contrib/federation/schema.py +++ /dev/null @@ -1,79 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import validation -from keystone.common.validation import parameter_types - - -basic_property_id = { - 'type': 'object', - 'properties': { - 'id': { - 'type': 'string' - } - }, - 'required': ['id'], - 'additionalProperties': False -} - -saml_create = { - 'type': 'object', - 'properties': { - 'identity': { - 'type': 'object', - 'properties': { - 'token': basic_property_id, - 'methods': { - 'type': 'array' - } - }, - 'required': ['token'], - 'additionalProperties': False - }, - 'scope': { - 'type': 'object', - 'properties': { - 'service_provider': basic_property_id - }, - 'required': ['service_provider'], - 'additionalProperties': False - }, - }, - 'required': ['identity', 'scope'], - 'additionalProperties': False -} - -_service_provider_properties = { - # NOTE(rodrigods): The database accepts URLs with 256 as max length, - # but parameter_types.url uses 225 as max length. - 'auth_url': parameter_types.url, - 'sp_url': parameter_types.url, - 'description': validation.nullable(parameter_types.description), - 'enabled': parameter_types.boolean, - 'relay_state_prefix': validation.nullable(parameter_types.description) -} - -service_provider_create = { - 'type': 'object', - 'properties': _service_provider_properties, - # NOTE(rodrigods): 'id' is not required since it is passed in the URL - 'required': ['auth_url', 'sp_url'], - 'additionalProperties': False -} - -service_provider_update = { - 'type': 'object', - 'properties': _service_provider_properties, - # Make sure at least one property is being updated - 'minProperties': 1, - 'additionalProperties': False -} diff --git a/keystone-moon/keystone/contrib/federation/utils.py b/keystone-moon/keystone/contrib/federation/utils.py deleted file mode 100644 index bde19cfd..00000000 --- a/keystone-moon/keystone/contrib/federation/utils.py +++ /dev/null @@ -1,776 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Utilities for Federation Extension.""" - -import ast -import re - -import jsonschema -from oslo_config import cfg -from oslo_log import log -from oslo_utils import timeutils -import six - -from keystone import exception -from keystone.i18n import _, _LW - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -MAPPING_SCHEMA = { - "type": "object", - "required": ['rules'], - "properties": { - "rules": { - "minItems": 1, - "type": "array", - "items": { - "type": "object", - "required": ['local', 'remote'], - "additionalProperties": False, - "properties": { - "local": { - "type": "array" - }, - "remote": { - "minItems": 1, - "type": "array", - "items": { - "type": "object", - "oneOf": [ - {"$ref": "#/definitions/empty"}, - {"$ref": "#/definitions/any_one_of"}, - {"$ref": "#/definitions/not_any_of"}, - {"$ref": "#/definitions/blacklist"}, - {"$ref": "#/definitions/whitelist"} - ], - } - } - } - } - } - }, - "definitions": { - "empty": { - "type": "object", - "required": ['type'], - "properties": { - "type": { - "type": "string" - }, - }, - "additionalProperties": False, - }, - "any_one_of": { - "type": "object", - "additionalProperties": False, - "required": ['type', 'any_one_of'], - "properties": { - "type": { - "type": "string" - }, - "any_one_of": { - "type": "array" - }, - "regex": { - "type": "boolean" - } - } - }, - "not_any_of": { - "type": "object", - "additionalProperties": False, - "required": ['type', 'not_any_of'], - "properties": { - "type": { - "type": "string" - }, - "not_any_of": { - "type": "array" - }, - "regex": { - "type": "boolean" - } - } - }, - "blacklist": { - "type": "object", - "additionalProperties": False, - "required": ['type', 'blacklist'], - "properties": { - "type": { - "type": "string" - }, - "blacklist": { - "type": "array" - } - } - }, - "whitelist": { - "type": "object", - "additionalProperties": False, - "required": ['type', 'whitelist'], - "properties": { - "type": { - "type": "string" - }, - "whitelist": { - "type": "array" - } - } - } - } -} - - -class DirectMaps(object): - """An abstraction around the remote matches. - - Each match is treated internally as a list. - """ - - def __init__(self): - self._matches = [] - - def add(self, values): - """Adds a matched value to the list of matches. - - :param list value: the match to save - - """ - self._matches.append(values) - - def __getitem__(self, idx): - """Used by Python when executing ``''.format(*DirectMaps())``.""" - value = self._matches[idx] - if isinstance(value, list) and len(value) == 1: - return value[0] - else: - return value - - -def validate_mapping_structure(ref): - v = jsonschema.Draft4Validator(MAPPING_SCHEMA) - - messages = '' - for error in sorted(v.iter_errors(ref), key=str): - messages = messages + error.message + "\n" - - if messages: - raise exception.ValidationError(messages) - - -def validate_expiration(token_ref): - if timeutils.utcnow() > token_ref.expires: - raise exception.Unauthorized(_('Federation token is expired')) - - -def validate_groups_cardinality(group_ids, mapping_id): - """Check if groups list is non-empty. - - :param group_ids: list of group ids - :type group_ids: list of str - - :raises exception.MissingGroups: if ``group_ids`` cardinality is 0 - - """ - if not group_ids: - raise exception.MissingGroups(mapping_id=mapping_id) - - -def get_remote_id_parameter(protocol): - # NOTE(marco-fargetta): Since we support any protocol ID, we attempt to - # retrieve the remote_id_attribute of the protocol ID. If it's not - # registered in the config, then register the option and try again. - # This allows the user to register protocols other than oidc and saml2. - remote_id_parameter = None - try: - remote_id_parameter = CONF[protocol]['remote_id_attribute'] - except AttributeError: - CONF.register_opt(cfg.StrOpt('remote_id_attribute'), - group=protocol) - try: - remote_id_parameter = CONF[protocol]['remote_id_attribute'] - except AttributeError: - pass - if not remote_id_parameter: - LOG.debug('Cannot find "remote_id_attribute" in configuration ' - 'group %s. Trying default location in ' - 'group federation.', protocol) - remote_id_parameter = CONF.federation.remote_id_attribute - - return remote_id_parameter - - -def validate_idp(idp, protocol, assertion): - """Validate the IdP providing the assertion is registered for the mapping. - """ - - remote_id_parameter = get_remote_id_parameter(protocol) - if not remote_id_parameter or not idp['remote_ids']: - LOG.debug('Impossible to identify the IdP %s ', idp['id']) - # If nothing is defined, the administrator may want to - # allow the mapping of every IdP - return - try: - idp_remote_identifier = assertion[remote_id_parameter] - except KeyError: - msg = _('Could not find Identity Provider identifier in ' - 'environment') - raise exception.ValidationError(msg) - if idp_remote_identifier not in idp['remote_ids']: - msg = _('Incoming identity provider identifier not included ' - 'among the accepted identifiers.') - raise exception.Forbidden(msg) - - -def validate_groups_in_backend(group_ids, mapping_id, identity_api): - """Iterate over group ids and make sure they are present in the backend/ - - This call is not transactional. - :param group_ids: IDs of the groups to be checked - :type group_ids: list of str - - :param mapping_id: id of the mapping used for this operation - :type mapping_id: str - - :param identity_api: Identity Manager object used for communication with - backend - :type identity_api: identity.Manager - - :raises: exception.MappedGroupNotFound - - """ - for group_id in group_ids: - try: - identity_api.get_group(group_id) - except exception.GroupNotFound: - raise exception.MappedGroupNotFound( - group_id=group_id, mapping_id=mapping_id) - - -def validate_groups(group_ids, mapping_id, identity_api): - """Check group ids cardinality and check their existence in the backend. - - This call is not transactional. - :param group_ids: IDs of the groups to be checked - :type group_ids: list of str - - :param mapping_id: id of the mapping used for this operation - :type mapping_id: str - - :param identity_api: Identity Manager object used for communication with - backend - :type identity_api: identity.Manager - - :raises: exception.MappedGroupNotFound - :raises: exception.MissingGroups - - """ - validate_groups_cardinality(group_ids, mapping_id) - validate_groups_in_backend(group_ids, mapping_id, identity_api) - - -# TODO(marek-denis): Optimize this function, so the number of calls to the -# backend are minimized. -def transform_to_group_ids(group_names, mapping_id, - identity_api, resource_api): - """Transform groups identitified by name/domain to their ids - - Function accepts list of groups identified by a name and domain giving - a list of group ids in return. - - Example of group_names parameter:: - - [ - { - "name": "group_name", - "domain": { - "id": "domain_id" - }, - }, - { - "name": "group_name_2", - "domain": { - "name": "domain_name" - } - } - ] - - :param group_names: list of group identified by name and its domain. - :type group_names: list - - :param mapping_id: id of the mapping used for mapping assertion into - local credentials - :type mapping_id: str - - :param identity_api: identity_api object - :param resource_api: resource manager object - - :returns: generator object with group ids - - :raises: excepton.MappedGroupNotFound: in case asked group doesn't - exist in the backend. - - """ - - def resolve_domain(domain): - """Return domain id. - - Input is a dictionary with a domain identified either by a ``id`` or a - ``name``. In the latter case system will attempt to fetch domain object - from the backend. - - :returns: domain's id - :rtype: str - - """ - domain_id = (domain.get('id') or - resource_api.get_domain_by_name( - domain.get('name')).get('id')) - return domain_id - - for group in group_names: - try: - group_dict = identity_api.get_group_by_name( - group['name'], resolve_domain(group['domain'])) - yield group_dict['id'] - except exception.GroupNotFound: - LOG.debug('Skip mapping group %s; has no entry in the backend', - group['name']) - - -def get_assertion_params_from_env(context): - LOG.debug('Environment variables: %s', context['environment']) - prefix = CONF.federation.assertion_prefix - for k, v in list(context['environment'].items()): - if k.startswith(prefix): - yield (k, v) - - -class UserType(object): - """User mapping type.""" - EPHEMERAL = 'ephemeral' - LOCAL = 'local' - - -class RuleProcessor(object): - """A class to process assertions and mapping rules.""" - - class _EvalType(object): - """Mapping rule evaluation types.""" - ANY_ONE_OF = 'any_one_of' - NOT_ANY_OF = 'not_any_of' - BLACKLIST = 'blacklist' - WHITELIST = 'whitelist' - - def __init__(self, rules): - """Initialize RuleProcessor. - - Example rules can be found at: - :class:`keystone.tests.mapping_fixtures` - - :param rules: rules from a mapping - :type rules: dict - - """ - - self.rules = rules - - def process(self, assertion_data): - """Transform assertion to a dictionary of user name and group ids - based on mapping rules. - - This function will iterate through the mapping rules to find - assertions that are valid. - - :param assertion_data: an assertion containing values from an IdP - :type assertion_data: dict - - Example assertion_data:: - - { - 'Email': 'testacct@example.com', - 'UserName': 'testacct', - 'FirstName': 'Test', - 'LastName': 'Account', - 'orgPersonType': 'Tester' - } - - :returns: dictionary with user and group_ids - - The expected return structure is:: - - { - 'name': 'foobar', - 'group_ids': ['abc123', 'def456'], - 'group_names': [ - { - 'name': 'group_name_1', - 'domain': { - 'name': 'domain1' - } - }, - { - 'name': 'group_name_1_1', - 'domain': { - 'name': 'domain1' - } - }, - { - 'name': 'group_name_2', - 'domain': { - 'id': 'xyz132' - } - } - ] - } - - """ - - # Assertions will come in as string key-value pairs, and will use a - # semi-colon to indicate multiple values, i.e. groups. - # This will create a new dictionary where the values are arrays, and - # any multiple values are stored in the arrays. - LOG.debug('assertion data: %s', assertion_data) - assertion = {n: v.split(';') for n, v in assertion_data.items() - if isinstance(v, six.string_types)} - LOG.debug('assertion: %s', assertion) - identity_values = [] - - LOG.debug('rules: %s', self.rules) - for rule in self.rules: - direct_maps = self._verify_all_requirements(rule['remote'], - assertion) - - # If the compare comes back as None, then the rule did not apply - # to the assertion data, go on to the next rule - if direct_maps is None: - continue - - # If there are no direct mappings, then add the local mapping - # directly to the array of saved values. However, if there is - # a direct mapping, then perform variable replacement. - if not direct_maps: - identity_values += rule['local'] - else: - for local in rule['local']: - new_local = self._update_local_mapping(local, direct_maps) - identity_values.append(new_local) - - LOG.debug('identity_values: %s', identity_values) - mapped_properties = self._transform(identity_values) - LOG.debug('mapped_properties: %s', mapped_properties) - return mapped_properties - - def _transform(self, identity_values): - """Transform local mappings, to an easier to understand format. - - Transform the incoming array to generate the return value for - the process function. Generating content for Keystone tokens will - be easier if some pre-processing is done at this level. - - :param identity_values: local mapping from valid evaluations - :type identity_values: array of dict - - Example identity_values:: - - [ - { - 'group': {'id': '0cd5e9'}, - 'user': { - 'email': 'bob@example.com' - }, - }, - { - 'groups': ['member', 'admin', tester'], - 'domain': { - 'name': 'default_domain' - } - } - ] - - :returns: dictionary with user name, group_ids and group_names. - :rtype: dict - - """ - - def extract_groups(groups_by_domain): - for groups in list(groups_by_domain.values()): - for group in list({g['name']: g for g in groups}.values()): - yield group - - def normalize_user(user): - """Parse and validate user mapping.""" - - user_type = user.get('type') - - if user_type and user_type not in (UserType.EPHEMERAL, - UserType.LOCAL): - msg = _("User type %s not supported") % user_type - raise exception.ValidationError(msg) - - if user_type is None: - user_type = user['type'] = UserType.EPHEMERAL - - if user_type == UserType.EPHEMERAL: - user['domain'] = { - 'id': CONF.federation.federated_domain_name - } - - # initialize the group_ids as a set to eliminate duplicates - user = {} - group_ids = set() - group_names = list() - groups_by_domain = dict() - - for identity_value in identity_values: - if 'user' in identity_value: - # if a mapping outputs more than one user name, log it - if user: - LOG.warning(_LW('Ignoring user name')) - else: - user = identity_value.get('user') - if 'group' in identity_value: - group = identity_value['group'] - if 'id' in group: - group_ids.add(group['id']) - elif 'name' in group: - domain = (group['domain'].get('name') or - group['domain'].get('id')) - groups_by_domain.setdefault(domain, list()).append(group) - group_names.extend(extract_groups(groups_by_domain)) - if 'groups' in identity_value: - if 'domain' not in identity_value: - msg = _("Invalid rule: %(identity_value)s. Both 'groups' " - "and 'domain' keywords must be specified.") - msg = msg % {'identity_value': identity_value} - raise exception.ValidationError(msg) - # In this case, identity_value['groups'] is a string - # representation of a list, and we want a real list. This is - # due to the way we do direct mapping substitutions today (see - # function _update_local_mapping() ) - try: - group_names_list = ast.literal_eval( - identity_value['groups']) - except ValueError: - group_names_list = [identity_value['groups']] - domain = identity_value['domain'] - group_dicts = [{'name': name, 'domain': domain} for name in - group_names_list] - - group_names.extend(group_dicts) - - normalize_user(user) - - return {'user': user, - 'group_ids': list(group_ids), - 'group_names': group_names} - - def _update_local_mapping(self, local, direct_maps): - """Replace any {0}, {1} ... values with data from the assertion. - - :param local: local mapping reference that needs to be updated - :type local: dict - :param direct_maps: identity values used to update local - :type direct_maps: keystone.contrib.federation.utils.DirectMaps - - Example local:: - - {'user': {'name': '{0} {1}', 'email': '{2}'}} - - Example direct_maps:: - - ['Bob', 'Thompson', 'bob@example.com'] - - :returns: new local mapping reference with replaced values. - - The expected return structure is:: - - {'user': {'name': 'Bob Thompson', 'email': 'bob@example.org'}} - - """ - - LOG.debug('direct_maps: %s', direct_maps) - LOG.debug('local: %s', local) - new = {} - for k, v in local.items(): - if isinstance(v, dict): - new_value = self._update_local_mapping(v, direct_maps) - else: - new_value = v.format(*direct_maps) - new[k] = new_value - return new - - def _verify_all_requirements(self, requirements, assertion): - """Go through the remote requirements of a rule, and compare against - the assertion. - - If a value of ``None`` is returned, the rule with this assertion - doesn't apply. - If an array of zero length is returned, then there are no direct - mappings to be performed, but the rule is valid. - Otherwise, then it will first attempt to filter the values according - to blacklist or whitelist rules and finally return the values in - order, to be directly mapped. - - :param requirements: list of remote requirements from rules - :type requirements: list - - Example requirements:: - - [ - { - "type": "UserName" - }, - { - "type": "orgPersonType", - "any_one_of": [ - "Customer" - ] - }, - { - "type": "ADFS_GROUPS", - "whitelist": [ - "g1", "g2", "g3", "g4" - ] - } - ] - - :param assertion: dict of attributes from an IdP - :type assertion: dict - - Example assertion:: - - { - 'UserName': ['testacct'], - 'LastName': ['Account'], - 'orgPersonType': ['Tester'], - 'Email': ['testacct@example.com'], - 'FirstName': ['Test'], - 'ADFS_GROUPS': ['g1', 'g2'] - } - - :returns: identity values used to update local - :rtype: keystone.contrib.federation.utils.DirectMaps or None - - """ - - direct_maps = DirectMaps() - - for requirement in requirements: - requirement_type = requirement['type'] - direct_map_values = assertion.get(requirement_type) - regex = requirement.get('regex', False) - - if not direct_map_values: - return None - - any_one_values = requirement.get(self._EvalType.ANY_ONE_OF) - if any_one_values is not None: - if self._evaluate_requirement(any_one_values, - direct_map_values, - self._EvalType.ANY_ONE_OF, - regex): - continue - else: - return None - - not_any_values = requirement.get(self._EvalType.NOT_ANY_OF) - if not_any_values is not None: - if self._evaluate_requirement(not_any_values, - direct_map_values, - self._EvalType.NOT_ANY_OF, - regex): - continue - else: - return None - - # If 'any_one_of' or 'not_any_of' are not found, then values are - # within 'type'. Attempt to find that 'type' within the assertion, - # and filter these values if 'whitelist' or 'blacklist' is set. - blacklisted_values = requirement.get(self._EvalType.BLACKLIST) - whitelisted_values = requirement.get(self._EvalType.WHITELIST) - - # If a blacklist or whitelist is used, we want to map to the - # whole list instead of just its values separately. - if blacklisted_values is not None: - direct_map_values = [v for v in direct_map_values - if v not in blacklisted_values] - elif whitelisted_values is not None: - direct_map_values = [v for v in direct_map_values - if v in whitelisted_values] - - direct_maps.add(direct_map_values) - - LOG.debug('updating a direct mapping: %s', direct_map_values) - - return direct_maps - - def _evaluate_values_by_regex(self, values, assertion_values): - for value in values: - for assertion_value in assertion_values: - if re.search(value, assertion_value): - return True - return False - - def _evaluate_requirement(self, values, assertion_values, - eval_type, regex): - """Evaluate the incoming requirement and assertion. - - If the requirement type does not exist in the assertion data, then - return False. If regex is specified, then compare the values and - assertion values. Otherwise, grab the intersection of the values - and use that to compare against the evaluation type. - - :param values: list of allowed values, defined in the requirement - :type values: list - :param assertion_values: The values from the assertion to evaluate - :type assertion_values: list/string - :param eval_type: determine how to evaluate requirements - :type eval_type: string - :param regex: perform evaluation with regex - :type regex: boolean - - :returns: boolean, whether requirement is valid or not. - - """ - if regex: - any_match = self._evaluate_values_by_regex(values, - assertion_values) - else: - any_match = bool(set(values).intersection(set(assertion_values))) - if any_match and eval_type == self._EvalType.ANY_ONE_OF: - return True - if not any_match and eval_type == self._EvalType.NOT_ANY_OF: - return True - - return False - - -def assert_enabled_identity_provider(federation_api, idp_id): - identity_provider = federation_api.get_idp(idp_id) - if identity_provider.get('enabled') is not True: - msg = _('Identity Provider %(idp)s is disabled') % {'idp': idp_id} - LOG.debug(msg) - raise exception.Forbidden(msg) - - -def assert_enabled_service_provider_object(service_provider): - if service_provider.get('enabled') is not True: - sp_id = service_provider['id'] - msg = _('Service Provider %(sp)s is disabled') % {'sp': sp_id} - LOG.debug(msg) - raise exception.Forbidden(msg) diff --git a/keystone-moon/keystone/contrib/moon/__init__.py b/keystone-moon/keystone/contrib/moon/__init__.py deleted file mode 100644 index 6a96782e..00000000 --- a/keystone-moon/keystone/contrib/moon/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -from keystone.contrib.moon.core import * # noqa -from keystone.contrib.moon import controllers # noqa -from keystone.contrib.moon import routers # noqa \ No newline at end of file diff --git a/keystone-moon/keystone/contrib/moon/algorithms.py b/keystone-moon/keystone/contrib/moon/algorithms.py deleted file mode 100644 index 2f997efc..00000000 --- a/keystone-moon/keystone/contrib/moon/algorithms.py +++ /dev/null @@ -1,78 +0,0 @@ -import itertools -from oslo_log import log -LOG = log.getLogger(__name__) - - -""" an example of authz_buffer, sub_meta_rule_dict, rule_dict -authz_buffer = { - 'subject_uuid': xxx, - 'object_uuid': yyy, - 'action_uuid': zzz, - 'subject_attributes': { - 'subject_category1': [], - 'subject_category2': [], - ... - 'subject_categoryn': [] - }, - 'object_attributes': {}, - 'action_attributes': {}, -} - -sub_meta_rule_dict = { - "subject_categories": ["subject_security_level", "aaa"], - "action_categories": ["computing_action"], - "object_categories": ["object_security_level"], -} - -rule_dict = [ - ["high", "vm_admin", "medium", True], - ["high", "vm_admin", "low", True], - ["medium", "vm_admin", "low", True], - ["high", "vm_access", "high", True], - ["high", "vm_access", "medium", True], - ["high", "vm_access", "low", True], - ["medium", "vm_access", "medium", True], - ["medium", "vm_access", "low", True], - ["low", "vm_access", "low", True] -] -""" - - -def inclusion(authz_buffer, sub_meta_rule_dict, rule_list): - _cat = [] - for subject_cat in sub_meta_rule_dict['subject_categories']: - if subject_cat in authz_buffer['subject_assignments']: - _cat.append(authz_buffer['subject_assignments'][subject_cat]) - for action_cat in sub_meta_rule_dict['action_categories']: - if action_cat in authz_buffer['action_assignments']: - _cat.append(authz_buffer['action_assignments'][action_cat]) - for object_cat in sub_meta_rule_dict['object_categories']: - if object_cat in authz_buffer['object_assignments']: - _cat.append(authz_buffer['object_assignments'][object_cat]) - - for _element in itertools.product(*_cat): - # Add the boolean at the end - _element = list(_element) - _element.append(True) - if _element in rule_list: - return True - - return False - - -def comparison(authz_buffer, sub_meta_rule_dict, rule_list): - return - - -def all_true(decision_buffer): - for _rule in decision_buffer: - if decision_buffer[_rule] == False: - return False - return True - - -def one_true(decision_buffer): - for _rule in decision_buffer: - if decision_buffer[_rule] == True: - return True - return False diff --git a/keystone-moon/keystone/contrib/moon/backends/__init__.py b/keystone-moon/keystone/contrib/moon/backends/__init__.py deleted file mode 100644 index 237bdc3e..00000000 --- a/keystone-moon/keystone/contrib/moon/backends/__init__.py +++ /dev/null @@ -1,97 +0,0 @@ - -""" -intra_extensions = { - intra_extension_id1: { - name: xxx, - model: yyy, - description: zzz}, - intra_extension_id2: {...}, - ... -} - -tenants = { - tenant_id1: { - name: xxx, - description: yyy, - intra_authz_extension_id: zzz, - intra_admin_extension_id: zzz, - }, - tenant_id2: {...}, - ... -} - ---------------- for each intra-extension ----------------- - -subject_categories = { - subject_category_id1: { - name: xxx, - description: yyy}, - subject_category_id2: {...}, - ... -} - -subjects = { - subject_id1: { - name: xxx, - description: yyy, - ...}, - subject_id2: {...}, - ... -} - -subject_scopes = { - subject_category_id1: { - subject_scope_id1: { - name: xxx, - description: aaa}, - subject_scope_id2: { - name: yyy, - description: bbb}, - ...}, - subject_scope_id3: { - ...} - subject_category_id2: {...}, - ... -} - -subject_assignments = { - subject_id1: { - subject_category_id1: [subject_scope_id1, subject_scope_id2, ...], - subject_category_id2: [subject_scope_id3, subject_scope_id4, ...], - ... - }, - subject_id2: { - subject_category_id1: [subject_scope_id1, subject_scope_id2, ...], - subject_category_id2: [subject_scope_id3, subject_scope_id4, ...], - ... - }, - ... -} - -aggregation_algorithm = { - aggregation_algorithm_id: { - name: xxx, - description: yyy - } - } - -sub_meta_rules = { - sub_meta_rule_id_1: { - "name": xxx, - "algorithm": yyy, - "subject_categories": [subject_category_id1, subject_category_id2,...], - "object_categories": [object_category_id1, object_category_id2,...], - "action_categories": [action_category_id1, action_category_id2,...] - sub_meta_rule_id_2: {...}, - ... -} - -rules = { - sub_meta_rule_id1: { - rule_id1: [subject_scope1, subject_scope2, ..., action_scope1, ..., object_scope1, ... ], - rule_id2: [subject_scope3, subject_scope4, ..., action_scope3, ..., object_scope3, ... ], - rule_id3: [thomas, write, admin.subjects] - ...}, - sub_meta_rule_id2: { }, - ...} -""" \ No newline at end of file diff --git a/keystone-moon/keystone/contrib/moon/backends/flat.py b/keystone-moon/keystone/contrib/moon/backends/flat.py deleted file mode 100644 index 05c1850b..00000000 --- a/keystone-moon/keystone/contrib/moon/backends/flat.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -from uuid import uuid4 -import os -import logging -import re -import time -from keystone import config -from oslo_log import log -# from keystone.contrib.moon.core import SuperExtensionDriver -from keystone.contrib.moon.core import LogDriver - - -CONF = config.CONF - - -class LogConnector(LogDriver): - - AUTHZ_FILE = '/var/log/moon/authz.log' - SYS_FILE = '/var/log/moon/system.log' - TIME_FORMAT = '%Y-%m-%d-%H:%M:%S' - - def __init__(self): - # Fixme (dthom): when logging from an other class, the %appname% in the event - # is always keystone.contrib.moon.backends.flat - super(LogConnector, self).__init__() - - self.SYS_LOG = logging.getLogger(__name__) - if not len(self.SYS_LOG.handlers): - fh = logging.FileHandler(self.SYS_FILE) - fh.setLevel(logging.DEBUG) - formatter = logging.Formatter('%(asctime)s ------ %(message)s', self.TIME_FORMAT) - fh.setFormatter(formatter) - self.SYS_LOG.addHandler(fh) - - self.AUTHZ_LOG = logging.getLogger("authz") - if not len(self.AUTHZ_LOG.handlers): - fh = logging.FileHandler(self.AUTHZ_FILE) - fh.setLevel(logging.WARNING) - formatter = logging.Formatter('%(asctime)s ------ %(message)s', self.TIME_FORMAT) - fh.setFormatter(formatter) - self.AUTHZ_LOG.addHandler(fh) - - def authz(self, message): - self.AUTHZ_LOG.warn(message) - - def debug(self, message): - self.SYS_LOG.debug(message) - - def info(self, message): - self.SYS_LOG.info(message) - - def warning(self, message): - self.SYS_LOG.warning(message) - - def error(self, message): - self.SYS_LOG.error(message) - - def critical(self, message): - self.SYS_LOG.critical(message) - - def get_logs(self, logger="authz", event_number=None, time_from=None, time_to=None, filter_str=None): - if logger == "authz": - _logs = open(self.AUTHZ_FILE).readlines() - else: - _logs = open(self.SYS_FILE).readlines() - if filter_str: - _logs = filter(lambda x: filter_str in x, _logs) - if time_from: - if isinstance(time_from, basestring): - time_from = time.strptime(time_from.split(" ")[0], self.TIME_FORMAT) - try: - __logs = [] - for log in _logs: - _log = time.strptime(log.split(" ")[0], self.TIME_FORMAT) - if time_from <= _log: - __logs.append(log) - _logs = __logs - except ValueError: - self.error("Time format error") - if time_to: - try: - if isinstance(time_to, basestring): - time_to = time.strptime(time_to.split(" ")[0], self.TIME_FORMAT) - __logs = [] - for log in _logs: - _log = time.strptime(log.split(" ")[0], self.TIME_FORMAT) - if time_to >= _log: - __logs.append(log) - _logs = __logs - except ValueError: - self.error("Time format error") - if event_number: - _logs = _logs[-event_number:] - return list(_logs) - - -# class SuperExtensionConnector(SuperExtensionDriver): -# -# def __init__(self): -# super(SuperExtensionConnector, self).__init__() -# # Super_Extension is loaded every time the server is started -# self.__uuid = uuid4().hex -# # self.__super_extension = Extension() -# _policy_abs_dir = os.path.join(CONF.moon.super_extension_directory, 'policy') -# # self.__super_extension.load_from_json(_policy_abs_dir) -# -# def get_super_extensions(self): -# return None -# -# def admin(self, sub, obj, act): -# # return self.__super_extension.authz(sub, obj, act) -# return True diff --git a/keystone-moon/keystone/contrib/moon/backends/memory.py b/keystone-moon/keystone/contrib/moon/backends/memory.py deleted file mode 100644 index b9fbb622..00000000 --- a/keystone-moon/keystone/contrib/moon/backends/memory.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -from uuid import uuid4 -from glob import glob -import os -import json -from keystone import config -from keystone.contrib.moon.core import ConfigurationDriver -from oslo_log import log -import hashlib - -CONF = config.CONF -LOG = log.getLogger(__name__) - - -class ConfigurationConnector(ConfigurationDriver): - - def __init__(self): - super(ConfigurationConnector, self).__init__() - self.aggregation_algorithms_dict = dict() - self.aggregation_algorithms_dict[hashlib.sha224("all_true").hexdigest()[:32]] = \ - {'name': 'all_true', 'description': 'all rules must match'} - self.aggregation_algorithms_dict[hashlib.sha224("one_true").hexdigest()[:32]] = \ - {'name': 'one_true', 'description': 'only one rule has to match'} - self.sub_meta_rule_algorithms_dict = dict() - self.sub_meta_rule_algorithms_dict[hashlib.sha224("inclusion").hexdigest()[:32]] = \ - {'name': 'inclusion', 'description': 'inclusion'} - self.sub_meta_rule_algorithms_dict[hashlib.sha224("comparison").hexdigest()[:32]] = \ - {'name': 'comparison', 'description': 'comparison'} - - def get_policy_templates_dict(self): - """ - :return: { - template_id1: {name: template_name, description: template_description}, - template_id2: {name: template_name, description: template_description}, - ... - } - """ - nodes = glob(os.path.join(CONF.moon.policy_directory, "*")) - templates = dict() - for node in nodes: - try: - metadata = json.load(open(os.path.join(node, "metadata.json"))) - except IOError: - # Note (asteroide): it's not a true policy directory, so we forgive it - continue - templates[os.path.basename(node)] = dict() - templates[os.path.basename(node)]["name"] = metadata["name"] - templates[os.path.basename(node)]["description"] = metadata["description"] - return templates - - def get_aggregation_algorithms_dict(self): - return self.aggregation_algorithms_dict - - def get_sub_meta_rule_algorithms_dict(self): - return self.sub_meta_rule_algorithms_dict diff --git a/keystone-moon/keystone/contrib/moon/backends/sql.py b/keystone-moon/keystone/contrib/moon/backends/sql.py deleted file mode 100644 index 1ddb474e..00000000 --- a/keystone-moon/keystone/contrib/moon/backends/sql.py +++ /dev/null @@ -1,1105 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -import six -from uuid import uuid4 -import copy - -from keystone import config -from oslo_log import log -from keystone.common import sql -from keystone import exception -from keystone.contrib.moon.exception import * -from oslo_serialization import jsonutils -from keystone.contrib.moon import IntraExtensionDriver -from keystone.contrib.moon import TenantDriver - -from sqlalchemy.orm.exc import UnmappedInstanceError -# from keystone.contrib.moon import InterExtensionDriver - -CONF = config.CONF -LOG = log.getLogger(__name__) - - -class IntraExtension(sql.ModelBase, sql.DictBase): - __tablename__ = 'intra_extensions' - attributes = ['id', 'intra_extension'] - id = sql.Column(sql.String(64), primary_key=True) - intra_extension = sql.Column(sql.JsonBlob(), nullable=True) - - @classmethod - def from_dict(cls, d): - new_d = d.copy() - return cls(**new_d) - - def to_dict(self): - return dict(six.iteritems(self)) - - -class Tenant(sql.ModelBase, sql.DictBase): - __tablename__ = 'tenants' - attributes = ['id', 'tenant'] - id = sql.Column(sql.String(64), primary_key=True, nullable=False) - tenant = sql.Column(sql.JsonBlob(), nullable=True) - - @classmethod - def from_dict(cls, d): - """Override parent from_dict() method with a different implementation. - """ - new_d = d.copy() - return cls(**new_d) - - def to_dict(self): - """ - """ - return dict(six.iteritems(self)) - - -class SubjectCategory(sql.ModelBase, sql.DictBase): - __tablename__ = 'subject_categories' - attributes = ['id', 'subject_category', 'intra_extension_id'] - id = sql.Column(sql.String(64), primary_key=True) - subject_category = sql.Column(sql.JsonBlob(), nullable=True) - intra_extension_id = sql.Column(sql.ForeignKey("intra_extensions.id"), nullable=False) - - @classmethod - def from_dict(cls, d): - new_d = d.copy() - return cls(**new_d) - - def to_dict(self): - return dict(six.iteritems(self)) - - -class ObjectCategory(sql.ModelBase, sql.DictBase): - __tablename__ = 'object_categories' - attributes = ['id', 'object_category', 'intra_extension_id'] - id = sql.Column(sql.String(64), primary_key=True) - object_category = sql.Column(sql.JsonBlob(), nullable=True) - intra_extension_id = sql.Column(sql.ForeignKey("intra_extensions.id"), nullable=False) - - @classmethod - def from_dict(cls, d): - new_d = d.copy() - return cls(**new_d) - - def to_dict(self): - return dict(six.iteritems(self)) - - -class ActionCategory(sql.ModelBase, sql.DictBase): - __tablename__ = 'action_categories' - attributes = ['id', 'action_category', 'intra_extension_id'] - id = sql.Column(sql.String(64), primary_key=True) - action_category = sql.Column(sql.JsonBlob(), nullable=True) - intra_extension_id = sql.Column(sql.ForeignKey("intra_extensions.id"), nullable=False) - - @classmethod - def from_dict(cls, d): - new_d = d.copy() - return cls(**new_d) - - def to_dict(self): - return dict(six.iteritems(self)) - - -class Subject(sql.ModelBase, sql.DictBase): - __tablename__ = 'subjects' - attributes = ['id', 'subject', 'intra_extension_id'] - id = sql.Column(sql.String(64), primary_key=True) - subject = sql.Column(sql.JsonBlob(), nullable=True) - intra_extension_id = sql.Column(sql.ForeignKey("intra_extensions.id"), nullable=False) - - @classmethod - def from_dict(cls, d): - new_d = d.copy() - return cls(**new_d) - - def to_dict(self): - return dict(six.iteritems(self)) - - -class Object(sql.ModelBase, sql.DictBase): - __tablename__ = 'objects' - attributes = ['id', 'object', 'intra_extension_id'] - id = sql.Column(sql.String(64), primary_key=True) - object = sql.Column(sql.JsonBlob(), nullable=True) - intra_extension_id = sql.Column(sql.ForeignKey("intra_extensions.id"), nullable=False) - - @classmethod - def from_dict(cls, d): - new_d = d.copy() - return cls(**new_d) - - def to_dict(self): - return dict(six.iteritems(self)) - - -class Action(sql.ModelBase, sql.DictBase): - __tablename__ = 'actions' - attributes = ['id', 'action', 'intra_extension_id'] - id = sql.Column(sql.String(64), primary_key=True) - action = sql.Column(sql.JsonBlob(), nullable=True) - intra_extension_id = sql.Column(sql.ForeignKey("intra_extensions.id"), nullable=False) - - @classmethod - def from_dict(cls, d): - new_d = d.copy() - return cls(**new_d) - - def to_dict(self): - return dict(six.iteritems(self)) - - -class SubjectScope(sql.ModelBase, sql.DictBase): - __tablename__ = 'subject_scopes' - attributes = ['id', 'subject_scope', 'intra_extension_id', 'subject_category_id'] - id = sql.Column(sql.String(64), primary_key=True) - subject_scope = sql.Column(sql.JsonBlob(), nullable=True) - intra_extension_id = sql.Column(sql.ForeignKey("intra_extensions.id"), nullable=False) - subject_category_id = sql.Column(sql.ForeignKey("subject_categories.id"), nullable=False) - - @classmethod - def from_dict(cls, d): - new_d = d.copy() - return cls(**new_d) - - def to_dict(self): - return dict(six.iteritems(self)) - - -class ObjectScope(sql.ModelBase, sql.DictBase): - __tablename__ = 'object_scopes' - attributes = ['id', 'object_scope', 'intra_extension_id', 'object_category_id'] - id = sql.Column(sql.String(64), primary_key=True) - object_scope = sql.Column(sql.JsonBlob(), nullable=True) - intra_extension_id = sql.Column(sql.ForeignKey("intra_extensions.id"), nullable=False) - object_category_id = sql.Column(sql.ForeignKey("object_categories.id"), nullable=False) - - @classmethod - def from_dict(cls, d): - new_d = d.copy() - return cls(**new_d) - - def to_dict(self): - return dict(six.iteritems(self)) - - -class ActionScope(sql.ModelBase, sql.DictBase): - __tablename__ = 'action_scopes' - attributes = ['id', 'action_scope', 'intra_extension_id', 'action_category'] - id = sql.Column(sql.String(64), primary_key=True) - action_scope = sql.Column(sql.JsonBlob(), nullable=True) - intra_extension_id = sql.Column(sql.ForeignKey("intra_extensions.id"), nullable=False) - action_category_id = sql.Column(sql.ForeignKey("action_categories.id"), nullable=False) - - @classmethod - def from_dict(cls, d): - new_d = d.copy() - return cls(**new_d) - - def to_dict(self): - return dict(six.iteritems(self)) - - -class SubjectAssignment(sql.ModelBase, sql.DictBase): - __tablename__ = 'subject_assignments' - attributes = ['id', 'subject_assignment', 'intra_extension_id', 'subject_id', 'subject_category_id'] - id = sql.Column(sql.String(64), primary_key=True) - subject_assignment = sql.Column(sql.JsonBlob(), nullable=True) - intra_extension_id = sql.Column(sql.ForeignKey("intra_extensions.id"), nullable=False) - subject_id = sql.Column(sql.ForeignKey("subjects.id"), nullable=False) - subject_category_id = sql.Column(sql.ForeignKey("subject_categories.id"), nullable=False) - - @classmethod - def from_dict(cls, d): - new_d = d.copy() - return cls(**new_d) - - def to_dict(self): - return dict(six.iteritems(self)) - - -class ObjectAssignment(sql.ModelBase, sql.DictBase): - __tablename__ = 'object_assignments' - attributes = ['id', 'object_assignment', 'intra_extension_id', 'object_id', 'object_category_id'] - id = sql.Column(sql.String(64), primary_key=True) - object_assignment = sql.Column(sql.JsonBlob(), nullable=True) - intra_extension_id = sql.Column(sql.ForeignKey("intra_extensions.id"), nullable=False) - object_id = sql.Column(sql.ForeignKey("objects.id"), nullable=False) - object_category_id = sql.Column(sql.ForeignKey("object_categories.id"), nullable=False) - - @classmethod - def from_dict(cls, d): - new_d = d.copy() - return cls(**new_d) - - def to_dict(self): - return dict(six.iteritems(self)) - - -class ActionAssignment(sql.ModelBase, sql.DictBase): - __tablename__ = 'action_assignments' - attributes = ['id', 'action_assignment', 'intra_extension_id', 'action_id', 'action_category_id'] - id = sql.Column(sql.String(64), primary_key=True) - action_assignment = sql.Column(sql.JsonBlob(), nullable=True) - intra_extension_id = sql.Column(sql.ForeignKey("intra_extensions.id"), nullable=False) - action_id = sql.Column(sql.ForeignKey("actions.id"), nullable=False) - action_category_id = sql.Column(sql.ForeignKey("action_categories.id"), nullable=False) - - @classmethod - def from_dict(cls, d): - new_d = d.copy() - return cls(**new_d) - - def to_dict(self): - return dict(six.iteritems(self)) - - -class SubMetaRule(sql.ModelBase, sql.DictBase): - __tablename__ = 'sub_meta_rules' - attributes = ['id', 'sub_meta_rule', 'intra_extension_id'] - id = sql.Column(sql.String(64), primary_key=True) - sub_meta_rule = sql.Column(sql.JsonBlob(), nullable=True) - intra_extension_id = sql.Column(sql.ForeignKey("intra_extensions.id"), nullable=False) - - @classmethod - def from_dict(cls, d): - new_d = d.copy() - return cls(**new_d) - - def to_dict(self): - return dict(six.iteritems(self)) - - -class Rule(sql.ModelBase, sql.DictBase): - __tablename__ = 'rules' - attributes = ['id', 'rule', 'intra_extension_id', 'sub_meta_rule_id'] - id = sql.Column(sql.String(64), primary_key=True) - rule = sql.Column(sql.JsonBlob(), nullable=True) - intra_extension_id = sql.Column(sql.ForeignKey("intra_extensions.id"), nullable=False) - sub_meta_rule_id = sql.Column(sql.ForeignKey("sub_meta_rules.id"), nullable=False) - - @classmethod - def from_dict(cls, d): - new_d = d.copy() - return cls(**new_d) - - def to_dict(self): - return dict(six.iteritems(self)) - - -__all_objects__ = ( - SubjectScope, - ObjectScope, - ActionScope, - SubjectAssignment, - ObjectAssignment, - ActionAssignment, - SubMetaRule, - SubjectCategory, - ObjectCategory, - ActionCategory, - Subject, - Object, - Action, - Rule, -) - - -class TenantConnector(TenantDriver): - - @staticmethod - def __update_dict(base, update): - """Update a dict only if values are not None - - :param base: dict to update - :param update: updates for the base dict - :return: None - """ - for key in update: - if type(update[key]) is not None: - base[key] = update[key] - - def get_tenants_dict(self): - with sql.session_for_read() as session: - query = session.query(Tenant) - tenants = query.all() - return {tenant.id: tenant.tenant for tenant in tenants} - - def add_tenant_dict(self, tenant_id, tenant_dict): - with sql.session_for_write() as session: - new_ref = Tenant.from_dict( - { - "id": tenant_id, - 'tenant': tenant_dict - } - ) - session.add(new_ref) - return {new_ref.id: new_ref.tenant} - - def del_tenant(self, tenant_id): - with sql.session_for_write() as session: - query = session.query(Tenant) - query = query.filter_by(id=tenant_id) - tenant = query.first() - session.delete(tenant) - - def set_tenant_dict(self, tenant_id, tenant_dict): - with sql.session_for_write() as session: - query = session.query(Tenant) - query = query.filter_by(id=tenant_id) - ref = query.first() - tenant_dict_orig = dict(ref.tenant) - self.__update_dict(tenant_dict_orig, tenant_dict) - setattr(ref, "tenant", tenant_dict_orig) - return {ref.id: tenant_dict_orig} - - -class IntraExtensionConnector(IntraExtensionDriver): - - # IntraExtension functions - - def get_intra_extensions_dict(self): - with sql.session_for_read() as session: - query = session.query(IntraExtension) - ref_list = query.all() - return {_ref.id: _ref.intra_extension for _ref in ref_list} - - def del_intra_extension(self, intra_extension_id): - with sql.session_for_write() as session: - ref = session.query(IntraExtension).get(intra_extension_id) - # Must delete all references to that IntraExtension - for _object in __all_objects__: - query = session.query(_object) - query = query.filter_by(intra_extension_id=intra_extension_id) - _refs = query.all() - for _ref in _refs: - session.delete(_ref) - # session.flush() - session.delete(ref) - - def set_intra_extension_dict(self, intra_extension_id, intra_extension_dict): - with sql.session_for_write() as session: - query = session.query(IntraExtension) - query = query.filter_by(id=intra_extension_id) - ref = query.first() - new_intra_extension = IntraExtension.from_dict( - { - "id": intra_extension_id, - 'intra_extension': intra_extension_dict, - } - ) - if not ref: - session.add(new_intra_extension) - ref = new_intra_extension - else: - for attr in IntraExtension.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_intra_extension, attr)) - # session.flush() - return IntraExtension.to_dict(ref) - - # Getter and Setter for subject_category - - def get_subject_categories_dict(self, intra_extension_id): - with sql.session_for_read() as session: - query = session.query(SubjectCategory) - query = query.filter_by(intra_extension_id=intra_extension_id) - ref_list = query.all() - return {_ref.id: _ref.subject_category for _ref in ref_list} - - def set_subject_category_dict(self, intra_extension_id, subject_category_id, subject_category_dict): - with sql.session_for_write() as session: - query = session.query(SubjectCategory) - query = query.filter_by(intra_extension_id=intra_extension_id, id=subject_category_id) - ref = query.first() - new_ref = SubjectCategory.from_dict( - { - "id": subject_category_id, - 'subject_category': subject_category_dict, - 'intra_extension_id': intra_extension_id - } - ) - if not ref: - session.add(new_ref) - ref = new_ref - else: - for attr in SubjectCategory.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_ref, attr)) - # # session.flush() - return {subject_category_id: SubjectCategory.to_dict(ref)['subject_category']} - - def del_subject_category(self, intra_extension_id, subject_category_id): - with sql.session_for_write() as session: - query = session.query(SubjectCategory) - query = query.filter_by(intra_extension_id=intra_extension_id, id=subject_category_id) - ref = query.first() - self.del_subject_assignment(intra_extension_id, None, None, None) - session.delete(ref) - - # Getter and Setter for object_category - - def get_object_categories_dict(self, intra_extension_id): - with sql.session_for_read() as session: - query = session.query(ObjectCategory) - query = query.filter_by(intra_extension_id=intra_extension_id) - ref_list = query.all() - return {_ref.id: _ref.object_category for _ref in ref_list} - - def set_object_category_dict(self, intra_extension_id, object_category_id, object_category_dict): - with sql.session_for_write() as session: - query = session.query(ObjectCategory) - query = query.filter_by(intra_extension_id=intra_extension_id, id=object_category_id) - ref = query.first() - new_ref = ObjectCategory.from_dict( - { - "id": object_category_id, - 'object_category': object_category_dict, - 'intra_extension_id': intra_extension_id - } - ) - if not ref: - session.add(new_ref) - ref = new_ref - else: - for attr in ObjectCategory.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_ref, attr)) - # session.flush() - return {object_category_id: ObjectCategory.to_dict(ref)['object_category']} - - def del_object_category(self, intra_extension_id, object_category_id): - with sql.session_for_write() as session: - query = session.query(ObjectCategory) - query = query.filter_by(intra_extension_id=intra_extension_id, id=object_category_id) - ref = query.first() - self.del_object_assignment(intra_extension_id, None, None, None) - session.delete(ref) - - # Getter and Setter for action_category - - def get_action_categories_dict(self, intra_extension_id): - with sql.session_for_read() as session: - query = session.query(ActionCategory) - query = query.filter_by(intra_extension_id=intra_extension_id) - ref_list = query.all() - return {_ref.id: _ref.action_category for _ref in ref_list} - - def set_action_category_dict(self, intra_extension_id, action_category_id, action_category_dict): - with sql.session_for_write() as session: - query = session.query(ActionCategory) - query = query.filter_by(intra_extension_id=intra_extension_id, id=action_category_id) - ref = query.first() - new_ref = ActionCategory.from_dict( - { - "id": action_category_id, - 'action_category': action_category_dict, - 'intra_extension_id': intra_extension_id - } - ) - if not ref: - session.add(new_ref) - ref = new_ref - else: - for attr in ActionCategory.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_ref, attr)) - # session.flush() - return {action_category_id: ActionCategory.to_dict(ref)['action_category']} - - def del_action_category(self, intra_extension_id, action_category_id): - with sql.session_for_write() as session: - query = session.query(ActionCategory) - query = query.filter_by(intra_extension_id=intra_extension_id, id=action_category_id) - ref = query.first() - self.del_action_assignment(intra_extension_id, None, None, None) - session.delete(ref) - - # Perimeter - - def get_subjects_dict(self, intra_extension_id): - with sql.session_for_read() as session: - query = session.query(Subject) - query = query.filter_by(intra_extension_id=intra_extension_id) - ref_list = query.all() - return {_ref.id: _ref.subject for _ref in ref_list} - - def set_subject_dict(self, intra_extension_id, subject_id, subject_dict): - with sql.session_for_write() as session: - query = session.query(Subject) - query = query.filter_by(intra_extension_id=intra_extension_id, id=subject_id) - ref = query.first() - # if 'id' in subject_dict: - # subject_dict['id'] = subject_id - new_ref = Subject.from_dict( - { - "id": subject_id, - 'subject': subject_dict, - 'intra_extension_id': intra_extension_id - } - ) - if not ref: - session.add(new_ref) - ref = new_ref - else: - for attr in Subject.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_ref, attr)) - # session.flush() - return {subject_id: Subject.to_dict(ref)['subject']} - - def del_subject(self, intra_extension_id, subject_id): - with sql.session_for_write() as session: - query = session.query(Subject) - query = query.filter_by(intra_extension_id=intra_extension_id, id=subject_id) - ref = query.first() - session.delete(ref) - - def get_objects_dict(self, intra_extension_id): - with sql.session_for_read() as session: - query = session.query(Object) - query = query.filter_by(intra_extension_id=intra_extension_id) - ref_list = query.all() - return {_ref.id: _ref.object for _ref in ref_list} - - def set_object_dict(self, intra_extension_id, object_id, object_dict): - with sql.session_for_write() as session: - query = session.query(Object) - query = query.filter_by(intra_extension_id=intra_extension_id, id=object_id) - ref = query.first() - new_ref = Object.from_dict( - { - "id": object_id, - 'object': object_dict, - 'intra_extension_id': intra_extension_id - } - ) - if not ref: - session.add(new_ref) - ref = new_ref - else: - for attr in Object.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_ref, attr)) - # session.flush() - return {object_id: Object.to_dict(ref)['object']} - - def del_object(self, intra_extension_id, object_id): - with sql.session_for_write() as session: - query = session.query(Object) - query = query.filter_by(intra_extension_id=intra_extension_id, id=object_id) - ref = query.first() - session.delete(ref) - - def get_actions_dict(self, intra_extension_id): - with sql.session_for_read() as session: - query = session.query(Action) - query = query.filter_by(intra_extension_id=intra_extension_id) - ref_list = query.all() - return {_ref.id: _ref.action for _ref in ref_list} - - def set_action_dict(self, intra_extension_id, action_id, action_dict): - with sql.session_for_write() as session: - query = session.query(Action) - query = query.filter_by(intra_extension_id=intra_extension_id, id=action_id) - ref = query.first() - new_ref = Action.from_dict( - { - "id": action_id, - 'action': action_dict, - 'intra_extension_id': intra_extension_id - } - ) - if not ref: - session.add(new_ref) - ref = new_ref - else: - for attr in Action.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_ref, attr)) - # session.flush() - return {action_id: Action.to_dict(ref)['action']} - - def del_action(self, intra_extension_id, action_id): - with sql.session_for_write() as session: - query = session.query(Action) - query = query.filter_by(intra_extension_id=intra_extension_id, id=action_id) - ref = query.first() - session.delete(ref) - - # Getter and Setter for subject_scope - - def get_subject_scopes_dict(self, intra_extension_id, subject_category_id): - with sql.session_for_read() as session: - query = session.query(SubjectScope) - query = query.filter_by(intra_extension_id=intra_extension_id, subject_category_id=subject_category_id) - ref_list = query.all() - return {_ref.id: _ref.subject_scope for _ref in ref_list} - - def set_subject_scope_dict(self, intra_extension_id, subject_category_id, subject_scope_id, subject_scope_dict): - with sql.session_for_write() as session: - query = session.query(SubjectScope) - query = query.filter_by(intra_extension_id=intra_extension_id, subject_category_id=subject_category_id, id=subject_scope_id) - ref = query.first() - new_ref = SubjectScope.from_dict( - { - "id": subject_scope_id, - 'subject_scope': subject_scope_dict, - 'intra_extension_id': intra_extension_id, - 'subject_category_id': subject_category_id - } - ) - if not ref: - session.add(new_ref) - ref = new_ref - else: - for attr in Subject.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_ref, attr)) - # session.flush() - return {subject_scope_id: SubjectScope.to_dict(ref)['subject_scope']} - - def del_subject_scope(self, intra_extension_id, subject_category_id, subject_scope_id): - with sql.session_for_write() as session: - query = session.query(SubjectScope) - if not subject_category_id or not subject_scope_id: - query = query.filter_by(intra_extension_id=intra_extension_id) - for ref in query.all(): - session.delete(ref) - else: - query = query.filter_by(intra_extension_id=intra_extension_id, subject_category_id=subject_category_id, id=subject_scope_id) - ref = query.first() - session.delete(ref) - - # Getter and Setter for object_category_scope - - def get_object_scopes_dict(self, intra_extension_id, object_category_id): - with sql.session_for_read() as session: - query = session.query(ObjectScope) - query = query.filter_by(intra_extension_id=intra_extension_id, object_category_id=object_category_id) - ref_list = query.all() - return {_ref.id: _ref.object_scope for _ref in ref_list} - - def set_object_scope_dict(self, intra_extension_id, object_category_id, object_scope_id, object_scope_dict): - with sql.session_for_write() as session: - query = session.query(ObjectScope) - query = query.filter_by(intra_extension_id=intra_extension_id, object_category_id=object_category_id, id=object_scope_id) - ref = query.first() - new_ref = ObjectScope.from_dict( - { - "id": object_scope_id, - 'object_scope': object_scope_dict, - 'intra_extension_id': intra_extension_id, - 'object_category_id': object_category_id - } - ) - if not ref: - session.add(new_ref) - ref = new_ref - else: - for attr in Object.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_ref, attr)) - # session.flush() - return {object_scope_id: ObjectScope.to_dict(ref)['object_scope']} - - def del_object_scope(self, intra_extension_id, object_category_id, object_scope_id): - with sql.session_for_write() as session: - query = session.query(ObjectScope) - if not object_category_id or not object_scope_id: - query = query.filter_by(intra_extension_id=intra_extension_id) - for ref in query.all(): - session.delete(ref) - else: - query = query.filter_by(intra_extension_id=intra_extension_id, object_category_id=object_category_id, id=object_scope_id) - ref = query.first() - session.delete(ref) - - # Getter and Setter for action_scope - - def get_action_scopes_dict(self, intra_extension_id, action_category_id): - with sql.session_for_read() as session: - query = session.query(ActionScope) - query = query.filter_by(intra_extension_id=intra_extension_id, action_category_id=action_category_id) - ref_list = query.all() - return {_ref.id: _ref.action_scope for _ref in ref_list} - - def set_action_scope_dict(self, intra_extension_id, action_category_id, action_scope_id, action_scope_dict): - with sql.session_for_write() as session: - query = session.query(ActionScope) - query = query.filter_by(intra_extension_id=intra_extension_id, action_category_id=action_category_id, id=action_scope_id) - ref = query.first() - new_ref = ActionScope.from_dict( - { - "id": action_scope_id, - 'action_scope': action_scope_dict, - 'intra_extension_id': intra_extension_id, - 'action_category_id': action_category_id - } - ) - if not ref: - session.add(new_ref) - ref = new_ref - else: - for attr in Action.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_ref, attr)) - # session.flush() - return {action_scope_id: ActionScope.to_dict(ref)['action_scope']} - - def del_action_scope(self, intra_extension_id, action_category_id, action_scope_id): - with sql.session_for_write() as session: - query = session.query(ActionScope) - if not action_category_id or not action_scope_id: - query = query.filter_by(intra_extension_id=intra_extension_id) - for ref in query.all(): - session.delete(ref) - else: - query = query.filter_by(intra_extension_id=intra_extension_id, action_category_id=action_category_id, id=action_scope_id) - ref = query.first() - session.delete(ref) - - # Getter and Setter for subject_category_assignment - - def get_subject_assignment_list(self, intra_extension_id, subject_id, subject_category_id): - with sql.session_for_read() as session: - query = session.query(SubjectAssignment) - if not subject_id or not subject_category_id or not subject_category_id: - query = query.filter_by(intra_extension_id=intra_extension_id) - ref = query.all() - return ref - else: - query = query.filter_by(intra_extension_id=intra_extension_id, subject_id=subject_id, subject_category_id=subject_category_id) - ref = query.first() - if not ref: - return list() - return list(ref.subject_assignment) - - def set_subject_assignment_list(self, intra_extension_id, subject_id, subject_category_id, subject_assignment_list=[]): - with sql.session_for_write() as session: - query = session.query(SubjectAssignment) - query = query.filter_by(intra_extension_id=intra_extension_id, subject_id=subject_id, subject_category_id=subject_category_id) - ref = query.first() - new_ref = SubjectAssignment.from_dict( - { - "id": uuid4().hex, - 'subject_assignment': subject_assignment_list, - 'intra_extension_id': intra_extension_id, - 'subject_id': subject_id, - 'subject_category_id': subject_category_id - } - ) - if not ref: - session.add(new_ref) - ref = new_ref - else: - for attr in SubjectAssignment.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_ref, attr)) - # session.flush() - return subject_assignment_list - - def add_subject_assignment_list(self, intra_extension_id, subject_id, subject_category_id, subject_scope_id): - new_subject_assignment_list = self.get_subject_assignment_list(intra_extension_id, subject_id, subject_category_id) - if subject_scope_id not in new_subject_assignment_list: - new_subject_assignment_list.append(subject_scope_id) - return self.set_subject_assignment_list(intra_extension_id, subject_id, subject_category_id, new_subject_assignment_list) - - def del_subject_assignment(self, intra_extension_id, subject_id, subject_category_id, subject_scope_id): - if not subject_id or not subject_category_id or not subject_category_id: - with sql.session_for_write() as session: - for ref in self.get_subject_assignment_list(intra_extension_id, None, None): - session.delete(ref) - session.flush() - return - new_subject_assignment_list = self.get_subject_assignment_list(intra_extension_id, subject_id, subject_category_id) - new_subject_assignment_list.remove(subject_scope_id) - return self.set_subject_assignment_list(intra_extension_id, subject_id, subject_category_id, new_subject_assignment_list) - - # Getter and Setter for object_category_assignment - - def get_object_assignment_list(self, intra_extension_id, object_id, object_category_id): - with sql.session_for_read() as session: - query = session.query(ObjectAssignment) - if not object_id or not object_category_id or not object_category_id: - query = query.filter_by(intra_extension_id=intra_extension_id) - ref = query.all() - return ref - else: - query = query.filter_by(intra_extension_id=intra_extension_id, object_id=object_id, object_category_id=object_category_id) - ref = query.first() - if not ref: - return list() - return list(ref.object_assignment) - - def set_object_assignment_list(self, intra_extension_id, object_id, object_category_id, object_assignment_list=[]): - with sql.session_for_write() as session: - query = session.query(ObjectAssignment) - query = query.filter_by(intra_extension_id=intra_extension_id, object_id=object_id, object_category_id=object_category_id) - ref = query.first() - new_ref = ObjectAssignment.from_dict( - { - "id": uuid4().hex, - 'object_assignment': object_assignment_list, - 'intra_extension_id': intra_extension_id, - 'object_id': object_id, - 'object_category_id': object_category_id - } - ) - if not ref: - session.add(new_ref) - else: - for attr in ObjectAssignment.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_ref, attr)) - # session.flush() - return self.get_object_assignment_list(intra_extension_id, object_id, object_category_id) - - def add_object_assignment_list(self, intra_extension_id, object_id, object_category_id, object_scope_id): - new_object_assignment_list = self.get_object_assignment_list(intra_extension_id, object_id, object_category_id) - if object_scope_id not in new_object_assignment_list: - new_object_assignment_list.append(object_scope_id) - return self.set_object_assignment_list(intra_extension_id, object_id, object_category_id, new_object_assignment_list) - - def del_object_assignment(self, intra_extension_id, object_id, object_category_id, object_scope_id): - if not object_id or not object_category_id or not object_category_id: - with sql.session_for_write() as session: - for ref in self.get_object_assignment_list(intra_extension_id, None, None): - session.delete(ref) - session.flush() - return - new_object_assignment_list = self.get_object_assignment_list(intra_extension_id, object_id, object_category_id) - new_object_assignment_list.remove(object_scope_id) - return self.set_object_assignment_list(intra_extension_id, object_id, object_category_id, new_object_assignment_list) - - # Getter and Setter for action_category_assignment - - def get_action_assignment_list(self, intra_extension_id, action_id, action_category_id): - with sql.session_for_read() as session: - query = session.query(ActionAssignment) - if not action_id or not action_category_id or not action_category_id: - query = query.filter_by(intra_extension_id=intra_extension_id) - ref = query.all() - return ref - else: - query = query.filter_by(intra_extension_id=intra_extension_id, action_id=action_id, action_category_id=action_category_id) - ref = query.first() - if not ref: - return list() - return list(ref.action_assignment) - - def set_action_assignment_list(self, intra_extension_id, action_id, action_category_id, action_assignment_list=[]): - with sql.session_for_write() as session: - query = session.query(ActionAssignment) - query = query.filter_by(intra_extension_id=intra_extension_id, action_id=action_id, action_category_id=action_category_id) - ref = query.first() - new_ref = ActionAssignment.from_dict( - { - "id": uuid4().hex, - 'action_assignment': action_assignment_list, - 'intra_extension_id': intra_extension_id, - 'action_id': action_id, - 'action_category_id': action_category_id - } - ) - if not ref: - session.add(new_ref) - else: - for attr in ActionAssignment.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_ref, attr)) - # session.flush() - return self.get_action_assignment_list(intra_extension_id, action_id, action_category_id) - - def add_action_assignment_list(self, intra_extension_id, action_id, action_category_id, action_scope_id): - new_action_assignment_list = self.get_action_assignment_list(intra_extension_id, action_id, action_category_id) - if action_scope_id not in new_action_assignment_list: - new_action_assignment_list.append(action_scope_id) - return self.set_action_assignment_list(intra_extension_id, action_id, action_category_id, new_action_assignment_list) - - def del_action_assignment(self, intra_extension_id, action_id, action_category_id, action_scope_id): - if not action_id or not action_category_id or not action_category_id: - with sql.session_for_write() as session: - for ref in self.get_action_assignment_list(intra_extension_id, None, None): - session.delete(ref) - session.flush() - return - new_action_assignment_list = self.get_action_assignment_list(intra_extension_id, action_id, action_category_id) - new_action_assignment_list.remove(action_scope_id) - return self.set_action_assignment_list(intra_extension_id, action_id, action_category_id, new_action_assignment_list) - - # Getter and Setter for sub_meta_rule - - def get_aggregation_algorithm_id(self, intra_extension_id): - with sql.session_for_read() as session: - query = session.query(IntraExtension) - query = query.filter_by(id=intra_extension_id) - ref = query.first() - try: - return {"aggregation_algorithm": ref.intra_extension["aggregation_algorithm"]} - except KeyError: - return "" - - def set_aggregation_algorithm_id(self, intra_extension_id, aggregation_algorithm_id): - with sql.session_for_write() as session: - query = session.query(IntraExtension) - query = query.filter_by(id=intra_extension_id) - ref = query.first() - intra_extension_dict = dict(ref.intra_extension) - intra_extension_dict["aggregation_algorithm"] = aggregation_algorithm_id - setattr(ref, "intra_extension", intra_extension_dict) - # session.flush() - return {"aggregation_algorithm": ref.intra_extension["aggregation_algorithm"]} - - def del_aggregation_algorithm(self, intra_extension_id): - with sql.session_for_write() as session: - query = session.query(IntraExtension) - query = query.filter_by(id=intra_extension_id) - ref = query.first() - intra_extension_dict = dict(ref.intra_extension) - intra_extension_dict["aggregation_algorithm"] = "" - setattr(ref, "intra_extension", intra_extension_dict) - return self.get_aggregation_algorithm_id(intra_extension_id) - - # Getter and Setter for sub_meta_rule - - def get_sub_meta_rules_dict(self, intra_extension_id): - with sql.session_for_read() as session: - query = session.query(SubMetaRule) - query = query.filter_by(intra_extension_id=intra_extension_id) - ref_list = query.all() - return {_ref.id: _ref.sub_meta_rule for _ref in ref_list} - - def set_sub_meta_rule_dict(self, intra_extension_id, sub_meta_rule_id, sub_meta_rule_dict): - with sql.session_for_write() as session: - query = session.query(SubMetaRule) - query = query.filter_by(intra_extension_id=intra_extension_id, id=sub_meta_rule_id) - ref = query.first() - new_ref = SubMetaRule.from_dict( - { - "id": sub_meta_rule_id, - 'sub_meta_rule': sub_meta_rule_dict, - 'intra_extension_id': intra_extension_id - } - ) - if not ref: - session.add(new_ref) - else: - _sub_meta_rule_dict = dict(ref.sub_meta_rule) - _sub_meta_rule_dict.update(sub_meta_rule_dict) - setattr(new_ref, "sub_meta_rule", _sub_meta_rule_dict) - for attr in SubMetaRule.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_ref, attr)) - # session.flush() - return self.get_sub_meta_rules_dict(intra_extension_id) - - def del_sub_meta_rule(self, intra_extension_id, sub_meta_rule_id): - with sql.session_for_write() as session: - query = session.query(SubMetaRule) - query = query.filter_by(intra_extension_id=intra_extension_id, id=sub_meta_rule_id) - ref = query.first() - session.delete(ref) - - # Getter and Setter for rules - - def get_rules_dict(self, intra_extension_id, sub_meta_rule_id): - with sql.session_for_read() as session: - query = session.query(Rule) - query = query.filter_by(intra_extension_id=intra_extension_id, sub_meta_rule_id=sub_meta_rule_id) - ref_list = query.all() - return {_ref.id: _ref.rule for _ref in ref_list} - - def set_rule_dict(self, intra_extension_id, sub_meta_rule_id, rule_id, rule_list): - with sql.session_for_write() as session: - query = session.query(Rule) - query = query.filter_by(intra_extension_id=intra_extension_id, sub_meta_rule_id=sub_meta_rule_id, id=rule_id) - ref = query.first() - new_ref = Rule.from_dict( - { - "id": rule_id, - 'rule': rule_list, - 'intra_extension_id': intra_extension_id, - 'sub_meta_rule_id': sub_meta_rule_id - } - ) - if not ref: - session.add(new_ref) - ref = new_ref - else: - for attr in Rule.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_ref, attr)) - # session.flush() - return {rule_id: ref.rule} - - def del_rule(self, intra_extension_id, sub_meta_rule_id, rule_id): - with sql.session_for_write() as session: - query = session.query(Rule) - query = query.filter_by(intra_extension_id=intra_extension_id, sub_meta_rule_id=sub_meta_rule_id, id=rule_id) - ref = query.first() - session.delete(ref) - - -# class InterExtension(sql.ModelBase, sql.DictBase): -# __tablename__ = 'inter_extension' -# attributes = [ -# 'id', -# 'requesting_intra_extension_id', -# 'requested_intra_extension_id', -# 'virtual_entity_uuid', -# 'genre', -# 'description', -# ] -# id = sql.Column(sql.String(64), primary_key=True) -# requesting_intra_extension_id = sql.Column(sql.String(64)) -# requested_intra_extension_id = sql.Column(sql.String(64)) -# virtual_entity_uuid = sql.Column(sql.String(64)) -# genre = sql.Column(sql.String(64)) -# description = sql.Column(sql.Text()) -# -# @classmethod -# def from_dict(cls, d): -# """Override parent from_dict() method with a simpler implementation. -# """ -# new_d = d.copy() -# return cls(**new_d) -# -# def to_dict(self): -# """Override parent to_dict() method with a simpler implementation. -# """ -# return dict(six.iteritems(self)) -# -# -# class InterExtensionConnector(InterExtensionDriver): -# -# def get_inter_extensions(self): -# with sql.session_for_read() as session: -# query = session.query(InterExtension.id) -# interextensions = query.all() -# return [interextension.id for interextension in interextensions] -# -# def create_inter_extensions(self, inter_id, inter_extension): -# with sql.session_for_read() as session: -# ie_ref = InterExtension.from_dict(inter_extension) -# session.add(ie_ref) -# return InterExtension.to_dict(ie_ref) -# -# def get_inter_extension(self, uuid): -# with sql.session_for_read() as session: -# query = session.query(InterExtension) -# query = query.filter_by(id=uuid) -# ref = query.first() -# if not ref: -# raise exception.NotFound -# return ref.to_dict() -# -# def delete_inter_extensions(self, inter_extension_id): -# with sql.session_for_read() as session: -# ref = session.query(InterExtension).get(inter_extension_id) -# session.delete(ref) - diff --git a/keystone-moon/keystone/contrib/moon/controllers.py b/keystone-moon/keystone/contrib/moon/controllers.py deleted file mode 100644 index d3f1bfad..00000000 --- a/keystone-moon/keystone/contrib/moon/controllers.py +++ /dev/null @@ -1,917 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -from keystone.common import controller -from keystone import config -from keystone import exception -from keystone.models import token_model -from keystone.contrib.moon.exception import * -from oslo_log import log -from uuid import uuid4 -import requests - - -CONF = config.CONF -LOG = log.getLogger(__name__) - - -@dependency.requires('configuration_api') -class Configuration(controller.V3Controller): - collection_name = 'configurations' - member_name = 'configuration' - - def __init__(self): - super(Configuration, self).__init__() - - def _get_user_id_from_token(self, token_id): - response = self.token_provider_api.validate_token(token_id) - token_ref = token_model.KeystoneToken(token_id=token_id, token_data=response) - return token_ref.get('user') - - @controller.protected() - def get_policy_templates(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - return self.configuration_api.get_policy_templates_dict(user_id) - - @controller.protected() - def get_aggregation_algorithms(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - return self.configuration_api.get_aggregation_algorithms_dict(user_id) - - @controller.protected() - def get_sub_meta_rule_algorithms(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - return self.configuration_api.get_sub_meta_rule_algorithms_dict(user_id) - - -@dependency.requires('tenant_api', 'resource_api') -class Tenants(controller.V3Controller): - - def __init__(self): - super(Tenants, self).__init__() - - def _get_user_id_from_token(self, token_id): - response = self.token_provider_api.validate_token(token_id) - token_ref = token_model.KeystoneToken(token_id=token_id, token_data=response) - return token_ref.get('user') - - @controller.protected() - def get_tenants(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - return self.tenant_api.get_tenants_dict(user_id) - - def __get_keystone_tenant_dict(self, tenant_id="", tenant_name="", tenant_description="", domain="default"): - tenants = self.resource_api.list_projects() - for tenant in tenants: - if tenant_id and tenant_id == tenant['id']: - return tenant - if tenant_name and tenant_name == tenant['name']: - return tenant - if not tenant_id: - tenant_id = uuid4().hex - if not tenant_name: - tenant_name = tenant_id - tenant = { - "id": tenant_id, - "name": tenant_name, - "description": tenant_description, - "enabled": True, - "domain_id": domain - } - keystone_tenant = self.resource_api.create_project(tenant["id"], tenant) - return keystone_tenant - - @controller.protected() - def add_tenant(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - k_tenant_dict = self.__get_keystone_tenant_dict( - tenant_name=kw.get('tenant_name'), - tenant_description=kw.get('tenant_description', kw.get('tenant_name')), - domain=kw.get('tenant_domain', "default"), - - ) - tenant_dict = dict() - tenant_dict['id'] = k_tenant_dict['id'] - tenant_dict['name'] = kw.get('tenant_name', None) - tenant_dict['description'] = kw.get('tenant_description', None) - tenant_dict['intra_authz_extension_id'] = kw.get('tenant_intra_authz_extension_id', None) - tenant_dict['intra_admin_extension_id'] = kw.get('tenant_intra_admin_extension_id', None) - return self.tenant_api.add_tenant_dict(user_id, tenant_dict['id'], tenant_dict) - - @controller.protected() - def get_tenant(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - tenant_id = kw.get('tenant_id', None) - return self.tenant_api.get_tenant_dict(user_id, tenant_id) - - @controller.protected() - def del_tenant(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - tenant_id = kw.get('tenant_id', None) - return self.tenant_api.del_tenant(user_id, tenant_id) - - @controller.protected() - def set_tenant(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - # Next line will raise an error if tenant doesn't exist - k_tenant_dict = self.resource_api.get_project(kw.get('tenant_id', None)) - tenant_id = kw.get('tenant_id', None) - tenant_dict = dict() - tenant_dict['name'] = k_tenant_dict.get('name', None) - if 'tenant_description' in kw: - tenant_dict['description'] = kw.get('tenant_description', None) - if 'tenant_intra_authz_extension_id' in kw: - tenant_dict['intra_authz_extension_id'] = kw.get('tenant_intra_authz_extension_id', None) - if 'tenant_intra_admin_extension_id' in kw: - tenant_dict['intra_admin_extension_id'] = kw.get('tenant_intra_admin_extension_id', None) - self.tenant_api.set_tenant_dict(user_id, tenant_id, tenant_dict) - - -def callback(self, context, prep_info, *args, **kwargs): - token_ref = "" - if context.get('token_id') is not None: - token_ref = token_model.KeystoneToken( - token_id=context['token_id'], - token_data=self.token_provider_api.validate_token( - context['token_id'])) - if not token_ref: - raise exception.Unauthorized - - -@dependency.requires('authz_api') -class Authz_v3(controller.V3Controller): - - def __init__(self): - super(Authz_v3, self).__init__() - - @controller.protected(callback) - def get_authz(self, context, tenant_id, subject_k_id, object_name, action_name): - try: - return self.authz_api.authz(tenant_id, subject_k_id, object_name, action_name) - except Exception as e: - return {'authz': False, 'comment': unicode(e)} - - -@dependency.requires('admin_api', 'root_api') -class IntraExtensions(controller.V3Controller): - collection_name = 'intra_extensions' - member_name = 'intra_extension' - - def __init__(self): - super(IntraExtensions, self).__init__() - - def _get_user_id_from_token(self, token_id): - response = self.token_provider_api.validate_token(token_id) - token_ref = token_model.KeystoneToken(token_id=token_id, token_data=response) - return token_ref.get('user')['id'] - - # IntraExtension functions - @controller.protected() - def get_intra_extensions(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - return self.admin_api.get_intra_extensions_dict(user_id) - - @controller.protected() - def add_intra_extension(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_dict = dict() - intra_extension_dict['name'] = kw.get('intra_extension_name', None) - intra_extension_dict['model'] = kw.get('intra_extension_model', None) - intra_extension_dict['genre'] = kw.get('intra_extension_genre', None) - intra_extension_dict['description'] = kw.get('intra_extension_description', None) - intra_extension_dict['subject_categories'] = kw.get('intra_extension_subject_categories', dict()) - intra_extension_dict['object_categories'] = kw.get('intra_extension_object_categories', dict()) - intra_extension_dict['action_categories'] = kw.get('intra_extension_action_categories', dict()) - intra_extension_dict['subjects'] = kw.get('intra_extension_subjects', dict()) - intra_extension_dict['objects'] = kw.get('intra_extension_objects', dict()) - intra_extension_dict['actions'] = kw.get('intra_extension_actions', dict()) - intra_extension_dict['subject_scopes'] = kw.get('intra_extension_subject_scopes', dict()) - intra_extension_dict['object_scopes'] = kw.get('intra_extension_object_scopes', dict()) - intra_extension_dict['action_scopes'] = kw.get('intra_extension_action_scopes', dict()) - intra_extension_dict['subject_assignments'] = kw.get('intra_extension_subject_assignments', dict()) - intra_extension_dict['object_assignments'] = kw.get('intra_extension_object_assignments', dict()) - intra_extension_dict['action_assignments'] = kw.get('intra_extension_action_assignments', dict()) - intra_extension_dict['aggregation_algorithm'] = kw.get('intra_extension_aggregation_algorithm', dict()) - intra_extension_dict['sub_meta_rules'] = kw.get('intra_extension_sub_meta_rules', dict()) - intra_extension_dict['rules'] = kw.get('intra_extension_rules', dict()) - ref = self.admin_api.load_intra_extension_dict(user_id, intra_extension_dict=intra_extension_dict) - return self.admin_api.populate_default_data(ref) - - @controller.protected() - def get_intra_extension(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - return self.admin_api.get_intra_extension_dict(user_id, intra_extension_id) - - @controller.protected() - def del_intra_extension(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - self.admin_api.del_intra_extension(user_id, intra_extension_id) - - @controller.protected() - def set_intra_extension(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - intra_extension_dict = dict() - intra_extension_dict['name'] = kw.get('intra_extension_name', None) - intra_extension_dict['model'] = kw.get('intra_extension_model', None) - intra_extension_dict['genre'] = kw.get('intra_extension_genre', None) - intra_extension_dict['description'] = kw.get('intra_extension_description', None) - return self.admin_api.set_intra_extension_dict(user_id, intra_extension_id, intra_extension_dict) - - @controller.protected() - def load_root_intra_extension(self, context, **kw): - self.root_api.load_root_intra_extension_dict() - - # Metadata functions - @controller.protected() - def get_subject_categories(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - return self.admin_api.get_subject_categories_dict(user_id, intra_extension_id) - - @controller.protected() - def add_subject_category(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - subject_category_dict = dict() - subject_category_dict['name'] = kw.get('subject_category_name', None) - subject_category_dict['description'] = kw.get('subject_category_description', None) - return self.admin_api.add_subject_category_dict(user_id, intra_extension_id, subject_category_dict) - - @controller.protected() - def get_subject_category(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - subject_category_id = kw.get('subject_category_id', None) - return self.admin_api.get_subject_category_dict(user_id, intra_extension_id, subject_category_id) - - @controller.protected() - def del_subject_category(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - subject_category_id = kw.get('subject_category_id', None) - self.admin_api.del_subject_category(user_id, intra_extension_id, subject_category_id) - - @controller.protected() - def set_subject_category(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - subject_category_id = kw.get('subject_category_id', None) - subject_category_dict = dict() - subject_category_dict['name'] = kw.get('subject_category_name', None) - subject_category_dict['description'] = kw.get('subject_category_description', None) - return self.admin_api.set_subject_category_dict(user_id, intra_extension_id, subject_category_id, subject_category_dict) - - @controller.protected() - def get_object_categories(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - return self.admin_api.get_object_categories_dict(user_id, intra_extension_id) - - @controller.protected() - def add_object_category(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - object_category_dict = dict() - object_category_dict['name'] = kw.get('object_category_name', None) - object_category_dict['description'] = kw.get('object_category_description', None) - return self.admin_api.add_object_category_dict(user_id, intra_extension_id, object_category_dict) - - @controller.protected() - def get_object_category(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - object_category_id = kw.get('object_category_id', None) - return self.admin_api.get_object_categories_dict(user_id, intra_extension_id, object_category_id) - - @controller.protected() - def del_object_category(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - object_category_id = kw.get('object_category_id', None) - self.admin_api.del_object_category(user_id, intra_extension_id, object_category_id) - - @controller.protected() - def set_object_category(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - object_category_id = kw.get('object_category_id', None) - object_category_dict = dict() - object_category_dict['name'] = kw.get('object_category_name', None) - object_category_dict['description'] = kw.get('object_category_description', None) - return self.admin_api.set_object_category_dict(user_id, intra_extension_id, object_category_id, object_category_dict) - - @controller.protected() - def get_action_categories(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - return self.admin_api.get_action_categories_dict(user_id, intra_extension_id) - - @controller.protected() - def add_action_category(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - action_category_dict = dict() - action_category_dict['name'] = kw.get('action_category_name', None) - action_category_dict['description'] = kw.get('action_category_description', None) - return self.admin_api.add_action_category_dict(user_id, intra_extension_id, action_category_dict) - - @controller.protected() - def get_action_category(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - action_category_id = kw.get('action_category_id', None) - return self.admin_api.get_action_categories_dict(user_id, intra_extension_id, action_category_id) - - @controller.protected() - def del_action_category(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - action_category_id = kw.get('action_category_id', None) - self.admin_api.del_action_category(user_id, intra_extension_id, action_category_id) - - @controller.protected() - def set_action_category(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - action_category_id = kw.get('action_category_id', None) - action_category_dict = dict() - action_category_dict['name'] = kw.get('action_category_name', None) - action_category_dict['description'] = kw.get('action_category_description', None) - return self.admin_api.set_action_category_dict(user_id, intra_extension_id, action_category_id, action_category_dict) - - # Perimeter functions - @controller.protected() - def get_subjects(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - return self.admin_api.get_subjects_dict(user_id, intra_extension_id) - - @controller.protected() - def add_subject(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - subject_dict = dict() - subject_dict['name'] = kw.get('subject_name', None) - subject_dict['description'] = kw.get('subject_description', None) - subject_dict['password'] = kw.get('subject_password', None) - subject_dict['email'] = kw.get('subject_email', None) - return self.admin_api.add_subject_dict(user_id, intra_extension_id, subject_dict) - - @controller.protected() - def get_subject(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - subject_id = kw.get('subject_id', None) - return self.admin_api.get_subject_dict(user_id, intra_extension_id, subject_id) - - @controller.protected() - def del_subject(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - subject_id = kw.get('subject_id', None) - self.admin_api.del_subject(user_id, intra_extension_id, subject_id) - - @controller.protected() - def set_subject(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - subject_id = kw.get('subject_id', None) - subject_dict = dict() - subject_dict['name'] = kw.get('subject_name', None) - subject_dict['description'] = kw.get('subject_description', None) - return self.admin_api.set_subject_dict(user_id, intra_extension_id, subject_id, subject_dict) - - @controller.protected() - def get_objects(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - return self.admin_api.get_objects_dict(user_id, intra_extension_id) - - @controller.protected() - def add_object(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - object_dict = dict() - object_dict['name'] = kw.get('object_name', None) - object_dict['description'] = kw.get('object_description', None) - return self.admin_api.add_object_dict(user_id, intra_extension_id, object_dict) - - @controller.protected() - def get_object(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - object_id = kw.get('object_id', None) - return self.admin_api.get_object_dict(user_id, intra_extension_id, object_id) - - @controller.protected() - def del_object(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - object_id = kw.get('object_id', None) - self.admin_api.del_object(user_id, intra_extension_id, object_id) - - @controller.protected() - def set_object(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - object_id = kw.get('object_id', None) - object_dict = dict() - object_dict['name'] = kw.get('object_name', None) - object_dict['description'] = kw.get('object_description', None) - return self.admin_api.set_object_dict(user_id, intra_extension_id, object_id, object_dict) - - @controller.protected() - def get_actions(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - return self.admin_api.get_actions_dict(user_id, intra_extension_id) - - @controller.protected() - def add_action(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - action_dict = dict() - action_dict['name'] = kw.get('action_name', None) - action_dict['description'] = kw.get('action_description', None) - return self.admin_api.add_action_dict(user_id, intra_extension_id, action_dict) - - @controller.protected() - def get_action(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - action_id = kw.get('action_id', None) - return self.admin_api.get_action_dict(user_id, intra_extension_id, action_id) - - @controller.protected() - def del_action(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - action_id = kw.get('action_id', None) - self.admin_api.del_action(user_id, intra_extension_id, action_id) - - @controller.protected() - def set_action(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - action_id = kw.get('action_id', None) - action_dict = dict() - action_dict['name'] = kw.get('action_name', None) - action_dict['description'] = kw.get('action_description', None) - return self.admin_api.set_action_dict(user_id, intra_extension_id, action_id, action_dict) - - # Scope functions - @controller.protected() - def get_subject_scopes(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - subject_category_id = kw.get('subject_category_id', None) - return self.admin_api.get_subject_scopes_dict(user_id, intra_extension_id, subject_category_id) - - @controller.protected() - def add_subject_scope(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - subject_category_id = kw.get('subject_category_id', None) - subject_scope_dict = dict() - subject_scope_dict['name'] = kw.get('subject_scope_name', None) - subject_scope_dict['description'] = kw.get('subject_scope_description', None) - return self.admin_api.add_subject_scope_dict(user_id, intra_extension_id, subject_category_id, subject_scope_dict) - - @controller.protected() - def get_subject_scope(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - subject_category_id = kw.get('subject_category_id', None) - subject_scope_id = kw.get('subject_scope_id', None) - return self.admin_api.get_subject_scope_dict(user_id, intra_extension_id, subject_category_id, subject_scope_id) - - @controller.protected() - def del_subject_scope(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - subject_category_id = kw.get('subject_category_id', None) - subject_scope_id = kw.get('subject_scope_id', None) - self.admin_api.del_subject_scope(user_id, intra_extension_id, subject_category_id, subject_scope_id) - - @controller.protected() - def set_subject_scope(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - subject_category_id = kw.get('subject_category_id', None) - subject_scope_id = kw.get('subject_scope_id', None) - subject_scope_dict = dict() - subject_scope_dict['name'] = kw.get('subject_scope_name', None) - subject_scope_dict['description'] = kw.get('subject_scope_description', None) - return self.admin_api.set_subject_scope_dict(user_id, intra_extension_id, subject_category_id, subject_scope_id, subject_scope_dict) - - @controller.protected() - def get_object_scopes(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - object_category_id = kw.get('object_category_id', None) - return self.admin_api.get_object_scopes_dict(user_id, intra_extension_id, object_category_id) - - @controller.protected() - def add_object_scope(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - object_category_id = kw.get('object_category_id', None) - object_scope_dict = dict() - object_scope_dict['name'] = kw.get('object_scope_name', None) - object_scope_dict['description'] = kw.get('object_scope_description', None) - return self.admin_api.add_object_scope_dict(user_id, intra_extension_id, object_category_id, object_scope_dict) - - @controller.protected() - def get_object_scope(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - object_category_id = kw.get('object_category_id', None) - object_scope_id = kw.get('object_scope_id', None) - return self.admin_api.get_object_scope_dict(user_id, intra_extension_id, object_category_id, object_scope_id) - - @controller.protected() - def del_object_scope(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - object_category_id = kw.get('object_category_id', None) - object_scope_id = kw.get('object_scope_id', None) - self.admin_api.del_object_scope(user_id, intra_extension_id, object_category_id, object_scope_id) - - @controller.protected() - def set_object_scope(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - object_category_id = kw.get('object_category_id', None) - object_scope_id = kw.get('object_scope_id', None) - object_scope_dict = dict() - object_scope_dict['name'] = kw.get('object_scope_name', None) - object_scope_dict['description'] = kw.get('object_scope_description', None) - return self.admin_api.set_object_scope_dict(user_id, intra_extension_id, object_category_id, object_scope_id, object_scope_dict) - - @controller.protected() - def get_action_scopes(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - action_category_id = kw.get('action_category_id', None) - return self.admin_api.get_action_scopes_dict(user_id, intra_extension_id, action_category_id) - - @controller.protected() - def add_action_scope(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - action_category_id = kw.get('action_category_id', None) - action_scope_dict = dict() - action_scope_dict['name'] = kw.get('action_scope_name', None) - action_scope_dict['description'] = kw.get('action_scope_description', None) - return self.admin_api.add_action_scope_dict(user_id, intra_extension_id, action_category_id, action_scope_dict) - - @controller.protected() - def get_action_scope(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - action_category_id = kw.get('action_category_id', None) - action_scope_id = kw.get('action_scope_id', None) - return self.admin_api.get_action_scope_dict(user_id, intra_extension_id, action_category_id, action_scope_id) - - @controller.protected() - def del_action_scope(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - action_category_id = kw.get('action_category_id', None) - action_scope_id = kw.get('action_scope_id', None) - self.admin_api.del_action_scope(user_id, intra_extension_id, action_category_id, action_scope_id) - - @controller.protected() - def set_action_scope(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - action_category_id = kw.get('action_category_id', None) - action_scope_id = kw.get('action_scope_id', None) - action_scope_dict = dict() - action_scope_dict['name'] = kw.get('action_scope_name', None) - action_scope_dict['description'] = kw.get('action_scope_description', None) - return self.admin_api.set_action_scope_dict(user_id, intra_extension_id, action_category_id, action_scope_id, action_scope_dict) - - # Assignment functions - - @controller.protected() - def add_subject_assignment(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - subject_id = kw.get('subject_id', None) - subject_category_id = kw.get('subject_category_id', None) - subject_scope_id = kw.get('subject_scope_id', None) - return self.admin_api.add_subject_assignment_list(user_id, intra_extension_id, subject_id, subject_category_id, subject_scope_id) - - @controller.protected() - def get_subject_assignment(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - subject_id = kw.get('subject_id', None) - subject_category_id = kw.get('subject_category_id', None) - return self.admin_api.get_subject_assignment_list(user_id, intra_extension_id, subject_id, subject_category_id) - - @controller.protected() - def del_subject_assignment(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - subject_id = kw.get('subject_id', None) - subject_category_id = kw.get('subject_category_id', None) - subject_scope_id = kw.get('subject_scope_id', None) - self.admin_api.del_subject_assignment(user_id, intra_extension_id, subject_id, subject_category_id, subject_scope_id) - - @controller.protected() - def add_object_assignment(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - object_id = kw.get('object_id', None) - object_category_id = kw.get('object_category_id', None) - object_scope_id = kw.get('object_scope_id', None) - return self.admin_api.add_object_assignment_list(user_id, intra_extension_id, object_id, object_category_id, object_scope_id) - - @controller.protected() - def get_object_assignment(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - object_id = kw.get('object_id', None) - object_category_id = kw.get('object_category_id', None) - return self.admin_api.get_object_assignment_list(user_id, intra_extension_id, object_id, object_category_id) - - @controller.protected() - def del_object_assignment(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - object_id = kw.get('object_id', None) - object_category_id = kw.get('object_category_id', None) - object_scope_id = kw.get('object_scope_id', None) - self.admin_api.del_object_assignment(user_id, intra_extension_id, object_id, object_category_id, object_scope_id) - - @controller.protected() - def add_action_assignment(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - action_id = kw.get('action_id', None) - action_category_id = kw.get('action_category_id', None) - action_scope_id = kw.get('action_scope_id', None) - return self.admin_api.add_action_assignment_list(user_id, intra_extension_id, action_id, action_category_id, action_scope_id) - - @controller.protected() - def get_action_assignment(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - action_id = kw.get('action_id', None) - action_category_id = kw.get('action_category_id', None) - return self.admin_api.get_action_assignment_list(user_id, intra_extension_id, action_id, action_category_id) - - @controller.protected() - def del_action_assignment(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - action_id = kw.get('action_id', None) - action_category_id = kw.get('action_category_id', None) - action_scope_id = kw.get('action_scope_id', None) - self.admin_api.del_action_assignment(user_id, intra_extension_id, action_id, action_category_id, action_scope_id) - - # Metarule functions - - @controller.protected() - def get_aggregation_algorithm(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - return self.admin_api.get_aggregation_algorithm_id(user_id, intra_extension_id) - - @controller.protected() - def set_aggregation_algorithm(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - aggregation_algorithm_id = kw.get('aggregation_algorithm_id', None) - return self.admin_api.set_aggregation_algorithm_id(user_id, intra_extension_id, aggregation_algorithm_id) - - @controller.protected() - def get_sub_meta_rules(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - return self.admin_api.get_sub_meta_rules_dict(user_id, intra_extension_id) - - @controller.protected() - def add_sub_meta_rule(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - sub_meta_rule_dict = dict() - sub_meta_rule_dict['name'] = kw.get('sub_meta_rule_name', None) - sub_meta_rule_dict['algorithm'] = kw.get('sub_meta_rule_algorithm', None) - sub_meta_rule_dict['subject_categories'] = kw.get('sub_meta_rule_subject_categories', None) - sub_meta_rule_dict['object_categories'] = kw.get('sub_meta_rule_object_categories', None) - sub_meta_rule_dict['action_categories'] = kw.get('sub_meta_rule_action_categories', None) - return self.admin_api.add_sub_meta_rule_dict(user_id, intra_extension_id, sub_meta_rule_dict) - - @controller.protected() - def get_sub_meta_rule(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - sub_meta_rule_id = kw.get('sub_meta_rule_id', None) - return self.admin_api.get_sub_meta_rule_dict(user_id, intra_extension_id, sub_meta_rule_id) - - @controller.protected() - def del_sub_meta_rule(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - sub_meta_rule_id = kw.get('sub_meta_rule_id', None) - self.admin_api.del_sub_meta_rule(user_id, intra_extension_id, sub_meta_rule_id) - - @controller.protected() - def set_sub_meta_rule(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - sub_meta_rule_id = kw.get('sub_meta_rule_id', None) - sub_meta_rule_dict = dict() - sub_meta_rule_dict['name'] = kw.get('sub_meta_rule_name', None) - sub_meta_rule_dict['algorithm'] = kw.get('sub_meta_rule_algorithm', None) - sub_meta_rule_dict['subject_categories'] = kw.get('sub_meta_rule_subject_categories', None) - sub_meta_rule_dict['object_categories'] = kw.get('sub_meta_rule_object_categories', None) - sub_meta_rule_dict['action_categories'] = kw.get('sub_meta_rule_action_categories', None) - return self.admin_api.set_sub_meta_rule_dict(user_id, intra_extension_id, sub_meta_rule_id, sub_meta_rule_dict) - - # Rules functions - @controller.protected() - def get_rules(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - sub_meta_rule_id = kw.get('sub_meta_rule_id', None) - return self.admin_api.get_rules_dict(user_id, intra_extension_id, sub_meta_rule_id) - - @controller.protected() - def add_rule(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - sub_meta_rule_id = kw.get('sub_meta_rule_id', None) - subject_category_list = kw.get('subject_categories', []) - object_category_list = kw.get('object_categories', []) - action_category_list = kw.get('action_categories', []) - enabled_bool = kw.get('enabled', True) - rule_list = subject_category_list + action_category_list + object_category_list + [enabled_bool, ] - return self.admin_api.add_rule_dict(user_id, intra_extension_id, sub_meta_rule_id, rule_list) - - @controller.protected() - def get_rule(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - sub_meta_rule_id = kw.get('sub_meta_rule_id', None) - rule_id = kw.get('rule_id', None) - return self.admin_api.get_rule_dict(user_id, intra_extension_id, sub_meta_rule_id, rule_id) - - @controller.protected() - def del_rule(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - sub_meta_rule_id = kw.get('sub_meta_rule_id', None) - rule_id = kw.get('rule_id', None) - self.admin_api.del_rule(user_id, intra_extension_id, sub_meta_rule_id, rule_id) - - @controller.protected() - def set_rule(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - intra_extension_id = kw.get('intra_extension_id', None) - sub_meta_rule_id = kw.get('sub_meta_rule_id', None) - rule_id = kw.get('rule_id', None) - rule_list = list() - subject_category_list = kw.get('subject_categories', []) - object_category_list = kw.get('object_categories', []) - action_category_list = kw.get('action_categories', []) - rule_list = subject_category_list + action_category_list + object_category_list - return self.admin_api.set_rule_dict(user_id, intra_extension_id, sub_meta_rule_id, rule_id, rule_list) - - -@dependency.requires('authz_api') -class InterExtensions(controller.V3Controller): - - def __init__(self): - super(InterExtensions, self).__init__() - - def _get_user_from_token(self, token_id): - response = self.token_provider_api.validate_token(token_id) - token_ref = token_model.KeystoneToken(token_id=token_id, token_data=response) - return token_ref['user'] - - # @controller.protected() - # def get_inter_extensions(self, context, **kw): - # user = self._get_user_from_token(context.get('token_id')) - # return { - # 'inter_extensions': - # self.interextension_api.get_inter_extensions() - # } - - # @controller.protected() - # def get_inter_extension(self, context, **kw): - # user = self._get_user_from_token(context.get('token_id')) - # return { - # 'inter_extensions': - # self.interextension_api.get_inter_extension(uuid=kw['inter_extension_id']) - # } - - # @controller.protected() - # def create_inter_extension(self, context, **kw): - # user = self._get_user_from_token(context.get('token_id')) - # return self.interextension_api.create_inter_extension(kw) - - # @controller.protected() - # def delete_inter_extension(self, context, **kw): - # user = self._get_user_from_token(context.get('token_id')) - # if 'inter_extension_id' not in kw: - # raise exception.Error - # return self.interextension_api.delete_inter_extension(kw['inter_extension_id']) - - -@dependency.requires('moonlog_api', 'authz_api') -class Logs(controller.V3Controller): - - def __init__(self): - super(Logs, self).__init__() - - def _get_user_id_from_token(self, token_id): - response = self.token_provider_api.validate_token(token_id) - token_ref = token_model.KeystoneToken(token_id=token_id, token_data=response) - return token_ref['user'] - - @controller.protected() - def get_logs(self, context, **kw): - user_id = self._get_user_id_from_token(context.get('token_id')) - options = kw.get('options', '') - return self.moonlog_api.get_logs(user_id, options) - - -@dependency.requires('identity_api', "token_provider_api", "resource_api") -class MoonAuth(controller.V3Controller): - - def __init__(self): - super(MoonAuth, self).__init__() - - def _get_project(self, uuid="", name=""): - projects = self.resource_api.list_projects() - for project in projects: - if uuid and uuid == project['id']: - return project - elif name and name == project['name']: - return project - - def get_token(self, context, **kw): - master_url = CONF.moon.master - data_auth = { - "auth": { - "identity": { - "methods": [ - "password" - ], - "password": { - "user": { - "domain": { - "id": "Default" - }, - "name": kw['username'], - "password": kw['password'] - } - } - } - } - } - - message = {} - if "project" in kw: - project = self._get_project(name=kw['project']) - if project: - data_auth["auth"]["scope"] = dict() - data_auth["auth"]["scope"]['project'] = dict() - data_auth["auth"]["scope"]['project']['id'] = project['id'] - else: - message = { - "error": { - "message": "Unable to find project {}".format(kw['project']), - "code": 200, - "title": "UnScopedToken" - }} - - req = requests.post("{}/v3/auth/tokens".format(master_url), - json=data_auth, - headers={"Content-Type": "application/json"} - ) - if req.status_code not in (200, 201): - LOG.error(req.text) - else: - _token = req.headers['X-Subject-Token'] - _data = req.json() - _result = { - "token": _token, - 'message': message - } - try: - _result["roles"] = map(lambda x: x['name'], _data["token"]["roles"]) - except KeyError: - pass - return _result - return {"token": None, 'message': req.json()} - diff --git a/keystone-moon/keystone/contrib/moon/core.py b/keystone-moon/keystone/contrib/moon/core.py deleted file mode 100644 index 943b8e78..00000000 --- a/keystone-moon/keystone/contrib/moon/core.py +++ /dev/null @@ -1,2990 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -from uuid import uuid4 -import os -import json -import copy -import re -import six -import time -import types -import requests - -from keystone.common import manager -from keystone.exception import UserNotFound -from oslo_log import log -from keystone.common import dependency -from keystone import exception -from oslo_config import cfg -from keystone.i18n import _ -from keystone.common import extension - -from keystone.contrib.moon.exception import * -from keystone.contrib.moon.algorithms import * - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - -OPTS = [ - cfg.StrOpt('configuration_driver', - default='keystone.contrib.moon.backends.memory.ConfigurationConnector', - help='Configuration backend driver.'), - cfg.StrOpt('tenant_driver', - default='keystone.contrib.moon.backends.sql.TenantConnector', - help='Tenant backend driver.'), - cfg.StrOpt('authz_driver', - default='keystone.contrib.moon.backends.flat.SuperExtensionConnector', - help='Authorisation backend driver.'), - cfg.StrOpt('intraextension_driver', - default='keystone.contrib.moon.backends.sql.IntraExtensionConnector', - help='IntraExtension backend driver.'), - cfg.StrOpt('interextension_driver', - default='keystone.contrib.moon.backends.sql.InterExtensionConnector', - help='InterExtension backend driver.'), - cfg.StrOpt('log_driver', - default='keystone.contrib.moon.backends.flat.LogConnector', - help='Logs backend driver.'), - cfg.StrOpt('policy_directory', - default='/etc/keystone/policies', - help='Local directory where all policies are stored.'), - cfg.StrOpt('root_policy_directory', - default='policy_root', - help='Local directory where Root IntraExtension configuration is stored.'), - cfg.StrOpt('master', - default='http://localhost:35357/', - help='Address of the Moon master (if empty, the current Moon is the master).'), - cfg.StrOpt('master_login', - default='admin', - help='Login of the Moon master.'), - cfg.StrOpt('master_password', - default='nomoresecrete', - help='Password of the Moon master.'), -] - -for option in OPTS: - CONF.register_opt(option, group="moon") - - -def filter_input(func_or_str): - - def __filter(string): - if string and type(string) in (str, unicode): - return "".join(re.findall("[\w\- +]*", string)) - return string - - def __filter_dict(arg): - result = dict() - for key in arg.keys(): - if key == "email": - result["email"] = __filter_email(arg[key]) - elif key == "password": - result["password"] = arg['password'] - else: - result[key] = __filter(arg[key]) - return result - - def __filter_email(string): - if string and type(string) in (str, unicode): - return "".join(re.findall("[\w@\._\- +]*", string)) - return string - - def wrapped(*args, **kwargs): - _args = [] - for arg in args: - if isinstance(arg, str) or isinstance(arg, unicode): - arg = __filter(arg) - elif isinstance(arg, list): - arg = [__filter(item) for item in arg] - elif isinstance(arg, tuple): - arg = (__filter(item) for item in arg) - elif isinstance(arg, dict): - arg = __filter_dict(arg) - _args.append(arg) - for arg in kwargs: - if type(kwargs[arg]) in (unicode, str): - kwargs[arg] = __filter(kwargs[arg]) - if isinstance(kwargs[arg], str) or isinstance(kwargs[arg], unicode): - kwargs[arg] = __filter(kwargs[arg]) - elif isinstance(kwargs[arg], list): - kwargs[arg] = [__filter(item) for item in kwargs[arg]] - elif isinstance(kwargs[arg], tuple): - kwargs[arg] = (__filter(item) for item in kwargs[arg]) - elif isinstance(kwargs[arg], dict): - kwargs[arg] = __filter_dict(kwargs[arg]) - return func_or_str(*_args, **kwargs) - - if isinstance(func_or_str, str) or isinstance(func_or_str, unicode): - return __filter(func_or_str) - if isinstance(func_or_str, list): - return [__filter(item) for item in func_or_str] - if isinstance(func_or_str, tuple): - return (__filter(item) for item in func_or_str) - if isinstance(func_or_str, dict): - return __filter_dict(func_or_str) - if isinstance(func_or_str, types.FunctionType): - return wrapped - return None - - -def enforce(action_names, object_name, **extra): - - def wrap(func): - _action_name_list = action_names - _object_name = object_name - dependency.resolve_future_dependencies() - - def wrapped(*args, **kwargs): - root_api = dependency._REGISTRY["root_api"][0] - admin_api = dependency._REGISTRY["admin_api"][0] - moonlog_api = dependency._REGISTRY["moonlog_api"][0] - tenant_api = dependency._REGISTRY["tenant_api"][0] - returned_value_for_func = None - try: - user_id = args[1] - except IndexError: - user_id = kwargs['user_id'] - intra_extension_id = None - intra_admin_extension_id = None - - intra_root_extension_id = root_api.root_extension_id - try: - intra_extension_id = args[2] - except IndexError: - if 'intra_extension_id' in kwargs: - intra_extension_id = kwargs['intra_extension_id'] - else: - intra_extension_id = intra_root_extension_id - - tenants_dict = tenant_api.driver.get_tenants_dict() - if root_api.is_admin_subject(user_id): - # TODO: check if there is no security hole here - # moonlog_api.driver.info("Authorizing because it is the user admin of the root intra-extension") - returned_value_for_func = func(*args, **kwargs) - else: - intra_extensions_dict = admin_api.driver.get_intra_extensions_dict() - if intra_extension_id not in intra_extensions_dict: - # if id is not an intra_extension, maybe it is a tenant id - intra_extension_id = intra_root_extension_id - if intra_extension_id in tenants_dict: - # id is in fact a tenant id so, we must check against the Root intra_extension - intra_extension_id = intra_root_extension_id - LOG.warning("intra_extension_id is a tenant ID ({})".format(intra_extension_id)) - else: - # id is not a known tenant ID, so we must check against the Root intra_extension - intra_extension_id = intra_root_extension_id - LOG.warning("Cannot manage because the intra-extension is unknown (fallback to the root intraextension)") - for _tenant_id in tenants_dict: - if tenants_dict[_tenant_id]['intra_authz_extension_id'] == intra_extension_id or \ - tenants_dict[_tenant_id]['intra_admin_extension_id'] == intra_extension_id: - intra_admin_extension_id = tenants_dict[_tenant_id]['intra_admin_extension_id'] - break - if not intra_admin_extension_id: - moonlog_api.driver.warning("No Intra_Admin_Extension found, authorization granted by default.") - returned_value_for_func = func(*args, **kwargs) - else: - objects_dict = admin_api.driver.get_objects_dict(intra_admin_extension_id) - object_name = intra_extensions_dict[intra_extension_id]['genre'] + '.' + _object_name - object_id = None - for _object_id in objects_dict: - if objects_dict[_object_id]['name'] == object_name: - object_id = _object_id - break - if not object_id: - objects_dict = admin_api.driver.get_objects_dict(intra_root_extension_id) - object_name = object_name.split(".")[-1] - for _object_id in objects_dict: - if objects_dict[_object_id]['name'] == object_name: - object_id = _object_id - break - if not object_id: - raise ObjectUnknown("enforce: Unknown object name: {}".format(object_name)) - # if we found the object in intra_root_extension_id, so we change the intra_admin_extension_id - # into intra_root_extension_id and we modify the ID of the subject - subjects_dict = admin_api.driver.get_subjects_dict(intra_admin_extension_id) - try: - subject_name = subjects_dict[user_id]["name"] - except KeyError: - subject_name = None - # Try if user_id is a Keystone ID - try: - for _subject_id in subjects_dict: - if subjects_dict[_subject_id]["keystone_id"] == user_id: - subject_name = subjects_dict[_subject_id]["name"] - except KeyError: - raise SubjectUnknown() - intra_admin_extension_id = intra_root_extension_id - subjects_dict = admin_api.driver.get_subjects_dict(intra_admin_extension_id) - user_id = None - for _subject_id in subjects_dict: - if subjects_dict[_subject_id]["name"] == subject_name: - user_id = _subject_id - if not user_id: - raise SubjectUnknown("Subject {} Unknown for Root IntraExtension...".format(subject_name)) - if type(_action_name_list) in (str, unicode): - action_name_list = (_action_name_list, ) - else: - action_name_list = _action_name_list - actions_dict = admin_api.driver.get_actions_dict(intra_admin_extension_id) - action_id_list = list() - for _action_name in action_name_list: - for _action_id in actions_dict: - if actions_dict[_action_id]['name'] == _action_name: - action_id_list.append(_action_id) - break - - authz_result = False - action_id = "" - for action_id in action_id_list: - res = admin_api.authz(intra_admin_extension_id, user_id, object_id, action_id) - moonlog_api.info("res={}".format(res)) - if res: - authz_result = True - else: - moonlog_api.authz("No authorization for ({} {}-{}-{})".format( - intra_admin_extension_id, - user_id, - object_name, - actions_dict[action_id]['name'])) - authz_result = False - break - if authz_result: - returned_value_for_func = func(*args, **kwargs) - else: - raise AuthzException("No authorization for ({} {}-{}-{})".format( - intra_admin_extension_id, - user_id, - object_name, - actions_dict[action_id]['name'])) - return returned_value_for_func - return wrapped - return wrap - - -@dependency.provider('configuration_api') -class ConfigurationManager(manager.Manager): - - driver_namespace = 'keystone.moon.configuration' - - def __init__(self): - super(ConfigurationManager, self).__init__(CONF.moon.configuration_driver) - - @enforce("read", "templates") - def get_policy_templates_dict(self, user_id): - """ - Return a dictionary of all policy templates - :return: { - template_id1: {name: template_name, description: template_description}, - template_id2: {name: template_name, description: template_description}, - ... - } - """ - return self.driver.get_policy_templates_dict() - - @enforce("read", "templates") - def get_policy_template_id_from_name(self, user_id, policy_template_name): - policy_templates_dict = self.driver.get_policy_templates_dict() - for policy_template_id in policy_templates_dict: - if policy_templates_dict[policy_template_id]['name'] == policy_template_name: - return policy_template_id - return None - - @enforce("read", "aggregation_algorithms") - def get_aggregation_algorithms_dict(self, user_id): - """ - Return a dictionary of all aggregation algorithms - :return: { - aggre_algo_id1: {name: aggre_name, description: aggre_algo_description}, - aggre_algo_id2: {name: aggre_name, description: aggre_algo_description}, - ... - } - """ - return self.driver.get_aggregation_algorithms_dict() - - @enforce("read", "aggregation_algorithms") - def get_aggregation_algorithm_id_from_name(self, user_id, aggregation_algorithm_name): - aggregation_algorithms_dict = self.driver.get_aggregation_algorithms_dict() - for aggregation_algorithm_id in aggregation_algorithms_dict: - if aggregation_algorithms_dict[aggregation_algorithm_id]['name'] == aggregation_algorithm_name: - return aggregation_algorithm_id - return None - - @enforce("read", "sub_meta_rule_algorithms") - def get_sub_meta_rule_algorithms_dict(self, user_id): - """ - Return a dictionary of sub_meta_rule algorithm - :return: { - sub_meta_rule_id1: {name: sub_meta_rule_name, description: sub_meta_rule_description}, - sub_meta_rule_id2: {name: sub_meta_rule_name, description: sub_meta_rule_description}, - ... - } - """ - return self.driver.get_sub_meta_rule_algorithms_dict() - - @enforce("read", "sub_meta_rule_algorithms") - def get_sub_meta_rule_algorithm_id_from_name(self, sub_meta_rule_algorithm_name): - sub_meta_rule_algorithms_dict = self.configuration_api.get_sub_meta_rule_algorithms_dict() - for sub_meta_rule_algorithm_id in sub_meta_rule_algorithms_dict: - if sub_meta_rule_algorithms_dict[sub_meta_rule_algorithm_id]['name'] == sub_meta_rule_algorithm_name: - return sub_meta_rule_algorithm_id - return None - - -@dependency.provider('tenant_api') -@dependency.requires('moonlog_api', 'admin_api', 'root_api', 'resource_api', 'admin_api') -class TenantManager(manager.Manager): - - driver_namespace = 'keystone.moon.tenant' - - def __init__(self): - super(TenantManager, self).__init__(CONF.moon.tenant_driver) - - @filter_input - @enforce("read", "tenants") - def get_tenants_dict(self, user_id): - """ - Return a dictionary with all tenants - :return: { - tenant_id1: { - name: xxx, - description: yyy, - intra_authz_extension_id: zzz, - intra_admin_extension_id: zzz, - }, - tenant_id2: {...}, - ... - } - """ - return self.driver.get_tenants_dict() - - def __get_keystone_tenant_dict(self, tenant_id="", tenant_name=""): - tenants = self.resource_api.list_projects() - for tenant in tenants: - if tenant_id and tenant_id == tenant['id']: - return tenant - if tenant_name and tenant_name == tenant['name']: - return tenant - if not tenant_id: - tenant_id = uuid4().hex - if not tenant_name: - tenant_name = tenant_id - tenant = { - "id": tenant_id, - "name": tenant_name, - "description": "Auto generated tenant from Moon platform", - "enabled": True, - "domain_id": "default" - } - keystone_tenant = self.resource_api.create_project(tenant["id"], tenant) - return keystone_tenant - - @filter_input - @enforce(("read", "write"), "tenants") - def add_tenant_dict(self, user_id, tenant_id, tenant_dict): - tenants_dict = self.driver.get_tenants_dict() - for tenant_id in tenants_dict: - if tenants_dict[tenant_id]['name'] == tenant_dict['name']: - raise TenantAddedNameExisting() - - # Check (and eventually sync) Keystone tenant - if 'id' not in tenant_dict: - tenant_dict['id'] = None - keystone_tenant = self.__get_keystone_tenant_dict(tenant_dict['id'], tenant_dict['name']) - for att in keystone_tenant: - if keystone_tenant[att]: - tenant_dict[att] = keystone_tenant[att] - # Sync users between intra_authz_extension and intra_admin_extension - self.moonlog_api.debug("add_tenant_dict {}".format(tenant_dict)) - if 'intra_admin_extension_id' in tenant_dict and tenant_dict['intra_admin_extension_id']: - if 'intra_authz_extension_id' in tenant_dict and tenant_dict['intra_authz_extension_id']: - authz_subjects_dict = self.admin_api.get_subjects_dict(self.root_api.root_admin_id, tenant_dict['intra_authz_extension_id']) - authz_subject_names_list = [authz_subjects_dict[subject_id]["name"] for subject_id in authz_subjects_dict] - admin_subjects_dict = self.admin_api.get_subjects_dict(self.root_api.root_admin_id, tenant_dict['intra_admin_extension_id']) - admin_subject_names_list = [admin_subjects_dict[subject_id]["name"] for subject_id in admin_subjects_dict] - for _subject_id in authz_subjects_dict: - if authz_subjects_dict[_subject_id]["name"] not in admin_subject_names_list: - self.admin_api.add_subject_dict(self.root_api.root_admin_id, tenant_dict['intra_admin_extension_id'], authz_subjects_dict[_subject_id]) - for _subject_id in admin_subjects_dict: - if admin_subjects_dict[_subject_id]["name"] not in authz_subject_names_list: - self.admin_api.add_subject_dict(self.root_api.root_admin_id, tenant_dict['intra_authz_extension_id'], admin_subjects_dict[_subject_id]) - - return self.driver.add_tenant_dict(tenant_dict['id'], tenant_dict) - - @filter_input - @enforce("read", "tenants") - def get_tenant_dict(self, user_id, tenant_id): - tenants_dict = self.driver.get_tenants_dict() - if tenant_id not in tenants_dict: - raise TenantUnknown() - return tenants_dict[tenant_id] - - @filter_input - @enforce(("read", "write"), "tenants") - def del_tenant(self, user_id, tenant_id): - if tenant_id not in self.driver.get_tenants_dict(): - raise TenantUnknown() - self.driver.del_tenant(tenant_id) - - @filter_input - @enforce(("read", "write"), "tenants") - def set_tenant_dict(self, user_id, tenant_id, tenant_dict): - tenants_dict = self.driver.get_tenants_dict() - if tenant_id not in tenants_dict: - raise TenantUnknown() - - # Sync users between intra_authz_extension and intra_admin_extension - if 'intra_admin_extension_id' in tenant_dict: - if 'intra_authz_extension_id' in tenant_dict: - authz_subjects_dict = self.admin_api.get_subjects_dict(self.root_api.root_admin_id, tenant_dict['intra_authz_extension_id']) - authz_subject_names_list = [authz_subjects_dict[subject_id]["name"] for subject_id in authz_subjects_dict] - admin_subjects_dict = self.admin_api.get_subjects_dict(self.root_api.root_admin_id, tenant_dict['intra_admin_extension_id']) - admin_subject_names_list = [admin_subjects_dict[subject_id]["name"] for subject_id in admin_subjects_dict] - for _subject_id in authz_subjects_dict: - if authz_subjects_dict[_subject_id]["name"] not in admin_subject_names_list: - self.admin_api.add_subject_dict(self.root_api.root_admin_id, tenant_dict['intra_admin_extension_id'], authz_subjects_dict[_subject_id]) - for _subject_id in admin_subjects_dict: - if admin_subjects_dict[_subject_id]["name"] not in authz_subject_names_list: - self.admin_api.add_subject_dict(self.root_api.root_admin_id, tenant_dict['intra_authz_extension_id'], admin_subjects_dict[_subject_id]) - - return self.driver.set_tenant_dict(tenant_id, tenant_dict) - - -@dependency.requires('identity_api', 'tenant_api', 'configuration_api', 'moonlog_api') -class IntraExtensionManager(manager.Manager): - - driver_namespace = 'keystone.moon.intraextension' - - def __init__(self): - super(IntraExtensionManager, self).__init__(CONF.moon.intraextension_driver) - self._root_admin_id = None - self._root_extension_id = None - - def __init_root(self, root_extension_id=None): - if root_extension_id: - self._root_extension_id = root_extension_id - else: - try: - self._root_extension_id = self.get_root_extension_id() - self.aggregation_algorithm_dict = self.configuration_api.driver.get_aggregation_algorithms_dict() - except AttributeError as e: - LOG.warning("Error on root intraextension initialization ({})".format(e)) - self._root_extension_id = None - self.aggregation_algorithm_dict = {} - if self._root_extension_id: - for subject_id, subject_dict in self.driver.get_subjects_dict(self.root_extension_id).iteritems(): - if subject_dict["name"] == "admin": - self._root_admin_id = subject_id - return - raise RootExtensionNotInitialized() - - @property - def root_extension_id(self): - if not self._root_extension_id: - self.__init_root() - return self._root_extension_id - - @root_extension_id.setter - def root_extension_id(self, value): - self._root_extension_id = value - LOG.info("set root_extension_id={}".format(self._root_extension_id)) - - @property - def root_admin_id(self): - if not self._root_admin_id: - self.__init_root() - return self._root_admin_id - - def get_root_extension_dict(self): - """ - - :return: {id: {"name": "xxx"}} - """ - return {self.root_extension_id: self.driver.get_intra_extensions_dict()[self.root_extension_id]} - - def get_root_extension_id(self): - extensions = self.driver.get_intra_extensions_dict() - for extension_id, extension_dict in extensions.iteritems(): - if extension_dict["name"] == CONF.moon.root_policy_directory: - return extension_id - else: - extension = self.load_root_intra_extension_dict(CONF.moon.root_policy_directory) - if not extension: - raise IntraExtensionCreationError("The root extension is not created.") - return extension['id'] - - def __get_authz_buffer(self, intra_extension_id, subject_id, object_id, action_id): - """ - :param intra_extension_id: - :param subject_id: - :param object_id: - :param action_id: - :return: authz_buffer = { - 'subject_id': xxx, - 'object_id': yyy, - 'action_id': zzz, - 'subject_assignments': { - 'subject_category1': [], - 'subject_category2': [], - ... - }, - 'object_assignments': {}, - 'action_assignments': {}, - } - """ - authz_buffer = dict() - # Sometimes it is not the subject ID but the User Keystone ID, so, we have to check - subjects_dict = self.driver.get_subjects_dict(intra_extension_id) - if subject_id not in subjects_dict.keys(): - for _subject_id in subjects_dict: - if subjects_dict[_subject_id]['keystone_id']: - subject_id = _subject_id - break - authz_buffer['subject_id'] = subject_id - authz_buffer['object_id'] = object_id - authz_buffer['action_id'] = action_id - meta_data_dict = dict() - meta_data_dict["subject_categories"] = self.driver.get_subject_categories_dict(intra_extension_id) - meta_data_dict["object_categories"] = self.driver.get_object_categories_dict(intra_extension_id) - meta_data_dict["action_categories"] = self.driver.get_action_categories_dict(intra_extension_id) - subject_assignment_dict = dict() - for category in meta_data_dict["subject_categories"]: - subject_assignment_dict[category] = self.driver.get_subject_assignment_list( - intra_extension_id, subject_id, category) - object_assignment_dict = dict() - for category in meta_data_dict["object_categories"]: - object_assignment_dict[category] = self.driver.get_object_assignment_list( - intra_extension_id, object_id, category) - action_assignment_dict = dict() - for category in meta_data_dict["action_categories"]: - action_assignment_dict[category] = self.driver.get_action_assignment_list( - intra_extension_id, action_id, category) - authz_buffer['subject_assignments'] = dict() - authz_buffer['object_assignments'] = dict() - authz_buffer['action_assignments'] = dict() - - for _subject_category in meta_data_dict['subject_categories']: - authz_buffer['subject_assignments'][_subject_category] = list(subject_assignment_dict[_subject_category]) - for _object_category in meta_data_dict['object_categories']: - authz_buffer['object_assignments'][_object_category] = list(object_assignment_dict[_object_category]) - for _action_category in meta_data_dict['action_categories']: - authz_buffer['action_assignments'][_action_category] = list(action_assignment_dict[_action_category]) - return authz_buffer - - def __authz(self, intra_extension_id, subject_id, object_id, action_id): - """Check authorization for a particular action. - - :param intra_extension_id: UUID of an IntraExtension - :param subject_id: subject UUID of the request - :param object_id: object UUID of the request - :param action_id: action UUID of the request - :return: True or False or raise an exception - :raises: - """ - authz_buffer = self.__get_authz_buffer(intra_extension_id, subject_id, object_id, action_id) - decision_buffer = dict() - decision = False - - meta_rule_dict = self.driver.get_sub_meta_rules_dict(intra_extension_id) - - for sub_meta_rule_id in meta_rule_dict: - if meta_rule_dict[sub_meta_rule_id]['algorithm'] == 'inclusion': - decision_buffer[sub_meta_rule_id] = inclusion( - authz_buffer, - meta_rule_dict[sub_meta_rule_id], - self.driver.get_rules_dict(intra_extension_id, sub_meta_rule_id).values()) - elif meta_rule_dict[sub_meta_rule_id]['algorithm'] == 'comparison': - decision_buffer[sub_meta_rule_id] = comparison( - authz_buffer, - meta_rule_dict[sub_meta_rule_id], - self.driver.get_rules_dict(intra_extension_id, sub_meta_rule_id).values()) - - try: - aggregation_algorithm_id = self.driver.get_aggregation_algorithm_id(intra_extension_id)['aggregation_algorithm'] - except TypeError: - return { - 'authz': False, - 'comment': "Aggregation algorithm not set" - } - if self.aggregation_algorithm_dict[aggregation_algorithm_id]['name'] == 'all_true': - decision = all_true(decision_buffer) - elif self.aggregation_algorithm_dict[aggregation_algorithm_id]['name'] == 'one_true': - decision = one_true(decision_buffer) - if not decision: - raise AuthzException("{} {}-{}-{}".format(intra_extension_id, subject_id, action_id, object_id)) - return { - 'authz': decision, - 'comment': "{} {}-{}-{}".format(intra_extension_id, subject_id, action_id, object_id) - } - - def authz(self, intra_extension_id, subject_id, object_id, action_id): - decision_dict = dict() - try: - decision_dict = self.__authz(intra_extension_id, subject_id, object_id, action_id) - except (SubjectUnknown, ObjectUnknown, ActionUnknown) as e: - # maybe we need to synchronize with the master - if CONF.moon.master: - self.get_data_from_master() - decision_dict = self.__authz(intra_extension_id, subject_id, object_id, action_id) - if not decision_dict["authz"]: - raise AuthzException(decision_dict["comment"]) - return {'authz': decision_dict["authz"], 'comment': ''} - - def get_data_from_master(self, subject=None, object=None, action=None): - LOG.info("Synchronising with master") - master_url = CONF.moon.master - master_login = CONF.moon.master_login - master_password = CONF.moon.master_password - headers = { - 'content-type': 'application/json', - 'Accept': 'text/plain,text/html,application/xhtml+xml,application/xml' - } - post = { - 'auth': { - 'scope': { - 'project': { - 'domain': {'id': 'Default'}, - 'name': 'demo'} - }, - 'identity': { - 'password': { - 'user': { - 'domain': {'id': 'Default'}, - 'password': 'nomoresecrete', - 'name': 'admin'} - }, - 'methods': ['password'] - } - } - } - post["auth"]["identity"]["password"]["user"]["name"] = master_login - post["auth"]["identity"]["password"]["user"]["password"] = master_password - req = requests.post('{}/v3/auth/tokens'.format(master_url), data=json.dumps(post), headers=headers) - if req.status_code not in (200, 201): - raise IntraExtensionException("Cannot connect to the Master.") - headers["X-Auth-Token"] = req.headers["x-subject-token"] - # get all intra-extensions - req = requests.get('{}/moon/intra_extensions/'.format(master_url), headers=headers) - extensions = req.json() - for intra_extension_id, intra_extension_value in extensions.iteritems(): - if intra_extension_value["model"] == "policy_root": - continue - - # add the intra-extension - intra_extension_dict = dict() - # Force the id of the intra-extension - intra_extension_dict['id'] = intra_extension_id - intra_extension_dict['name'] = intra_extension_value["name"] - intra_extension_dict['model'] = intra_extension_value["model"] - intra_extension_dict['genre'] = intra_extension_value["genre"] - intra_extension_dict['description'] = intra_extension_value["description"] - try: - ref = self.load_intra_extension_dict(self.root_admin_id, intra_extension_dict=intra_extension_dict) - except Exception as e: - LOG.error("(load_intra_extension_dict) Got an unhandled exception: {}".format(e)) - import traceback, sys - traceback.print_exc(file=sys.stdout) - - # Note (asteroide): we use the driver API to bypass authorizations of the internal API - # but in we force overwriting data every time - - # get all categories from master - _url = '{}/moon/intra_extensions/{}/subject_categories'.format(master_url, intra_extension_id) - req = requests.get(_url, headers=headers) - cat = req.json() - _categories_name = map(lambda x: x["name"], - self.driver.get_subject_categories_dict(intra_extension_id).values()) - for _cat_key, _cat_value in cat.iteritems(): - if _cat_value['name'] in _categories_name: - continue - self.driver.set_subject_category_dict(intra_extension_id, _cat_key, _cat_value) - _url = '{}/moon/intra_extensions/{}/object_categories'.format(master_url, intra_extension_id) - req = requests.get(_url, headers=headers) - cat = req.json() - _categories_name = map(lambda x: x["name"], - self.driver.get_object_categories_dict(intra_extension_id).values()) - for _cat_key, _cat_value in cat.iteritems(): - if _cat_value['name'] in _categories_name: - continue - self.driver.set_object_category_dict(intra_extension_id, _cat_key, _cat_value) - _url = '{}/moon/intra_extensions/{}/action_categories'.format(master_url, intra_extension_id) - req = requests.get(_url, headers=headers) - cat = req.json() - _categories_name = map(lambda x: x["name"], - self.driver.get_action_categories_dict(intra_extension_id).values()) - for _cat_key, _cat_value in cat.iteritems(): - if _cat_value['name'] in _categories_name: - continue - self.driver.set_action_category_dict(intra_extension_id, _cat_key, _cat_value) - - # get part of subjects, objects, actions from master - _url = '{}/moon/intra_extensions/{}/subjects'.format(master_url, intra_extension_id) - req = requests.get(_url, headers=headers) - sub = req.json() - _subjects_name = map(lambda x: x["name"], self.driver.get_subjects_dict(intra_extension_id).values()) - for _sub_key, _sub_value in sub.iteritems(): - if _sub_value['name'] in _subjects_name: - continue - keystone_user = self.identity_api.get_user_by_name(_sub_value['keystone_name'], "default") - _sub_value['keystone_id'] = keystone_user['id'] - self.driver.set_subject_dict(intra_extension_id, _sub_key, _sub_value) - _url = '{}/moon/intra_extensions/{}/objects'.format(master_url, intra_extension_id) - req = requests.get(_url, headers=headers) - obj = req.json() - _objects_name = map(lambda x: x["name"], self.driver.get_objects_dict(intra_extension_id).values()) - for _obj_key, _obj_value in obj.iteritems(): - if _obj_value['name'] in _objects_name: - continue - _obj_value['id'] = _obj_key - self.driver.set_object_dict(intra_extension_id, _obj_key, _obj_value) - _url = '{}/moon/intra_extensions/{}/actions'.format(master_url, intra_extension_id) - req = requests.get(_url, headers=headers) - act = req.json() - _actions_name = map(lambda x: x["name"], self.driver.get_actions_dict(intra_extension_id).values()) - for _act_key, _act_value in act.iteritems(): - if _act_value['name'] in _actions_name: - continue - self.driver.set_action_dict(intra_extension_id, _act_key, _act_value) - - # get all scopes from master - for s_cat, _value in self.driver.get_subject_categories_dict(intra_extension_id).iteritems(): - _url = '{}/moon/intra_extensions/{}/subject_scopes/{}'.format(master_url, intra_extension_id, s_cat) - req = requests.get(_url, headers=headers) - scopes = req.json() - _scopes_name = map(lambda x: x["name"], - self.driver.get_subject_scopes_dict(intra_extension_id, s_cat).values()) - if not _scopes_name: - continue - for _scope_key, _scope_value in scopes.iteritems(): - if _scope_value['name'] in _scopes_name: - continue - self.driver.set_subject_scope_dict(intra_extension_id, s_cat, _scope_key, _scope_value) - for o_cat in self.driver.get_subject_categories_dict(intra_extension_id): - _url = '{}/moon/intra_extensions/{}/object_scopes/{}'.format(master_url, intra_extension_id, o_cat) - req = requests.get(_url, headers=headers) - scopes = req.json() - _scopes_name = map(lambda x: x["name"], - self.driver.get_object_scopes_dict(intra_extension_id, o_cat).values()) - if not _scopes_name: - continue - for _scope_key, _scope_value in scopes.iteritems(): - if _scope_value['name'] in _scopes_name: - continue - self.driver.set_object_scope_dict(intra_extension_id, o_cat, _scope_key, _scope_value) - for a_cat in self.driver.get_subject_categories_dict(intra_extension_id): - _url = '{}/moon/intra_extensions/{}/action_scopes/{}'.format(master_url, intra_extension_id, a_cat) - req = requests.get(_url, headers=headers) - scopes = req.json() - _scopes_name = map(lambda x: x["name"], - self.driver.get_action_scopes_dict(intra_extension_id, a_cat ).values()) - if not _scopes_name: - continue - for _scope_key, _scope_value in scopes.iteritems(): - if _scope_value['name'] in _scopes_name: - continue - self.add_action_scope_dict(intra_extension_id, a_cat, _scope_key, _scope_value) - - # get aggregation algorithm from master - _url = '{}/moon/intra_extensions/{}/aggregation_algorithm'.format(master_url, intra_extension_id) - req = requests.get(_url, headers=headers) - algo = req.json() - self.driver.set_aggregation_algorithm_id(intra_extension_id, algo['aggregation_algorithm']) - - # get meta-rule from master - _url = '{}/moon/intra_extensions/{}/sub_meta_rules'.format(master_url, intra_extension_id) - req = requests.get(_url, headers=headers) - sub_meta_rules = req.json() - _sub_meta_rules_name = map(lambda x: x["name"], self.driver.get_sub_meta_rules_dict(intra_extension_id).values()) - for _sub_meta_rules_key, _sub_meta_rules_value in sub_meta_rules.iteritems(): - if _sub_meta_rules_value['name'] in _sub_meta_rules_name: - continue - self.driver.set_sub_meta_rule_dict(intra_extension_id, _sub_meta_rules_key, _sub_meta_rules_value) - - # get all rules from master - _sub_meta_rules_ids = self.driver.get_sub_meta_rules_dict(intra_extension_id).keys() - for _sub_meta_rules_id in _sub_meta_rules_ids: - _url = '{}/moon/intra_extensions/{}/rule/{}'.format(master_url, intra_extension_id, _sub_meta_rules_id) - req = requests.get(_url, headers=headers) - rules = req.json() - _rules = self.driver.get_rules_dict(intra_extension_id, _sub_meta_rules_id).values() - for _rules_key, _rules_value in rules.iteritems(): - if _rules_value in _rules: - continue - self.driver.set_rule_dict(intra_extension_id, _sub_meta_rules_id, _rules_key, _rules_value) - - # get part of assignments from master - _subject_ids = self.driver.get_subjects_dict(intra_extension_id).keys() - _subject_category_ids = self.driver.get_subject_categories_dict(intra_extension_id).keys() - - for _subject_id in _subject_ids: - for _subject_category_id in _subject_category_ids: - _url = '{}/moon/intra_extensions/{}/subject_assignments/{}/{}'.format( - master_url, - intra_extension_id, - _subject_id, - _subject_category_id - ) - req = requests.get(_url, headers=headers) - subject_assignments = req.json() - _assignments = self.driver.get_subject_assignment_list( - intra_extension_id, - _subject_id, - _subject_category_id - ) - for _assignment in subject_assignments: - if _assignment in _assignments: - continue - self.driver.add_subject_assignment_list( - intra_extension_id, - _subject_id, - _subject_category_id, - _assignment - ) - - _object_ids = self.driver.get_objects_dict(intra_extension_id).keys() - _object_category_ids = self.driver.get_object_categories_dict(intra_extension_id).keys() - - for _object_id in _object_ids: - for _object_category_id in _object_category_ids: - _url = '{}/moon/intra_extensions/{}/object_assignments/{}/{}'.format( - master_url, - intra_extension_id, - _object_id, - _object_category_id - ) - req = requests.get(_url, headers=headers) - object_assignments = req.json() - _assignments = self.driver.get_object_assignment_list( - intra_extension_id, - _object_id, - _object_category_id - ) - for _assignment in object_assignments: - if _assignment in _assignments: - continue - self.driver.add_object_assignment_list( - intra_extension_id, - _object_id, - _object_category_id, - _assignment - ) - - _action_ids = self.driver.get_actions_dict(intra_extension_id).keys() - _action_category_ids = self.driver.get_action_categories_dict(intra_extension_id).keys() - - for _action_id in _action_ids: - for _action_category_id in _action_category_ids: - _url = '{}/moon/intra_extensions/{}/action_assignments/{}/{}'.format( - master_url, - intra_extension_id, - _action_id, - _action_category_id - ) - req = requests.get(_url, headers=headers) - action_assignments = req.json() - _assignments = self.driver.get_action_assignment_list( - intra_extension_id, - _action_id, - _action_category_id - ) - for _assignment in action_assignments: - if _assignment in _assignments: - continue - self.driver.add_action_assignment_list( - intra_extension_id, - _action_id, - _action_category_id, - _assignment - ) - - @enforce("read", "intra_extensions") - def get_intra_extensions_dict(self, user_id): - """ - :param user_id: - :return: { - intra_extension_id1: { - name: xxx, - model: yyy, - genre, authz, - description: zzz} - }, - intra_extension_id2: {...}, - ...} - """ - return self.driver.get_intra_extensions_dict() - - # load policy from policy directory - - def __load_metadata_file(self, intra_extension_dict, policy_dir): - - metadata_path = os.path.join(policy_dir, 'metadata.json') - f = open(metadata_path) - json_perimeter = json.load(f) - - subject_categories = map(lambda x: x["name"], - self.driver.get_subject_categories_dict(intra_extension_dict["id"]).values()) - for _cat in json_perimeter['subject_categories']: - if _cat not in subject_categories: - self.driver.set_subject_category_dict(intra_extension_dict["id"], uuid4().hex, - {"name": _cat, "description": _cat}) - object_categories = map(lambda x: x["name"], - self.driver.get_object_categories_dict(intra_extension_dict["id"]).values()) - for _cat in json_perimeter['object_categories']: - if _cat not in object_categories: - self.driver.set_object_category_dict(intra_extension_dict["id"], uuid4().hex, - {"name": _cat, "description": _cat}) - action_categories = map(lambda x: x["name"], - self.driver.get_action_categories_dict(intra_extension_dict["id"]).values()) - for _cat in json_perimeter['action_categories']: - if _cat not in action_categories: - self.driver.set_action_category_dict(intra_extension_dict["id"], uuid4().hex, - {"name": _cat, "description": _cat}) - - def __load_perimeter_file(self, intra_extension_dict, policy_dir): - - perimeter_path = os.path.join(policy_dir, 'perimeter.json') - f = open(perimeter_path) - json_perimeter = json.load(f) - - subjects_name_list = map(lambda x: x["name"], self.driver.get_subjects_dict(intra_extension_dict["id"]).values()) - subject_dict = dict() - # We suppose that all subjects can be mapped to a true user in Keystone - for _subject in json_perimeter['subjects']: - if _subject in subjects_name_list: - continue - try: - keystone_user = self.identity_api.get_user_by_name(_subject, "default") - except exception.UserNotFound: - # TODO (asteroide): must add a configuration option to allow that exception - # maybe a debug option for unittest - keystone_user = {'id': uuid4().hex, 'name': _subject} - self.moonlog_api.error("Keystone user not found ({})".format(_subject)) - subject_id = uuid4().hex - subject_dict[subject_id] = keystone_user - subject_dict[subject_id]['keystone_id'] = keystone_user["id"] - subject_dict[subject_id]['keystone_name'] = keystone_user["name"] - self.driver.set_subject_dict(intra_extension_dict["id"], subject_id, subject_dict[subject_id]) - intra_extension_dict["subjects"] = subject_dict - - # Copy all values for objects and actions - objects_name_list = map(lambda x: x["name"], self.driver.get_objects_dict(intra_extension_dict["id"]).values()) - object_dict = dict() - for _object in json_perimeter['objects']: - if _object in objects_name_list: - continue - _id = uuid4().hex - object_dict[_id] = {"name": _object, "description": _object} - self.driver.set_object_dict(intra_extension_dict["id"], _id, object_dict[_id]) - intra_extension_dict["objects"] = object_dict - - actions_name_list = map(lambda x: x["name"], self.driver.get_objects_dict(intra_extension_dict["id"]).values()) - action_dict = dict() - for _action in json_perimeter['actions']: - if _action in actions_name_list: - continue - _id = uuid4().hex - action_dict[_id] = {"name": _action, "description": _action} - self.driver.set_action_dict(intra_extension_dict["id"], _id, action_dict[_id]) - intra_extension_dict["actions"] = action_dict - - def __load_scope_file(self, intra_extension_dict, policy_dir): - - metadata_path = os.path.join(policy_dir, 'scope.json') - f = open(metadata_path) - json_perimeter = json.load(f) - - intra_extension_dict['subject_scopes'] = dict() - for category, scope in json_perimeter["subject_scopes"].iteritems(): - category_id = self.driver.get_uuid_from_name(intra_extension_dict["id"], category, self.driver.SUBJECT_CATEGORY) - _scope_dict = dict() - for _scope in scope: - _id = uuid4().hex - _scope_dict[_id] = {"name": _scope, "description": _scope} - self.driver.set_subject_scope_dict(intra_extension_dict["id"], category_id, _id, _scope_dict[_id]) - intra_extension_dict['subject_scopes'][category] = _scope_dict - - intra_extension_dict['object_scopes'] = dict() - for category, scope in json_perimeter["object_scopes"].iteritems(): - category_id = self.driver.get_uuid_from_name(intra_extension_dict["id"], category, self.driver.OBJECT_CATEGORY) - _scope_dict = dict() - for _scope in scope: - _id = uuid4().hex - _scope_dict[_id] = {"name": _scope, "description": _scope} - self.driver.set_object_scope_dict(intra_extension_dict["id"], category_id, _id, _scope_dict[_id]) - intra_extension_dict['object_scopes'][category] = _scope_dict - - intra_extension_dict['action_scopes'] = dict() - for category, scope in json_perimeter["action_scopes"].iteritems(): - category_id = self.driver.get_uuid_from_name(intra_extension_dict["id"], category, self.driver.ACTION_CATEGORY) - _scope_dict = dict() - for _scope in scope: - _id = uuid4().hex - _scope_dict[_id] = {"name": _scope, "description": _scope} - self.driver.set_action_scope_dict(intra_extension_dict["id"], category_id, _id, _scope_dict[_id]) - intra_extension_dict['action_scopes'][category] = _scope_dict - - def __load_assignment_file(self, intra_extension_dict, policy_dir): - - f = open(os.path.join(policy_dir, 'assignment.json')) - json_assignments = json.load(f) - - subject_assignments = dict() - for category_name, value in json_assignments['subject_assignments'].iteritems(): - category_id = self.driver.get_uuid_from_name(intra_extension_dict["id"], category_name, self.driver.SUBJECT_CATEGORY) - for user_name in value: - subject_id = self.driver.get_uuid_from_name(intra_extension_dict["id"], user_name, self.driver.SUBJECT) - if subject_id not in subject_assignments: - subject_assignments[subject_id] = dict() - if category_id not in subject_assignments[subject_id]: - subject_assignments[subject_id][category_id] = \ - map(lambda x: self.driver.get_uuid_from_name(intra_extension_dict["id"], x, self.driver.SUBJECT_SCOPE, category_name), - value[user_name]) - else: - subject_assignments[subject_id][category_id].extend( - map(lambda x: self.driver.get_uuid_from_name(intra_extension_dict["id"], x, self.driver.SUBJECT_SCOPE, category_name), - value[user_name]) - ) - self.driver.set_subject_assignment_list(intra_extension_dict["id"], subject_id, category_id, - subject_assignments[subject_id][category_id]) - - object_assignments = dict() - for category_name, value in json_assignments["object_assignments"].iteritems(): - category_id = self.driver.get_uuid_from_name(intra_extension_dict["id"], category_name, self.driver.OBJECT_CATEGORY) - for object_name in value: - object_id = self.driver.get_uuid_from_name(intra_extension_dict["id"], object_name, self.driver.OBJECT) - if object_name not in object_assignments: - object_assignments[object_id] = dict() - if category_id not in object_assignments[object_id]: - object_assignments[object_id][category_id] = \ - map(lambda x: self.driver.get_uuid_from_name(intra_extension_dict["id"], x, self.driver.OBJECT_SCOPE, category_name), - value[object_name]) - else: - object_assignments[object_id][category_id].extend( - map(lambda x: self.driver.get_uuid_from_name(intra_extension_dict["id"], x, self.driver.OBJECT_SCOPE, category_name), - value[object_name]) - ) - self.driver.set_object_assignment_list(intra_extension_dict["id"], object_id, category_id, - object_assignments[object_id][category_id]) - - action_assignments = dict() - for category_name, value in json_assignments["action_assignments"].iteritems(): - category_id = self.driver.get_uuid_from_name(intra_extension_dict["id"], category_name, self.driver.ACTION_CATEGORY) - for action_name in value: - action_id = self.driver.get_uuid_from_name(intra_extension_dict["id"], action_name, self.driver.ACTION) - if action_name not in action_assignments: - action_assignments[action_id] = dict() - if category_id not in action_assignments[action_id]: - action_assignments[action_id][category_id] = \ - map(lambda x: self.driver.get_uuid_from_name(intra_extension_dict["id"], x, self.driver.ACTION_SCOPE, category_name), - value[action_name]) - else: - action_assignments[action_id][category_id].extend( - map(lambda x: self.driver.get_uuid_from_name(intra_extension_dict["id"], x, self.driver.ACTION_SCOPE, category_name), - value[action_name]) - ) - self.driver.set_action_assignment_list(intra_extension_dict["id"], action_id, category_id, - action_assignments[action_id][category_id]) - - def __load_metarule_file(self, intra_extension_dict, policy_dir): - - metadata_path = os.path.join(policy_dir, 'metarule.json') - f = open(metadata_path) - json_metarule = json.load(f) - metarule = dict() - categories = { - "subject_categories": self.driver.SUBJECT_CATEGORY, - "object_categories": self.driver.OBJECT_CATEGORY, - "action_categories": self.driver.ACTION_CATEGORY - } - # Translate value from JSON file to UUID for Database - for metarule_name in json_metarule["sub_meta_rules"]: - _id = uuid4().hex - metarule[_id] = dict() - metarule[_id]["name"] = metarule_name - for item in ("subject_categories", "object_categories", "action_categories"): - metarule[_id][item] = list() - for element in json_metarule["sub_meta_rules"][metarule_name][item]: - metarule[_id][item].append(self.driver.get_uuid_from_name(intra_extension_dict["id"], element, categories[item])) - metarule[_id]["algorithm"] = json_metarule["sub_meta_rules"][metarule_name]["algorithm"] - self.driver.set_sub_meta_rule_dict(intra_extension_dict["id"], _id, metarule[_id]) - submetarules = { - "aggregation": json_metarule["aggregation"], - "sub_meta_rules": metarule - } - for _id, _value in self.configuration_api.driver.get_aggregation_algorithms_dict().iteritems(): - if _value["name"] == json_metarule["aggregation"]: - self.driver.set_aggregation_algorithm_id(intra_extension_dict["id"], _id) - break - else: - LOG.warning("No aggregation_algorithm found for '{}'".format(json_metarule["aggregation"])) - - def __load_rule_file(self, intra_extension_dict, policy_dir): - - metadata_path = os.path.join(policy_dir, 'rule.json') - f = open(metadata_path) - json_rules = json.load(f) - intra_extension_dict["rule"] = {"rule": copy.deepcopy(json_rules)} - # Translate value from JSON file to UUID for Database - rules = dict() - sub_meta_rules = self.driver.get_sub_meta_rules_dict(intra_extension_dict["id"]) - for sub_rule_name in json_rules: - sub_rule_id = self.driver.get_uuid_from_name(intra_extension_dict["id"], - sub_rule_name, - self.driver.SUB_META_RULE) - rules[sub_rule_id] = list() - for rule in json_rules[sub_rule_name]: - subrule = list() - _rule = list(rule) - for category_uuid in sub_meta_rules[sub_rule_id]["subject_categories"]: - scope_name = _rule.pop(0) - scope_uuid = self.driver.get_uuid_from_name(intra_extension_dict["id"], - scope_name, - self.driver.SUBJECT_SCOPE, - category_uuid=category_uuid) - subrule.append(scope_uuid) - for category_uuid in sub_meta_rules[sub_rule_id]["action_categories"]: - scope_name = _rule.pop(0) - scope_uuid = self.driver.get_uuid_from_name(intra_extension_dict["id"], - scope_name, - self.driver.ACTION_SCOPE, - category_uuid=category_uuid) - subrule.append(scope_uuid) - for category_uuid in sub_meta_rules[sub_rule_id]["object_categories"]: - scope_name = _rule.pop(0) - scope_uuid = self.driver.get_uuid_from_name(intra_extension_dict["id"], - scope_name, - self.driver.OBJECT_SCOPE, - category_uuid=category_uuid) - subrule.append(scope_uuid) - # if a positive/negative value exists, all item of rule have not be consumed - if len(rule) >= 1 and isinstance(rule[0], bool): - subrule.append(rule[0]) - else: - # if value doesn't exist add a default value - subrule.append(True) - self.driver.set_rule_dict(intra_extension_dict["id"], sub_rule_id, uuid4().hex, subrule) - - @enforce(("read", "write"), "intra_extensions") - def load_intra_extension_dict(self, user_id, intra_extension_dict): - ie_dict = dict() - if "id" in intra_extension_dict: - ie_dict['id'] = filter_input(intra_extension_dict["id"]) - else: - ie_dict['id'] = uuid4().hex - - intraextensions = self.get_intra_extensions_dict(user_id) - - ie_dict["name"] = filter_input(intra_extension_dict["name"]) - ie_dict["model"] = filter_input(intra_extension_dict["model"]) - ie_dict["genre"] = filter_input(intra_extension_dict["genre"]) - if not ie_dict["genre"]: - if "admin" in ie_dict["model"] or "root" in ie_dict["model"]: - ie_dict["genre"] = "admin" - else: - ie_dict["genre"] = "authz" - ie_dict["description"] = filter_input(intra_extension_dict["description"]) - ref = self.driver.set_intra_extension_dict(ie_dict['id'], ie_dict) - - # if ie_dict['id'] in intraextensions: - # # note (dthom): if id was in intraextensions, it implies that the intraextension was already there - # # so we don't have to populate with default values - return ref - - def populate_default_data(self, ref): - self.moonlog_api.debug("Creation of IE: {}".format(ref)) - # read the template given by "model" and populate default variables - template_dir = os.path.join(CONF.moon.policy_directory, ref['intra_extension']["model"]) - self.__load_metadata_file(ref['intra_extension'], template_dir) - self.__load_perimeter_file(ref['intra_extension'], template_dir) - self.__load_scope_file(ref['intra_extension'], template_dir) - self.__load_assignment_file(ref['intra_extension'], template_dir) - self.__load_metarule_file(ref['intra_extension'], template_dir) - self.__load_rule_file(ref['intra_extension'], template_dir) - return ref - - def load_root_intra_extension_dict(self, policy_template=CONF.moon.root_policy_directory): - # Note (asteroide): Only one root Extension is authorized - # and this extension is created at the very beginning of the server - # so we don't need to use enforce here - extensions = self.driver.get_intra_extensions_dict() - for extension_id, extension_dict in extensions.iteritems(): - if extension_dict["name"] == CONF.moon.root_policy_directory: - return {'id': extension_id} - ie_dict = dict() - ie_dict['id'] = uuid4().hex - ie_dict["name"] = "policy_root" - ie_dict["model"] = filter_input(policy_template) - ie_dict["genre"] = "admin" - ie_dict["description"] = "policy_root" - ref = self.driver.set_intra_extension_dict(ie_dict['id'], ie_dict) - logging.debug("Creation of root IE: {}".format(ref)) - self.moonlog_api.debug("Creation of root IE: {}".format(ref)) - - # read the template given by "model" and populate default variables - template_dir = os.path.join(CONF.moon.policy_directory, ie_dict["model"]) - self.__load_metadata_file(ie_dict, template_dir) - self.__load_perimeter_file(ie_dict, template_dir) - self.__load_scope_file(ie_dict, template_dir) - self.__load_assignment_file(ie_dict, template_dir) - self.__load_metarule_file(ie_dict, template_dir) - self.__load_rule_file(ie_dict, template_dir) - self.__init_root(root_extension_id=ie_dict['id']) - if CONF.moon.master: - LOG.info("Master address: {}".format(CONF.moon.master)) - self.get_data_from_master() - return ref - - @enforce("read", "intra_extensions") - def get_intra_extension_dict(self, user_id, intra_extension_id): - """ - :param user_id: - :return: { - intra_extension_id: { - name: xxx, - model: yyy, - genre: authz, - description: xxx} - } - """ - intra_extensions_dict = self.driver.get_intra_extensions_dict() - if intra_extension_id not in intra_extensions_dict: - raise IntraExtensionUnknown() - return intra_extensions_dict[intra_extension_id] - - @enforce(("read", "write"), "intra_extensions") - def del_intra_extension(self, user_id, intra_extension_id): - if intra_extension_id not in self.driver.get_intra_extensions_dict(): - raise IntraExtensionUnknown() - for sub_meta_rule_id in self.driver.get_sub_meta_rules_dict(intra_extension_id): - for rule_id in self.driver.get_rules_dict(intra_extension_id, sub_meta_rule_id): - self.driver.del_rule(intra_extension_id, sub_meta_rule_id, rule_id) - self.driver.del_sub_meta_rule(intra_extension_id, sub_meta_rule_id) - self.driver.del_aggregation_algorithm(intra_extension_id) - for subject_id in self.driver.get_subjects_dict(intra_extension_id): - for subject_category_id in self.driver.get_subject_categories_dict(intra_extension_id): - self.driver.del_subject_scope(intra_extension_id, None, None) - self.driver.del_subject_assignment(intra_extension_id, None, None, None) - self.driver.del_subject_category(intra_extension_id, subject_category_id) - for object_id in self.driver.get_objects_dict(intra_extension_id): - for object_category_id in self.driver.get_object_categories_dict(intra_extension_id): - self.driver.del_object_scope(intra_extension_id, None, None) - self.driver.del_object_assignment(intra_extension_id, None, None, None) - self.driver.del_object_category(intra_extension_id, object_category_id) - for action_id in self.driver.get_actions_dict(intra_extension_id): - for action_category_id in self.driver.get_action_categories_dict(intra_extension_id): - self.driver.del_action_scope(intra_extension_id, None, None) - self.driver.del_action_assignment(intra_extension_id, None, None, None) - self.driver.del_action_category(intra_extension_id, action_category_id) - for subject_id in self.driver.get_subjects_dict(intra_extension_id): - self.driver.del_subject(intra_extension_id, subject_id) - for object_id in self.driver.get_objects_dict(intra_extension_id): - self.driver.del_object(intra_extension_id, object_id) - for action_id in self.driver.get_actions_dict(intra_extension_id): - self.driver.del_action(intra_extension_id, action_id) - return self.driver.del_intra_extension(intra_extension_id) - - @enforce(("read", "write"), "intra_extensions") - def set_intra_extension_dict(self, user_id, intra_extension_id, intra_extension_dict): - if intra_extension_id not in self.driver.get_intra_extensions_dict(): - raise IntraExtensionUnknown() - return self.driver.set_intra_extension_dict(intra_extension_id, intra_extension_dict) - - # Metadata functions - - @filter_input - @enforce("read", "subject_categories") - def get_subject_categories_dict(self, user_id, intra_extension_id): - """ - :param user_id: - :param intra_extension_id: - :return: { - subject_category_id1: { - name: xxx, - description: yyy}, - subject_category_id2: {...}, - ...} - """ - return self.driver.get_subject_categories_dict(intra_extension_id) - - @filter_input - @enforce(("read", "write"), "subject_categories") - def add_subject_category_dict(self, user_id, intra_extension_id, subject_category_dict): - subject_categories_dict = self.driver.get_subject_categories_dict(intra_extension_id) - for subject_category_id in subject_categories_dict: - if subject_categories_dict[subject_category_id]['name'] == subject_category_dict['name']: - raise SubjectCategoryNameExisting("Subject category {} already exists!".format(subject_category_dict['name'])) - _id = subject_category_dict.get('id', uuid4().hex) - return self.driver.set_subject_category_dict(intra_extension_id, _id, subject_category_dict) - - @filter_input - @enforce("read", "subject_categories") - def get_subject_category_dict(self, user_id, intra_extension_id, subject_category_id): - subject_categories_dict = self.driver.get_subject_categories_dict(intra_extension_id) - if subject_category_id not in subject_categories_dict: - raise SubjectCategoryUnknown() - return subject_categories_dict[subject_category_id] - - @filter_input - @enforce(("read", "write"), "subject_categories") - @enforce(("read", "write"), "subject_scopes") - @enforce(("read", "write"), "subject_assignments") - def del_subject_category(self, user_id, intra_extension_id, subject_category_id): - if subject_category_id not in self.driver.get_subject_categories_dict(intra_extension_id): - raise SubjectCategoryUnknown() - # Destroy scopes related to this category - for scope in self.driver.get_subject_scopes_dict(intra_extension_id, subject_category_id): - self.del_subject_scope(intra_extension_id, subject_category_id, scope) - # Destroy assignments related to this category - for subject_id in self.driver.get_subjects_dict(intra_extension_id): - for assignment_id in self.driver.get_subject_assignment_list(intra_extension_id, subject_id, subject_category_id): - self.driver.del_subject_assignment(intra_extension_id, subject_id, subject_category_id, assignment_id) - self.driver.del_subject_category(intra_extension_id, subject_category_id) - - @filter_input - @enforce(("read", "write"), "subject_categories") - def set_subject_category_dict(self, user_id, intra_extension_id, subject_category_id, subject_category_dict): - if subject_category_id not in self.driver.get_subject_categories_dict(intra_extension_id): - raise SubjectCategoryUnknown() - return self.driver.set_subject_category_dict(intra_extension_id, subject_category_id, subject_category_dict) - - @filter_input - @enforce("read", "object_categories") - def get_object_categories_dict(self, user_id, intra_extension_id): - return self.driver.get_object_categories_dict(intra_extension_id) - - @filter_input - @enforce(("read", "write"), "object_categories") - @enforce(("read", "write"), "object_scopes") - def add_object_category_dict(self, user_id, intra_extension_id, object_category_dict): - object_categories_dict = self.driver.get_object_categories_dict(intra_extension_id) - for object_category_id in object_categories_dict: - if object_categories_dict[object_category_id]["name"] == object_category_dict['name']: - raise ObjectCategoryNameExisting() - _id = object_category_dict.get('id', uuid4().hex) - return self.driver.set_object_category_dict(intra_extension_id, _id, object_category_dict) - - @filter_input - @enforce("read", "object_categories") - def get_object_category_dict(self, user_id, intra_extension_id, object_category_id): - object_categories_dict = self.driver.get_object_categories_dict(intra_extension_id) - if object_category_id not in object_categories_dict: - raise ObjectCategoryUnknown() - return object_categories_dict[object_category_id] - - @filter_input - @enforce(("read", "write"), "object_categories") - @enforce(("read", "write"), "object_scopes") - @enforce(("read", "write"), "object_assignments") - def del_object_category(self, user_id, intra_extension_id, object_category_id): - if object_category_id not in self.driver.get_object_categories_dict(intra_extension_id): - raise ObjectCategoryUnknown() - # Destroy scopes related to this category - for scope in self.driver.get_object_scopes_dict(intra_extension_id, object_category_id): - self.del_object_scope(intra_extension_id, object_category_id, scope) - # Destroy assignments related to this category - for object_id in self.driver.get_objects_dict(intra_extension_id): - for assignment_id in self.driver.get_object_assignment_list(intra_extension_id, object_id, object_category_id): - self.driver.del_object_assignment(intra_extension_id, object_id, object_category_id, assignment_id) - self.driver.del_object_category(intra_extension_id, object_category_id) - - @filter_input - @enforce(("read", "write"), "object_categories") - def set_object_category_dict(self, user_id, intra_extension_id, object_category_id, object_category_dict): - if object_category_id not in self.driver.get_object_categories_dict(intra_extension_id): - raise ObjectCategoryUnknown() - return self.driver.set_object_category_dict(intra_extension_id, object_category_id, object_category_dict) - - @filter_input - @enforce("read", "action_categories") - def get_action_categories_dict(self, user_id, intra_extension_id): - return self.driver.get_action_categories_dict(intra_extension_id) - - @filter_input - @enforce(("read", "write"), "action_categories") - @enforce(("read", "write"), "action_scopes") - def add_action_category_dict(self, user_id, intra_extension_id, action_category_dict): - action_categories_dict = self.driver.get_action_categories_dict(intra_extension_id) - for action_category_id in action_categories_dict: - if action_categories_dict[action_category_id]['name'] == action_category_dict['name']: - raise ActionCategoryNameExisting() - _id = action_category_dict.get('id', uuid4().hex) - return self.driver.set_action_category_dict(intra_extension_id, _id, action_category_dict) - - @filter_input - @enforce("read", "action_categories") - def get_action_category_dict(self, user_id, intra_extension_id, action_category_id): - action_categories_dict = self.driver.get_action_categories_dict(intra_extension_id) - if action_category_id not in action_categories_dict: - raise ActionCategoryUnknown() - return action_categories_dict[action_category_id] - - @filter_input - @enforce(("read", "write"), "action_categories") - @enforce(("read", "write"), "action_scopes") - def del_action_category(self, user_id, intra_extension_id, action_category_id): - if action_category_id not in self.driver.get_action_categories_dict(intra_extension_id): - raise ActionCategoryUnknown() - # Destroy scopes related to this category - for scope in self.driver.get_action_scopes_dict(intra_extension_id, action_category_id): - self.del_action_scope(intra_extension_id, action_category_id, scope) - # Destroy assignments related to this category - for action_id in self.driver.get_actions_dict(intra_extension_id): - for assignment_id in self.driver.get_action_assignment_list(intra_extension_id, action_id, action_category_id): - self.driver.del_action_assignment(intra_extension_id, action_id, action_category_id, assignment_id) - self.driver.del_action_category(intra_extension_id, action_category_id) - - @filter_input - @enforce(("read", "write"), "action_categories") - def set_action_category_dict(self, user_id, intra_extension_id, action_category_id, action_category_dict): - if action_category_id not in self.driver.get_action_categories_dict(intra_extension_id): - raise ActionCategoryUnknown() - return self.driver.set_action_category_dict(intra_extension_id, action_category_id, action_category_dict) - - # Perimeter functions - - @filter_input - @enforce("read", "subjects") - def get_subjects_dict(self, user_id, intra_extension_id): - return self.driver.get_subjects_dict(intra_extension_id) - - @filter_input - @enforce(("read", "write"), "subjects") - def add_subject_dict(self, user_id, intra_extension_id, subject_dict): - subjects_dict = self.driver.get_subjects_dict(intra_extension_id) - for subject_id in subjects_dict: - if subjects_dict[subject_id]["name"] == subject_dict['name']: - raise SubjectNameExisting("Subject {} already exists! [add_subject_dict]".format(subject_dict['name'])) - try: - subject_keystone_dict = self.identity_api.get_user_by_name(subject_dict['name'], "default") - except UserNotFound as e: - if 'domain_id' not in subject_dict: - subject_dict['domain_id'] = "default" - if 'project_id' not in subject_dict: - tenants = self.tenant_api.get_tenants_dict(user_id) - # Get the tenant ID for that intra_extension - for tenant_id, tenant_value in tenants.iteritems(): - if intra_extension_id == tenant_value['intra_admin_extension_id'] or \ - intra_extension_id == tenant_value['intra_authz_extension_id']: - subject_dict['project_id'] = tenant_value['id'] - break - else: - # If no tenant is found default to the admin tenant - for tenant_id, tenant_value in tenants.iteritems(): - if tenant_value['name'] == 'admin': - subject_dict['project_id'] = tenant_value['id'] - if 'email' not in subject_dict: - subject_dict['email'] = "" - if 'password' not in subject_dict: - # Default passord to the name of the new user - subject_dict['password'] = subject_dict['name'] - subject_keystone_dict = self.identity_api.create_user(subject_dict) - subject_dict["keystone_id"] = subject_keystone_dict["id"] - subject_dict["keystone_name"] = subject_keystone_dict["name"] - return self.driver.set_subject_dict(intra_extension_id, uuid4().hex, subject_dict) - - @filter_input - @enforce("read", "subjects") - def get_subject_dict(self, user_id, intra_extension_id, subject_id): - subjects_dict = self.driver.get_subjects_dict(intra_extension_id) - if subject_id not in subjects_dict: - raise SubjectUnknown() - return subjects_dict[subject_id] - - @filter_input - @enforce(("read", "write"), "subjects") - def del_subject(self, user_id, intra_extension_id, subject_id): - if subject_id not in self.driver.get_subjects_dict(intra_extension_id): - raise SubjectUnknown() - # Destroy assignments related to this category - for subject_category_id in self.driver.get_subject_categories_dict(intra_extension_id): - for _subject_id in self.driver.get_subjects_dict(intra_extension_id): - for assignment_id in self.driver.get_subject_assignment_list(intra_extension_id, _subject_id, subject_category_id): - self.driver.del_subject_assignment(intra_extension_id, _subject_id, subject_category_id, assignment_id) - self.driver.del_subject(intra_extension_id, subject_id) - - @filter_input - @enforce(("read", "write"), "subjects") - def set_subject_dict(self, user_id, intra_extension_id, subject_id, subject_dict): - subjects_dict = self.driver.get_subjects_dict(intra_extension_id) - for subject_id in subjects_dict: - if subjects_dict[subject_id]["name"] == subject_dict['name']: - raise SubjectNameExisting("Subject {} already exists!".format(subject_dict['name'])) - # Next line will raise an error if user is not present in Keystone database - subject_keystone_dict = self.identity_api.get_user_by_name(subject_dict['name'], "default") - subject_dict["keystone_id"] = subject_keystone_dict["id"] - subject_dict["keystone_name"] = subject_keystone_dict["name"] - return self.driver.set_subject_dict(intra_extension_id, subject_dict["id"], subject_dict) - - @filter_input - def get_subject_dict_from_keystone_id(self, tenant_id, intra_extension_id, keystone_id): - tenants_dict = self.tenant_api.driver.get_tenants_dict() - if tenant_id not in tenants_dict: - raise TenantUnknown() - if intra_extension_id not in (tenants_dict[tenant_id]['intra_authz_extension_id'], - tenants_dict[tenant_id]['intra_admin_extension_id'], ): - raise IntraExtensionUnknown() - # Note (asteroide): We used self.root_admin_id because the user requesting this information - # may only know his keystone_id and not the subject ID in the requested intra_extension. - subjects_dict = self.get_subjects_dict(self.root_admin_id, intra_extension_id) - for subject_id in subjects_dict: - if keystone_id == subjects_dict[subject_id]['keystone_id']: - return {subject_id: subjects_dict[subject_id]} - - @filter_input - def get_subject_dict_from_keystone_name(self, tenant_id, intra_extension_id, keystone_name): - tenants_dict = self.tenant_api.driver.get_tenants_dict() - if tenant_id not in tenants_dict: - raise TenantUnknown() - if intra_extension_id not in (tenants_dict[tenant_id]['intra_authz_extension_id'], - tenants_dict[tenant_id]['intra_admin_extension_id'], ): - raise IntraExtensionUnknown() - # Note (asteroide): We used self.root_admin_id because the user requesting this information - # may only know his keystone_name and not the subject ID in the requested intra_extension. - subjects_dict = self.get_subjects_dict(self.root_admin_id, intra_extension_id) - for subject_id in subjects_dict: - if keystone_name == subjects_dict[subject_id]['keystone_name']: - return {subject_id: subjects_dict[subject_id]} - - @filter_input - @enforce("read", "objects") - def get_objects_dict(self, user_id, intra_extension_id): - return self.driver.get_objects_dict(intra_extension_id) - - @filter_input - @enforce(("read", "write"), "objects") - def add_object_dict(self, user_id, intra_extension_id, object_dict): - objects_dict = self.driver.get_objects_dict(intra_extension_id) - object_id = uuid4().hex - if "id" in object_dict: - object_id = object_dict['id'] - for _object_id in objects_dict: - if objects_dict[_object_id]["name"] == object_dict['name']: - raise ObjectNameExisting("Object {} already exist!".format(object_dict['name'])) - return self.driver.set_object_dict(intra_extension_id, object_id, object_dict) - - @filter_input - @enforce("read", "objects") - def get_object_dict(self, user_id, intra_extension_id, object_id): - objects_dict = self.driver.get_objects_dict(intra_extension_id) - if object_id not in objects_dict: - raise ObjectUnknown("Unknown object id: {}".format(object_id)) - return objects_dict[object_id] - - @filter_input - @enforce(("read", "write"), "objects") - def del_object(self, user_id, intra_extension_id, object_id): - if object_id not in self.driver.get_objects_dict(intra_extension_id): - raise ObjectUnknown("Unknown object id: {}".format(object_id)) - # Destroy assignments related to this category - for object_category_id in self.driver.get_object_categories_dict(intra_extension_id): - for _object_id in self.driver.get_objects_dict(intra_extension_id): - for assignment_id in self.driver.get_object_assignment_list(intra_extension_id, _object_id, object_category_id): - self.driver.del_object_assignment(intra_extension_id, _object_id, object_category_id, assignment_id) - self.driver.del_object(intra_extension_id, object_id) - - @filter_input - @enforce(("read", "write"), "objects") - def set_object_dict(self, user_id, intra_extension_id, object_id, object_dict): - objects_dict = self.driver.get_objects_dict(intra_extension_id) - for object_id in objects_dict: - if objects_dict[object_id]["name"] == object_dict['name']: - raise ObjectNameExisting() - return self.driver.set_object_dict(intra_extension_id, object_id, object_dict) - - @filter_input - @enforce("read", "actions") - def get_actions_dict(self, user_id, intra_extension_id): - return self.driver.get_actions_dict(intra_extension_id) - - @filter_input - @enforce(("read", "write"), "actions") - def add_action_dict(self, user_id, intra_extension_id, action_dict): - actions_dict = self.driver.get_actions_dict(intra_extension_id) - for action_id in actions_dict: - if actions_dict[action_id]["name"] == action_dict['name']: - raise ActionNameExisting() - return self.driver.set_action_dict(intra_extension_id, uuid4().hex, action_dict) - - @filter_input - @enforce("read", "actions") - def get_action_dict(self, user_id, intra_extension_id, action_id): - actions_dict = self.driver.get_actions_dict(intra_extension_id) - if action_id not in actions_dict: - raise ActionUnknown() - return actions_dict[action_id] - - @filter_input - @enforce(("read", "write"), "actions") - def del_action(self, user_id, intra_extension_id, action_id): - if action_id not in self.driver.get_actions_dict(intra_extension_id): - raise ActionUnknown() - # Destroy assignments related to this category - for action_category_id in self.driver.get_action_categories_dict(intra_extension_id): - for _action_id in self.driver.get_actions_dict(intra_extension_id): - for assignment_id in self.driver.get_action_assignment_list(intra_extension_id, _action_id, action_category_id): - self.driver.del_action_assignment(intra_extension_id, _action_id, action_category_id, assignment_id) - return self.driver.del_action(intra_extension_id, action_id) - - @filter_input - @enforce(("read", "write"), "actions") - def set_action_dict(self, user_id, intra_extension_id, action_id, action_dict): - actions_dict = self.driver.get_actions_dict(intra_extension_id) - for action_id in actions_dict: - if actions_dict[action_id]["name"] == action_dict['name']: - raise ActionNameExisting() - return self.driver.set_action_dict(intra_extension_id, action_id, action_dict) - - # Scope functions - - @filter_input - @enforce("read", "subject_scopes") - @enforce("read", "subject_categories") - def get_subject_scopes_dict(self, user_id, intra_extension_id, subject_category_id): - """ - :param user_id: - :param intra_extension_id: - :param subject_category_id: - :return: { - subject_scope_id1: { - name: xxx, - des: aaa}, - subject_scope_id2: { - name: yyy, - des: bbb}, - ...} - """ - if subject_category_id not in self.driver.get_subject_categories_dict(intra_extension_id): - raise SubjectCategoryUnknown() - return self.driver.get_subject_scopes_dict(intra_extension_id, subject_category_id) - - @filter_input - @enforce(("read", "write"), "subject_scopes") - @enforce("read", "subject_categories") - def add_subject_scope_dict(self, user_id, intra_extension_id, subject_category_id, subject_scope_dict): - if subject_category_id not in self.driver.get_subject_categories_dict(intra_extension_id): - raise SubjectCategoryUnknown() - subject_scopes_dict = self.driver.get_subject_scopes_dict(intra_extension_id, subject_category_id) - for _subject_scope_id in subject_scopes_dict: - if subject_scope_dict['name'] == subject_scopes_dict[_subject_scope_id]['name']: - raise SubjectScopeNameExisting() - return self.driver.set_subject_scope_dict(intra_extension_id, subject_category_id, uuid4().hex, subject_scope_dict) - - @filter_input - @enforce("read", "subject_scopes") - @enforce("read", "subject_categories") - def get_subject_scope_dict(self, user_id, intra_extension_id, subject_category_id, subject_scope_id): - if subject_category_id not in self.driver.get_subject_categories_dict(intra_extension_id): - raise SubjectCategoryUnknown() - subject_scopes_dict = self.driver.get_subject_scopes_dict(intra_extension_id, subject_category_id) - if subject_scope_id not in subject_scopes_dict: - raise SubjectScopeUnknown() - return subject_scopes_dict[subject_scope_id] - - @filter_input - @enforce(("read", "write"), "subject_scopes") - @enforce("read", "subject_categories") - def del_subject_scope(self, user_id, intra_extension_id, subject_category_id, subject_scope_id): - if subject_category_id not in self.driver.get_subject_categories_dict(intra_extension_id): - raise SubjectCategoryUnknown() - if subject_scope_id not in self.driver.get_subject_scopes_dict(intra_extension_id, subject_category_id): - raise SubjectScopeUnknown() - # Destroy scope-related assignment - for subject_id in self.driver.get_subjects_dict(intra_extension_id): - for assignment_id in self.driver.get_subject_assignment_list(intra_extension_id, subject_id, subject_category_id): - self.driver.del_subject_assignment(intra_extension_id, subject_id, subject_category_id, assignment_id) - # Destroy scope-related rule - for sub_meta_rule_id in self.driver.get_sub_meta_rules_dict(intra_extension_id): - rules_dict = self.driver.get_rules_dict(intra_extension_id, sub_meta_rule_id) - for rule_id in rules_dict: - if subject_scope_id in rules_dict[rule_id]: - self.driver.del_rule(intra_extension_id, sub_meta_rule_id, rule_id) - self.driver.del_subject_scope(intra_extension_id, subject_category_id, subject_scope_id) - - @filter_input - @enforce(("read", "write"), "subject_scopes") - @enforce("read", "subject_categories") - def set_subject_scope_dict(self, user_id, intra_extension_id, subject_category_id, subject_scope_id, subject_scope_dict): - if subject_category_id not in self.driver.get_subject_categories_dict(intra_extension_id): - raise SubjectCategoryUnknown() - subject_scopes_dict = self.driver.get_subject_scopes_dict(intra_extension_id, subject_category_id) - for _subject_scope_id in subject_scopes_dict: - if subject_scopes_dict[_subject_scope_id]['name'] == subject_scope_dict['name']: - raise SubjectScopeNameExisting() - return self.driver.set_subject_scope_dict(intra_extension_id, subject_category_id, subject_scope_id, subject_scope_dict) - - @filter_input - @enforce("read", "object_scopes") - @enforce("read", "object_categories") - def get_object_scopes_dict(self, user_id, intra_extension_id, object_category_id): - if object_category_id not in self.driver.get_object_categories_dict(intra_extension_id): - raise ObjectCategoryUnknown() - return self.driver.get_object_scopes_dict(intra_extension_id, object_category_id) - - @filter_input - @enforce(("read", "write"), "object_scopes") - @enforce("read", "object_categories") - def add_object_scope_dict(self, user_id, intra_extension_id, object_category_id, object_scope_dict): - if object_category_id not in self.driver.get_object_categories_dict(intra_extension_id): - raise ObjectCategoryUnknown() - object_scopes_dict = self.driver.get_object_scopes_dict(intra_extension_id, object_category_id) - for _object_scope_id in object_scopes_dict: - if object_scopes_dict[_object_scope_id]['name'] == object_scope_dict['name']: - raise ObjectScopeNameExisting() - return self.driver.set_object_scope_dict(intra_extension_id, object_category_id, uuid4().hex, object_scope_dict) - - @filter_input - @enforce("read", "object_scopes") - @enforce("read", "object_categories") - def get_object_scope_dict(self, user_id, intra_extension_id, object_category_id, object_scope_id): - if object_category_id not in self.driver.get_object_categories_dict(intra_extension_id): - raise ObjectCategoryUnknown() - object_scopes_dict = self.driver.get_object_scopes_dict(intra_extension_id, object_category_id) - if object_scope_id not in object_scopes_dict: - raise ObjectScopeUnknown() - return object_scopes_dict[object_scope_id] - - @filter_input - @enforce(("read", "write"), "object_scopes") - @enforce("read", "object_categories") - def del_object_scope(self, user_id, intra_extension_id, object_category_id, object_scope_id): - if object_category_id not in self.driver.get_object_categories_dict(intra_extension_id): - raise ObjectCategoryUnknown() - if object_scope_id not in self.driver.get_object_scopes_dict(intra_extension_id, object_category_id): - raise ObjectScopeUnknown() - # Destroy scope-related assignment - for object_id in self.driver.get_objects_dict(intra_extension_id): - for assignment_id in self.driver.get_object_assignment_list(intra_extension_id, object_id, object_category_id): - self.driver.del_object_assignment(intra_extension_id, object_id, object_category_id, assignment_id) - # Destroy scope-related rule - for sub_meta_rule_id in self.driver.get_sub_meta_rules_dict(intra_extension_id): - rules_dict = self.driver.get_rules_dict(intra_extension_id, sub_meta_rule_id) - for rule_id in rules_dict: - if object_scope_id in rules_dict[rule_id]: - self.driver.del_rule(intra_extension_id, sub_meta_rule_id, rule_id) - self.driver.del_object_scope(intra_extension_id, object_category_id, object_scope_id) - - @filter_input - @enforce(("read", "write"), "object_scopes") - @enforce("read", "object_categories") - def set_object_scope_dict(self, user_id, intra_extension_id, object_category_id, object_scope_id, object_scope_dict): - if object_category_id not in self.driver.get_object_categories_dict(intra_extension_id): - raise ObjectCategoryUnknown() - object_scopes_dict = self.driver.get_object_scopes_dict(intra_extension_id, object_category_id) - for _object_scope_id in object_scopes_dict: - if object_scopes_dict[_object_scope_id]['name'] == object_scope_dict['name']: - raise ObjectScopeNameExisting() - return self.driver.set_object_scope_dict(intra_extension_id, object_category_id, object_scope_id, object_scope_dict) - - @filter_input - @enforce("read", "action_scopes") - @enforce("read", "action_categories") - def get_action_scopes_dict(self, user_id, intra_extension_id, action_category_id): - if action_category_id not in self.driver.get_action_categories_dict(intra_extension_id): - raise ActionCategoryUnknown() - return self.driver.get_action_scopes_dict(intra_extension_id, action_category_id) - - @filter_input - @enforce(("read", "write"), "action_scopes") - @enforce("read", "action_categories") - def add_action_scope_dict(self, user_id, intra_extension_id, action_category_id, action_scope_dict): - if action_category_id not in self.driver.get_action_categories_dict(intra_extension_id): - raise ActionCategoryUnknown() - action_scopes_dict = self.driver.get_action_scopes_dict(intra_extension_id, action_category_id) - for _action_scope_id in action_scopes_dict: - if action_scopes_dict[_action_scope_id]['name'] == action_scope_dict['name']: - raise ActionScopeNameExisting() - return self.driver.set_action_scope_dict(intra_extension_id, action_category_id, uuid4().hex, action_scope_dict) - - @filter_input - @enforce("read", "action_scopes") - @enforce("read", "action_categories") - def get_action_scope_dict(self, user_id, intra_extension_id, action_category_id, action_scope_id): - if action_category_id not in self.driver.get_action_categories_dict(intra_extension_id): - raise ActionCategoryUnknown() - action_scopes_dict = self.driver.get_action_scopes_dict(intra_extension_id, action_category_id) - if action_scope_id not in action_scopes_dict: - raise ActionScopeUnknown() - return action_scopes_dict[action_scope_id] - - @filter_input - @enforce(("read", "write"), "action_scopes") - @enforce("read", "action_categories") - def del_action_scope(self, user_id, intra_extension_id, action_category_id, action_scope_id): - if action_category_id not in self.driver.get_action_categories_dict(intra_extension_id): - raise ActionCategoryUnknown() - if action_scope_id not in self.driver.get_action_scopes_dict(intra_extension_id, action_category_id): - raise ActionScopeUnknown() - # Destroy scope-related assignment - for action_id in self.driver.get_actions_dict(intra_extension_id): - for assignment_id in self.driver.get_action_assignment_list(intra_extension_id, action_id, action_category_id): - self.driver.del_action_assignment(intra_extension_id, action_id, action_category_id, assignment_id) - # Destroy scope-related rule - for sub_meta_rule_id in self.driver.get_sub_meta_rules_dict(intra_extension_id): - rules_dict = self.driver.get_rules_dict(intra_extension_id, sub_meta_rule_id) - for rule_id in rules_dict: - if action_scope_id in rules_dict[rule_id]: - self.driver.del_rule(intra_extension_id, sub_meta_rule_id, rule_id) - self.driver.del_action_scope(intra_extension_id, action_category_id, action_scope_id) - - @filter_input - @enforce(("read", "write"), "action_scopes") - @enforce("read", "action_categories") - def set_action_scope_dict(self, user_id, intra_extension_id, action_category_id, action_scope_id, action_scope_dict): - if action_category_id not in self.driver.get_action_categories_dict(intra_extension_id): - raise ActionCategoryUnknown() - action_scopes_dict = self.driver.get_action_scopes_dict(intra_extension_id, action_category_id) - for _action_scope_id in action_scopes_dict: - if action_scopes_dict[_action_scope_id]['name'] == action_scope_dict['name']: - raise ActionScopeNameExisting() - return self.driver.set_action_scope_dict(intra_extension_id, action_category_id, action_scope_id, action_scope_dict) - - # Assignment functions - - @filter_input - @enforce("read", "subject_assignments") - @enforce("read", "subjects") - @enforce("read", "subject_categories") - def get_subject_assignment_list(self, user_id, intra_extension_id, subject_id, subject_category_id): - """ - :param user_id: - :param intra_extension_id: - :param subject_id: - :param subject_category_id: - :return: [ - subject_scope_id1, ..., subject_scope_idn - ] - """ - if subject_id not in self.driver.get_subjects_dict(intra_extension_id): - raise SubjectUnknown() - if subject_category_id not in self.driver.get_subject_categories_dict(intra_extension_id): - raise SubjectCategoryUnknown() - return self.driver.get_subject_assignment_list(intra_extension_id, subject_id, subject_category_id) - - @filter_input - @enforce(("read", "write"), "subject_assignments") - @enforce("read", "subjects") - @enforce("read", "subject_categories") - @enforce("read", "subject_scopes") - def add_subject_assignment_list(self, user_id, intra_extension_id, subject_id, subject_category_id, subject_scope_id): - if subject_id not in self.driver.get_subjects_dict(intra_extension_id): - raise SubjectUnknown() - if subject_category_id not in self.driver.get_subject_categories_dict(intra_extension_id): - raise SubjectCategoryUnknown() - if subject_scope_id not in self.driver.get_subject_scopes_dict(intra_extension_id, subject_category_id): - raise SubjectScopeUnknown() - elif subject_scope_id in self.driver.get_subject_assignment_list(intra_extension_id, subject_id, subject_category_id): - raise SubjectAssignmentExisting() - return self.driver.add_subject_assignment_list(intra_extension_id, subject_id, subject_category_id, subject_scope_id) - - @filter_input - @enforce(("read", "write"), "subject_assignments") - @enforce("read", "subjects") - @enforce("read", "subject_categories") - @enforce("read", "subject_scopes") - def del_subject_assignment(self, user_id, intra_extension_id, subject_id, subject_category_id, subject_scope_id): - if subject_id not in self.driver.get_subjects_dict(intra_extension_id): - raise SubjectUnknown() - if subject_category_id not in self.driver.get_subject_categories_dict(intra_extension_id): - raise SubjectCategoryUnknown() - if subject_scope_id not in self.driver.get_subject_scopes_dict(intra_extension_id, subject_category_id): - raise SubjectScopeUnknown() - elif subject_scope_id not in self.driver.get_subject_assignment_list(intra_extension_id, subject_id, subject_category_id): - raise SubjectAssignmentUnknown() - self.driver.del_subject_assignment(intra_extension_id, subject_id, subject_category_id, subject_scope_id) - - @filter_input - @enforce("read", "object_assignments") - @enforce("read", "objects") - @enforce("read", "object_categories") - def get_object_assignment_list(self, user_id, intra_extension_id, object_id, object_category_id): - if object_id not in self.driver.get_objects_dict(intra_extension_id): - raise ObjectUnknown("Unknown object id: {}".format(object_id)) - if object_category_id not in self.driver.get_object_categories_dict(intra_extension_id): - raise ObjectCategoryUnknown() - return self.driver.get_object_assignment_list(intra_extension_id, object_id, object_category_id) - - @filter_input - @enforce(("read", "write"), "object_assignments") - @enforce("read", "objects") - @enforce("read", "object_categories") - def add_object_assignment_list(self, user_id, intra_extension_id, object_id, object_category_id, object_scope_id): - if object_id not in self.driver.get_objects_dict(intra_extension_id): - raise ObjectUnknown("Unknown object id: {}".format(object_id)) - if object_category_id not in self.driver.get_object_categories_dict(intra_extension_id): - raise ObjectCategoryUnknown() - if object_scope_id not in self.driver.get_object_scopes_dict(intra_extension_id, object_category_id): - raise ObjectScopeUnknown() - elif object_scope_id in self.driver.get_object_assignment_list(intra_extension_id, object_id, object_category_id): - raise ObjectAssignmentExisting() - return self.driver.add_object_assignment_list(intra_extension_id, object_id, object_category_id, object_scope_id) - - @filter_input - @enforce(("read", "write"), "object_assignments") - @enforce("read", "objects") - @enforce("read", "object_categories") - @enforce("read", "object_scopes") - def del_object_assignment(self, user_id, intra_extension_id, object_id, object_category_id, object_scope_id): - if object_id not in self.driver.get_objects_dict(intra_extension_id): - raise ObjectUnknown("Unknown object id: {}".format(object_id)) - if object_category_id not in self.driver.get_object_categories_dict(intra_extension_id): - raise ObjectCategoryUnknown() - if object_scope_id not in self.driver.get_object_scopes_dict(intra_extension_id, object_category_id): - raise ObjectScopeUnknown() - elif object_scope_id not in self.driver.get_object_assignment_list(intra_extension_id, object_id, object_category_id): - raise ObjectAssignmentUnknown() - self.driver.del_object_assignment(intra_extension_id, object_id, object_category_id, object_scope_id) - - @filter_input - @enforce("read", "action_assignments") - @enforce("read", "actions") - @enforce("read", "action_categories") - def get_action_assignment_list(self, user_id, intra_extension_id, action_id, action_category_id): - if action_id not in self.driver.get_actions_dict(intra_extension_id): - raise ActionUnknown() - if action_category_id not in self.driver.get_action_categories_dict(intra_extension_id): - raise ActionCategoryUnknown() - return self.driver.get_action_assignment_list(intra_extension_id, action_id, action_category_id) - - @filter_input - @enforce(("read", "write"), "action_assignments") - @enforce("read", "actions") - @enforce("read", "action_categories") - def add_action_assignment_list(self, user_id, intra_extension_id, action_id, action_category_id, action_scope_id): - if action_id not in self.driver.get_actions_dict(intra_extension_id): - raise ActionUnknown() - if action_category_id not in self.driver.get_action_categories_dict(intra_extension_id): - raise ActionCategoryUnknown() - if action_scope_id not in self.driver.get_action_scopes_dict(intra_extension_id, action_category_id): - raise ActionScopeUnknown() - elif action_scope_id in self.driver.get_action_assignment_list(intra_extension_id, action_id, action_category_id): - raise ObjectAssignmentExisting() - return self.driver.add_action_assignment_list(intra_extension_id, action_id, action_category_id, action_scope_id) - - @filter_input - @enforce(("read", "write"), "action_assignments") - @enforce("read", "actions") - @enforce("read", "action_categories") - @enforce("read", "action_scopes") - def del_action_assignment(self, user_id, intra_extension_id, action_id, action_category_id, action_scope_id): - if action_id not in self.driver.get_actions_dict(intra_extension_id): - raise ActionUnknown() - if action_category_id not in self.driver.get_action_categories_dict(intra_extension_id): - raise ActionCategoryUnknown() - if action_scope_id not in self.driver.get_action_scopes_dict(intra_extension_id, action_category_id): - raise ActionScopeUnknown() - elif action_scope_id not in self.driver.get_action_assignment_list(intra_extension_id, action_id, action_category_id): - raise ActionAssignmentUnknown() - self.driver.del_action_assignment(intra_extension_id, action_id, action_category_id, action_scope_id) - - # Metarule functions - - @filter_input - @enforce("read", "aggregation_algorithm") - def get_aggregation_algorithm_id(self, user_id, intra_extension_id): - """ - :param user_id: - :param intra_extension_id: - :return: { - aggregation_algorithm_id: {name: xxx, description: yyy} - } - """ - aggregation_algorithm_id = self.driver.get_aggregation_algorithm_id(intra_extension_id) - if not aggregation_algorithm_id: - raise AggregationAlgorithmNotExisting() - return aggregation_algorithm_id - - @filter_input - @enforce(("read", "write"), "aggregation_algorithm") - def set_aggregation_algorithm_id(self, user_id, intra_extension_id, aggregation_algorithm_id): - if aggregation_algorithm_id: - if aggregation_algorithm_id not in self.configuration_api.get_aggregation_algorithms_dict( - self.root_admin_id): - raise AggregationAlgorithmUnknown() - return self.driver.set_aggregation_algorithm_id(intra_extension_id, aggregation_algorithm_id) - - @filter_input - @enforce("read", "sub_meta_rules") - def get_sub_meta_rules_dict(self, user_id, intra_extension_id): - """ - :param user_id: - :param intra_extension_id: - :return: { - sub_meta_rule_id_1: { - "name": xxx, - "algorithm": yyy, - "subject_categories": [subject_category_id1, subject_category_id2,...], - "object_categories": [object_category_id1, object_category_id2,...], - "action_categories": [action_category_id1, action_category_id2,...] - sub_meta_rule_id_2: {...} - ... - } - """ - return self.driver.get_sub_meta_rules_dict(intra_extension_id) - - @filter_input - @enforce(("read", "write"), "sub_meta_rules") - @enforce("write", "rules") - def add_sub_meta_rule_dict(self, user_id, intra_extension_id, sub_meta_rule_dict): - LOG.info("add_sub_meta_rule_dict = {}".format(self.driver.get_sub_meta_rules_dict(intra_extension_id))) - LOG.info("add_sub_meta_rule_dict = {}".format(sub_meta_rule_dict)) - sub_meta_rules_dict = self.driver.get_sub_meta_rules_dict(intra_extension_id) - for _sub_meta_rule_id in sub_meta_rules_dict: - if sub_meta_rule_dict['name'] == sub_meta_rules_dict[_sub_meta_rule_id]["name"]: - raise SubMetaRuleNameExisting() - if sub_meta_rule_dict['subject_categories'] == sub_meta_rules_dict[_sub_meta_rule_id]["subject_categories"] and \ - sub_meta_rule_dict['object_categories'] == sub_meta_rules_dict[_sub_meta_rule_id]["object_categories"] and \ - sub_meta_rule_dict['action_categories'] == sub_meta_rules_dict[_sub_meta_rule_id]["action_categories"] and \ - sub_meta_rule_dict['algorithm'] == sub_meta_rules_dict[_sub_meta_rule_id]["algorithm"]: - raise SubMetaRuleExisting() - algorithm_names = map(lambda x: x['name'], - self.configuration_api.get_sub_meta_rule_algorithms_dict(user_id).values()) - if sub_meta_rule_dict['algorithm'] not in algorithm_names: - raise SubMetaRuleAlgorithmNotExisting() - sub_meta_rule_id = uuid4().hex - # TODO (dthom): add new sub-meta-rule to rule dict - # self.driver.add_rule(intra_extension_id, sub_meta_rule_id, []) - return self.driver.set_sub_meta_rule_dict(intra_extension_id, sub_meta_rule_id, sub_meta_rule_dict) - - @filter_input - @enforce(("read", "write"), "sub_meta_rules") - def get_sub_meta_rule_dict(self, user_id, intra_extension_id, sub_meta_rule_id): - sub_meta_rule_dict = self.driver.get_sub_meta_rules_dict(intra_extension_id) - if sub_meta_rule_id not in sub_meta_rule_dict: - raise SubMetaRuleUnknown() - return sub_meta_rule_dict[sub_meta_rule_id] - - @filter_input - @enforce(("read", "write"), "sub_meta_rules") - @enforce(("read", "write"), "rules") - def del_sub_meta_rule(self, user_id, intra_extension_id, sub_meta_rule_id): - if sub_meta_rule_id not in self.driver.get_sub_meta_rules_dict(intra_extension_id): - raise SubMetaRuleUnknown() - for rule_id in self.driver.get_rules_dict(intra_extension_id, sub_meta_rule_id): - self.del_rule(intra_extension_id, sub_meta_rule_id, rule_id) - self.driver.del_sub_meta_rule(intra_extension_id, sub_meta_rule_id) - - @filter_input - @enforce(("read", "write"), "sub_meta_rules") - @enforce("write", "rules") - def set_sub_meta_rule_dict(self, user_id, intra_extension_id, sub_meta_rule_id, sub_meta_rule_dict): - LOG.info("set_sub_meta_rule_dict = {}".format(self.driver.get_sub_meta_rules_dict(intra_extension_id))) - LOG.info("set_sub_meta_rule_dict = {} {}".format(sub_meta_rule_id, sub_meta_rule_dict)) - if sub_meta_rule_id not in self.driver.get_sub_meta_rules_dict(intra_extension_id): - raise SubMetaRuleUnknown() - for attribute in sub_meta_rule_dict.keys(): - if not sub_meta_rule_dict[attribute]: - sub_meta_rule_dict.pop(attribute) - return self.driver.set_sub_meta_rule_dict(intra_extension_id, sub_meta_rule_id, sub_meta_rule_dict) - - # Rule functions - @filter_input - @enforce("read", "rules") - def get_rules_dict(self, user_id, intra_extension_id, sub_meta_rule_id): - """ - :param user_id: - :param intra_extension_id: - :param sub_meta_rule_id: - :return: { - rule_id1: [subject_scope1, subject_scope2, ..., action_scope1, ..., object_scope1, ... ], - rule_id2: [subject_scope3, subject_scope4, ..., action_scope3, ..., object_scope3, ... ], - ...} - """ - return self.driver.get_rules_dict(intra_extension_id, sub_meta_rule_id) - - @filter_input - @enforce("read", "sub_meta_rules") - @enforce(("read", "write"), "rules") - def add_rule_dict(self, user_id, intra_extension_id, sub_meta_rule_id, rule_list): - if sub_meta_rule_id not in self.driver.get_sub_meta_rules_dict(intra_extension_id): - raise SubMetaRuleUnknown() - if rule_list in self.driver.get_rules_dict(intra_extension_id, sub_meta_rule_id).values(): - raise RuleExisting() - return self.driver.set_rule_dict(intra_extension_id, sub_meta_rule_id, uuid4().hex, rule_list) - - @filter_input - @enforce("read", "sub_meta_rules") - @enforce("read", "rules") - def get_rule_dict(self, user_id, intra_extension_id, sub_meta_rule_id, rule_id): - if sub_meta_rule_id not in self.driver.get_sub_meta_rules_dict(intra_extension_id): - raise SubMetaRuleUnknown() - rules_dict = self.driver.get_rules_dict(intra_extension_id, sub_meta_rule_id) - if rule_id not in rules_dict: - raise RuleUnknown() - return rules_dict[rule_id] - - @filter_input - @enforce("read", "sub_meta_rules") - @enforce(("read", "write"), "rules") - def del_rule(self, user_id, intra_extension_id, sub_meta_rule_id, rule_id): - if sub_meta_rule_id not in self.driver.get_sub_meta_rules_dict(intra_extension_id): - raise SubMetaRuleUnknown() - if rule_id not in self.driver.get_rules_dict(intra_extension_id, sub_meta_rule_id): - raise RuleUnknown() - self.driver.del_rule(intra_extension_id, sub_meta_rule_id, rule_id) - - @filter_input - @enforce("read", "sub_meta_rules") - @enforce(("read", "write"), "rules") - def set_rule_dict(self, user_id, intra_extension_id, sub_meta_rule_id, rule_id, rule_list): - if sub_meta_rule_id not in self.driver.get_sub_meta_rules_dict(intra_extension_id): - raise SubMetaRuleUnknown() - if rule_id not in self.driver.get_rules_dict(intra_extension_id, sub_meta_rule_id): - raise RuleUnknown() - return self.driver.set_rule_dict(intra_extension_id, sub_meta_rule_id, rule_id, rule_list) - - -@dependency.provider('authz_api') -class IntraExtensionAuthzManager(IntraExtensionManager): - - def __init__(self): - super(IntraExtensionAuthzManager, self).__init__() - - def __authz(self, tenant_id, subject_k_id, object_name, action_name, genre="authz"): - """Check authorization for a particular action. - :return: True or False or raise an exception - """ - if genre == "authz": - genre = "intra_authz_extension_id" - elif genre == "admin": - genre = "intra_admin_extension_id" - - tenants_dict = self.tenant_api.get_tenants_dict(self.root_admin_id) - - if tenant_id not in tenants_dict: - # raise TenantUnknown("Cannot authz because Tenant is unknown {}".format(tenant_id)) - LOG.warning("Cannot authz because Tenant is not managed by Moon {}".format(tenant_id)) - return {'authz': True, 'comment': "Cannot authz because Tenant is not managed by Moon {}".format(tenant_id)} - intra_extension_id = tenants_dict[tenant_id][genre] - if not intra_extension_id: - raise TenantNoIntraExtension() - subjects_dict = self.driver.get_subjects_dict(intra_extension_id) - subject_id = None - for _subject_id in subjects_dict: - if subjects_dict[_subject_id]['keystone_id'] == subject_k_id: - subject_id = _subject_id - break - if not subject_id: - raise SubjectUnknown("Unknown subject id: {}".format(subject_k_id)) - objects_dict = self.driver.get_objects_dict(intra_extension_id) - object_id = None - for _object_id in objects_dict: - if objects_dict[_object_id]['name'] == object_name: - object_id = _object_id - break - if not object_id: - raise ObjectUnknown("Unknown object name: {}".format(object_name)) - - actions_dict = self.driver.get_actions_dict(intra_extension_id) - action_id = None - for _action_id in actions_dict: - if actions_dict[_action_id]['name'] == action_name: - action_id = _action_id - break - if not action_id: - raise ActionUnknown("Unknown action name: {}".format(action_name)) - return super(IntraExtensionAuthzManager, self).authz(intra_extension_id, subject_id, object_id, action_id) - - def authz(self, tenant_id, subject_k_id, object_name, action_name, genre="authz"): - try: - return self.__authz(tenant_id, subject_k_id, object_name, action_name, genre="authz") - except (SubjectUnknown, ObjectUnknown, ActionUnknown) as e: - # maybe we need to synchronize with the master - if CONF.moon.master: - self.get_data_from_master() - return self.__authz(tenant_id, subject_k_id, object_name, action_name, genre="authz") - raise e - except TenantNoIntraExtension: - return {'authz': True, 'comment': "Cannot authz because Tenant is not managed by Moon {}".format(tenant_id)} - - def add_subject_dict(self, user_id, intra_extension_id, subject_dict): - subject = super(IntraExtensionAuthzManager, self).add_subject_dict(user_id, intra_extension_id, subject_dict) - subject_id, subject_value = subject.iteritems().next() - tenants_dict = self.tenant_api.get_tenants_dict(self.root_admin_id) - for tenant_id in tenants_dict: - if tenants_dict[tenant_id]["intra_admin_extension_id"] and \ - tenants_dict[tenant_id]["intra_authz_extension_id"] == intra_extension_id: - _subjects = self.driver.get_subjects_dict(tenants_dict[tenant_id]["intra_admin_extension_id"]) - if subject_value["name"] not in [_subjects[_id]["name"] for _id in _subjects]: - self.driver.set_subject_dict(tenants_dict[tenant_id]["intra_admin_extension_id"], uuid4().hex, subject_value) - break - if tenants_dict[tenant_id]["intra_authz_extension_id"] and \ - tenants_dict[tenant_id]["intra_admin_extension_id"] == intra_extension_id: - _subjects = self.driver.get_subjects_dict(tenants_dict[tenant_id]["intra_authz_extension_id"]) - if subject_value["name"] not in [_subjects[_id]["name"] for _id in _subjects]: - self.driver.set_subject_dict(tenants_dict[tenant_id]["intra_authz_extension_id"], uuid4().hex, subject_value) - break - return subject - - def del_subject(self, user_id, intra_extension_id, subject_id): - subject_name = self.driver.get_subjects_dict(intra_extension_id)[subject_id]["name"] - super(IntraExtensionAuthzManager, self).del_subject(user_id, intra_extension_id, subject_id) - tenants_dict = self.tenant_api.get_tenants_dict(self.root_admin_id) - for tenant_id in tenants_dict: - if tenants_dict[tenant_id]["intra_authz_extension_id"] == intra_extension_id and \ - tenants_dict[tenant_id]["intra_admin_extension_id"]: - subject_id = self.driver.get_uuid_from_name(tenants_dict[tenant_id]["intra_admin_extension_id"], - subject_name, - self.driver.SUBJECT) - self.driver.del_subject(tenants_dict[tenant_id]["intra_admin_extension_id"], subject_id) - break - if tenants_dict[tenant_id]["intra_admin_extension_id"] == intra_extension_id and \ - tenants_dict[tenant_id]["intra_authz_extension_id"]: - subject_id = self.driver.get_uuid_from_name(tenants_dict[tenant_id]["intra_authz_extension_id"], - subject_name, - self.driver.SUBJECT) - self.driver.del_subject(tenants_dict[tenant_id]["intra_authz_extension_id"], subject_id) - break - - def set_subject_dict(self, user_id, intra_extension_id, subject_id, subject_dict): - subject = super(IntraExtensionAuthzManager, self).set_subject_dict(user_id, intra_extension_id, subject_dict) - subject_id, subject_value = subject.iteritems().next() - tenants_dict = self.tenant_api.get_tenants_dict(self.root_admin_id) - for tenant_id in tenants_dict: - if tenants_dict[tenant_id]["intra_authz_extension_id"] == intra_extension_id: - self.driver.set_subject_dict(tenants_dict[tenant_id]["intra_admin_extension_id"], uuid4().hex, subject_value) - break - if tenants_dict[tenant_id]["intra_admin_extension_id"] == intra_extension_id: - self.driver.set_subject_dict(tenants_dict[tenant_id]["intra_authz_extension_id"], uuid4().hex, subject_value) - break - return subject - - def add_subject_category(self, user_id, intra_extension_id, subject_category_dict): - raise AuthzException("add_subject_category") - - def del_subject_category(self, user_id, intra_extension_id, subject_category_id): - raise AuthzException("del_subject_category") - - def set_subject_category(self, user_id, intra_extension_id, subject_category_id, subject_category_dict): - raise AuthzException("set_subject_category") - - def add_object_category(self, user_id, intra_extension_id, object_category_dict): - raise AuthzException("add_object_category") - - def del_object_category(self, user_id, intra_extension_id, object_category_id): - raise AuthzException("del_object_category") - - def add_action_category(self, user_id, intra_extension_id, action_category_name): - raise AuthzException("add_action_category") - - def del_action_category(self, user_id, intra_extension_id, action_category_id): - raise AuthzException("del_action_category") - - def add_object_dict(self, user_id, intra_extension_id, object_name): - raise AuthzException("add_object_dict") - - def set_object_dict(self, user_id, intra_extension_id, object_id, object_dict): - raise AuthzException("set_object_dict") - - def del_object(self, user_id, intra_extension_id, object_id): - raise AuthzException("del_object") - - def add_action_dict(self, user_id, intra_extension_id, action_name): - raise AuthzException("add_action_dict") - - def set_action_dict(self, user_id, intra_extension_id, action_id, action_dict): - raise AuthzException("set_action_dict") - - def del_action(self, user_id, intra_extension_id, action_id): - raise AuthzException("del_action") - - def add_subject_scope_dict(self, user_id, intra_extension_id, subject_category_id, subject_scope_dict): - raise AuthzException("add_subject_scope_dict") - - def del_subject_scope(self, user_id, intra_extension_id, subject_category_id, subject_scope_id): - raise AuthzException("del_subject_scope") - - def set_subject_scope_dict(self, user_id, intra_extension_id, subject_category_id, subject_scope_id, subject_scope_name): - raise AuthzException("set_subject_scope_dict") - - def add_object_scope_dict(self, user_id, intra_extension_id, object_category_id, object_scope_name): - raise AuthzException("add_object_scope_dict") - - def del_object_scope(self, user_id, intra_extension_id, object_category_id, object_scope_id): - raise AuthzException("del_object_scope") - - def set_object_scope_dict(self, user_id, intra_extension_id, object_category_id, object_scope_id, object_scope_name): - raise AuthzException("set_object_scope_dict") - - def add_action_scope_dict(self, user_id, intra_extension_id, action_category_id, action_scope_name): - raise AuthzException("add_action_scope_dict") - - def del_action_scope(self, user_id, intra_extension_id, action_category_id, action_scope_id): - raise AuthzException("del_action_scope") - - def add_subject_assignment_list(self, user_id, intra_extension_id, subject_id, subject_category_id, subject_scope_id): - raise AuthzException("add_subject_assignment_list") - - def del_subject_assignment(self, user_id, intra_extension_id, subject_id, subject_category_id, subject_scope_id): - raise AuthzException("del_subject_assignment") - - def add_object_assignment_list(self, user_id, intra_extension_id, object_id, object_category_id, object_scope_id): - raise AuthzException("add_object_assignment_list") - - def del_object_assignment(self, user_id, intra_extension_id, object_id, object_category_id, object_scope_id): - raise AuthzException("del_object_assignment") - - def add_action_assignment_list(self, user_id, intra_extension_id, action_id, action_category_id, action_scope_id): - raise AuthzException("add_action_assignment_list") - - def del_action_assignment(self, user_id, intra_extension_id, action_id, action_category_id, action_scope_id): - raise AuthzException("del_action_assignment") - - def set_aggregation_algorithm_id(self, user_id, intra_extension_id, aggregation_algorithm_id): - raise AuthzException("set_aggregation_algorithm_id") - - def del_aggregation_algorithm_(self, user_id, intra_extension_id): - raise AuthzException("del_aggregation_algorithm_") - - def add_sub_meta_rule_dict(self, user_id, intra_extension_id, sub_meta_rule_dict): - raise AuthzException("add_sub_meta_rule_dict") - - def del_sub_meta_rule(self, user_id, intra_extension_id, sub_meta_rule_id): - raise AuthzException("del_sub_meta_rule") - - def set_sub_meta_rule_dict(self, user_id, intra_extension_id, sub_meta_rule_id, sub_meta_rule_dict): - raise AuthzException("set_sub_meta_rule_dict") - - def add_rule_dict(self, user_id, intra_extension_id, sub_meta_rule_id, rule_list): - raise AuthzException("add_rule_dict") - - def del_rule(self, user_id, intra_extension_id, sub_meta_rule_id, rule_id): - raise AuthzException("del_rule") - - def set_rule_dict(self, user_id, intra_extension_id, sub_meta_rule_id, rule_id, rule_list): - raise AuthzException("set_rule_dict") - - -@dependency.provider('admin_api') -class IntraExtensionAdminManager(IntraExtensionManager): - - def __init__(self): - super(IntraExtensionAdminManager, self).__init__() - - def add_subject_dict(self, user_id, intra_extension_id, subject_dict): - subject = super(IntraExtensionAdminManager, self).add_subject_dict(user_id, intra_extension_id, subject_dict) - subject_id, subject_value = subject.iteritems().next() - tenants_dict = self.tenant_api.get_tenants_dict(self.root_admin_id) - for tenant_id in tenants_dict: - if tenants_dict[tenant_id]["intra_admin_extension_id"] and \ - tenants_dict[tenant_id]["intra_authz_extension_id"] == intra_extension_id: - _subjects = self.driver.get_subjects_dict(tenants_dict[tenant_id]["intra_admin_extension_id"]) - if subject_value["name"] not in [_subjects[_id]["name"] for _id in _subjects]: - self.driver.set_subject_dict(tenants_dict[tenant_id]["intra_admin_extension_id"], uuid4().hex, subject_value) - break - if tenants_dict[tenant_id]["intra_authz_extension_id"] and \ - tenants_dict[tenant_id]["intra_admin_extension_id"] == intra_extension_id: - _subjects = self.driver.get_subjects_dict(tenants_dict[tenant_id]["intra_authz_extension_id"]) - if subject_value["name"] not in [_subjects[_id]["name"] for _id in _subjects]: - self.driver.set_subject_dict(tenants_dict[tenant_id]["intra_authz_extension_id"], uuid4().hex, subject_value) - break - return subject - - def del_subject(self, user_id, intra_extension_id, subject_id): - subject_name = self.driver.get_subjects_dict(intra_extension_id)[subject_id]["name"] - super(IntraExtensionAdminManager, self).del_subject(user_id, intra_extension_id, subject_id) - tenants_dict = self.tenant_api.get_tenants_dict(self.root_admin_id) - for tenant_id in tenants_dict: - if tenants_dict[tenant_id]["intra_authz_extension_id"] == intra_extension_id and \ - tenants_dict[tenant_id]["intra_admin_extension_id"]: - subject_id = self.driver.get_uuid_from_name(tenants_dict[tenant_id]["intra_admin_extension_id"], - subject_name, - self.driver.SUBJECT) - self.driver.del_subject(tenants_dict[tenant_id]["intra_admin_extension_id"], subject_id) - break - if tenants_dict[tenant_id]["intra_admin_extension_id"] == intra_extension_id and \ - tenants_dict[tenant_id]["intra_authz_extension_id"]: - subject_id = self.driver.get_uuid_from_name(tenants_dict[tenant_id]["intra_authz_extension_id"], - subject_name, - self.driver.SUBJECT) - self.driver.del_subject(tenants_dict[tenant_id]["intra_authz_extension_id"], subject_id) - break - - def set_subject_dict(self, user_id, intra_extension_id, subject_id, subject_dict): - subject = super(IntraExtensionAdminManager, self).set_subject_dict(user_id, intra_extension_id, subject_dict) - subject_id, subject_value = subject.iteritems().next() - tenants_dict = self.tenant_api.get_tenants_dict(self.root_admin_id) - for tenant_id in tenants_dict: - if tenants_dict[tenant_id]["intra_authz_extension_id"] == intra_extension_id: - self.driver.set_subject_dict(tenants_dict[tenant_id]["intra_admin_extension_id"], uuid4().hex, subject_value) - break - if tenants_dict[tenant_id]["intra_admin_extension_id"] == intra_extension_id: - self.driver.set_subject_dict(tenants_dict[tenant_id]["intra_authz_extension_id"], uuid4().hex, subject_value) - break - return subject - - def add_object_dict(self, user_id, intra_extension_id, object_name): - if "admin" == self.get_intra_extension_dict(self.root_admin_id, intra_extension_id)['genre']: - raise ObjectsWriteNoAuthorized() - return super(IntraExtensionAdminManager, self).add_object_dict(user_id, intra_extension_id, object_name) - - def set_object_dict(self, user_id, intra_extension_id, object_id, object_dict): - if "admin" == self.get_intra_extension_dict(self.root_admin_id, intra_extension_id)['genre']: - raise ObjectsWriteNoAuthorized() - return super(IntraExtensionAdminManager, self).set_object_dict(user_id, intra_extension_id, object_id, object_dict) - - def del_object(self, user_id, intra_extension_id, object_id): - if "admin" == self.get_intra_extension_dict(self.root_admin_id, intra_extension_id)['genre']: - raise ObjectsWriteNoAuthorized() - return super(IntraExtensionAdminManager, self).del_object(user_id, intra_extension_id, object_id) - - def add_action_dict(self, user_id, intra_extension_id, action_name): - if "admin" == self.get_intra_extension_dict(self.root_admin_id, intra_extension_id)['genre']: - raise ActionsWriteNoAuthorized() - return super(IntraExtensionAdminManager, self).add_action_dict(user_id, intra_extension_id, action_name) - - def set_action_dict(self, user_id, intra_extension_id, action_id, action_dict): - if "admin" == self.get_intra_extension_dict(self.root_admin_id, intra_extension_id)['genre']: - raise ActionsWriteNoAuthorized() - return super(IntraExtensionAdminManager, self).set_action_dict(user_id, intra_extension_id, action_id, action_dict) - - def del_action(self, user_id, intra_extension_id, action_id): - if "admin" == self.get_intra_extension_dict(self.root_admin_id, intra_extension_id)['genre']: - raise ActionsWriteNoAuthorized() - return super(IntraExtensionAdminManager, self).del_action(user_id, intra_extension_id, action_id) - - -@dependency.provider('root_api') -class IntraExtensionRootManager(IntraExtensionManager): - - def __init__(self): - super(IntraExtensionRootManager, self).__init__() - - def is_admin_subject(self, keystone_id): - for subject_id, subject_dict in self.driver.get_subjects_dict(self.root_extension_id).iteritems(): - if subject_id == keystone_id: - # subject_id may be a true id from an intra_extension - return True - if subject_dict["name"] == "admin" and subject_dict["keystone_id"] == keystone_id: - return True - return False - - -@dependency.provider('moonlog_api') -class LogManager(manager.Manager): - - driver_namespace = 'keystone.moon.log' - - def __init__(self): - driver = CONF.moon.log_driver - super(LogManager, self).__init__(driver) - - def get_logs(self, logger="authz", options="", event_number=None, time_from=None, time_to=None, filter_str=None): - - if len(options) > 0: - options = options.split(",") - event_number = None - time_from = None - time_to = None - filter_str = None - for opt in options: - if "event_number" in opt: - event_number = "".join(re.findall("\d*", opt.split("=")[-1])) - try: - event_number = int(event_number) - except ValueError: - event_number = None - elif "from" in opt: - time_from = "".join(re.findall("[\w\-:]*", opt.split("=")[-1])) - try: - time_from = time.strptime(time_from, self.TIME_FORMAT) - except ValueError: - time_from = None - elif "to" in opt: - time_to = "".join(re.findall("[\w\-:] *", opt.split("=")[-1])) - try: - time_to = time.strptime(time_to, self.TIME_FORMAT) - except ValueError: - time_to = None - elif "filter" in opt: - filter_str = "".join(re.findall("\w*", opt.split("=")[-1])) - return self.driver.get_logs(logger, event_number, time_from, time_to, filter_str) - - def get_authz_logs(self, options="", event_number=None, time_from=None, time_to=None, filter_str=None): - return self.get_logs( - logger="authz", - options="", - event_number=None, - time_from=None, - time_to=None, - filter_str=None) - - def get_sys_logs(self, options="", event_number=None, time_from=None, time_to=None, filter_str=None): - return self.get_logs( - logger="sys", - options="", - event_number=None, - time_from=None, - time_to=None, - filter_str=None) - - def authz(self, message): - return self.driver.authz(message) - - def debug(self, message): - return self.driver.debug(message) - - def info(self, message): - return self.driver.info(message) - - def warning(self, message): - return self.driver.warning(message) - - def error(self, message): - return self.driver.error(message) - - def critical(self, message): - return self.driver.critical(message) - - -class ConfigurationDriver(object): - - def get_policy_templates_dict(self): - raise exception.NotImplemented() # pragma: no cover - - def get_aggregation_algorithm_id(self): - raise exception.NotImplemented() # pragma: no cover - - def get_sub_meta_rule_algorithms_dict(self): - raise exception.NotImplemented() # pragma: no cover - - -class TenantDriver(object): - - def get_tenants_dict(self): - raise exception.NotImplemented() # pragma: no cover - - def add_tenant_dict(self, tenant_id, tenant_dict): - raise exception.NotImplemented() # pragma: no cover - - def del_tenant_dict(self, tenant_id): - raise exception.NotImplemented() # pragma: no cover - - def set_tenant_dict(self, tenant_id, tenant_dict): - raise exception.NotImplemented() # pragma: no cover - - -class IntraExtensionDriver(object): - - SUBJECT = 'subject' - OBJECT = 'object' - ACTION = 'action' - SUBJECT_CATEGORY = 'subject_category' - OBJECT_CATEGORY = 'object_category' - ACTION_CATEGORY = 'action_category' - SUBJECT_SCOPE = 'subject_scope' - OBJECT_SCOPE = 'object_scope' - ACTION_SCOPE = 'action_scope' - SUB_META_RULE = 'sub_meta_rule' - - def __get_data_from_type(self, - intra_extension_uuid, - name=None, - uuid=None, - data_name=None, - category_name=None, - category_uuid=None): - - def extract_name(data_dict): - for key in data_dict: - try: - yield data_dict[key]["name"] - except KeyError: - for key2 in data_dict[key]: - yield data_dict[key][key2]["name"] - - data_values = list() - - if data_name == self.SUBJECT: - data_values = self.get_subjects_dict(intra_extension_uuid) - if (name and name not in extract_name(data_values)) or \ - (uuid and uuid not in data_values.keys()): - raise SubjectUnknown("{} / {}".format(name, data_values)) - elif data_name == self.OBJECT: - data_values = self.get_objects_dict(intra_extension_uuid) - if (name and name not in extract_name(data_values)) or \ - (uuid and uuid not in data_values.keys()): - raise ObjectUnknown("{} / {}".format(name, data_values)) - elif data_name == self.ACTION: - data_values = self.get_actions_dict(intra_extension_uuid) - if (name and name not in extract_name(data_values)) or \ - (uuid and uuid not in data_values.keys()): - raise ActionUnknown("{} / {}".format(name, data_values)) - elif data_name == self.SUBJECT_CATEGORY: - data_values = self.get_subject_categories_dict(intra_extension_uuid) - if (name and name not in extract_name(data_values)) or \ - (uuid and uuid not in data_values.keys()): - raise SubjectCategoryUnknown("{} / {}".format(name, data_values)) - elif data_name == self.OBJECT_CATEGORY: - data_values = self.get_object_categories_dict(intra_extension_uuid) - if (name and name not in extract_name(data_values)) or \ - (uuid and uuid not in data_values.keys()): - raise ObjectCategoryUnknown("{} / {}".format(name, data_values)) - elif data_name == self.ACTION_CATEGORY: - data_values = self.get_action_categories_dict(intra_extension_uuid) - if (name and name not in extract_name(data_values)) or \ - (uuid and uuid not in data_values.keys()): - raise ActionCategoryUnknown("{} / {}".format(name, data_values)) - elif data_name == self.SUBJECT_SCOPE: - if not category_uuid: - category_uuid = self.get_uuid_from_name(intra_extension_uuid, category_name, self.SUBJECT_CATEGORY) - data_values = self.get_subject_scopes_dict(intra_extension_uuid, - category_uuid) - if (name and name not in extract_name(data_values)) or \ - (uuid and uuid not in data_values.keys()): - raise SubjectScopeUnknown("{} / {}".format(name, data_values)) - elif data_name == self.OBJECT_SCOPE: - if not category_uuid: - category_uuid = self.get_uuid_from_name(intra_extension_uuid, category_name, self.OBJECT_CATEGORY) - data_values = self.get_object_scopes_dict(intra_extension_uuid, - category_uuid) - if (name and name not in extract_name(data_values)) or \ - (uuid and uuid not in data_values.keys()): - raise ObjectScopeUnknown("{} / {}".format(name, data_values)) - elif data_name == self.ACTION_SCOPE: - if not category_uuid: - category_uuid = self.get_uuid_from_name(intra_extension_uuid, category_name, self.ACTION_CATEGORY) - data_values = self.get_action_scopes_dict(intra_extension_uuid, - category_uuid) - if (name and name not in extract_name(data_values)) or \ - (uuid and uuid not in data_values.keys()): - raise ActionScopeUnknown("{} / {}".format(name, data_values)) - elif data_name == self.SUB_META_RULE: - data_values = self.get_sub_meta_rules_dict(intra_extension_uuid) - if (name and name not in extract_name(data_values)) or \ - (uuid and uuid not in data_values.keys()): - raise SubMetaRuleUnknown("{} / {}".format(name, data_values)) - # if data_name in ( - # self.SUBJECT_SCOPE, - # self.OBJECT_SCOPE, - # self.ACTION_SCOPE - # ): - # return data_values[category_uuid] - return data_values - - def get_uuid_from_name(self, intra_extension_uuid, name, data_name, category_name=None, category_uuid=None): - data_values = self.__get_data_from_type( - intra_extension_uuid=intra_extension_uuid, - name=name, - data_name=data_name, - category_name=category_name, - category_uuid=category_uuid, - ) - return filter(lambda v: v[1]["name"] == name, data_values.iteritems())[0][0] - - def get_name_from_uuid(self, intra_extension_uuid, uuid, data_name, category_name=None, category_uuid=None): - data_values = self.__get_data_from_type( - intra_extension_uuid=intra_extension_uuid, - uuid=uuid, - data_name=data_name, - category_name=category_name, - category_uuid=category_uuid, - ) - return data_values[uuid] - - # Getter and Setter for intra_extension - - def get_intra_extensions_dict(self): - raise exception.NotImplemented() # pragma: no cover - - def del_intra_extension(self, intra_extension_id): - raise exception.NotImplemented() # pragma: no cover - - def set_intra_extension_dict(self, intra_extension_id, intra_extension_dict): - raise exception.NotImplemented() # pragma: no cover - - # Metadata functions - - def get_subject_categories_dict(self, intra_extension_id): - raise exception.NotImplemented() # pragma: no cover - - def set_subject_category_dict(self, intra_extension_id, subject_category_id, subject_category_dict): - raise exception.NotImplemented() # pragma: no cover - - def del_subject_category(self, intra_extension_id, subject_category_id): - raise exception.NotImplemented() # pragma: no cover - - def get_object_categories_dict(self, intra_extension_id): - """Get a list of all object categories - - :param intra_extension_id: IntraExtension UUID - :type intra_extension_id: string - :return: a dictionary containing all object categories {"uuid1": "name1", "uuid2": "name2"} - """ - raise exception.NotImplemented() # pragma: no cover - - def set_object_category_dict(self, intra_extension_id, object_category_id, object_category_dict): - raise exception.NotImplemented() # pragma: no cover - - def del_object_category(self, intra_extension_id, object_category_id): - raise exception.NotImplemented() # pragma: no cover - - def get_action_categories_dict(self, intra_extension_id): - raise exception.NotImplemented() # pragma: no cover - - def set_action_category_dict(self, intra_extension_id, action_category_id, action_category_dict): - raise exception.NotImplemented() # pragma: no cover - - def del_action_category(self, intra_extension_id, action_category_id): - raise exception.NotImplemented() # pragma: no cover - - # Perimeter functions - - def get_subjects_dict(self, intra_extension_id): - raise exception.NotImplemented() # pragma: no cover - - def set_subject_dict(self, intra_extension_id, subject_id, subject_dict): - raise exception.NotImplemented() # pragma: no cover - - def del_subject(self, intra_extension_id, subject_id): - raise exception.NotImplemented() # pragma: no cover - - def get_objects_dict(self, intra_extension_id): - raise exception.NotImplemented() # pragma: no cover - - def set_object_dict(self, intra_extension_id, object_id, object_dict): - raise exception.NotImplemented() # pragma: no cover - - def del_object(self, intra_extension_id, object_id): - raise exception.NotImplemented() # pragma: no cover - - def get_actions_dict(self, intra_extension_id): - raise exception.NotImplemented() # pragma: no cover - - def set_action_dict(self, intra_extension_id, action_id, action_dict): - raise exception.NotImplemented() # pragma: no cover - - def del_action(self, intra_extension_id, action_id): - raise exception.NotImplemented() # pragma: no cover - - # Scope functions - - def get_subject_scopes_dict(self, intra_extension_id, subject_category_id): - raise exception.NotImplemented() # pragma: no cover - - def set_subject_scope_dict(self, intra_extension_id, subject_category_id, subject_scope_id, subject_scope_dict): - raise exception.NotImplemented() # pragma: no cover - - def del_subject_scope(self, intra_extension_id, subject_category_id, subject_scope_id): - raise exception.NotImplemented() # pragma: no cover - - def get_object_scopes_dict(self, intra_extension_id, object_category_id): - raise exception.NotImplemented() # pragma: no cover - - def set_object_scope_dict(self, intra_extension_id, object_category_id, object_scope_id, object_scope_dict): - raise exception.NotImplemented() # pragma: no cover - - def del_object_scope(self, intra_extension_id, object_category_id, object_scope_id): - raise exception.NotImplemented() # pragma: no cover - - def get_action_scopes_dict(self, intra_extension_id, action_category_id): - raise exception.NotImplemented() # pragma: no cover - - def set_action_scope_dict(self, intra_extension_id, action_category_id, action_scope_id, action_scope_dict): - raise exception.NotImplemented() # pragma: no cover - - def del_action_scope(self, intra_extension_id, action_category_id, action_scope_id): - raise exception.NotImplemented() # pragma: no cover - - # Assignment functions - - def get_subject_assignment_list(self, intra_extension_id, subject_id, subject_category_id): - raise exception.NotImplemented() # pragma: no cover - - def set_subject_assignment_list(self, intra_extension_id, subject_id, subject_category_id, subject_assignment_list): - raise exception.NotImplemented() # pragma: no cover - - def add_subject_assignment_list(self, intra_extension_id, subject_id, subject_category_id, subject_scope_id): - raise exception.NotImplemented() # pragma: no cover - - def del_subject_assignment(self, intra_extension_id, subject_id, subject_category_id, subject_scope_id): - raise exception.NotImplemented() # pragma: no cover - - def get_object_assignment_list(self, intra_extension_id, object_id, object_category_id): - raise exception.NotImplemented() # pragma: no cover - - def set_object_assignment_list(self, intra_extension_id, object_id, object_category_id, object_assignment_list): - raise exception.NotImplemented() # pragma: no cover - - def add_object_assignment_list(self, intra_extension_id, object_id, object_category_id, object_scope_id): - raise exception.NotImplemented() # pragma: no cover - - def del_object_assignment(self, intra_extension_id, object_id, object_category_id, object_scope_id): - raise exception.NotImplemented() # pragma: no cover - - def get_action_assignment_list(self, intra_extension_id, action_id, action_category_id): - raise exception.NotImplemented() # pragma: no cover - - def set_action_assignment_list(self, intra_extension_id, action_id, action_category_id, action_assignment_list): - raise exception.NotImplemented() # pragma: no cover - - def add_action_assignment_list(self, intra_extension_id, action_id, action_category_id, action_scope_id): - raise exception.NotImplemented() # pragma: no cover - - def del_action_assignment(self, intra_extension_id, action_id, action_category_id, action_scope_id): - raise exception.NotImplemented() # pragma: no cover - - # Meta_rule functions - - def set_aggregation_algorithm_id(self, intra_extension_id, aggregation_algorithm_id): - raise exception.NotImplemented() # pragma: no cover - - def get_aggregation_algorithm_id(self, intra_extension_id): - raise exception.NotImplemented() # pragma: no cover - - def del_aggregation_algorithm(self, intra_extension_id): - raise exception.NotImplemented() # pragma: no cover - - def get_sub_meta_rules_dict(self, intra_extension_id): - raise exception.NotImplemented() # pragma: no cover - - def set_sub_meta_rule_dict(self, intra_extension_id, sub_meta_rule_id, meta_rule_dict): - raise exception.NotImplemented() # pragma: no cover - - def del_sub_meta_rule(self, intra_extension_id, sub_meta_rule_id): - raise exception.NotImplemented() # pragma: no cover - - # Rule functions - - def get_rules_dict(self, intra_extension_id, sub_meta_rule_id): - raise exception.NotImplemented() # pragma: no cover - - def set_rule_dict(self, intra_extension_id, sub_meta_rule_id, rule_id, rule_list): - raise exception.NotImplemented() # pragma: no cover - - def del_rule(self, intra_extension_id, sub_meta_rule_id, rule_id): - raise exception.NotImplemented() # pragma: no cover - - -class LogDriver(object): - - def authz(self, message): - """Log authorization message - - :param message: the message to log - :type message: string - :return: None - """ - raise exception.NotImplemented() # pragma: no cover - - def debug(self, message): - """Log debug message - - :param message: the message to log - :type message: string - :return: None - """ - raise exception.NotImplemented() # pragma: no cover - - def info(self, message): - """Log informational message - - :param message: the message to log - :type message: string - :return: None - """ - raise exception.NotImplemented() # pragma: no cover - - def warning(self, message): - """Log warning message - - :param message: the message to log - :type message: string - :return: None - """ - raise exception.NotImplemented() # pragma: no cover - - def error(self, message): - """Log error message - - :param message: the message to log - :type message: string - :return: None - """ - raise exception.NotImplemented() # pragma: no cover - - def critical(self, message): - """Log critical message - - :param message: the message to log - :type message: string - :return: None - """ - raise exception.NotImplemented() # pragma: no cover - - def get_logs(self, options): - """Get logs - - :param options: options to filter log events - :type options: string eg: "event_number=10,from=2014-01-01-10:10:10,to=2014-01-01-12:10:10,filter=expression" - :return: a list of log events - - TIME_FORMAT is '%Y-%m-%d-%H:%M:%S' - """ - raise exception.NotImplemented() # pragma: no cover - - -# @dependency.provider('interextension_api') -# @dependency.requires('identity_api') -# class InterExtensionManager(manager.Manager): -# -# def __init__(self): -# driver = CONF.moon.interextension_driver -# super(InterExtensionManager, self).__init__(driver) -# -# def check_inter_extension(self, uuid): -# if uuid not in self.get_inter_extensions(): -# LOG.error("Unknown InterExtension {}".format(uuid)) -# raise exception.NotFound("InterExtension not found.") -# -# def get_inter_extensions(self): -# return self.driver.get_inter_extensions() -# -# def get_inter_extension(self, uuid): -# return self.driver.get_inter_extension(uuid) -# -# def create_inter_extension(self, inter_extension): -# ie = dict() -# ie['id'] = uuid4().hex -# ie["requesting_intra_extension_uuid"] = filter_input(inter_extension["requesting_intra_extension_uuid"]) -# ie["requested_intra_extension_uuid"] = filter_input(inter_extension["requested_intra_extension_uuid"]) -# ie["description"] = filter_input(inter_extension["description"]) -# ie["virtual_entity_uuid"] = filter_input(inter_extension["virtual_entity_uuid"]) -# ie["genre"] = filter_input(inter_extension["genre"]) -# -# ref = self.driver.create_inter_extensions(ie['id'], ie) -# return ref -# -# def delete_inter_extension(self, inter_extension_id): -# LOG.error("Deleting {}".format(inter_extension_id)) -# ref = self.driver.delete_inter_extensions(inter_extension_id) -# return ref -# -# -# class InterExtensionDriver(object): -# -# # Getter and Setter for InterExtensions -# -# def get_inter_extensions(self): -# raise exception.NotImplemented() # pragma: no cover -# -# def get_inter_extension(self, uuid): -# raise exception.NotImplemented() # pragma: no cover -# -# def create_inter_extensions(self, intra_id, intra_extension): -# raise exception.NotImplemented() # pragma: no cover -# -# def delete_inter_extensions(self, intra_extension_id): -# raise exception.NotImplemented() # pragma: no cover -# -# -# class VirtualEntityDriver(object): -# -# # Getter and Setter for InterExtensions -# -# def get_virtual_entities(self): -# raise exception.NotImplemented() # pragma: no cover -# -# def create_virtual_entities(self, ve_id, virtual_entity): -# raise exception.NotImplemented() # pragma: no cover - diff --git a/keystone-moon/keystone/contrib/moon/exception.py b/keystone-moon/keystone/contrib/moon/exception.py deleted file mode 100644 index 4e9bf7c9..00000000 --- a/keystone-moon/keystone/contrib/moon/exception.py +++ /dev/null @@ -1,422 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -from keystone.common import dependency -from keystone.exception import Error -from keystone.i18n import _, _LW -import logging -from oslo_log import log - -LOG = log.getLogger(__name__) - - -class MoonErrorMetaClass(type): - - def __init__(cls, name, bases, dct): - super(MoonErrorMetaClass, cls).__init__(name, bases, dct) - cls.hierarchy += "/"+str(name) - - -@dependency.requires('moonlog_api') -class MoonError(Error): - __metaclass__ = MoonErrorMetaClass - hierarchy = "" - message_format = _("There is an error requesting the Moon platform.") - code = 400 - title = 'Moon Error' - logger = "ERROR" - - def __init__(self, message=""): - if message: - self.message_format = message - super(MoonError, self).__init__() - - def __del__(self): - message = "{} ({})".format(self.hierarchy, self.message_format) - if self.logger == "ERROR": - try: - self.moonlog_api.error(message) - except AttributeError: - LOG.error(message) - elif self.logger == "WARNING": - try: - self.moonlog_api.warning(message) - except AttributeError: - LOG.warning(message) - elif self.logger == "CRITICAL": - try: - self.moonlog_api.critical(message) - except AttributeError: - LOG.critical(message) - elif self.logger == "AUTHZ": - try: - self.moonlog_api.authz(self.hierarchy) - self.moonlog_api.error(message) - except AttributeError: - LOG.error(message) - else: - try: - self.moonlog_api.info(message) - except AttributeError: - LOG.info(message) - - -# Exceptions for Tenant - -class TenantException(MoonError): - message_format = _("There is an error requesting this tenant.") - code = 400 - title = 'Tenant Error' - logger = "ERROR" - - -class TenantUnknown(TenantException): - message_format = _("The tenant is unknown.") - code = 400 - title = 'Tenant Unknown' - logger = "ERROR" - - -class TenantAddedNameExisting(TenantException): - message_format = _("The tenant name is existing.") - code = 400 - title = 'Added Tenant Name Existing' - logger = "ERROR" - - -class TenantNoIntraExtension(TenantException): - message_format = _("The tenant has not intra_extension.") - code = 400 - title = 'Tenant No Intra_Extension' - logger = "ERROR" - - -class TenantNoIntraAuthzExtension(TenantNoIntraExtension): - message_format = _("The tenant has not intra_admin_extension.") - code = 400 - title = 'Tenant No Intra_Admin_Extension' - logger = "ERROR" - -# Exceptions for IntraExtension - - -class IntraExtensionException(MoonError): - message_format = _("There is an error requesting this IntraExtension.") - code = 400 - title = 'Extension Error' - - -class IntraExtensionUnknown(IntraExtensionException): - message_format = _("The intra_extension is unknown.") - code = 400 - title = 'Intra Extension Unknown' - logger = "Error" - - -class RootExtensionUnknown(IntraExtensionUnknown): - message_format = _("The root_extension is unknown.") - code = 400 - title = 'Root Extension Unknown' - logger = "Error" - - -class RootExtensionNotInitialized(IntraExtensionException): - message_format = _("The root_extension is not initialized.") - code = 400 - title = 'Root Extension Not Initialized' - logger = "Error" - - -class IntraExtensionCreationError(IntraExtensionException): - message_format = _("The arguments for the creation of this Extension were malformed.") - code = 400 - title = 'Intra Extension Creation Error' - - -# Authz exceptions - -class AuthzException(MoonError): - message_format = _("There is an authorization error requesting this IntraExtension.") - code = 403 - title = 'Authz Exception' - logger = "AUTHZ" - - -# Admin exceptions - -class AdminException(MoonError): - message_format = _("There is an error requesting this Authz IntraExtension.") - code = 400 - title = 'Authz Exception' - logger = "AUTHZ" - - -class AdminMetaData(AdminException): - code = 400 - title = 'Metadata Exception' - - -class AdminPerimeter(AdminException): - code = 400 - title = 'Perimeter Exception' - - -class AdminScope(AdminException): - code = 400 - title = 'Scope Exception' - - -class AdminAssignment(AdminException): - code = 400 - title = 'Assignment Exception' - - -class AdminMetaRule(AdminException): - code = 400 - title = 'Aggregation Algorithm Exception' - - -class AdminRule(AdminException): - code = 400 - title = 'Rule Exception' - - -class SubjectCategoryNameExisting(AdminMetaData): - message_format = _("The given subject category name is existing.") - code = 400 - title = 'Subject Category Name Existing' - logger = "ERROR" - - -class ObjectCategoryNameExisting(AdminMetaData): - message_format = _("The given object category name is existing.") - code = 400 - title = 'Object Category Name Existing' - logger = "ERROR" - - -class ActionCategoryNameExisting(AdminMetaData): - message_format = _("The given action category name is existing.") - code = 400 - title = 'Action Category Name Existing' - logger = "ERROR" - - -class SubjectCategoryUnknown(AdminMetaData): - message_format = _("The given subject category is unknown.") - code = 400 - title = 'Subject Category Unknown' - logger = "ERROR" - - -class ObjectCategoryUnknown(AdminMetaData): - message_format = _("The given object category is unknown.") - code = 400 - title = 'Object Category Unknown' - logger = "ERROR" - - -class ActionCategoryUnknown(AdminMetaData): - message_format = _("The given action category is unknown.") - code = 400 - title = 'Action Category Unknown' - logger = "ERROR" - - -class SubjectUnknown(AdminPerimeter): - message_format = _("The given subject is unknown.") - code = 400 - title = 'Subject Unknown' - logger = "ERROR" - - -class ObjectUnknown(AdminPerimeter): - message_format = _("The given object is unknown.") - code = 400 - title = 'Object Unknown' - logger = "ERROR" - - -class ActionUnknown(AdminPerimeter): - message_format = _("The given action is unknown.") - code = 400 - title = 'Action Unknown' - logger = "ERROR" - - -class SubjectNameExisting(AdminPerimeter): - message_format = _("The given subject name is existing.") - code = 400 - title = 'Subject Name Existing' - logger = "ERROR" - - -class ObjectNameExisting(AdminPerimeter): - message_format = _("The given object name is existing.") - code = 400 - title = 'Object Name Existing' - logger = "ERROR" - - -class ActionNameExisting(AdminPerimeter): - message_format = _("The given action name is existing.") - code = 400 - title = 'Action Name Existing' - logger = "ERROR" - - -class ObjectsWriteNoAuthorized(AdminPerimeter): - message_format = _("The modification on Objects is not authorized.") - code = 400 - title = 'Objects Write No Authorized' - logger = "AUTHZ" - - -class ActionsWriteNoAuthorized(AdminPerimeter): - message_format = _("The modification on Actions is not authorized.") - code = 400 - title = 'Actions Write No Authorized' - logger = "AUTHZ" - - -class SubjectScopeUnknown(AdminScope): - message_format = _("The given subject scope is unknown.") - code = 400 - title = 'Subject Scope Unknown' - logger = "ERROR" - - -class ObjectScopeUnknown(AdminScope): - message_format = _("The given object scope is unknown.") - code = 400 - title = 'Object Scope Unknown' - logger = "ERROR" - - -class ActionScopeUnknown(AdminScope): - message_format = _("The given action scope is unknown.") - code = 400 - title = 'Action Scope Unknown' - logger = "ERROR" - - -class SubjectScopeNameExisting(AdminScope): - message_format = _("The given subject scope name is existing.") - code = 400 - title = 'Subject Scope Name Existing' - logger = "ERROR" - - -class ObjectScopeNameExisting(AdminScope): - message_format = _("The given object scope name is existing.") - code = 400 - title = 'Object Scope Name Existing' - logger = "ERROR" - - -class ActionScopeNameExisting(AdminScope): - message_format = _("The given action scope name is existing.") - code = 400 - title = 'Action Scope Name Existing' - logger = "ERROR" - - -class SubjectAssignmentUnknown(AdminAssignment): - message_format = _("The given subject assignment value is unknown.") - code = 400 - title = 'Subject Assignment Unknown' - logger = "ERROR" - - -class ObjectAssignmentUnknown(AdminAssignment): - message_format = _("The given object assignment value is unknown.") - code = 400 - title = 'Object Assignment Unknown' - logger = "ERROR" - - -class ActionAssignmentUnknown(AdminAssignment): - message_format = _("The given action assignment value is unknown.") - code = 400 - title = 'Action Assignment Unknown' - logger = "ERROR" - - -class SubjectAssignmentExisting(AdminAssignment): - message_format = _("The given subject assignment value is existing.") - code = 400 - title = 'Subject Assignment Existing' - logger = "ERROR" - - -class ObjectAssignmentExisting(AdminAssignment): - message_format = _("The given object assignment value is existing.") - code = 400 - title = 'Object Assignment Existing' - logger = "ERROR" - - -class ActionAssignmentExisting(AdminAssignment): - message_format = _("The given action assignment value is existing.") - code = 400 - title = 'Action Assignment Existing' - logger = "ERROR" - - -class AggregationAlgorithmNotExisting(AdminMetaRule): - message_format = _("The given aggregation algorithm is not existing.") - code = 400 - title = 'Aggregation Algorithm Not Existing' - logger = "ERROR" - - -class AggregationAlgorithmUnknown(AdminMetaRule): - message_format = _("The given aggregation algorithm is unknown.") - code = 400 - title = 'Aggregation Algorithm Unknown' - logger = "ERROR" - - -class SubMetaRuleAlgorithmNotExisting(AdminMetaRule): - message_format = _("The given sub_meta_rule algorithm is unknown.") - code = 400 - title = 'Sub_meta_rule Algorithm Unknown' - logger = "ERROR" - - -class SubMetaRuleUnknown(AdminMetaRule): - message_format = _("The given sub meta rule is unknown.") - code = 400 - title = 'Sub Meta Rule Unknown' - logger = "ERROR" - - -class SubMetaRuleNameExisting(AdminMetaRule): - message_format = _("The sub meta rule name already exists.") - code = 400 - title = 'Sub Meta Rule Name Existing' - logger = "ERROR" - - -class SubMetaRuleExisting(AdminMetaRule): - message_format = _("The sub meta rule already exists.") - code = 400 - title = 'Sub Meta Rule Existing' - logger = "ERROR" - - -class RuleExisting(AdminRule): - message_format = _("The rule already exists.") - code = 400 - title = 'Rule Existing' - logger = "ERROR" - - -class RuleUnknown(AdminRule): - message_format = _("The rule for that request doesn't exist.") - code = 400 - title = 'Rule Unknown' - logger = "ERROR" - diff --git a/keystone-moon/keystone/contrib/moon/migrate_repo/__init__.py b/keystone-moon/keystone/contrib/moon/migrate_repo/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/moon/migrate_repo/migrate.cfg b/keystone-moon/keystone/contrib/moon/migrate_repo/migrate.cfg deleted file mode 100644 index 7a7bd1f8..00000000 --- a/keystone-moon/keystone/contrib/moon/migrate_repo/migrate.cfg +++ /dev/null @@ -1,25 +0,0 @@ -[db_settings] -# Used to identify which repository this database is versioned under. -# You can use the name of your project. -repository_id=moon - -# The name of the database table used to track the schema version. -# This name shouldn't already be used by your project. -# If this is changed once a database is under version control, you'll need to -# change the table name in each database too. -version_table=migrate_version - -# When committing a change script, Migrate will attempt to generate the -# sql for all supported databases; normally, if one of them fails - probably -# because you don't have that database installed - it is ignored and the -# commit continues, perhaps ending successfully. -# Databases in this list MUST compile successfully during a commit, or the -# entire commit will fail. List the databases your application will actually -# be using to ensure your updates to that database work properly. -# This must be a list; example: ['postgres','sqlite'] -required_dbs=[] - -# When creating new change scripts, Migrate will stamp the new script with -# a version number. By default this is latest_version + 1. You can set this -# to 'true' to tell Migrate to use the UTC timestamp instead. -use_timestamp_numbering=False diff --git a/keystone-moon/keystone/contrib/moon/migrate_repo/versions/001_moon.py b/keystone-moon/keystone/contrib/moon/migrate_repo/versions/001_moon.py deleted file mode 100644 index bcd334fa..00000000 --- a/keystone-moon/keystone/contrib/moon/migrate_repo/versions/001_moon.py +++ /dev/null @@ -1,211 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -import sqlalchemy as sql -from keystone.common import sql as k_sql - - -def upgrade(migrate_engine): - meta = sql.MetaData() - meta.bind = migrate_engine - - intra_extension_table = sql.Table( - 'intra_extensions', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('intra_extension', k_sql.JsonBlob(), nullable=True), - mysql_engine='InnoDB', - mysql_charset='utf8') - intra_extension_table.create(migrate_engine, checkfirst=True) - - tenant_table = sql.Table( - 'tenants', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('tenant', k_sql.JsonBlob(), nullable=True), - mysql_engine='InnoDB', - mysql_charset='utf8') - tenant_table.create(migrate_engine, checkfirst=True) - - subject_categories_table = sql.Table( - 'subject_categories', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('subject_category', k_sql.JsonBlob(), nullable=True), - sql.Column('intra_extension_id', sql.ForeignKey("intra_extensions.id"), nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - subject_categories_table.create(migrate_engine, checkfirst=True) - - object_categories_table = sql.Table( - 'object_categories', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('object_category', k_sql.JsonBlob(), nullable=True), - sql.Column('intra_extension_id', sql.ForeignKey("intra_extensions.id"), nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - object_categories_table.create(migrate_engine, checkfirst=True) - - action_categories_table = sql.Table( - 'action_categories', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('action_category', k_sql.JsonBlob(), nullable=True), - sql.Column('intra_extension_id', sql.ForeignKey("intra_extensions.id"), nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - action_categories_table.create(migrate_engine, checkfirst=True) - - subjects_table = sql.Table( - 'subjects', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('subject', k_sql.JsonBlob(), nullable=True), - sql.Column('intra_extension_id', sql.ForeignKey("intra_extensions.id"), nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - subjects_table.create(migrate_engine, checkfirst=True) - - objects_table = sql.Table( - 'objects', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('object', k_sql.JsonBlob(), nullable=True), - sql.Column('intra_extension_id', sql.ForeignKey("intra_extensions.id"), nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - objects_table.create(migrate_engine, checkfirst=True) - - actions_table = sql.Table( - 'actions', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('action', k_sql.JsonBlob(), nullable=True), - sql.Column('intra_extension_id', sql.ForeignKey("intra_extensions.id"), nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - actions_table.create(migrate_engine, checkfirst=True) - - subject_scopes_table = sql.Table( - 'subject_scopes', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('subject_scope', k_sql.JsonBlob(), nullable=True), - sql.Column('intra_extension_id', sql.ForeignKey("intra_extensions.id"), nullable=False), - sql.Column('subject_category_id', sql.ForeignKey("subject_categories.id"), nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - subject_scopes_table.create(migrate_engine, checkfirst=True) - - object_scopes_table = sql.Table( - 'object_scopes', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('object_scope', k_sql.JsonBlob(), nullable=True), - sql.Column('intra_extension_id', sql.ForeignKey("intra_extensions.id"), nullable=False), - sql.Column('object_category_id', sql.ForeignKey("object_categories.id"), nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - object_scopes_table.create(migrate_engine, checkfirst=True) - - action_scopes_table = sql.Table( - 'action_scopes', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('action_scope', k_sql.JsonBlob(), nullable=True), - sql.Column('intra_extension_id', sql.ForeignKey("intra_extensions.id"), nullable=False), - sql.Column('action_category_id', sql.ForeignKey("action_categories.id"), nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - action_scopes_table.create(migrate_engine, checkfirst=True) - - subject_assignments_table = sql.Table( - 'subject_assignments', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('subject_assignment', k_sql.JsonBlob(), nullable=True), - sql.Column('intra_extension_id', sql.ForeignKey("intra_extensions.id"), nullable=False), - sql.Column('subject_id', sql.ForeignKey("subjects.id"), nullable=False), - sql.Column('subject_category_id', sql.ForeignKey("subject_categories.id"), nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - subject_assignments_table.create(migrate_engine, checkfirst=True) - - object_assignments_table = sql.Table( - 'object_assignments', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('object_assignment', k_sql.JsonBlob(), nullable=True), - sql.Column('intra_extension_id', sql.ForeignKey("intra_extensions.id"), nullable=False), - sql.Column('object_id', sql.ForeignKey("objects.id"), nullable=False), - sql.Column('object_category_id', sql.ForeignKey("object_categories.id"), nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - object_assignments_table.create(migrate_engine, checkfirst=True) - - action_assignments_table = sql.Table( - 'action_assignments', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('action_assignment', k_sql.JsonBlob(), nullable=True), - sql.Column('intra_extension_id', sql.ForeignKey("intra_extensions.id"), nullable=False), - sql.Column('action_id', sql.ForeignKey("actions.id"), nullable=False), - sql.Column('action_category_id', sql.ForeignKey("action_categories.id"), nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - action_assignments_table.create(migrate_engine, checkfirst=True) - - sub_meta_rules_table = sql.Table( - 'sub_meta_rules', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('sub_meta_rule', k_sql.JsonBlob(), nullable=True), - sql.Column('intra_extension_id', sql.ForeignKey("intra_extensions.id"), nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - sub_meta_rules_table.create(migrate_engine, checkfirst=True) - - rules_table = sql.Table( - 'rules', - meta, - sql.Column('id', sql.String(64), primary_key=True), - sql.Column('rule', k_sql.JsonBlob(), nullable=True), - sql.Column('intra_extension_id', sql.ForeignKey("intra_extensions.id"), nullable=False), - sql.Column('sub_meta_rule_id', sql.ForeignKey("sub_meta_rules.id"), nullable=False), - mysql_engine='InnoDB', - mysql_charset='utf8') - rules_table.create(migrate_engine, checkfirst=True) - - -def downgrade(migrate_engine): - meta = sql.MetaData() - meta.bind = migrate_engine - - for _table in ( - 'rules', - 'sub_meta_rules', - 'action_assignments', - 'object_assignments', - 'subject_assignments', - 'action_scopes', - 'object_scopes', - 'subject_scopes', - 'actions', - 'objects', - 'subjects', - 'action_categories', - 'object_categories', - 'subject_categories', - 'tenants', - 'intra_extensions' - ): - try: - table = sql.Table(_table, meta, autoload=True) - table.drop(migrate_engine, checkfirst=True) - except Exception as e: - print(e.message) - - diff --git a/keystone-moon/keystone/contrib/moon/migrate_repo/versions/002_moon.py b/keystone-moon/keystone/contrib/moon/migrate_repo/versions/002_moon.py deleted file mode 100644 index 14e22fc4..00000000 --- a/keystone-moon/keystone/contrib/moon/migrate_repo/versions/002_moon.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -import sqlalchemy as sql -from keystone.common import sql as k_sql - - -def upgrade(migrate_engine): - meta = sql.MetaData() - meta.bind = migrate_engine - -# region_table = sql.Table( -# 'inter_extension', -# meta, -# sql.Column('id', sql.String(64), primary_key=True), -# sql.Column('requesting_intra_extension_uuid', sql.String(64), nullable=False), -# sql.Column('requested_intra_extension_uuid', sql.String(64), nullable=False), -# sql.Column('virtual_entity_uuid', sql.String(64), nullable=False), -# sql.Column('genre', sql.String(64), nullable=False), -# sql.Column('description', sql.Text(), nullable=True), -# -# mysql_engine='InnoDB', -# mysql_charset='utf8') -# region_table.create(migrate_engine, checkfirst=True) -# -# -def downgrade(migrate_engine): - meta = sql.MetaData() - meta.bind = migrate_engine - -# table = sql.Table('inter_extension', meta, autoload=True) -# table.drop(migrate_engine, checkfirst=True) diff --git a/keystone-moon/keystone/contrib/moon/migrate_repo/versions/003_moon.py b/keystone-moon/keystone/contrib/moon/migrate_repo/versions/003_moon.py deleted file mode 100644 index f11fb2fb..00000000 --- a/keystone-moon/keystone/contrib/moon/migrate_repo/versions/003_moon.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -import sqlalchemy as sql -from keystone.common import sql as k_sql - - -def upgrade(migrate_engine): - meta = sql.MetaData() - meta.bind = migrate_engine - -# region_table = sql.Table( -# 'tenants', -# meta, -# sql.Column('id', sql.String(64), primary_key=True), -# sql.Column('name', sql.String(128), nullable=True), -# sql.Column('authz', sql.String(64), nullable=True), -# sql.Column('admin', sql.String(64), nullable=True), -# -# mysql_engine='InnoDB', -# mysql_charset='utf8') -# region_table.create(migrate_engine, checkfirst=True) -# - -def downgrade(migrate_engine): - meta = sql.MetaData() - meta.bind = migrate_engine -# -# table = sql.Table('tenants', meta, autoload=True) -# table.drop(migrate_engine, checkfirst=True) diff --git a/keystone-moon/keystone/contrib/moon/migrate_repo/versions/__init__.py b/keystone-moon/keystone/contrib/moon/migrate_repo/versions/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/moon/routers.py b/keystone-moon/keystone/contrib/moon/routers.py deleted file mode 100644 index c3bb7df0..00000000 --- a/keystone-moon/keystone/contrib/moon/routers.py +++ /dev/null @@ -1,507 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -"""WSGI Routers for the Moon service.""" - -from keystone.contrib.moon import controllers -from keystone.common import wsgi -from oslo_log import log - -LOG = log.getLogger(__name__) - - -class Routers(wsgi.ComposableRouter): - """API Endpoints for the Moon extension. - """ - - PATH_PREFIX = '' - - def __init__(self, description): - self.description = description - - @staticmethod - def _get_rel(component): - return 'http://docs.openstack.org/api/openstack-authz/3/param/{}'.format(component) - - @staticmethod - def _get_path(component): - return 'http://docs.openstack.org/api/openstack-authz/3/param/{}'.format(component) - - def add_routes(self, mapper): - # Controllers creation - authz_controller = controllers.Authz_v3() - configuration_controller = controllers.Configuration() - intra_ext_controller = controllers.IntraExtensions() - tenants_controller = controllers.Tenants() - logs_controller = controllers.Logs() - auth_controller = controllers.MoonAuth() - inter_ext_controller = controllers.InterExtensions() - - # Configuration route - mapper.connect( - self.PATH_PREFIX+'/configuration/templates', - controller=configuration_controller, - action='get_policy_templates', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/configuration/aggregation_algorithms', - controller=configuration_controller, - action='get_aggregation_algorithms', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/configuration/sub_meta_rule_algorithms', - controller=configuration_controller, - action='get_sub_meta_rule_algorithms', - conditions=dict(method=['GET'])) - - # Tenants route - mapper.connect( - self.PATH_PREFIX+'/tenants', - controller=tenants_controller, - action='get_tenants', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/tenants', - controller=tenants_controller, - action='add_tenant', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/tenants/{tenant_id}', - controller=tenants_controller, - action='get_tenant', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/tenants/{tenant_id}', - controller=tenants_controller, - action='del_tenant', - conditions=dict(method=['DELETE'])) - mapper.connect( - self.PATH_PREFIX+'/tenants/{tenant_id}', - controller=tenants_controller, - action='set_tenant', - conditions=dict(method=['POST'])) - - # Authz route - mapper.connect( - self.PATH_PREFIX+'/authz/{tenant_id}/{subject_k_id}/{object_name}/{action_name}', - controller=authz_controller, - action='get_authz', - conditions=dict(method=['GET'])) - - # IntraExtensions/Admin route - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/init', - controller=intra_ext_controller, - action='load_root_intra_extension', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions', - controller=intra_ext_controller, - action='get_intra_extensions', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions', - controller=intra_ext_controller, - action='add_intra_extension', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}', - controller=intra_ext_controller, - action='get_intra_extension', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}', - controller=intra_ext_controller, - action='set_intra_extension', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}', - controller=intra_ext_controller, - action='del_intra_extension', - conditions=dict(method=['DELETE'])) - - # Metadata route - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/subject_categories', - controller=intra_ext_controller, - action='get_subject_categories', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/subject_categories', - controller=intra_ext_controller, - action='add_subject_category', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/subject_categories/{subject_category_id}', - controller=intra_ext_controller, - action='get_subject_category', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/subject_categories/{subject_category_id}', - controller=intra_ext_controller, - action='del_subject_category', - conditions=dict(method=['DELETE'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/subject_categories/{subject_category_id}', - controller=intra_ext_controller, - action='set_subject_category', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/object_categories', - controller=intra_ext_controller, - action='get_object_categories', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/object_categories', - controller=intra_ext_controller, - action='add_object_category', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/object_categories/{object_category_id}', - controller=intra_ext_controller, - action='get_object_category', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/object_categories/{object_category_id}', - controller=intra_ext_controller, - action='del_object_category', - conditions=dict(method=['DELETE'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/object_categories/{object_category_id}', - controller=intra_ext_controller, - action='set_object_category', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/action_categories', - controller=intra_ext_controller, - action='get_action_categories', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/action_categories', - controller=intra_ext_controller, - action='add_action_category', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/action_categories/{action_category_id}', - controller=intra_ext_controller, - action='get_action_category', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/action_categories/{action_category_id}', - controller=intra_ext_controller, - action='del_action_category', - conditions=dict(method=['DELETE'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/action_categories/{action_category_id}', - controller=intra_ext_controller, - action='set_action_category', - conditions=dict(method=['POST'])) - - # Perimeter route - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/subjects', - controller=intra_ext_controller, - action='get_subjects', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/subjects', - controller=intra_ext_controller, - action='add_subject', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/subjects/{subject_id}', - controller=intra_ext_controller, - action='get_subject', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/subjects/{subject_id}', - controller=intra_ext_controller, - action='del_subject', - conditions=dict(method=['DELETE'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/subjects/{subject_id}', - controller=intra_ext_controller, - action='set_subject', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/objects', - controller=intra_ext_controller, - action='get_objects', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/objects', - controller=intra_ext_controller, - action='add_object', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/objects/{object_id}', - controller=intra_ext_controller, - action='get_object', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/objects/{object_id}', - controller=intra_ext_controller, - action='del_object', - conditions=dict(method=['DELETE'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/objects/{object_id}', - controller=intra_ext_controller, - action='set_object', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/actions', - controller=intra_ext_controller, - action='get_actions', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/actions', - controller=intra_ext_controller, - action='add_action', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/actions/{action_id}', - controller=intra_ext_controller, - action='get_action', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/actions/{action_id}', - controller=intra_ext_controller, - action='del_action', - conditions=dict(method=['DELETE'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/actions/{action_id}', - controller=intra_ext_controller, - action='set_action', - conditions=dict(method=['POST'])) - - # Scope route - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/subject_scopes/{subject_category_id}', - controller=intra_ext_controller, - action='get_subject_scopes', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/subject_scopes/{subject_category_id}', - controller=intra_ext_controller, - action='add_subject_scope', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/subject_scopes/{subject_category_id}/{subject_scope_id}', - controller=intra_ext_controller, - action='get_subject_scope', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/subject_scopes/{subject_category_id}/{subject_scope_id}', - controller=intra_ext_controller, - action='del_subject_scope', - conditions=dict(method=['DELETE'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/subject_scopes/{subject_category_id}/{subject_scope_id}', - controller=intra_ext_controller, - action='set_subject_scope', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/object_scopes/{object_category_id}', - controller=intra_ext_controller, - action='get_object_scopes', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/object_scopes/{object_category_id}', - controller=intra_ext_controller, - action='add_object_scope', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/object_scopes/{object_category_id}/{object_scope_id}', - controller=intra_ext_controller, - action='get_object_scope', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/object_scopes/{object_category_id}/{object_scope_id}', - controller=intra_ext_controller, - action='del_object_scope', - conditions=dict(method=['DELETE'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/object_scopes/{object_category_id}/{object_scope_id}', - controller=intra_ext_controller, - action='set_object_scope', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/action_scopes/{action_category_id}', - controller=intra_ext_controller, - action='get_action_scopes', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/action_scopes/{action_category_id}', - controller=intra_ext_controller, - action='add_action_scope', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/action_scopes/{action_category_id}/{action_scope_id}', - controller=intra_ext_controller, - action='get_action_scope', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/action_scopes/{action_category_id}/{action_scope_id}', - controller=intra_ext_controller, - action='del_action_scope', - conditions=dict(method=['DELETE'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/action_scopes/{action_category_id}/{action_scope_id}', - controller=intra_ext_controller, - action='set_action_scope', - conditions=dict(method=['POST'])) - - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/subject_assignments', - controller=intra_ext_controller, - action='add_subject_assignment', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/' - 'subject_assignments/{subject_id}/{subject_category_id}', - controller=intra_ext_controller, - action='get_subject_assignment', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/' - 'subject_assignments/{subject_id}/{subject_category_id}/{subject_scope_id}', - controller=intra_ext_controller, - action='del_subject_assignment', - conditions=dict(method=['DELETE'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/object_assignments', - controller=intra_ext_controller, - action='add_object_assignment', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/' - 'object_assignments/{object_id}/{object_category_id}', - controller=intra_ext_controller, - action='get_object_assignment', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/' - 'object_assignments/{object_id}/{object_category_id}/{object_scope_id}', - controller=intra_ext_controller, - action='del_object_assignment', - conditions=dict(method=['DELETE'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/action_assignments', - controller=intra_ext_controller, - action='add_action_assignment', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/' - 'action_assignments/{action_id}/{action_category_id}', - controller=intra_ext_controller, - action='get_action_assignment', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/' - 'action_assignments/{action_id}/{action_category_id}/{action_scope_id}', - controller=intra_ext_controller, - action='del_action_assignment', - conditions=dict(method=['DELETE'])) - - # Metarule route - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/aggregation_algorithm', - controller=intra_ext_controller, - action='get_aggregation_algorithm', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/aggregation_algorithm', - controller=intra_ext_controller, - action='set_aggregation_algorithm', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/sub_meta_rules', - controller=intra_ext_controller, - action='get_sub_meta_rules', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/sub_meta_rules', - controller=intra_ext_controller, - action='add_sub_meta_rule', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/sub_meta_rules/{sub_meta_rule_id}', - controller=intra_ext_controller, - action='get_sub_meta_rule', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/sub_meta_rules/{sub_meta_rule_id}', - controller=intra_ext_controller, - action='del_sub_meta_rule', - conditions=dict(method=['DELETE'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/sub_meta_rules/{sub_meta_rule_id}', - controller=intra_ext_controller, - action='set_sub_meta_rule', - conditions=dict(method=['POST'])) - - # Rules route - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/rule/{sub_meta_rule_id}', - controller=intra_ext_controller, - action='get_rules', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/rule/{sub_meta_rule_id}', - controller=intra_ext_controller, - action='add_rule', - conditions=dict(method=['POST'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/rule/{sub_meta_rule_id}/{rule_id}', - controller=intra_ext_controller, - action='get_rule', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/rule/{sub_meta_rule_id}/{rule_id}', - controller=intra_ext_controller, - action='del_rule', - conditions=dict(method=['DELETE'])) - mapper.connect( - self.PATH_PREFIX+'/intra_extensions/{intra_extension_id}/rule/{sub_meta_rule_id}/{rule_id}', - controller=intra_ext_controller, - action='set_rule', - conditions=dict(method=['POST'])) - - # Logs route - mapper.connect( - self.PATH_PREFIX+'/logs', - controller=logs_controller, - action='get_logs', - conditions=dict(method=['GET'])) - mapper.connect( - self.PATH_PREFIX+'/logs/{options}', - controller=logs_controller, - action='get_logs', - conditions=dict(method=['GET'])) - - # Auth route - mapper.connect( - self.PATH_PREFIX+'/auth/tokens', - controller=auth_controller, - action='get_token', - conditions=dict(method=['POST'])) - - # InterExtensions route - # mapper.connect( - # controller=inter_ext_controller, - # self.PATH_PREFIX+'/inter_extensions', - # action='get_inter_extensions', - # action='create_inter_extension', - # rel=self._get_rel('inter_extensions'), - # path_vars={}) - # mapper.connect( - # controller=inter_ext_controller, - # self.PATH_PREFIX+'/inter_extensions/{inter_extension_id}', - # action='get_inter_extension', - # action='delete_inter_extension', - # rel=self._get_rel('inter_extensions'), - # path_vars={ - # 'inter_extension_id': self._get_path('inter_extensions'), - # }) diff --git a/keystone-moon/keystone/contrib/moon/service.py b/keystone-moon/keystone/contrib/moon/service.py deleted file mode 100644 index cd68e98a..00000000 --- a/keystone-moon/keystone/contrib/moon/service.py +++ /dev/null @@ -1,57 +0,0 @@ -import functools -import sys - -from oslo_config import cfg -from oslo_log import log -from paste import deploy -import routes -from keystone.contrib.moon.routers import Routers - -from keystone import assignment -from keystone import auth -from keystone import catalog -from keystone.common import wsgi -from keystone import controllers -from keystone import credential -from keystone import endpoint_policy -from keystone import identity -from keystone import policy -from keystone import resource -from keystone import routers -from keystone import token -from keystone import trust - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -# def loadapp(conf, name): -# # NOTE(blk-u): Save the application being loaded in the controllers module. -# # This is similar to how public_app_factory() and v3_app_factory() -# # register the version with the controllers module. -# controllers.latest_app = deploy.loadapp(conf, name=name) -# return controllers.latest_app - - -def fail_gracefully(f): - """Logs exceptions and aborts.""" - @functools.wraps(f) - def wrapper(*args, **kw): - try: - return f(*args, **kw) - except Exception as e: - LOG.debug(e, exc_info=True) - - # exception message is printed to all logs - LOG.critical(e) - sys.exit(1) - - return wrapper - - -@fail_gracefully -def moon_app_factory(global_conf, **local_conf): - return wsgi.ComposingRouter(routes.Mapper(), - [Routers('moon_service')]) - diff --git a/keystone-moon/keystone/contrib/moon/wsgi.py b/keystone-moon/keystone/contrib/moon/wsgi.py deleted file mode 100644 index f2a99633..00000000 --- a/keystone-moon/keystone/contrib/moon/wsgi.py +++ /dev/null @@ -1,8 +0,0 @@ -from keystone.server import wsgi -from oslo_log import log - -LOG = log.getLogger(__name__) - - -def initialize_moon_application(): - return wsgi.initialize_application('moon_service') diff --git a/keystone-moon/keystone/contrib/oauth1/__init__.py b/keystone-moon/keystone/contrib/oauth1/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/oauth1/backends/__init__.py b/keystone-moon/keystone/contrib/oauth1/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/oauth1/backends/sql.py b/keystone-moon/keystone/contrib/oauth1/backends/sql.py deleted file mode 100644 index 31b6ce3b..00000000 --- a/keystone-moon/keystone/contrib/oauth1/backends/sql.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import versionutils - -from keystone.oauth1.backends import sql - - -_OLD = "keystone.contrib.oauth1.backends.sql.OAuth1" -_NEW = "sql" - - -class OAuth1(sql.OAuth1): - - @versionutils.deprecated(versionutils.deprecated.MITAKA, - in_favor_of=_NEW, - what=_OLD) - def __init__(self, *args, **kwargs): - super(OAuth1, self).__init__(*args, **kwargs) diff --git a/keystone-moon/keystone/contrib/oauth1/controllers.py b/keystone-moon/keystone/contrib/oauth1/controllers.py deleted file mode 100644 index d12fc96b..00000000 --- a/keystone-moon/keystone/contrib/oauth1/controllers.py +++ /dev/null @@ -1,411 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Extensions supporting OAuth1.""" - -from oslo_config import cfg -from oslo_serialization import jsonutils -from oslo_utils import timeutils - -from keystone.common import controller -from keystone.common import dependency -from keystone.common import utils -from keystone.common import wsgi -from keystone.contrib.oauth1 import core as oauth1 -from keystone.contrib.oauth1 import validator -from keystone import exception -from keystone.i18n import _ -from keystone import notifications - - -CONF = cfg.CONF - - -@notifications.internal(notifications.INVALIDATE_USER_OAUTH_CONSUMER_TOKENS, - resource_id_arg_index=0) -def _emit_user_oauth_consumer_token_invalidate(payload): - # This is a special case notification that expect the payload to be a dict - # containing the user_id and the consumer_id. This is so that the token - # provider can invalidate any tokens in the token persistence if - # token persistence is enabled - pass - - -@dependency.requires('oauth_api', 'token_provider_api') -class ConsumerCrudV3(controller.V3Controller): - collection_name = 'consumers' - member_name = 'consumer' - - @classmethod - def base_url(cls, context, path=None): - """Construct a path and pass it to V3Controller.base_url method.""" - - # NOTE(stevemar): Overriding path to /OS-OAUTH1/consumers so that - # V3Controller.base_url handles setting the self link correctly. - path = '/OS-OAUTH1/' + cls.collection_name - return controller.V3Controller.base_url(context, path=path) - - @controller.protected() - def create_consumer(self, context, consumer): - ref = self._assign_unique_id(self._normalize_dict(consumer)) - initiator = notifications._get_request_audit_info(context) - consumer_ref = self.oauth_api.create_consumer(ref, initiator) - return ConsumerCrudV3.wrap_member(context, consumer_ref) - - @controller.protected() - def update_consumer(self, context, consumer_id, consumer): - self._require_matching_id(consumer_id, consumer) - ref = self._normalize_dict(consumer) - self._validate_consumer_ref(ref) - initiator = notifications._get_request_audit_info(context) - ref = self.oauth_api.update_consumer(consumer_id, ref, initiator) - return ConsumerCrudV3.wrap_member(context, ref) - - @controller.protected() - def list_consumers(self, context): - ref = self.oauth_api.list_consumers() - return ConsumerCrudV3.wrap_collection(context, ref) - - @controller.protected() - def get_consumer(self, context, consumer_id): - ref = self.oauth_api.get_consumer(consumer_id) - return ConsumerCrudV3.wrap_member(context, ref) - - @controller.protected() - def delete_consumer(self, context, consumer_id): - user_token_ref = utils.get_token_ref(context) - payload = {'user_id': user_token_ref.user_id, - 'consumer_id': consumer_id} - _emit_user_oauth_consumer_token_invalidate(payload) - initiator = notifications._get_request_audit_info(context) - self.oauth_api.delete_consumer(consumer_id, initiator) - - def _validate_consumer_ref(self, consumer): - if 'secret' in consumer: - msg = _('Cannot change consumer secret') - raise exception.ValidationError(message=msg) - - -@dependency.requires('oauth_api') -class AccessTokenCrudV3(controller.V3Controller): - collection_name = 'access_tokens' - member_name = 'access_token' - - @classmethod - def _add_self_referential_link(cls, context, ref): - # NOTE(lwolf): overriding method to add proper path to self link - ref.setdefault('links', {}) - path = '/users/%(user_id)s/OS-OAUTH1/access_tokens' % { - 'user_id': cls._get_user_id(ref) - } - ref['links']['self'] = cls.base_url(context, path) + '/' + ref['id'] - - @controller.protected() - def get_access_token(self, context, user_id, access_token_id): - access_token = self.oauth_api.get_access_token(access_token_id) - if access_token['authorizing_user_id'] != user_id: - raise exception.NotFound() - access_token = self._format_token_entity(context, access_token) - return AccessTokenCrudV3.wrap_member(context, access_token) - - @controller.protected() - def list_access_tokens(self, context, user_id): - auth_context = context.get('environment', - {}).get('KEYSTONE_AUTH_CONTEXT', {}) - if auth_context.get('is_delegated_auth'): - raise exception.Forbidden( - _('Cannot list request tokens' - ' with a token issued via delegation.')) - refs = self.oauth_api.list_access_tokens(user_id) - formatted_refs = ([self._format_token_entity(context, x) - for x in refs]) - return AccessTokenCrudV3.wrap_collection(context, formatted_refs) - - @controller.protected() - def delete_access_token(self, context, user_id, access_token_id): - access_token = self.oauth_api.get_access_token(access_token_id) - consumer_id = access_token['consumer_id'] - payload = {'user_id': user_id, 'consumer_id': consumer_id} - _emit_user_oauth_consumer_token_invalidate(payload) - initiator = notifications._get_request_audit_info(context) - return self.oauth_api.delete_access_token( - user_id, access_token_id, initiator) - - @staticmethod - def _get_user_id(entity): - return entity.get('authorizing_user_id', '') - - def _format_token_entity(self, context, entity): - - formatted_entity = entity.copy() - access_token_id = formatted_entity['id'] - user_id = self._get_user_id(formatted_entity) - if 'role_ids' in entity: - formatted_entity.pop('role_ids') - if 'access_secret' in entity: - formatted_entity.pop('access_secret') - - url = ('/users/%(user_id)s/OS-OAUTH1/access_tokens/%(access_token_id)s' - '/roles' % {'user_id': user_id, - 'access_token_id': access_token_id}) - - formatted_entity.setdefault('links', {}) - formatted_entity['links']['roles'] = (self.base_url(context, url)) - - return formatted_entity - - -@dependency.requires('oauth_api', 'role_api') -class AccessTokenRolesV3(controller.V3Controller): - collection_name = 'roles' - member_name = 'role' - - @controller.protected() - def list_access_token_roles(self, context, user_id, access_token_id): - access_token = self.oauth_api.get_access_token(access_token_id) - if access_token['authorizing_user_id'] != user_id: - raise exception.NotFound() - authed_role_ids = access_token['role_ids'] - authed_role_ids = jsonutils.loads(authed_role_ids) - refs = ([self._format_role_entity(x) for x in authed_role_ids]) - return AccessTokenRolesV3.wrap_collection(context, refs) - - @controller.protected() - def get_access_token_role(self, context, user_id, - access_token_id, role_id): - access_token = self.oauth_api.get_access_token(access_token_id) - if access_token['authorizing_user_id'] != user_id: - raise exception.Unauthorized(_('User IDs do not match')) - authed_role_ids = access_token['role_ids'] - authed_role_ids = jsonutils.loads(authed_role_ids) - for authed_role_id in authed_role_ids: - if authed_role_id == role_id: - role = self._format_role_entity(role_id) - return AccessTokenRolesV3.wrap_member(context, role) - raise exception.RoleNotFound(_('Could not find role')) - - def _format_role_entity(self, role_id): - role = self.role_api.get_role(role_id) - formatted_entity = role.copy() - if 'description' in role: - formatted_entity.pop('description') - if 'enabled' in role: - formatted_entity.pop('enabled') - return formatted_entity - - -@dependency.requires('assignment_api', 'oauth_api', - 'resource_api', 'token_provider_api') -class OAuthControllerV3(controller.V3Controller): - collection_name = 'not_used' - member_name = 'not_used' - - def create_request_token(self, context): - headers = context['headers'] - oauth_headers = oauth1.get_oauth_headers(headers) - consumer_id = oauth_headers.get('oauth_consumer_key') - requested_project_id = headers.get('Requested-Project-Id') - - if not consumer_id: - raise exception.ValidationError( - attribute='oauth_consumer_key', target='request') - if not requested_project_id: - raise exception.ValidationError( - attribute='requested_project_id', target='request') - - # NOTE(stevemar): Ensure consumer and requested project exist - self.resource_api.get_project(requested_project_id) - self.oauth_api.get_consumer(consumer_id) - - url = self.base_url(context, context['path']) - - req_headers = {'Requested-Project-Id': requested_project_id} - req_headers.update(headers) - request_verifier = oauth1.RequestTokenEndpoint( - request_validator=validator.OAuthValidator(), - token_generator=oauth1.token_generator) - h, b, s = request_verifier.create_request_token_response( - url, - http_method='POST', - body=context['query_string'], - headers=req_headers) - - if (not b) or int(s) > 399: - msg = _('Invalid signature') - raise exception.Unauthorized(message=msg) - - request_token_duration = CONF.oauth1.request_token_duration - initiator = notifications._get_request_audit_info(context) - token_ref = self.oauth_api.create_request_token(consumer_id, - requested_project_id, - request_token_duration, - initiator) - - result = ('oauth_token=%(key)s&oauth_token_secret=%(secret)s' - % {'key': token_ref['id'], - 'secret': token_ref['request_secret']}) - - if CONF.oauth1.request_token_duration: - expiry_bit = '&oauth_expires_at=%s' % token_ref['expires_at'] - result += expiry_bit - - headers = [('Content-Type', 'application/x-www-urlformencoded')] - response = wsgi.render_response(result, - status=(201, 'Created'), - headers=headers) - - return response - - def create_access_token(self, context): - headers = context['headers'] - oauth_headers = oauth1.get_oauth_headers(headers) - consumer_id = oauth_headers.get('oauth_consumer_key') - request_token_id = oauth_headers.get('oauth_token') - oauth_verifier = oauth_headers.get('oauth_verifier') - - if not consumer_id: - raise exception.ValidationError( - attribute='oauth_consumer_key', target='request') - if not request_token_id: - raise exception.ValidationError( - attribute='oauth_token', target='request') - if not oauth_verifier: - raise exception.ValidationError( - attribute='oauth_verifier', target='request') - - req_token = self.oauth_api.get_request_token( - request_token_id) - - expires_at = req_token['expires_at'] - if expires_at: - now = timeutils.utcnow() - expires = timeutils.normalize_time( - timeutils.parse_isotime(expires_at)) - if now > expires: - raise exception.Unauthorized(_('Request token is expired')) - - url = self.base_url(context, context['path']) - - access_verifier = oauth1.AccessTokenEndpoint( - request_validator=validator.OAuthValidator(), - token_generator=oauth1.token_generator) - h, b, s = access_verifier.create_access_token_response( - url, - http_method='POST', - body=context['query_string'], - headers=headers) - params = oauth1.extract_non_oauth_params(b) - if len(params) != 0: - msg = _('There should not be any non-oauth parameters') - raise exception.Unauthorized(message=msg) - - if req_token['consumer_id'] != consumer_id: - msg = _('provided consumer key does not match stored consumer key') - raise exception.Unauthorized(message=msg) - - if req_token['verifier'] != oauth_verifier: - msg = _('provided verifier does not match stored verifier') - raise exception.Unauthorized(message=msg) - - if req_token['id'] != request_token_id: - msg = _('provided request key does not match stored request key') - raise exception.Unauthorized(message=msg) - - if not req_token.get('authorizing_user_id'): - msg = _('Request Token does not have an authorizing user id') - raise exception.Unauthorized(message=msg) - - access_token_duration = CONF.oauth1.access_token_duration - initiator = notifications._get_request_audit_info(context) - token_ref = self.oauth_api.create_access_token(request_token_id, - access_token_duration, - initiator) - - result = ('oauth_token=%(key)s&oauth_token_secret=%(secret)s' - % {'key': token_ref['id'], - 'secret': token_ref['access_secret']}) - - if CONF.oauth1.access_token_duration: - expiry_bit = '&oauth_expires_at=%s' % (token_ref['expires_at']) - result += expiry_bit - - headers = [('Content-Type', 'application/x-www-urlformencoded')] - response = wsgi.render_response(result, - status=(201, 'Created'), - headers=headers) - - return response - - @controller.protected() - def authorize_request_token(self, context, request_token_id, roles): - """An authenticated user is going to authorize a request token. - - As a security precaution, the requested roles must match those in - the request token. Because this is in a CLI-only world at the moment, - there is not another easy way to make sure the user knows which roles - are being requested before authorizing. - """ - auth_context = context.get('environment', - {}).get('KEYSTONE_AUTH_CONTEXT', {}) - if auth_context.get('is_delegated_auth'): - raise exception.Forbidden( - _('Cannot authorize a request token' - ' with a token issued via delegation.')) - - req_token = self.oauth_api.get_request_token(request_token_id) - - expires_at = req_token['expires_at'] - if expires_at: - now = timeutils.utcnow() - expires = timeutils.normalize_time( - timeutils.parse_isotime(expires_at)) - if now > expires: - raise exception.Unauthorized(_('Request token is expired')) - - # put the roles in a set for easy comparison - authed_roles = set() - for role in roles: - authed_roles.add(role['id']) - - # verify the authorizing user has the roles - user_token = utils.get_token_ref(context) - user_id = user_token.user_id - project_id = req_token['requested_project_id'] - user_roles = self.assignment_api.get_roles_for_user_and_project( - user_id, project_id) - cred_set = set(user_roles) - - if not cred_set.issuperset(authed_roles): - msg = _('authorizing user does not have role required') - raise exception.Unauthorized(message=msg) - - # create list of just the id's for the backend - role_list = list(authed_roles) - - # verify the user has the project too - req_project_id = req_token['requested_project_id'] - user_projects = self.assignment_api.list_projects_for_user(user_id) - for user_project in user_projects: - if user_project['id'] == req_project_id: - break - else: - msg = _("User is not a member of the requested project") - raise exception.Unauthorized(message=msg) - - # finally authorize the token - authed_token = self.oauth_api.authorize_request_token( - request_token_id, user_id, role_list) - - to_return = {'token': {'oauth_verifier': authed_token['verifier']}} - return to_return diff --git a/keystone-moon/keystone/contrib/oauth1/core.py b/keystone-moon/keystone/contrib/oauth1/core.py deleted file mode 100644 index 6406a803..00000000 --- a/keystone-moon/keystone/contrib/oauth1/core.py +++ /dev/null @@ -1,367 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into the OAuth1 service.""" - -from __future__ import absolute_import - -import abc -import string -import uuid - -import oauthlib.common -from oauthlib import oauth1 -from oslo_config import cfg -from oslo_log import log -import six - -from keystone.common import dependency -from keystone.common import extension -from keystone.common import manager -from keystone import exception -from keystone.i18n import _LE -from keystone import notifications - - -RequestValidator = oauth1.RequestValidator -Client = oauth1.Client -AccessTokenEndpoint = oauth1.AccessTokenEndpoint -ResourceEndpoint = oauth1.ResourceEndpoint -AuthorizationEndpoint = oauth1.AuthorizationEndpoint -SIG_HMAC = oauth1.SIGNATURE_HMAC -RequestTokenEndpoint = oauth1.RequestTokenEndpoint -oRequest = oauthlib.common.Request -# The characters used to generate verifiers are limited to alphanumerical -# values for ease of manual entry. Commonly confused characters are omitted. -VERIFIER_CHARS = string.ascii_letters + string.digits -CONFUSED_CHARS = 'jiIl1oO0' -VERIFIER_CHARS = ''.join(c for c in VERIFIER_CHARS if c not in CONFUSED_CHARS) - - -class Token(object): - def __init__(self, key, secret): - self.key = key - self.secret = secret - self.verifier = None - - def set_verifier(self, verifier): - self.verifier = verifier - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -def token_generator(*args, **kwargs): - return uuid.uuid4().hex - - -EXTENSION_DATA = { - 'name': 'OpenStack OAUTH1 API', - 'namespace': 'http://docs.openstack.org/identity/api/ext/' - 'OS-OAUTH1/v1.0', - 'alias': 'OS-OAUTH1', - 'updated': '2013-07-07T12:00:0-00:00', - 'description': 'OpenStack OAuth 1.0a Delegated Auth Mechanism.', - 'links': [ - { - 'rel': 'describedby', - # TODO(dolph): link needs to be revised after - # bug 928059 merges - 'type': 'text/html', - 'href': 'https://github.com/openstack/identity-api', - } - ]} -extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) -extension.register_public_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) - - -def filter_consumer(consumer_ref): - """Filter out private items in a consumer dict. - - 'secret' is never returned. - - :returns: consumer_ref - - """ - if consumer_ref: - consumer_ref = consumer_ref.copy() - consumer_ref.pop('secret', None) - return consumer_ref - - -def filter_token(access_token_ref): - """Filter out private items in an access token dict. - - 'access_secret' is never returned. - - :returns: access_token_ref - - """ - if access_token_ref: - access_token_ref = access_token_ref.copy() - access_token_ref.pop('access_secret', None) - return access_token_ref - - -def get_oauth_headers(headers): - parameters = {} - - # The incoming headers variable is your usual heading from context - # In an OAuth signed req, where the oauth variables are in the header, - # they with the key 'Authorization'. - - if headers and 'Authorization' in headers: - # A typical value for Authorization is seen below - # 'OAuth realm="", oauth_body_hash="2jm%3D", oauth_nonce="14475435" - # along with other oauth variables, the 'OAuth ' part is trimmed - # to split the rest of the headers. - - auth_header = headers['Authorization'] - params = oauth1.rfc5849.utils.parse_authorization_header(auth_header) - parameters.update(dict(params)) - return parameters - else: - msg = _LE('Cannot retrieve Authorization headers') - LOG.error(msg) - raise exception.OAuthHeadersMissingError() - - -def extract_non_oauth_params(query_string): - params = oauthlib.common.extract_params(query_string) - return {k: v for k, v in params if not k.startswith('oauth_')} - - -@dependency.provider('oauth_api') -class Manager(manager.Manager): - """Default pivot point for the OAuth1 backend. - - See :mod:`keystone.common.manager.Manager` for more details on how this - dynamically calls the backend. - - """ - - driver_namespace = 'keystone.oauth1' - - _ACCESS_TOKEN = "OS-OAUTH1:access_token" - _REQUEST_TOKEN = "OS-OAUTH1:request_token" - _CONSUMER = "OS-OAUTH1:consumer" - - def __init__(self): - super(Manager, self).__init__(CONF.oauth1.driver) - - def create_consumer(self, consumer_ref, initiator=None): - ret = self.driver.create_consumer(consumer_ref) - notifications.Audit.created(self._CONSUMER, ret['id'], initiator) - return ret - - def update_consumer(self, consumer_id, consumer_ref, initiator=None): - ret = self.driver.update_consumer(consumer_id, consumer_ref) - notifications.Audit.updated(self._CONSUMER, consumer_id, initiator) - return ret - - def delete_consumer(self, consumer_id, initiator=None): - ret = self.driver.delete_consumer(consumer_id) - notifications.Audit.deleted(self._CONSUMER, consumer_id, initiator) - return ret - - def create_access_token(self, request_id, access_token_duration, - initiator=None): - ret = self.driver.create_access_token(request_id, - access_token_duration) - notifications.Audit.created(self._ACCESS_TOKEN, ret['id'], initiator) - return ret - - def delete_access_token(self, user_id, access_token_id, initiator=None): - ret = self.driver.delete_access_token(user_id, access_token_id) - notifications.Audit.deleted(self._ACCESS_TOKEN, access_token_id, - initiator) - return ret - - def create_request_token(self, consumer_id, requested_project, - request_token_duration, initiator=None): - ret = self.driver.create_request_token( - consumer_id, requested_project, request_token_duration) - notifications.Audit.created(self._REQUEST_TOKEN, ret['id'], - initiator) - return ret - - -@six.add_metaclass(abc.ABCMeta) -class Oauth1DriverV8(object): - """Interface description for an OAuth1 driver.""" - - @abc.abstractmethod - def create_consumer(self, consumer_ref): - """Create consumer. - - :param consumer_ref: consumer ref with consumer name - :type consumer_ref: dict - :returns: consumer_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_consumer(self, consumer_id, consumer_ref): - """Update consumer. - - :param consumer_id: id of consumer to update - :type consumer_id: string - :param consumer_ref: new consumer ref with consumer name - :type consumer_ref: dict - :returns: consumer_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_consumers(self): - """List consumers. - - :returns: list of consumers - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_consumer(self, consumer_id): - """Get consumer, returns the consumer id (key) - and description. - - :param consumer_id: id of consumer to get - :type consumer_id: string - :returns: consumer_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_consumer_with_secret(self, consumer_id): - """Like get_consumer() but returned consumer_ref includes - the consumer secret. - - Secrets should only be shared upon consumer creation; the - consumer secret is required to verify incoming OAuth requests. - - :param consumer_id: id of consumer to get - :type consumer_id: string - :returns: consumer_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_consumer(self, consumer_id): - """Delete consumer. - - :param consumer_id: id of consumer to get - :type consumer_id: string - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_access_tokens(self, user_id): - """List access tokens. - - :param user_id: search for access tokens authorized by given user id - :type user_id: string - :returns: list of access tokens the user has authorized - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_access_token(self, user_id, access_token_id): - """Delete access token. - - :param user_id: authorizing user id - :type user_id: string - :param access_token_id: access token to delete - :type access_token_id: string - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def create_request_token(self, consumer_id, requested_project, - request_token_duration): - """Create request token. - - :param consumer_id: the id of the consumer - :type consumer_id: string - :param requested_project_id: requested project id - :type requested_project_id: string - :param request_token_duration: duration of request token - :type request_token_duration: string - :returns: request_token_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_request_token(self, request_token_id): - """Get request token. - - :param request_token_id: the id of the request token - :type request_token_id: string - :returns: request_token_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_access_token(self, access_token_id): - """Get access token. - - :param access_token_id: the id of the access token - :type access_token_id: string - :returns: access_token_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def authorize_request_token(self, request_id, user_id, role_ids): - """Authorize request token. - - :param request_id: the id of the request token, to be authorized - :type request_id: string - :param user_id: the id of the authorizing user - :type user_id: string - :param role_ids: list of role ids to authorize - :type role_ids: list - :returns: verifier - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def create_access_token(self, request_id, access_token_duration): - """Create access token. - - :param request_id: the id of the request token, to be deleted - :type request_id: string - :param access_token_duration: duration of an access token - :type access_token_duration: string - :returns: access_token_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - -Driver = manager.create_legacy_driver(Oauth1DriverV8) diff --git a/keystone-moon/keystone/contrib/oauth1/migrate_repo/__init__.py b/keystone-moon/keystone/contrib/oauth1/migrate_repo/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/oauth1/migrate_repo/migrate.cfg b/keystone-moon/keystone/contrib/oauth1/migrate_repo/migrate.cfg deleted file mode 100644 index 97ca7810..00000000 --- a/keystone-moon/keystone/contrib/oauth1/migrate_repo/migrate.cfg +++ /dev/null @@ -1,25 +0,0 @@ -[db_settings] -# Used to identify which repository this database is versioned under. -# You can use the name of your project. -repository_id=oauth1 - -# The name of the database table used to track the schema version. -# This name shouldn't already be used by your project. -# If this is changed once a database is under version control, you'll need to -# change the table name in each database too. -version_table=migrate_version - -# When committing a change script, Migrate will attempt to generate the -# sql for all supported databases; normally, if one of them fails - probably -# because you don't have that database installed - it is ignored and the -# commit continues, perhaps ending successfully. -# Databases in this list MUST compile successfully during a commit, or the -# entire commit will fail. List the databases your application will actually -# be using to ensure your updates to that database work properly. -# This must be a list; example: ['postgres','sqlite'] -required_dbs=[] - -# When creating new change scripts, Migrate will stamp the new script with -# a version number. By default this is latest_version + 1. You can set this -# to 'true' to tell Migrate to use the UTC timestamp instead. -use_timestamp_numbering=False diff --git a/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/001_add_oauth_tables.py b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/001_add_oauth_tables.py deleted file mode 100644 index fe0212d7..00000000 --- a/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/001_add_oauth_tables.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='oauth1') diff --git a/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/002_fix_oauth_tables_fk.py b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/002_fix_oauth_tables_fk.py deleted file mode 100644 index fe0212d7..00000000 --- a/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/002_fix_oauth_tables_fk.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='oauth1') diff --git a/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/003_consumer_description_nullalbe.py b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/003_consumer_description_nullalbe.py deleted file mode 100644 index fe0212d7..00000000 --- a/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/003_consumer_description_nullalbe.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='oauth1') diff --git a/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/004_request_token_roles_nullable.py b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/004_request_token_roles_nullable.py deleted file mode 100644 index fe0212d7..00000000 --- a/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/004_request_token_roles_nullable.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='oauth1') diff --git a/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/005_consumer_id_index.py b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/005_consumer_id_index.py deleted file mode 100644 index a4681e16..00000000 --- a/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/005_consumer_id_index.py +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright 2014 Mirantis.inc -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='oauth1') diff --git a/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/__init__.py b/keystone-moon/keystone/contrib/oauth1/migrate_repo/versions/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/oauth1/routers.py b/keystone-moon/keystone/contrib/oauth1/routers.py deleted file mode 100644 index 42a26c10..00000000 --- a/keystone-moon/keystone/contrib/oauth1/routers.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log -from oslo_log import versionutils - -from keystone.common import wsgi -from keystone.i18n import _ - - -LOG = log.getLogger(__name__) - - -class OAuth1Extension(wsgi.Middleware): - - def __init__(self, *args, **kwargs): - super(OAuth1Extension, self).__init__(*args, **kwargs) - msg = _("Remove oauth1_extension from the paste pipeline, the " - "oauth1 extension is now always available. Update the " - "[pipeline:api_v3] section in keystone-paste.ini accordingly, " - "as it will be removed in the O release.") - versionutils.report_deprecated_feature(LOG, msg) diff --git a/keystone-moon/keystone/contrib/oauth1/validator.py b/keystone-moon/keystone/contrib/oauth1/validator.py deleted file mode 100644 index 8f44059e..00000000 --- a/keystone-moon/keystone/contrib/oauth1/validator.py +++ /dev/null @@ -1,179 +0,0 @@ -# Copyright 2014 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""oAuthlib request validator.""" - -from oslo_log import log -import six - -from keystone.common import dependency -from keystone.contrib.oauth1 import core as oauth1 -from keystone import exception - - -METHOD_NAME = 'oauth_validator' -LOG = log.getLogger(__name__) - - -@dependency.requires('oauth_api') -class OAuthValidator(oauth1.RequestValidator): - - # TODO(mhu) set as option probably? - @property - def enforce_ssl(self): - return False - - @property - def safe_characters(self): - # oauth tokens are generated from a uuid hex value - return set("abcdef0123456789") - - def _check_token(self, token): - # generic token verification when they're obtained from a uuid hex - return (set(token) <= self.safe_characters and - len(token) == 32) - - def check_client_key(self, client_key): - return self._check_token(client_key) - - def check_request_token(self, request_token): - return self._check_token(request_token) - - def check_access_token(self, access_token): - return self._check_token(access_token) - - def check_nonce(self, nonce): - # Assuming length is not a concern - return set(nonce) <= self.safe_characters - - def check_verifier(self, verifier): - return (all(i in oauth1.VERIFIER_CHARS for i in verifier) and - len(verifier) == 8) - - def get_client_secret(self, client_key, request): - client = self.oauth_api.get_consumer_with_secret(client_key) - return client['secret'] - - def get_request_token_secret(self, client_key, token, request): - token_ref = self.oauth_api.get_request_token(token) - return token_ref['request_secret'] - - def get_access_token_secret(self, client_key, token, request): - access_token = self.oauth_api.get_access_token(token) - return access_token['access_secret'] - - def get_default_realms(self, client_key, request): - # realms weren't implemented with the previous library - return [] - - def get_realms(self, token, request): - return [] - - def get_redirect_uri(self, token, request): - # OOB (out of band) is supposed to be the default value to use - return 'oob' - - def get_rsa_key(self, client_key, request): - # HMAC signing is used, so return a dummy value - return '' - - def invalidate_request_token(self, client_key, request_token, request): - # this method is invoked when an access token is generated out of a - # request token, to make sure that request token cannot be consumed - # anymore. This is done in the backend, so we do nothing here. - pass - - def validate_client_key(self, client_key, request): - try: - return self.oauth_api.get_consumer(client_key) is not None - except exception.NotFound: - return False - - def validate_request_token(self, client_key, token, request): - try: - return self.oauth_api.get_request_token(token) is not None - except exception.NotFound: - return False - - def validate_access_token(self, client_key, token, request): - try: - return self.oauth_api.get_access_token(token) is not None - except exception.NotFound: - return False - - def validate_timestamp_and_nonce(self, - client_key, - timestamp, - nonce, - request, - request_token=None, - access_token=None): - return True - - def validate_redirect_uri(self, client_key, redirect_uri, request): - # we expect OOB, we don't really care - return True - - def validate_requested_realms(self, client_key, realms, request): - # realms are not used - return True - - def validate_realms(self, - client_key, - token, - request, - uri=None, - realms=None): - return True - - def validate_verifier(self, client_key, token, verifier, request): - try: - req_token = self.oauth_api.get_request_token(token) - return req_token['verifier'] == verifier - except exception.NotFound: - return False - - def verify_request_token(self, token, request): - # there aren't strong expectations on the request token format - return isinstance(token, six.string_types) - - def verify_realms(self, token, realms, request): - return True - - # The following save_XXX methods are called to create tokens. I chose to - # keep the original logic, but the comments below show how that could be - # implemented. The real implementation logic is in the backend. - def save_access_token(self, token, request): - pass -# token_duration = CONF.oauth1.request_token_duration -# request_token_id = request.client_key -# self.oauth_api.create_access_token(request_token_id, -# token_duration, -# token["oauth_token"], -# token["oauth_token_secret"]) - - def save_request_token(self, token, request): - pass -# project_id = request.headers.get('Requested-Project-Id') -# token_duration = CONF.oauth1.request_token_duration -# self.oauth_api.create_request_token(request.client_key, -# project_id, -# token_duration, -# token["oauth_token"], -# token["oauth_token_secret"]) - - def save_verifier(self, token, verifier, request): - # keep the old logic for this, as it is done in two steps and requires - # information that the request validator has no access to - pass diff --git a/keystone-moon/keystone/contrib/revoke/__init__.py b/keystone-moon/keystone/contrib/revoke/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/revoke/backends/__init__.py b/keystone-moon/keystone/contrib/revoke/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/revoke/backends/kvs.py b/keystone-moon/keystone/contrib/revoke/backends/kvs.py deleted file mode 100644 index 086becb0..00000000 --- a/keystone-moon/keystone/contrib/revoke/backends/kvs.py +++ /dev/null @@ -1,74 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime - -from oslo_config import cfg -from oslo_log import versionutils -from oslo_utils import timeutils - -from keystone.common import kvs -from keystone.contrib import revoke -from keystone import exception - - -CONF = cfg.CONF - -_EVENT_KEY = 'os-revoke-events' -_KVS_BACKEND = 'openstack.kvs.Memory' - - -class Revoke(revoke.RevokeDriverV8): - - @versionutils.deprecated( - versionutils.deprecated.JUNO, - in_favor_of='keystone.contrib.revoke.backends.sql', - remove_in=+1, - what='keystone.contrib.revoke.backends.kvs') - def __init__(self, **kwargs): - super(Revoke, self).__init__() - self._store = kvs.get_key_value_store('os-revoke-driver') - self._store.configure(backing_store=_KVS_BACKEND, **kwargs) - - def _list_events(self): - try: - return self._store.get(_EVENT_KEY) - except exception.NotFound: - return [] - - def list_events(self, last_fetch=None): - results = [] - - with self._store.get_lock(_EVENT_KEY): - events = self._list_events() - - for event in events: - revoked_at = event.revoked_at - if last_fetch is None or revoked_at > last_fetch: - results.append(event) - return results - - def revoke(self, event): - pruned = [] - expire_delta = datetime.timedelta(seconds=CONF.token.expiration) - oldest = timeutils.utcnow() - expire_delta - - with self._store.get_lock(_EVENT_KEY) as lock: - events = self._list_events() - if event: - events.append(event) - - for event in events: - revoked_at = event.revoked_at - if revoked_at > oldest: - pruned.append(event) - self._store.set(_EVENT_KEY, pruned, lock) diff --git a/keystone-moon/keystone/contrib/revoke/backends/sql.py b/keystone-moon/keystone/contrib/revoke/backends/sql.py deleted file mode 100644 index 0bf493ae..00000000 --- a/keystone-moon/keystone/contrib/revoke/backends/sql.py +++ /dev/null @@ -1,28 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import versionutils - -from keystone.revoke.backends import sql - - -_OLD = "keystone.contrib.revoke.backends.sql.Revoke" -_NEW = "sql" - - -class Revoke(sql.Revoke): - - @versionutils.deprecated(versionutils.deprecated.MITAKA, - in_favor_of=_NEW, - what=_OLD) - def __init__(self, *args, **kwargs): - super(Revoke, self).__init__(*args, **kwargs) diff --git a/keystone-moon/keystone/contrib/revoke/controllers.py b/keystone-moon/keystone/contrib/revoke/controllers.py deleted file mode 100644 index 40151bae..00000000 --- a/keystone-moon/keystone/contrib/revoke/controllers.py +++ /dev/null @@ -1,44 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_utils import timeutils - -from keystone.common import controller -from keystone.common import dependency -from keystone import exception -from keystone.i18n import _ - - -@dependency.requires('revoke_api') -class RevokeController(controller.V3Controller): - @controller.protected() - def list_revoke_events(self, context): - since = context['query_string'].get('since') - last_fetch = None - if since: - try: - last_fetch = timeutils.normalize_time( - timeutils.parse_isotime(since)) - except ValueError: - raise exception.ValidationError( - message=_('invalid date format %s') % since) - events = self.revoke_api.list_events(last_fetch=last_fetch) - # Build the links by hand as the standard controller calls require ids - response = {'events': [event.to_dict() for event in events], - 'links': { - 'next': None, - 'self': RevokeController.base_url( - context, - path=context['path']), - 'previous': None} - } - return response diff --git a/keystone-moon/keystone/contrib/revoke/core.py b/keystone-moon/keystone/contrib/revoke/core.py deleted file mode 100644 index 3b108c9e..00000000 --- a/keystone-moon/keystone/contrib/revoke/core.py +++ /dev/null @@ -1,262 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into the Revoke service.""" - -import abc -import datetime - -from oslo_config import cfg -from oslo_log import log -from oslo_log import versionutils -from oslo_utils import timeutils -import six - -from keystone.common import cache -from keystone.common import dependency -from keystone.common import extension -from keystone.common import manager -from keystone.contrib.revoke import model -from keystone import exception -from keystone.i18n import _ -from keystone import notifications - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -EXTENSION_DATA = { - 'name': 'OpenStack Revoke API', - 'namespace': 'http://docs.openstack.org/identity/api/ext/' - 'OS-REVOKE/v1.0', - 'alias': 'OS-REVOKE', - 'updated': '2014-02-24T20:51:0-00:00', - 'description': 'OpenStack revoked token reporting mechanism.', - 'links': [ - { - 'rel': 'describedby', - 'type': 'text/html', - 'href': ('https://github.com/openstack/identity-api/blob/master/' - 'openstack-identity-api/v3/src/markdown/' - 'identity-api-v3-os-revoke-ext.md'), - } - ]} -extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) -extension.register_public_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) - -MEMOIZE = cache.get_memoization_decorator(section='revoke') - - -def revoked_before_cutoff_time(): - expire_delta = datetime.timedelta( - seconds=CONF.token.expiration + CONF.revoke.expiration_buffer) - oldest = timeutils.utcnow() - expire_delta - return oldest - - -@dependency.provider('revoke_api') -class Manager(manager.Manager): - """Default pivot point for the Revoke backend. - - Performs common logic for recording revocations. - - See :mod:`keystone.common.manager.Manager` for more details on - how this dynamically calls the backend. - - """ - - driver_namespace = 'keystone.revoke' - - def __init__(self): - super(Manager, self).__init__(CONF.revoke.driver) - self._register_listeners() - self.model = model - - def _user_callback(self, service, resource_type, operation, - payload): - self.revoke_by_user(payload['resource_info']) - - def _role_callback(self, service, resource_type, operation, - payload): - self.revoke( - model.RevokeEvent(role_id=payload['resource_info'])) - - def _project_callback(self, service, resource_type, operation, - payload): - self.revoke( - model.RevokeEvent(project_id=payload['resource_info'])) - - def _domain_callback(self, service, resource_type, operation, - payload): - self.revoke( - model.RevokeEvent(domain_id=payload['resource_info'])) - - def _trust_callback(self, service, resource_type, operation, - payload): - self.revoke( - model.RevokeEvent(trust_id=payload['resource_info'])) - - def _consumer_callback(self, service, resource_type, operation, - payload): - self.revoke( - model.RevokeEvent(consumer_id=payload['resource_info'])) - - def _access_token_callback(self, service, resource_type, operation, - payload): - self.revoke( - model.RevokeEvent(access_token_id=payload['resource_info'])) - - def _role_assignment_callback(self, service, resource_type, operation, - payload): - info = payload['resource_info'] - self.revoke_by_grant(role_id=info['role_id'], user_id=info['user_id'], - domain_id=info.get('domain_id'), - project_id=info.get('project_id')) - - def _register_listeners(self): - callbacks = { - notifications.ACTIONS.deleted: [ - ['OS-TRUST:trust', self._trust_callback], - ['OS-OAUTH1:consumer', self._consumer_callback], - ['OS-OAUTH1:access_token', self._access_token_callback], - ['role', self._role_callback], - ['user', self._user_callback], - ['project', self._project_callback], - ['role_assignment', self._role_assignment_callback] - ], - notifications.ACTIONS.disabled: [ - ['user', self._user_callback], - ['project', self._project_callback], - ['domain', self._domain_callback], - ], - notifications.ACTIONS.internal: [ - [notifications.INVALIDATE_USER_TOKEN_PERSISTENCE, - self._user_callback], - ] - } - - for event, cb_info in callbacks.items(): - for resource_type, callback_fns in cb_info: - notifications.register_event_callback(event, resource_type, - callback_fns) - - def revoke_by_user(self, user_id): - return self.revoke(model.RevokeEvent(user_id=user_id)) - - def _assert_not_domain_and_project_scoped(self, domain_id=None, - project_id=None): - if domain_id is not None and project_id is not None: - msg = _('The revoke call must not have both domain_id and ' - 'project_id. This is a bug in the Keystone server. The ' - 'current request is aborted.') - raise exception.UnexpectedError(exception=msg) - - @versionutils.deprecated(as_of=versionutils.deprecated.JUNO, - remove_in=0) - def revoke_by_expiration(self, user_id, expires_at, - domain_id=None, project_id=None): - - self._assert_not_domain_and_project_scoped(domain_id=domain_id, - project_id=project_id) - - self.revoke( - model.RevokeEvent(user_id=user_id, - expires_at=expires_at, - domain_id=domain_id, - project_id=project_id)) - - def revoke_by_audit_id(self, audit_id): - self.revoke(model.RevokeEvent(audit_id=audit_id)) - - def revoke_by_audit_chain_id(self, audit_chain_id, project_id=None, - domain_id=None): - - self._assert_not_domain_and_project_scoped(domain_id=domain_id, - project_id=project_id) - - self.revoke(model.RevokeEvent(audit_chain_id=audit_chain_id, - domain_id=domain_id, - project_id=project_id)) - - def revoke_by_grant(self, role_id, user_id=None, - domain_id=None, project_id=None): - self.revoke( - model.RevokeEvent(user_id=user_id, - role_id=role_id, - domain_id=domain_id, - project_id=project_id)) - - def revoke_by_user_and_project(self, user_id, project_id): - self.revoke( - model.RevokeEvent(project_id=project_id, user_id=user_id)) - - def revoke_by_project_role_assignment(self, project_id, role_id): - self.revoke(model.RevokeEvent(project_id=project_id, role_id=role_id)) - - def revoke_by_domain_role_assignment(self, domain_id, role_id): - self.revoke(model.RevokeEvent(domain_id=domain_id, role_id=role_id)) - - @MEMOIZE - def _get_revoke_tree(self): - events = self.driver.list_events() - revoke_tree = model.RevokeTree(revoke_events=events) - - return revoke_tree - - def check_token(self, token_values): - """Checks the values from a token against the revocation list - - :param token_values: dictionary of values from a token, - normalized for differences between v2 and v3. The checked values are a - subset of the attributes of model.TokenEvent - - :raises exception.TokenNotFound: if the token is invalid - - """ - if self._get_revoke_tree().is_revoked(token_values): - raise exception.TokenNotFound(_('Failed to validate token')) - - def revoke(self, event): - self.driver.revoke(event) - self._get_revoke_tree.invalidate(self) - - -@six.add_metaclass(abc.ABCMeta) -class RevokeDriverV8(object): - """Interface for recording and reporting revocation events.""" - - @abc.abstractmethod - def list_events(self, last_fetch=None): - """return the revocation events, as a list of objects - - :param last_fetch: Time of last fetch. Return all events newer. - :returns: A list of keystone.contrib.revoke.model.RevokeEvent - newer than `last_fetch.` - If no last_fetch is specified, returns all events - for tokens issued after the expiration cutoff. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def revoke(self, event): - """register a revocation event - - :param event: An instance of - keystone.contrib.revoke.model.RevocationEvent - - """ - raise exception.NotImplemented() # pragma: no cover - - -Driver = manager.create_legacy_driver(RevokeDriverV8) diff --git a/keystone-moon/keystone/contrib/revoke/migrate_repo/__init__.py b/keystone-moon/keystone/contrib/revoke/migrate_repo/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/revoke/migrate_repo/migrate.cfg b/keystone-moon/keystone/contrib/revoke/migrate_repo/migrate.cfg deleted file mode 100644 index 0e61bcaa..00000000 --- a/keystone-moon/keystone/contrib/revoke/migrate_repo/migrate.cfg +++ /dev/null @@ -1,25 +0,0 @@ -[db_settings] -# Used to identify which repository this database is versioned under. -# You can use the name of your project. -repository_id=revoke - -# The name of the database table used to track the schema version. -# This name shouldn't already be used by your project. -# If this is changed once a database is under version control, you'll need to -# change the table name in each database too. -version_table=migrate_version - -# When committing a change script, Migrate will attempt to generate the -# sql for all supported databases; normally, if one of them fails - probably -# because you don't have that database installed - it is ignored and the -# commit continues, perhaps ending successfully. -# Databases in this list MUST compile successfully during a commit, or the -# entire commit will fail. List the databases your application will actually -# be using to ensure your updates to that database work properly. -# This must be a list; example: ['postgres','sqlite'] -required_dbs=[] - -# When creating new change scripts, Migrate will stamp the new script with -# a version number. By default this is latest_version + 1. You can set this -# to 'true' to tell Migrate to use the UTC timestamp instead. -use_timestamp_numbering=False diff --git a/keystone-moon/keystone/contrib/revoke/migrate_repo/versions/001_revoke_table.py b/keystone-moon/keystone/contrib/revoke/migrate_repo/versions/001_revoke_table.py deleted file mode 100644 index 81c535e1..00000000 --- a/keystone-moon/keystone/contrib/revoke/migrate_repo/versions/001_revoke_table.py +++ /dev/null @@ -1,17 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='revoke') diff --git a/keystone-moon/keystone/contrib/revoke/migrate_repo/versions/002_add_audit_id_and_chain_to_revoke_table.py b/keystone-moon/keystone/contrib/revoke/migrate_repo/versions/002_add_audit_id_and_chain_to_revoke_table.py deleted file mode 100644 index 81c535e1..00000000 --- a/keystone-moon/keystone/contrib/revoke/migrate_repo/versions/002_add_audit_id_and_chain_to_revoke_table.py +++ /dev/null @@ -1,17 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import exception - - -def upgrade(migrate_engine): - raise exception.MigrationMovedFailure(extension='revoke') diff --git a/keystone-moon/keystone/contrib/revoke/migrate_repo/versions/__init__.py b/keystone-moon/keystone/contrib/revoke/migrate_repo/versions/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/contrib/revoke/model.py b/keystone-moon/keystone/contrib/revoke/model.py deleted file mode 100644 index e677bfb5..00000000 --- a/keystone-moon/keystone/contrib/revoke/model.py +++ /dev/null @@ -1,371 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_utils import timeutils -from six.moves import map - -from keystone.common import utils - - -# The set of attributes common between the RevokeEvent -# and the dictionaries created from the token Data. -_NAMES = ['trust_id', - 'consumer_id', - 'access_token_id', - 'audit_id', - 'audit_chain_id', - 'expires_at', - 'domain_id', - 'project_id', - 'user_id', - 'role_id'] - - -# Additional arguments for creating a RevokeEvent -_EVENT_ARGS = ['issued_before', 'revoked_at'] - -# Names of attributes in the RevocationEvent, including "virtual" attributes. -# Virtual attributes are those added based on other values. -_EVENT_NAMES = _NAMES + ['domain_scope_id'] - -# Values that will be in the token data but not in the event. -# These will compared with event values that have different names. -# For example: both trustor_id and trustee_id are compared against user_id -_TOKEN_KEYS = ['identity_domain_id', - 'assignment_domain_id', - 'issued_at', - 'trustor_id', - 'trustee_id'] - -# Alternative names to be checked in token for every field in -# revoke tree. -ALTERNATIVES = { - 'user_id': ['user_id', 'trustor_id', 'trustee_id'], - 'domain_id': ['identity_domain_id', 'assignment_domain_id'], - # For a domain-scoped token, the domain is in assignment_domain_id. - 'domain_scope_id': ['assignment_domain_id', ], -} - - -REVOKE_KEYS = _NAMES + _EVENT_ARGS - - -def blank_token_data(issued_at): - token_data = dict() - for name in _NAMES: - token_data[name] = None - for name in _TOKEN_KEYS: - token_data[name] = None - # required field - token_data['issued_at'] = issued_at - return token_data - - -class RevokeEvent(object): - def __init__(self, **kwargs): - for k in REVOKE_KEYS: - v = kwargs.get(k, None) - setattr(self, k, v) - - if self.domain_id and self.expires_at: - # This is revoking a domain-scoped token. - self.domain_scope_id = self.domain_id - self.domain_id = None - else: - # This is revoking all tokens for a domain. - self.domain_scope_id = None - - if self.expires_at is not None: - # Trim off the expiration time because MySQL timestamps are only - # accurate to the second. - self.expires_at = self.expires_at.replace(microsecond=0) - - if self.revoked_at is None: - self.revoked_at = timeutils.utcnow() - if self.issued_before is None: - self.issued_before = self.revoked_at - - def to_dict(self): - keys = ['user_id', - 'role_id', - 'domain_id', - 'domain_scope_id', - 'project_id', - 'audit_id', - 'audit_chain_id', - ] - event = {key: self.__dict__[key] for key in keys - if self.__dict__[key] is not None} - if self.trust_id is not None: - event['OS-TRUST:trust_id'] = self.trust_id - if self.consumer_id is not None: - event['OS-OAUTH1:consumer_id'] = self.consumer_id - if self.consumer_id is not None: - event['OS-OAUTH1:access_token_id'] = self.access_token_id - if self.expires_at is not None: - event['expires_at'] = utils.isotime(self.expires_at) - if self.issued_before is not None: - event['issued_before'] = utils.isotime(self.issued_before, - subsecond=True) - return event - - def key_for_name(self, name): - return "%s=%s" % (name, getattr(self, name) or '*') - - -def attr_keys(event): - return list(map(event.key_for_name, _EVENT_NAMES)) - - -class RevokeTree(object): - """Fast Revocation Checking Tree Structure - - The Tree is an index to quickly match tokens against events. - Each node is a hashtable of key=value combinations from revocation events. - The - - """ - - def __init__(self, revoke_events=None): - self.revoke_map = dict() - self.add_events(revoke_events) - - def add_event(self, event): - """Updates the tree based on a revocation event. - - Creates any necessary internal nodes in the tree corresponding to the - fields of the revocation event. The leaf node will always be set to - the latest 'issued_before' for events that are otherwise identical. - - :param: Event to add to the tree - - :returns: the event that was passed in. - - """ - revoke_map = self.revoke_map - for key in attr_keys(event): - revoke_map = revoke_map.setdefault(key, {}) - revoke_map['issued_before'] = max( - event.issued_before, revoke_map.get( - 'issued_before', event.issued_before)) - return event - - def remove_event(self, event): - """Update the tree based on the removal of a Revocation Event - - Removes empty nodes from the tree from the leaf back to the root. - - If multiple events trace the same path, but have different - 'issued_before' values, only the last is ever stored in the tree. - So only an exact match on 'issued_before' ever triggers a removal - - :param: Event to remove from the tree - - """ - stack = [] - revoke_map = self.revoke_map - for name in _EVENT_NAMES: - key = event.key_for_name(name) - nxt = revoke_map.get(key) - if nxt is None: - break - stack.append((revoke_map, key, nxt)) - revoke_map = nxt - else: - if event.issued_before == revoke_map['issued_before']: - revoke_map.pop('issued_before') - for parent, key, child in reversed(stack): - if not any(child): - del parent[key] - - def add_events(self, revoke_events): - return list(map(self.add_event, revoke_events or [])) - - @staticmethod - def _next_level_keys(name, token_data): - """Generate keys based on current field name and token data - - Generate all keys to look for in the next iteration of revocation - event tree traversal. - """ - yield '*' - if name == 'role_id': - # Roles are very special since a token has a list of them. - # If the revocation event matches any one of them, - # revoke the token. - for role_id in token_data.get('roles', []): - yield role_id - else: - # For other fields we try to get any branch that concur - # with any alternative field in the token. - for alt_name in ALTERNATIVES.get(name, [name]): - yield token_data[alt_name] - - def _search(self, revoke_map, names, token_data): - """Search for revocation event by token_data - - Traverse the revocation events tree looking for event matching token - data issued after the token. - """ - if not names: - # The last (leaf) level is checked in a special way because we - # verify issued_at field differently. - try: - return revoke_map['issued_before'] >= token_data['issued_at'] - except KeyError: - return False - - name, remaining_names = names[0], names[1:] - - for key in self._next_level_keys(name, token_data): - subtree = revoke_map.get('%s=%s' % (name, key)) - if subtree and self._search(subtree, remaining_names, token_data): - return True - - # If we made it out of the loop then no element in revocation tree - # corresponds to our token and it is good. - return False - - def is_revoked(self, token_data): - """Check if a token matches the revocation event - - Compare the values for each level of the tree with the values from - the token, accounting for attributes that have alternative - keys, and for wildcard matches. - if there is a match, continue down the tree. - if there is no match, exit early. - - token_data is a map based on a flattened view of token. - The required fields are: - - 'expires_at','user_id', 'project_id', 'identity_domain_id', - 'assignment_domain_id', 'trust_id', 'trustor_id', 'trustee_id' - 'consumer_id', 'access_token_id' - - """ - return self._search(self.revoke_map, _EVENT_NAMES, token_data) - - -def build_token_values_v2(access, default_domain_id): - token_data = access['token'] - - token_expires_at = timeutils.parse_isotime(token_data['expires']) - - # Trim off the microseconds because the revocation event only has - # expirations accurate to the second. - token_expires_at = token_expires_at.replace(microsecond=0) - - token_values = { - 'expires_at': timeutils.normalize_time(token_expires_at), - 'issued_at': timeutils.normalize_time( - timeutils.parse_isotime(token_data['issued_at'])), - 'audit_id': token_data.get('audit_ids', [None])[0], - 'audit_chain_id': token_data.get('audit_ids', [None])[-1], - } - - token_values['user_id'] = access.get('user', {}).get('id') - - project = token_data.get('tenant') - if project is not None: - token_values['project_id'] = project['id'] - else: - token_values['project_id'] = None - - token_values['identity_domain_id'] = default_domain_id - token_values['assignment_domain_id'] = default_domain_id - - trust = token_data.get('trust') - if trust is None: - token_values['trust_id'] = None - token_values['trustor_id'] = None - token_values['trustee_id'] = None - else: - token_values['trust_id'] = trust['id'] - token_values['trustor_id'] = trust['trustor_id'] - token_values['trustee_id'] = trust['trustee_id'] - - token_values['consumer_id'] = None - token_values['access_token_id'] = None - - role_list = [] - # Roles are by ID in metadata and by name in the user section - roles = access.get('metadata', {}).get('roles', []) - for role in roles: - role_list.append(role) - token_values['roles'] = role_list - return token_values - - -def build_token_values(token_data): - - token_expires_at = timeutils.parse_isotime(token_data['expires_at']) - - # Trim off the microseconds because the revocation event only has - # expirations accurate to the second. - token_expires_at = token_expires_at.replace(microsecond=0) - - token_values = { - 'expires_at': timeutils.normalize_time(token_expires_at), - 'issued_at': timeutils.normalize_time( - timeutils.parse_isotime(token_data['issued_at'])), - 'audit_id': token_data.get('audit_ids', [None])[0], - 'audit_chain_id': token_data.get('audit_ids', [None])[-1], - } - - user = token_data.get('user') - if user is not None: - token_values['user_id'] = user['id'] - # Federated users do not have a domain, be defensive and get the user - # domain set to None in the federated user case. - token_values['identity_domain_id'] = user.get('domain', {}).get('id') - else: - token_values['user_id'] = None - token_values['identity_domain_id'] = None - - project = token_data.get('project', token_data.get('tenant')) - if project is not None: - token_values['project_id'] = project['id'] - token_values['assignment_domain_id'] = project['domain']['id'] - else: - token_values['project_id'] = None - - domain = token_data.get('domain') - if domain is not None: - token_values['assignment_domain_id'] = domain['id'] - else: - token_values['assignment_domain_id'] = None - - role_list = [] - roles = token_data.get('roles') - if roles is not None: - for role in roles: - role_list.append(role['id']) - token_values['roles'] = role_list - - trust = token_data.get('OS-TRUST:trust') - if trust is None: - token_values['trust_id'] = None - token_values['trustor_id'] = None - token_values['trustee_id'] = None - else: - token_values['trust_id'] = trust['id'] - token_values['trustor_id'] = trust['trustor_user']['id'] - token_values['trustee_id'] = trust['trustee_user']['id'] - - oauth1 = token_data.get('OS-OAUTH1') - if oauth1 is None: - token_values['consumer_id'] = None - token_values['access_token_id'] = None - else: - token_values['consumer_id'] = oauth1['consumer_id'] - token_values['access_token_id'] = oauth1['access_token_id'] - return token_values diff --git a/keystone-moon/keystone/contrib/revoke/routers.py b/keystone-moon/keystone/contrib/revoke/routers.py deleted file mode 100644 index a44c6194..00000000 --- a/keystone-moon/keystone/contrib/revoke/routers.py +++ /dev/null @@ -1,31 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log -from oslo_log import versionutils - -from keystone.common import wsgi -from keystone.i18n import _ - - -LOG = log.getLogger(__name__) - - -class RevokeExtension(wsgi.Middleware): - - def __init__(self, *args, **kwargs): - super(RevokeExtension, self).__init__(*args, **kwargs) - msg = _("Remove revoke_extension from the paste pipeline, the " - "revoke extension is now always available. Update the " - "[pipeline:api_v3] section in keystone-paste.ini accordingly, " - "as it will be removed in the O release.") - versionutils.report_deprecated_feature(LOG, msg) diff --git a/keystone-moon/keystone/contrib/s3/__init__.py b/keystone-moon/keystone/contrib/s3/__init__.py deleted file mode 100644 index eec77c72..00000000 --- a/keystone-moon/keystone/contrib/s3/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.contrib.s3.core import * # noqa diff --git a/keystone-moon/keystone/contrib/s3/core.py b/keystone-moon/keystone/contrib/s3/core.py deleted file mode 100644 index c497f5d5..00000000 --- a/keystone-moon/keystone/contrib/s3/core.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into the S3 Credentials service. - -This service provides S3 token validation for services configured with the -s3_token middleware to authorize S3 requests. - -This service uses the same credentials used by EC2. Refer to the documentation -for the EC2 module for how to generate the required credentials. -""" - -import base64 -import hashlib -import hmac - -import six - -from keystone.common import extension -from keystone.common import json_home -from keystone.common import utils -from keystone.common import wsgi -from keystone.contrib.ec2 import controllers -from keystone import exception -from keystone.i18n import _ - - -EXTENSION_DATA = { - 'name': 'OpenStack S3 API', - 'namespace': 'http://docs.openstack.org/identity/api/ext/' - 's3tokens/v1.0', - 'alias': 's3tokens', - 'updated': '2013-07-07T12:00:0-00:00', - 'description': 'OpenStack S3 API.', - 'links': [ - { - 'rel': 'describedby', - 'type': 'text/html', - 'href': 'http://developer.openstack.org/' - 'api-ref-identity-v2-ext.html', - } - ]} -extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) - - -class S3Extension(wsgi.V3ExtensionRouter): - def add_routes(self, mapper): - controller = S3Controller() - # validation - self._add_resource( - mapper, controller, - path='/s3tokens', - post_action='authenticate', - rel=json_home.build_v3_extension_resource_relation( - 's3tokens', '1.0', 's3tokens')) - - -class S3Controller(controllers.Ec2Controller): - def check_signature(self, creds_ref, credentials): - string_to_sign = base64.urlsafe_b64decode(str(credentials['token'])) - - if string_to_sign[0:4] != b'AWS4': - signature = self._calculate_signature_v1(string_to_sign, - creds_ref['secret']) - else: - signature = self._calculate_signature_v4(string_to_sign, - creds_ref['secret']) - - if not utils.auth_str_equal(credentials['signature'], signature): - raise exception.Unauthorized( - message=_('Credential signature mismatch')) - - def _calculate_signature_v1(self, string_to_sign, secret_key): - """Calculates a v1 signature. - - :param bytes string_to_sign: String that contains request params and - is used for calculate signature of request - :param text secret_key: Second auth key of EC2 account that is used to - sign requests - """ - key = str(secret_key).encode('utf-8') - if six.PY2: - b64_encode = base64.encodestring - else: - b64_encode = base64.encodebytes - signed = b64_encode(hmac.new(key, string_to_sign, hashlib.sha1) - .digest()).decode('utf-8').strip() - return signed - - def _calculate_signature_v4(self, string_to_sign, secret_key): - """Calculates a v4 signature. - - :param bytes string_to_sign: String that contains request params and - is used for calculate signature of request - :param text secret_key: Second auth key of EC2 account that is used to - sign requests - """ - parts = string_to_sign.split(b'\n') - if len(parts) != 4 or parts[0] != b'AWS4-HMAC-SHA256': - raise exception.Unauthorized(message=_('Invalid EC2 signature.')) - scope = parts[2].split(b'/') - if len(scope) != 4 or scope[2] != b's3' or scope[3] != b'aws4_request': - raise exception.Unauthorized(message=_('Invalid EC2 signature.')) - - def _sign(key, msg): - return hmac.new(key, msg, hashlib.sha256).digest() - - signed = _sign(('AWS4' + secret_key).encode('utf-8'), scope[0]) - signed = _sign(signed, scope[1]) - signed = _sign(signed, scope[2]) - signed = _sign(signed, b'aws4_request') - - signature = hmac.new(signed, string_to_sign, hashlib.sha256) - return signature.hexdigest() diff --git a/keystone-moon/keystone/contrib/simple_cert/__init__.py b/keystone-moon/keystone/contrib/simple_cert/__init__.py deleted file mode 100644 index 2e5f9928..00000000 --- a/keystone-moon/keystone/contrib/simple_cert/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.contrib.simple_cert.routers import SimpleCertExtension # noqa diff --git a/keystone-moon/keystone/contrib/simple_cert/controllers.py b/keystone-moon/keystone/contrib/simple_cert/controllers.py deleted file mode 100644 index d34c03a6..00000000 --- a/keystone-moon/keystone/contrib/simple_cert/controllers.py +++ /dev/null @@ -1,42 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -import webob - -from keystone.common import controller -from keystone.common import dependency -from keystone import exception - -CONF = cfg.CONF - - -@dependency.requires('token_provider_api') -class SimpleCert(controller.V3Controller): - - def _get_certificate(self, name): - try: - with open(name, 'r') as f: - body = f.read() - except IOError: - raise exception.CertificateFilesUnavailable() - - # NOTE(jamielennox): We construct the webob Response ourselves here so - # that we don't pass through the JSON encoding process. - headers = [('Content-Type', 'application/x-pem-file')] - return webob.Response(body=body, headerlist=headers, status="200 OK") - - def get_ca_certificate(self, context): - return self._get_certificate(CONF.signing.ca_certs) - - def list_certificates(self, context): - return self._get_certificate(CONF.signing.certfile) diff --git a/keystone-moon/keystone/contrib/simple_cert/core.py b/keystone-moon/keystone/contrib/simple_cert/core.py deleted file mode 100644 index 531c6aae..00000000 --- a/keystone-moon/keystone/contrib/simple_cert/core.py +++ /dev/null @@ -1,32 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import extension - -EXTENSION_DATA = { - 'name': 'OpenStack Simple Certificate API', - 'namespace': 'http://docs.openstack.org/identity/api/ext/' - 'OS-SIMPLE-CERT/v1.0', - 'alias': 'OS-SIMPLE-CERT', - 'updated': '2014-01-20T12:00:0-00:00', - 'description': 'OpenStack simple certificate retrieval extension', - 'links': [ - { - 'rel': 'describedby', - # TODO(dolph): link needs to be revised after - # bug 928059 merges - 'type': 'text/html', - 'href': 'https://github.com/openstack/identity-api', - } - ]} -extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) -extension.register_public_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) diff --git a/keystone-moon/keystone/contrib/simple_cert/routers.py b/keystone-moon/keystone/contrib/simple_cert/routers.py deleted file mode 100644 index b1d509e7..00000000 --- a/keystone-moon/keystone/contrib/simple_cert/routers.py +++ /dev/null @@ -1,33 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log -from oslo_log import versionutils - -from keystone.common import wsgi -from keystone.i18n import _ - - -LOG = log.getLogger(__name__) - - -class SimpleCertExtension(wsgi.Middleware): - - def __init__(self, application): - super(SimpleCertExtension, self).__init__(application) - msg = _("Remove simple_cert from the paste pipeline, the " - "PKI and PKIz token providers are now deprecated and " - "simple_cert was only used insupport of these token " - "providers. Update the [pipeline:api_v3] section in " - "keystone-paste.ini accordingly, as it will be removed in the " - "O release.") - versionutils.report_deprecated_feature(LOG, msg) diff --git a/keystone-moon/keystone/contrib/user_crud/__init__.py b/keystone-moon/keystone/contrib/user_crud/__init__.py deleted file mode 100644 index 271ceee6..00000000 --- a/keystone-moon/keystone/contrib/user_crud/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2012 Red Hat, Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.contrib.user_crud.core import * # noqa diff --git a/keystone-moon/keystone/contrib/user_crud/core.py b/keystone-moon/keystone/contrib/user_crud/core.py deleted file mode 100644 index b37157ea..00000000 --- a/keystone-moon/keystone/contrib/user_crud/core.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2012 Red Hat, Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log -from oslo_log import versionutils - -from keystone.common import wsgi -from keystone.i18n import _ - - -LOG = log.getLogger(__name__) - - -class CrudExtension(wsgi.Middleware): - def __init__(self, application): - super(CrudExtension, self).__init__(application) - msg = _("Remove user_crud_extension from the paste pipeline, the " - "user_crud extension is now always available. Update" - "the [pipeline:public_api] section in keystone-paste.ini " - "accordingly, as it will be removed in the O release.") - versionutils.report_deprecated_feature(LOG, msg) diff --git a/keystone-moon/keystone/controllers.py b/keystone-moon/keystone/controllers.py deleted file mode 100644 index 085c1fb0..00000000 --- a/keystone-moon/keystone/controllers.py +++ /dev/null @@ -1,218 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log -from oslo_serialization import jsonutils -import webob - -from keystone.common import extension -from keystone.common import json_home -from keystone.common import wsgi -from keystone import exception - - -LOG = log.getLogger(__name__) - -MEDIA_TYPE_JSON = 'application/vnd.openstack.identity-%s+json' - -_VERSIONS = [] - -# NOTE(blk-u): latest_app will be set by keystone.service.loadapp(). It gets -# set to the application that was just loaded. In the case of keystone-all, -# loadapp() gets called twice, once for the public app and once for the admin -# app. In the case of httpd/keystone, loadapp() gets called once for the public -# app if this is the public instance or loadapp() gets called for the admin app -# if it's the admin instance. -# This is used to fetch the /v3 JSON Home response. The /v3 JSON Home response -# is the same whether it's the admin or public service so either admin or -# public works. -latest_app = None - - -def request_v3_json_home(new_prefix): - if 'v3' not in _VERSIONS: - # No V3 support, so return an empty JSON Home document. - return {'resources': {}} - - req = webob.Request.blank( - '/v3', headers={'Accept': 'application/json-home'}) - v3_json_home_str = req.get_response(latest_app).body - v3_json_home = jsonutils.loads(v3_json_home_str) - json_home.translate_urls(v3_json_home, new_prefix) - - return v3_json_home - - -class Extensions(wsgi.Application): - """Base extensions controller to be extended by public and admin API's.""" - - # extend in subclass to specify the set of extensions - @property - def extensions(self): - return None - - def get_extensions_info(self, context): - return {'extensions': {'values': list(self.extensions.values())}} - - def get_extension_info(self, context, extension_alias): - try: - return {'extension': self.extensions[extension_alias]} - except KeyError: - raise exception.NotFound(target=extension_alias) - - -class AdminExtensions(Extensions): - @property - def extensions(self): - return extension.ADMIN_EXTENSIONS - - -class PublicExtensions(Extensions): - @property - def extensions(self): - return extension.PUBLIC_EXTENSIONS - - -def register_version(version): - _VERSIONS.append(version) - - -class MimeTypes(object): - JSON = 'application/json' - JSON_HOME = 'application/json-home' - - -def v3_mime_type_best_match(context): - - # accept_header is a WebOb MIMEAccept object so supports best_match. - accept_header = context['accept_header'] - - if not accept_header: - return MimeTypes.JSON - - SUPPORTED_TYPES = [MimeTypes.JSON, MimeTypes.JSON_HOME] - return accept_header.best_match(SUPPORTED_TYPES) - - -class Version(wsgi.Application): - - def __init__(self, version_type, routers=None): - self.endpoint_url_type = version_type - self._routers = routers - - super(Version, self).__init__() - - def _get_identity_url(self, context, version): - """Returns a URL to keystone's own endpoint.""" - url = self.base_url(context, self.endpoint_url_type) - return '%s/%s/' % (url, version) - - def _get_versions_list(self, context): - """The list of versions is dependent on the context.""" - versions = {} - if 'v2.0' in _VERSIONS: - versions['v2.0'] = { - 'id': 'v2.0', - 'status': 'stable', - 'updated': '2014-04-17T00:00:00Z', - 'links': [ - { - 'rel': 'self', - 'href': self._get_identity_url(context, 'v2.0'), - }, { - 'rel': 'describedby', - 'type': 'text/html', - 'href': 'http://docs.openstack.org/' - } - ], - 'media-types': [ - { - 'base': 'application/json', - 'type': MEDIA_TYPE_JSON % 'v2.0' - } - ] - } - - if 'v3' in _VERSIONS: - versions['v3'] = { - 'id': 'v3.4', - 'status': 'stable', - 'updated': '2015-03-30T00:00:00Z', - 'links': [ - { - 'rel': 'self', - 'href': self._get_identity_url(context, 'v3'), - } - ], - 'media-types': [ - { - 'base': 'application/json', - 'type': MEDIA_TYPE_JSON % 'v3' - } - ] - } - - return versions - - def get_versions(self, context): - - req_mime_type = v3_mime_type_best_match(context) - if req_mime_type == MimeTypes.JSON_HOME: - v3_json_home = request_v3_json_home('/v3') - return wsgi.render_response( - body=v3_json_home, - headers=(('Content-Type', MimeTypes.JSON_HOME),)) - - versions = self._get_versions_list(context) - return wsgi.render_response(status=(300, 'Multiple Choices'), body={ - 'versions': { - 'values': list(versions.values()) - } - }) - - def get_version_v2(self, context): - versions = self._get_versions_list(context) - if 'v2.0' in _VERSIONS: - return wsgi.render_response(body={ - 'version': versions['v2.0'] - }) - else: - raise exception.VersionNotFound(version='v2.0') - - def _get_json_home_v3(self): - - def all_resources(): - for router in self._routers: - for resource in router.v3_resources: - yield resource - - return { - 'resources': dict(all_resources()) - } - - def get_version_v3(self, context): - versions = self._get_versions_list(context) - if 'v3' in _VERSIONS: - req_mime_type = v3_mime_type_best_match(context) - - if req_mime_type == MimeTypes.JSON_HOME: - return wsgi.render_response( - body=self._get_json_home_v3(), - headers=(('Content-Type', MimeTypes.JSON_HOME),)) - - return wsgi.render_response(body={ - 'version': versions['v3'] - }) - else: - raise exception.VersionNotFound(version='v3') diff --git a/keystone-moon/keystone/credential/__init__.py b/keystone-moon/keystone/credential/__init__.py deleted file mode 100644 index ea9d906c..00000000 --- a/keystone-moon/keystone/credential/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.credential import controllers # noqa -from keystone.credential.core import * # noqa diff --git a/keystone-moon/keystone/credential/backends/__init__.py b/keystone-moon/keystone/credential/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/credential/backends/sql.py b/keystone-moon/keystone/credential/backends/sql.py deleted file mode 100644 index dfb9d20a..00000000 --- a/keystone-moon/keystone/credential/backends/sql.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import driver_hints -from keystone.common import sql -from keystone import credential -from keystone import exception - - -class CredentialModel(sql.ModelBase, sql.DictBase): - __tablename__ = 'credential' - attributes = ['id', 'user_id', 'project_id', 'blob', 'type'] - id = sql.Column(sql.String(64), primary_key=True) - user_id = sql.Column(sql.String(64), - nullable=False) - project_id = sql.Column(sql.String(64)) - blob = sql.Column(sql.JsonBlob(), nullable=False) - type = sql.Column(sql.String(255), nullable=False) - extra = sql.Column(sql.JsonBlob()) - - -class Credential(credential.CredentialDriverV8): - - # credential crud - - @sql.handle_conflicts(conflict_type='credential') - def create_credential(self, credential_id, credential): - with sql.session_for_write() as session: - ref = CredentialModel.from_dict(credential) - session.add(ref) - return ref.to_dict() - - @driver_hints.truncated - def list_credentials(self, hints): - with sql.session_for_read() as session: - credentials = session.query(CredentialModel) - credentials = sql.filter_limit_query(CredentialModel, - credentials, hints) - return [s.to_dict() for s in credentials] - - def list_credentials_for_user(self, user_id, type=None): - with sql.session_for_read() as session: - query = session.query(CredentialModel) - query = query.filter_by(user_id=user_id) - if type: - query = query.filter_by(type=type) - refs = query.all() - return [ref.to_dict() for ref in refs] - - def _get_credential(self, session, credential_id): - ref = session.query(CredentialModel).get(credential_id) - if ref is None: - raise exception.CredentialNotFound(credential_id=credential_id) - return ref - - def get_credential(self, credential_id): - with sql.session_for_read() as session: - return self._get_credential(session, credential_id).to_dict() - - @sql.handle_conflicts(conflict_type='credential') - def update_credential(self, credential_id, credential): - with sql.session_for_write() as session: - ref = self._get_credential(session, credential_id) - old_dict = ref.to_dict() - for k in credential: - old_dict[k] = credential[k] - new_credential = CredentialModel.from_dict(old_dict) - for attr in CredentialModel.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_credential, attr)) - ref.extra = new_credential.extra - return ref.to_dict() - - def delete_credential(self, credential_id): - with sql.session_for_write() as session: - ref = self._get_credential(session, credential_id) - session.delete(ref) - - def delete_credentials_for_project(self, project_id): - with sql.session_for_write() as session: - query = session.query(CredentialModel) - query = query.filter_by(project_id=project_id) - query.delete() - - def delete_credentials_for_user(self, user_id): - with sql.session_for_write() as session: - query = session.query(CredentialModel) - query = query.filter_by(user_id=user_id) - query.delete() diff --git a/keystone-moon/keystone/credential/controllers.py b/keystone-moon/keystone/credential/controllers.py deleted file mode 100644 index 321acc48..00000000 --- a/keystone-moon/keystone/credential/controllers.py +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import hashlib - -from oslo_serialization import jsonutils - -from keystone.common import controller -from keystone.common import dependency -from keystone.common import validation -from keystone.credential import schema -from keystone import exception -from keystone.i18n import _ - - -@dependency.requires('credential_api') -class CredentialV3(controller.V3Controller): - collection_name = 'credentials' - member_name = 'credential' - - def __init__(self): - super(CredentialV3, self).__init__() - self.get_member_from_driver = self.credential_api.get_credential - - def _assign_unique_id(self, ref, trust_id=None): - # Generates and assigns a unique identifier to - # a credential reference. - if ref.get('type', '').lower() == 'ec2': - try: - blob = jsonutils.loads(ref.get('blob')) - except (ValueError, TypeError): - raise exception.ValidationError( - message=_('Invalid blob in credential')) - if not blob or not isinstance(blob, dict): - raise exception.ValidationError(attribute='blob', - target='credential') - if blob.get('access') is None: - raise exception.ValidationError(attribute='access', - target='blob') - ret_ref = ref.copy() - ret_ref['id'] = hashlib.sha256(blob['access']).hexdigest() - # Update the blob with the trust_id, so credentials created - # with a trust scoped token will result in trust scoped - # tokens when authentication via ec2tokens happens - if trust_id is not None: - blob['trust_id'] = trust_id - ret_ref['blob'] = jsonutils.dumps(blob) - return ret_ref - else: - return super(CredentialV3, self)._assign_unique_id(ref) - - @controller.protected() - @validation.validated(schema.credential_create, 'credential') - def create_credential(self, context, credential): - trust_id = self._get_trust_id_for_request(context) - ref = self._assign_unique_id(self._normalize_dict(credential), - trust_id) - ref = self.credential_api.create_credential(ref['id'], ref) - return CredentialV3.wrap_member(context, ref) - - @staticmethod - def _blob_to_json(ref): - # credentials stored via ec2tokens before the fix for #1259584 - # need json serializing, as that's the documented API format - blob = ref.get('blob') - if isinstance(blob, dict): - new_ref = ref.copy() - new_ref['blob'] = jsonutils.dumps(blob) - return new_ref - else: - return ref - - @controller.filterprotected('user_id', 'type') - def list_credentials(self, context, filters): - hints = CredentialV3.build_driver_hints(context, filters) - refs = self.credential_api.list_credentials(hints) - ret_refs = [self._blob_to_json(r) for r in refs] - return CredentialV3.wrap_collection(context, ret_refs, - hints=hints) - - @controller.protected() - def get_credential(self, context, credential_id): - ref = self.credential_api.get_credential(credential_id) - ret_ref = self._blob_to_json(ref) - return CredentialV3.wrap_member(context, ret_ref) - - @controller.protected() - @validation.validated(schema.credential_update, 'credential') - def update_credential(self, context, credential_id, credential): - self._require_matching_id(credential_id, credential) - - ref = self.credential_api.update_credential(credential_id, credential) - return CredentialV3.wrap_member(context, ref) - - @controller.protected() - def delete_credential(self, context, credential_id): - return self.credential_api.delete_credential(credential_id) diff --git a/keystone-moon/keystone/credential/core.py b/keystone-moon/keystone/credential/core.py deleted file mode 100644 index 1550fc99..00000000 --- a/keystone-moon/keystone/credential/core.py +++ /dev/null @@ -1,149 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into the Credential service.""" - -import abc - -from oslo_config import cfg -from oslo_log import log -import six - -from keystone.common import dependency -from keystone.common import driver_hints -from keystone.common import manager -from keystone import exception - - -CONF = cfg.CONF - -LOG = log.getLogger(__name__) - - -@dependency.provider('credential_api') -class Manager(manager.Manager): - """Default pivot point for the Credential backend. - - See :mod:`keystone.common.manager.Manager` for more details on how this - dynamically calls the backend. - - """ - - driver_namespace = 'keystone.credential' - - def __init__(self): - super(Manager, self).__init__(CONF.credential.driver) - - @manager.response_truncated - def list_credentials(self, hints=None): - return self.driver.list_credentials(hints or driver_hints.Hints()) - - -@six.add_metaclass(abc.ABCMeta) -class CredentialDriverV8(object): - # credential crud - - @abc.abstractmethod - def create_credential(self, credential_id, credential): - """Creates a new credential. - - :raises keystone.exception.Conflict: If a duplicate credential exists. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_credentials(self, hints): - """List all credentials. - - :param hints: contains the list of filters yet to be satisfied. - Any filters satisfied here will be removed so that - the caller will know if any filters remain. - - :returns: a list of credential_refs or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_credentials_for_user(self, user_id, type=None): - """List credentials for a user. - - :param user_id: ID of a user to filter credentials by. - :param type: type of credentials to filter on. - - :returns: a list of credential_refs or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_credential(self, credential_id): - """Get a credential by ID. - - :returns: credential_ref - :raises keystone.exception.CredentialNotFound: If credential doesn't - exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_credential(self, credential_id, credential): - """Updates an existing credential. - - :raises keystone.exception.CredentialNotFound: If credential doesn't - exist. - :raises keystone.exception.Conflict: If a duplicate credential exists. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_credential(self, credential_id): - """Deletes an existing credential. - - :raises keystone.exception.CredentialNotFound: If credential doesn't - exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_credentials_for_project(self, project_id): - """Deletes all credentials for a project.""" - self._delete_credentials(lambda cr: cr['project_id'] == project_id) - - @abc.abstractmethod - def delete_credentials_for_user(self, user_id): - """Deletes all credentials for a user.""" - self._delete_credentials(lambda cr: cr['user_id'] == user_id) - - def _delete_credentials(self, match_fn): - """Do the actual credential deletion work (default implementation). - - :param match_fn: function that takes a credential dict as the - parameter and returns true or false if the - identifier matches the credential dict. - """ - for cr in self.list_credentials(): - if match_fn(cr): - try: - self.credential_api.delete_credential(cr['id']) - except exception.CredentialNotFound: - LOG.debug('Deletion of credential is not required: %s', - cr['id']) - - -Driver = manager.create_legacy_driver(CredentialDriverV8) diff --git a/keystone-moon/keystone/credential/routers.py b/keystone-moon/keystone/credential/routers.py deleted file mode 100644 index db3651f4..00000000 --- a/keystone-moon/keystone/credential/routers.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""WSGI Routers for the Credentials service.""" - -from keystone.common import router -from keystone.common import wsgi -from keystone.credential import controllers - - -class Routers(wsgi.RoutersBase): - - def append_v3_routers(self, mapper, routers): - routers.append( - router.Router(controllers.CredentialV3(), - 'credentials', 'credential', - resource_descriptions=self.v3_resources)) diff --git a/keystone-moon/keystone/credential/schema.py b/keystone-moon/keystone/credential/schema.py deleted file mode 100644 index 749f0c0a..00000000 --- a/keystone-moon/keystone/credential/schema.py +++ /dev/null @@ -1,62 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -_credential_properties = { - 'blob': { - 'type': 'string' - }, - 'project_id': { - 'type': 'string' - }, - 'type': { - 'type': 'string' - }, - 'user_id': { - 'type': 'string' - } -} - -credential_create = { - 'type': 'object', - 'properties': _credential_properties, - 'additionalProperties': True, - 'oneOf': [ - { - 'title': 'ec2 credential requires project_id', - 'required': ['blob', 'type', 'user_id', 'project_id'], - 'properties': { - 'type': { - 'enum': ['ec2'] - } - } - }, - { - 'title': 'non-ec2 credential does not require project_id', - 'required': ['blob', 'type', 'user_id'], - 'properties': { - 'type': { - 'not': { - 'enum': ['ec2'] - } - } - } - } - ] -} - -credential_update = { - 'type': 'object', - 'properties': _credential_properties, - 'minProperties': 1, - 'additionalProperties': True -} diff --git a/keystone-moon/keystone/endpoint_policy/__init__.py b/keystone-moon/keystone/endpoint_policy/__init__.py deleted file mode 100644 index 36c016a1..00000000 --- a/keystone-moon/keystone/endpoint_policy/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.endpoint_policy.core import * # noqa diff --git a/keystone-moon/keystone/endpoint_policy/backends/__init__.py b/keystone-moon/keystone/endpoint_policy/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/endpoint_policy/backends/sql.py b/keystone-moon/keystone/endpoint_policy/backends/sql.py deleted file mode 100644 index aacbb083..00000000 --- a/keystone-moon/keystone/endpoint_policy/backends/sql.py +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -import sqlalchemy - -from keystone.common import sql -from keystone import exception - - -class PolicyAssociation(sql.ModelBase, sql.ModelDictMixin): - __tablename__ = 'policy_association' - attributes = ['policy_id', 'endpoint_id', 'region_id', 'service_id'] - # The id column is never exposed outside this module. It only exists to - # provide a primary key, given that the real columns we would like to use - # (endpoint_id, service_id, region_id) can be null - id = sql.Column(sql.String(64), primary_key=True) - policy_id = sql.Column(sql.String(64), nullable=False) - endpoint_id = sql.Column(sql.String(64), nullable=True) - service_id = sql.Column(sql.String(64), nullable=True) - region_id = sql.Column(sql.String(64), nullable=True) - __table_args__ = (sql.UniqueConstraint('endpoint_id', 'service_id', - 'region_id'),) - - def to_dict(self): - """Returns the model's attributes as a dictionary. - - We override the standard method in order to hide the id column, - since this only exists to provide the table with a primary key. - - """ - d = {} - for attr in self.__class__.attributes: - d[attr] = getattr(self, attr) - return d - - -class EndpointPolicy(object): - - def create_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): - with sql.session_for_write() as session: - try: - # See if there is already a row for this association, and if - # so, update it with the new policy_id - query = session.query(PolicyAssociation) - query = query.filter_by(endpoint_id=endpoint_id) - query = query.filter_by(service_id=service_id) - query = query.filter_by(region_id=region_id) - association = query.one() - association.policy_id = policy_id - except sql.NotFound: - association = PolicyAssociation(id=uuid.uuid4().hex, - policy_id=policy_id, - endpoint_id=endpoint_id, - service_id=service_id, - region_id=region_id) - session.add(association) - - def check_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): - sql_constraints = sqlalchemy.and_( - PolicyAssociation.policy_id == policy_id, - PolicyAssociation.endpoint_id == endpoint_id, - PolicyAssociation.service_id == service_id, - PolicyAssociation.region_id == region_id) - - # NOTE(henry-nash): Getting a single value to save object - # management overhead. - with sql.session_for_read() as session: - if session.query(PolicyAssociation.id).filter( - sql_constraints).distinct().count() == 0: - raise exception.PolicyAssociationNotFound() - - def delete_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): - with sql.session_for_write() as session: - query = session.query(PolicyAssociation) - query = query.filter_by(policy_id=policy_id) - query = query.filter_by(endpoint_id=endpoint_id) - query = query.filter_by(service_id=service_id) - query = query.filter_by(region_id=region_id) - query.delete() - - def get_policy_association(self, endpoint_id=None, - service_id=None, region_id=None): - sql_constraints = sqlalchemy.and_( - PolicyAssociation.endpoint_id == endpoint_id, - PolicyAssociation.service_id == service_id, - PolicyAssociation.region_id == region_id) - - try: - with sql.session_for_read() as session: - policy_id = session.query(PolicyAssociation.policy_id).filter( - sql_constraints).distinct().one() - return {'policy_id': policy_id} - except sql.NotFound: - raise exception.PolicyAssociationNotFound() - - def list_associations_for_policy(self, policy_id): - with sql.session_for_read() as session: - query = session.query(PolicyAssociation) - query = query.filter_by(policy_id=policy_id) - return [ref.to_dict() for ref in query.all()] - - def delete_association_by_endpoint(self, endpoint_id): - with sql.session_for_write() as session: - query = session.query(PolicyAssociation) - query = query.filter_by(endpoint_id=endpoint_id) - query.delete() - - def delete_association_by_service(self, service_id): - with sql.session_for_write() as session: - query = session.query(PolicyAssociation) - query = query.filter_by(service_id=service_id) - query.delete() - - def delete_association_by_region(self, region_id): - with sql.session_for_write() as session: - query = session.query(PolicyAssociation) - query = query.filter_by(region_id=region_id) - query.delete() - - def delete_association_by_policy(self, policy_id): - with sql.session_for_write() as session: - query = session.query(PolicyAssociation) - query = query.filter_by(policy_id=policy_id) - query.delete() diff --git a/keystone-moon/keystone/endpoint_policy/controllers.py b/keystone-moon/keystone/endpoint_policy/controllers.py deleted file mode 100644 index b96834dc..00000000 --- a/keystone-moon/keystone/endpoint_policy/controllers.py +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import controller -from keystone.common import dependency -from keystone import notifications - - -@dependency.requires('policy_api', 'catalog_api', 'endpoint_policy_api') -class EndpointPolicyV3Controller(controller.V3Controller): - collection_name = 'endpoints' - member_name = 'endpoint' - - def __init__(self): - super(EndpointPolicyV3Controller, self).__init__() - notifications.register_event_callback( - 'deleted', 'endpoint', self._on_endpoint_delete) - notifications.register_event_callback( - 'deleted', 'service', self._on_service_delete) - notifications.register_event_callback( - 'deleted', 'region', self._on_region_delete) - notifications.register_event_callback( - 'deleted', 'policy', self._on_policy_delete) - - def _on_endpoint_delete(self, service, resource_type, operation, payload): - self.endpoint_policy_api.delete_association_by_endpoint( - payload['resource_info']) - - def _on_service_delete(self, service, resource_type, operation, payload): - self.endpoint_policy_api.delete_association_by_service( - payload['resource_info']) - - def _on_region_delete(self, service, resource_type, operation, payload): - self.endpoint_policy_api.delete_association_by_region( - payload['resource_info']) - - def _on_policy_delete(self, service, resource_type, operation, payload): - self.endpoint_policy_api.delete_association_by_policy( - payload['resource_info']) - - @controller.protected() - def create_policy_association_for_endpoint(self, context, - policy_id, endpoint_id): - """Create an association between a policy and an endpoint.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_endpoint(endpoint_id) - self.endpoint_policy_api.create_policy_association( - policy_id, endpoint_id=endpoint_id) - - @controller.protected() - def check_policy_association_for_endpoint(self, context, - policy_id, endpoint_id): - """Check an association between a policy and an endpoint.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_endpoint(endpoint_id) - self.endpoint_policy_api.check_policy_association( - policy_id, endpoint_id=endpoint_id) - - @controller.protected() - def delete_policy_association_for_endpoint(self, context, - policy_id, endpoint_id): - """Delete an association between a policy and an endpoint.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_endpoint(endpoint_id) - self.endpoint_policy_api.delete_policy_association( - policy_id, endpoint_id=endpoint_id) - - @controller.protected() - def create_policy_association_for_service(self, context, - policy_id, service_id): - """Create an association between a policy and a service.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_service(service_id) - self.endpoint_policy_api.create_policy_association( - policy_id, service_id=service_id) - - @controller.protected() - def check_policy_association_for_service(self, context, - policy_id, service_id): - """Check an association between a policy and a service.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_service(service_id) - self.endpoint_policy_api.check_policy_association( - policy_id, service_id=service_id) - - @controller.protected() - def delete_policy_association_for_service(self, context, - policy_id, service_id): - """Delete an association between a policy and a service.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_service(service_id) - self.endpoint_policy_api.delete_policy_association( - policy_id, service_id=service_id) - - @controller.protected() - def create_policy_association_for_region_and_service( - self, context, policy_id, service_id, region_id): - """Create an association between a policy and region+service.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_service(service_id) - self.catalog_api.get_region(region_id) - self.endpoint_policy_api.create_policy_association( - policy_id, service_id=service_id, region_id=region_id) - - @controller.protected() - def check_policy_association_for_region_and_service( - self, context, policy_id, service_id, region_id): - """Check an association between a policy and region+service.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_service(service_id) - self.catalog_api.get_region(region_id) - self.endpoint_policy_api.check_policy_association( - policy_id, service_id=service_id, region_id=region_id) - - @controller.protected() - def delete_policy_association_for_region_and_service( - self, context, policy_id, service_id, region_id): - """Delete an association between a policy and region+service.""" - self.policy_api.get_policy(policy_id) - self.catalog_api.get_service(service_id) - self.catalog_api.get_region(region_id) - self.endpoint_policy_api.delete_policy_association( - policy_id, service_id=service_id, region_id=region_id) - - @controller.protected() - def get_policy_for_endpoint(self, context, endpoint_id): - """Get the effective policy for an endpoint.""" - self.catalog_api.get_endpoint(endpoint_id) - ref = self.endpoint_policy_api.get_policy_for_endpoint(endpoint_id) - # NOTE(henry-nash): since the collection and member for this class is - # set to endpoints, we have to handle wrapping this policy entity - # ourselves. - self._add_self_referential_link(context, ref) - return {'policy': ref} - - # NOTE(henry-nash): As in the catalog controller, we must ensure that the - # legacy_endpoint_id does not escape. - - @classmethod - def filter_endpoint(cls, ref): - if 'legacy_endpoint_id' in ref: - ref.pop('legacy_endpoint_id') - return ref - - @classmethod - def wrap_member(cls, context, ref): - ref = cls.filter_endpoint(ref) - return super(EndpointPolicyV3Controller, cls).wrap_member(context, ref) - - @controller.protected() - def list_endpoints_for_policy(self, context, policy_id): - """List endpoints with the effective association to a policy.""" - self.policy_api.get_policy(policy_id) - refs = self.endpoint_policy_api.list_endpoints_for_policy(policy_id) - return EndpointPolicyV3Controller.wrap_collection(context, refs) diff --git a/keystone-moon/keystone/endpoint_policy/core.py b/keystone-moon/keystone/endpoint_policy/core.py deleted file mode 100644 index 6243f26b..00000000 --- a/keystone-moon/keystone/endpoint_policy/core.py +++ /dev/null @@ -1,439 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import abc - -from oslo_config import cfg -from oslo_log import log -import six - -from keystone.common import dependency -from keystone.common import manager -from keystone import exception -from keystone.i18n import _, _LE, _LW - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -@dependency.provider('endpoint_policy_api') -@dependency.requires('catalog_api', 'policy_api') -class Manager(manager.Manager): - """Default pivot point for the Endpoint Policy backend. - - See :mod:`keystone.common.manager.Manager` for more details on how this - dynamically calls the backend. - - """ - - driver_namespace = 'keystone.endpoint_policy' - - def __init__(self): - super(Manager, self).__init__(CONF.endpoint_policy.driver) - - def _assert_valid_association(self, endpoint_id, service_id, region_id): - """Assert that the association is supported. - - There are three types of association supported: - - - Endpoint (in which case service and region must be None) - - Service and region (in which endpoint must be None) - - Service (in which case endpoint and region must be None) - - """ - if (endpoint_id is not None and - service_id is None and region_id is None): - return - if (service_id is not None and region_id is not None and - endpoint_id is None): - return - if (service_id is not None and - endpoint_id is None and region_id is None): - return - - raise exception.InvalidPolicyAssociation(endpoint_id=endpoint_id, - service_id=service_id, - region_id=region_id) - - def create_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): - self._assert_valid_association(endpoint_id, service_id, region_id) - self.driver.create_policy_association(policy_id, endpoint_id, - service_id, region_id) - - def check_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): - self._assert_valid_association(endpoint_id, service_id, region_id) - self.driver.check_policy_association(policy_id, endpoint_id, - service_id, region_id) - - def delete_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): - self._assert_valid_association(endpoint_id, service_id, region_id) - self.driver.delete_policy_association(policy_id, endpoint_id, - service_id, region_id) - - def list_endpoints_for_policy(self, policy_id): - - def _get_endpoint(endpoint_id, policy_id): - try: - return self.catalog_api.get_endpoint(endpoint_id) - except exception.EndpointNotFound: - msg = _LW('Endpoint %(endpoint_id)s referenced in ' - 'association for policy %(policy_id)s not found.') - LOG.warning(msg, {'policy_id': policy_id, - 'endpoint_id': endpoint_id}) - raise - - def _get_endpoints_for_service(service_id, endpoints): - # TODO(henry-nash): Consider optimizing this in the future by - # adding an explicit list_endpoints_for_service to the catalog API. - return [ep for ep in endpoints if ep['service_id'] == service_id] - - def _get_endpoints_for_service_and_region( - service_id, region_id, endpoints, regions): - # TODO(henry-nash): Consider optimizing this in the future. - # The lack of a two-way pointer in the region tree structure - # makes this somewhat inefficient. - - def _recursively_get_endpoints_for_region( - region_id, service_id, endpoint_list, region_list, - endpoints_found, regions_examined): - """Recursively search down a region tree for endpoints. - - :param region_id: the point in the tree to examine - :param service_id: the service we are interested in - :param endpoint_list: list of all endpoints - :param region_list: list of all regions - :param endpoints_found: list of matching endpoints found so - far - which will be updated if more are - found in this iteration - :param regions_examined: list of regions we have already looked - at - used to spot illegal circular - references in the tree to avoid never - completing search - :returns: list of endpoints that match - - """ - if region_id in regions_examined: - msg = _LE('Circular reference or a repeated entry found ' - 'in region tree - %(region_id)s.') - LOG.error(msg, {'region_id': ref.region_id}) - return - - regions_examined.append(region_id) - endpoints_found += ( - [ep for ep in endpoint_list if - ep['service_id'] == service_id and - ep['region_id'] == region_id]) - - for region in region_list: - if region['parent_region_id'] == region_id: - _recursively_get_endpoints_for_region( - region['id'], service_id, endpoints, regions, - endpoints_found, regions_examined) - - endpoints_found = [] - regions_examined = [] - - # Now walk down the region tree - _recursively_get_endpoints_for_region( - region_id, service_id, endpoints, regions, - endpoints_found, regions_examined) - - return endpoints_found - - matching_endpoints = [] - endpoints = self.catalog_api.list_endpoints() - regions = self.catalog_api.list_regions() - for ref in self.list_associations_for_policy(policy_id): - if ref.get('endpoint_id') is not None: - matching_endpoints.append( - _get_endpoint(ref['endpoint_id'], policy_id)) - continue - - if (ref.get('service_id') is not None and - ref.get('region_id') is None): - matching_endpoints += _get_endpoints_for_service( - ref['service_id'], endpoints) - continue - - if (ref.get('service_id') is not None and - ref.get('region_id') is not None): - matching_endpoints += ( - _get_endpoints_for_service_and_region( - ref['service_id'], ref['region_id'], - endpoints, regions)) - continue - - msg = _LW('Unsupported policy association found - ' - 'Policy %(policy_id)s, Endpoint %(endpoint_id)s, ' - 'Service %(service_id)s, Region %(region_id)s, ') - LOG.warning(msg, {'policy_id': policy_id, - 'endpoint_id': ref['endpoint_id'], - 'service_id': ref['service_id'], - 'region_id': ref['region_id']}) - - return matching_endpoints - - def get_policy_for_endpoint(self, endpoint_id): - - def _get_policy(policy_id, endpoint_id): - try: - return self.policy_api.get_policy(policy_id) - except exception.PolicyNotFound: - msg = _LW('Policy %(policy_id)s referenced in association ' - 'for endpoint %(endpoint_id)s not found.') - LOG.warning(msg, {'policy_id': policy_id, - 'endpoint_id': endpoint_id}) - raise - - def _look_for_policy_for_region_and_service(endpoint): - """Look in the region and its parents for a policy. - - Examine the region of the endpoint for a policy appropriate for - the service of the endpoint. If there isn't a match, then chase up - the region tree to find one. - - """ - region_id = endpoint['region_id'] - regions_examined = [] - while region_id is not None: - try: - ref = self.get_policy_association( - service_id=endpoint['service_id'], - region_id=region_id) - return ref['policy_id'] - except exception.PolicyAssociationNotFound: # nosec - # There wasn't one for that region & service, handle below. - pass - - # There wasn't one for that region & service, let's - # chase up the region tree - regions_examined.append(region_id) - region = self.catalog_api.get_region(region_id) - region_id = None - if region.get('parent_region_id') is not None: - region_id = region['parent_region_id'] - if region_id in regions_examined: - msg = _LE('Circular reference or a repeated entry ' - 'found in region tree - %(region_id)s.') - LOG.error(msg, {'region_id': region_id}) - break - - # First let's see if there is a policy explicitly defined for - # this endpoint. - - try: - ref = self.get_policy_association(endpoint_id=endpoint_id) - return _get_policy(ref['policy_id'], endpoint_id) - except exception.PolicyAssociationNotFound: # nosec - # There wasn't a policy explicitly defined for this endpoint, - # handled below. - pass - - # There wasn't a policy explicitly defined for this endpoint, so - # now let's see if there is one for the Region & Service. - - endpoint = self.catalog_api.get_endpoint(endpoint_id) - policy_id = _look_for_policy_for_region_and_service(endpoint) - if policy_id is not None: - return _get_policy(policy_id, endpoint_id) - - # Finally, just check if there is one for the service. - try: - ref = self.get_policy_association( - service_id=endpoint['service_id']) - return _get_policy(ref['policy_id'], endpoint_id) - except exception.PolicyAssociationNotFound: # nosec - # No policy is associated with endpoint, handled below. - pass - - msg = _('No policy is associated with endpoint ' - '%(endpoint_id)s.') % {'endpoint_id': endpoint_id} - raise exception.NotFound(msg) - - -@six.add_metaclass(abc.ABCMeta) -class EndpointPolicyDriverV8(object): - """Interface description for an Endpoint Policy driver.""" - - @abc.abstractmethod - def create_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): - """Creates a policy association. - - :param policy_id: identity of policy that is being associated - :type policy_id: string - :param endpoint_id: identity of endpoint to associate - :type endpoint_id: string - :param service_id: identity of the service to associate - :type service_id: string - :param region_id: identity of the region to associate - :type region_id: string - :returns: None - - There are three types of association permitted: - - - Endpoint (in which case service and region must be None) - - Service and region (in which endpoint must be None) - - Service (in which case endpoint and region must be None) - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def check_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): - """Checks existence a policy association. - - :param policy_id: identity of policy that is being associated - :type policy_id: string - :param endpoint_id: identity of endpoint to associate - :type endpoint_id: string - :param service_id: identity of the service to associate - :type service_id: string - :param region_id: identity of the region to associate - :type region_id: string - :raises keystone.exception.PolicyAssociationNotFound: If there is no - match for the specified association. - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_policy_association(self, policy_id, endpoint_id=None, - service_id=None, region_id=None): - """Deletes a policy association. - - :param policy_id: identity of policy that is being associated - :type policy_id: string - :param endpoint_id: identity of endpoint to associate - :type endpoint_id: string - :param service_id: identity of the service to associate - :type service_id: string - :param region_id: identity of the region to associate - :type region_id: string - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_policy_association(self, endpoint_id=None, - service_id=None, region_id=None): - """Gets the policy for an explicit association. - - This method is not exposed as a public API, but is used by - get_policy_for_endpoint(). - - :param endpoint_id: identity of endpoint - :type endpoint_id: string - :param service_id: identity of the service - :type service_id: string - :param region_id: identity of the region - :type region_id: string - :raises keystone.exception.PolicyAssociationNotFound: If there is no - match for the specified association. - :returns: dict containing policy_id - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_associations_for_policy(self, policy_id): - """List the associations for a policy. - - This method is not exposed as a public API, but is used by - list_endpoints_for_policy(). - - :param policy_id: identity of policy - :type policy_id: string - :returns: List of association dicts - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_endpoints_for_policy(self, policy_id): - """List all the endpoints using a given policy. - - :param policy_id: identity of policy that is being associated - :type policy_id: string - :returns: list of endpoints that have an effective association with - that policy - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_policy_for_endpoint(self, endpoint_id): - """Get the appropriate policy for a given endpoint. - - :param endpoint_id: identity of endpoint - :type endpoint_id: string - :returns: Policy entity for the endpoint - - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_association_by_endpoint(self, endpoint_id): - """Removes all the policy associations with the specific endpoint. - - :param endpoint_id: identity of endpoint to check - :type endpoint_id: string - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_association_by_service(self, service_id): - """Removes all the policy associations with the specific service. - - :param service_id: identity of endpoint to check - :type service_id: string - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_association_by_region(self, region_id): - """Removes all the policy associations with the specific region. - - :param region_id: identity of endpoint to check - :type region_id: string - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_association_by_policy(self, policy_id): - """Removes all the policy associations with the specific policy. - - :param policy_id: identity of endpoint to check - :type policy_id: string - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover - - -Driver = manager.create_legacy_driver(EndpointPolicyDriverV8) diff --git a/keystone-moon/keystone/endpoint_policy/routers.py b/keystone-moon/keystone/endpoint_policy/routers.py deleted file mode 100644 index 4846bb18..00000000 --- a/keystone-moon/keystone/endpoint_policy/routers.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools - -from keystone.common import json_home -from keystone.common import wsgi -from keystone.endpoint_policy import controllers - - -build_resource_relation = functools.partial( - json_home.build_v3_extension_resource_relation, - extension_name='OS-ENDPOINT-POLICY', extension_version='1.0') - - -class Routers(wsgi.RoutersBase): - - PATH_PREFIX = '/OS-ENDPOINT-POLICY' - - def append_v3_routers(self, mapper, routers): - endpoint_policy_controller = controllers.EndpointPolicyV3Controller() - - self._add_resource( - mapper, endpoint_policy_controller, - path='/endpoints/{endpoint_id}' + self.PATH_PREFIX + '/policy', - get_head_action='get_policy_for_endpoint', - rel=build_resource_relation(resource_name='endpoint_policy'), - path_vars={'endpoint_id': json_home.Parameters.ENDPOINT_ID}) - self._add_resource( - mapper, endpoint_policy_controller, - path='/policies/{policy_id}' + self.PATH_PREFIX + '/endpoints', - get_action='list_endpoints_for_policy', - rel=build_resource_relation(resource_name='policy_endpoints'), - path_vars={'policy_id': json_home.Parameters.POLICY_ID}) - self._add_resource( - mapper, endpoint_policy_controller, - path=('/policies/{policy_id}' + self.PATH_PREFIX + - '/endpoints/{endpoint_id}'), - get_head_action='check_policy_association_for_endpoint', - put_action='create_policy_association_for_endpoint', - delete_action='delete_policy_association_for_endpoint', - rel=build_resource_relation( - resource_name='endpoint_policy_association'), - path_vars={ - 'policy_id': json_home.Parameters.POLICY_ID, - 'endpoint_id': json_home.Parameters.ENDPOINT_ID, - }) - self._add_resource( - mapper, endpoint_policy_controller, - path=('/policies/{policy_id}' + self.PATH_PREFIX + - '/services/{service_id}'), - get_head_action='check_policy_association_for_service', - put_action='create_policy_association_for_service', - delete_action='delete_policy_association_for_service', - rel=build_resource_relation( - resource_name='service_policy_association'), - path_vars={ - 'policy_id': json_home.Parameters.POLICY_ID, - 'service_id': json_home.Parameters.SERVICE_ID, - }) - self._add_resource( - mapper, endpoint_policy_controller, - path=('/policies/{policy_id}' + self.PATH_PREFIX + - '/services/{service_id}/regions/{region_id}'), - get_head_action='check_policy_association_for_region_and_service', - put_action='create_policy_association_for_region_and_service', - delete_action='delete_policy_association_for_region_and_service', - rel=build_resource_relation( - resource_name='region_and_service_policy_association'), - path_vars={ - 'policy_id': json_home.Parameters.POLICY_ID, - 'service_id': json_home.Parameters.SERVICE_ID, - 'region_id': json_home.Parameters.REGION_ID, - }) diff --git a/keystone-moon/keystone/exception.py b/keystone-moon/keystone/exception.py deleted file mode 100644 index e347d345..00000000 --- a/keystone-moon/keystone/exception.py +++ /dev/null @@ -1,544 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_log import log -from oslo_utils import encodeutils -import six - -from keystone.i18n import _, _LW - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - -# Tests use this to make exception message format errors fatal -_FATAL_EXCEPTION_FORMAT_ERRORS = False - - -def _format_with_unicode_kwargs(msg_format, kwargs): - try: - return msg_format % kwargs - except UnicodeDecodeError: - try: - kwargs = {k: encodeutils.safe_decode(v) - for k, v in kwargs.items()} - except UnicodeDecodeError: - # NOTE(jamielennox): This is the complete failure case - # at least by showing the template we have some idea - # of where the error is coming from - return msg_format - - return msg_format % kwargs - - -class Error(Exception): - """Base error class. - - Child classes should define an HTTP status code, title, and a - message_format. - - """ - - code = None - title = None - message_format = None - - def __init__(self, message=None, **kwargs): - try: - message = self._build_message(message, **kwargs) - except KeyError: - # if you see this warning in your logs, please raise a bug report - if _FATAL_EXCEPTION_FORMAT_ERRORS: - raise - else: - LOG.warning(_LW('missing exception kwargs (programmer error)')) - message = self.message_format - - super(Error, self).__init__(message) - - def _build_message(self, message, **kwargs): - """Builds and returns an exception message. - - :raises KeyError: given insufficient kwargs - - """ - if message: - return message - return _format_with_unicode_kwargs(self.message_format, kwargs) - - -class ValidationError(Error): - message_format = _("Expecting to find %(attribute)s in %(target)s -" - " the server could not comply with the request" - " since it is either malformed or otherwise" - " incorrect. The client is assumed to be in error.") - code = 400 - title = 'Bad Request' - - -class URLValidationError(ValidationError): - message_format = _("Cannot create an endpoint with an invalid URL:" - " %(url)s") - - -class SchemaValidationError(ValidationError): - # NOTE(lbragstad): For whole OpenStack message consistency, this error - # message has been written in a format consistent with WSME. - message_format = _("%(detail)s") - - -class ValidationTimeStampError(Error): - message_format = _("Timestamp not in expected format." - " The server could not comply with the request" - " since it is either malformed or otherwise" - " incorrect. The client is assumed to be in error.") - code = 400 - title = 'Bad Request' - - -class ValidationExpirationError(Error): - message_format = _("The 'expires_at' must not be before now." - " The server could not comply with the request" - " since it is either malformed or otherwise" - " incorrect. The client is assumed to be in error.") - code = 400 - title = 'Bad Request' - - -class StringLengthExceeded(ValidationError): - message_format = _("String length exceeded.The length of" - " string '%(string)s' exceeded the limit" - " of column %(type)s(CHAR(%(length)d)).") - - -class ValidationSizeError(Error): - message_format = _("Request attribute %(attribute)s must be" - " less than or equal to %(size)i. The server" - " could not comply with the request because" - " the attribute size is invalid (too large)." - " The client is assumed to be in error.") - code = 400 - title = 'Bad Request' - - -class CircularRegionHierarchyError(Error): - message_format = _("The specified parent region %(parent_region_id)s " - "would create a circular region hierarchy.") - code = 400 - title = 'Bad Request' - - -class ForbiddenNotSecurity(Error): - """When you want to return a 403 Forbidden response but not security. - - Use this for errors where the message is always safe to present to the user - and won't give away extra information. - - """ - - code = 403 - title = 'Forbidden' - - -class PasswordVerificationError(ForbiddenNotSecurity): - message_format = _("The password length must be less than or equal " - "to %(size)i. The server could not comply with the " - "request because the password is invalid.") - - -class RegionDeletionError(ForbiddenNotSecurity): - message_format = _("Unable to delete region %(region_id)s because it or " - "its child regions have associated endpoints.") - - -class PKITokenExpected(ForbiddenNotSecurity): - message_format = _('The certificates you requested are not available. ' - 'It is likely that this server does not use PKI tokens ' - 'otherwise this is the result of misconfiguration.') - - -class SecurityError(Error): - """Security error exception. - - Avoids exposing details of security errors, unless in insecure_debug mode. - - """ - - amendment = _('(Disable insecure_debug mode to suppress these details.)') - - def _build_message(self, message, **kwargs): - """Only returns detailed messages in insecure_debug mode.""" - if message and CONF.insecure_debug: - if isinstance(message, six.string_types): - # Only do replacement if message is string. The message is - # sometimes a different exception or bytes, which would raise - # TypeError. - message = _format_with_unicode_kwargs(message, kwargs) - return _('%(message)s %(amendment)s') % { - 'message': message, - 'amendment': self.amendment} - - return _format_with_unicode_kwargs(self.message_format, kwargs) - - -class Unauthorized(SecurityError): - message_format = _("The request you have made requires authentication.") - code = 401 - title = 'Unauthorized' - - -class AuthPluginException(Unauthorized): - message_format = _("Authentication plugin error.") - - def __init__(self, *args, **kwargs): - super(AuthPluginException, self).__init__(*args, **kwargs) - self.authentication = {} - - -class MissingGroups(Unauthorized): - message_format = _("Unable to find valid groups while using " - "mapping %(mapping_id)s") - - -class AuthMethodNotSupported(AuthPluginException): - message_format = _("Attempted to authenticate with an unsupported method.") - - def __init__(self, *args, **kwargs): - super(AuthMethodNotSupported, self).__init__(*args, **kwargs) - self.authentication = {'methods': CONF.auth.methods} - - -class AdditionalAuthRequired(AuthPluginException): - message_format = _("Additional authentications steps required.") - - def __init__(self, auth_response=None, **kwargs): - super(AdditionalAuthRequired, self).__init__(message=None, **kwargs) - self.authentication = auth_response - - -class Forbidden(SecurityError): - message_format = _("You are not authorized to perform the" - " requested action.") - code = 403 - title = 'Forbidden' - - -class ForbiddenAction(Forbidden): - message_format = _("You are not authorized to perform the" - " requested action: %(action)s") - - -class ImmutableAttributeError(Forbidden): - message_format = _("Could not change immutable attribute(s) " - "'%(attributes)s' in target %(target)s") - - -class CrossBackendNotAllowed(Forbidden): - message_format = _("Group membership across backend boundaries is not " - "allowed, group in question is %(group_id)s, " - "user is %(user_id)s") - - -class InvalidPolicyAssociation(Forbidden): - message_format = _("Invalid mix of entities for policy association - " - "only Endpoint, Service or Region+Service allowed. " - "Request was - Endpoint: %(endpoint_id)s, " - "Service: %(service_id)s, Region: %(region_id)s") - - -class InvalidDomainConfig(Forbidden): - message_format = _("Invalid domain specific configuration: %(reason)s") - - -class NotFound(Error): - message_format = _("Could not find: %(target)s") - code = 404 - title = 'Not Found' - - -class EndpointNotFound(NotFound): - message_format = _("Could not find endpoint: %(endpoint_id)s") - - -class MetadataNotFound(NotFound): - # NOTE (dolph): metadata is not a user-facing concept, - # so this exception should not be exposed. - - message_format = _("An unhandled exception has occurred:" - " Could not find metadata.") - - -class PolicyNotFound(NotFound): - message_format = _("Could not find policy: %(policy_id)s") - - -class PolicyAssociationNotFound(NotFound): - message_format = _("Could not find policy association") - - -class RoleNotFound(NotFound): - message_format = _("Could not find role: %(role_id)s") - - -class ImpliedRoleNotFound(NotFound): - message_format = _("%(prior_role_id)s does not imply %(implied_role_id)s") - - -class InvalidImpliedRole(Forbidden): - message_format = _("%(role_id)s cannot be an implied roles") - - -class RoleAssignmentNotFound(NotFound): - message_format = _("Could not find role assignment with role: " - "%(role_id)s, user or group: %(actor_id)s, " - "project or domain: %(target_id)s") - - -class RegionNotFound(NotFound): - message_format = _("Could not find region: %(region_id)s") - - -class ServiceNotFound(NotFound): - message_format = _("Could not find service: %(service_id)s") - - -class DomainNotFound(NotFound): - message_format = _("Could not find domain: %(domain_id)s") - - -class ProjectNotFound(NotFound): - message_format = _("Could not find project: %(project_id)s") - - -class InvalidParentProject(NotFound): - message_format = _("Cannot create project with parent: %(project_id)s") - - -class TokenNotFound(NotFound): - message_format = _("Could not find token: %(token_id)s") - - -class UserNotFound(NotFound): - message_format = _("Could not find user: %(user_id)s") - - -class GroupNotFound(NotFound): - message_format = _("Could not find group: %(group_id)s") - - -class MappingNotFound(NotFound): - message_format = _("Could not find mapping: %(mapping_id)s") - - -class TrustNotFound(NotFound): - message_format = _("Could not find trust: %(trust_id)s") - - -class TrustUseLimitReached(Forbidden): - message_format = _("No remaining uses for trust: %(trust_id)s") - - -class CredentialNotFound(NotFound): - message_format = _("Could not find credential: %(credential_id)s") - - -class VersionNotFound(NotFound): - message_format = _("Could not find version: %(version)s") - - -class EndpointGroupNotFound(NotFound): - message_format = _("Could not find Endpoint Group: %(endpoint_group_id)s") - - -class IdentityProviderNotFound(NotFound): - message_format = _("Could not find Identity Provider: %(idp_id)s") - - -class ServiceProviderNotFound(NotFound): - message_format = _("Could not find Service Provider: %(sp_id)s") - - -class FederatedProtocolNotFound(NotFound): - message_format = _("Could not find federated protocol %(protocol_id)s for" - " Identity Provider: %(idp_id)s") - - -class PublicIDNotFound(NotFound): - # This is used internally and mapped to either User/GroupNotFound or, - # Assertion before the exception leaves Keystone. - message_format = "%(id)s" - - -class DomainConfigNotFound(NotFound): - message_format = _('Could not find %(group_or_option)s in domain ' - 'configuration for domain %(domain_id)s') - - -class ConfigRegistrationNotFound(Exception): - # This is used internally between the domain config backend and the - # manager, so should not escape to the client. If it did, it is a coding - # error on our part, and would end up, appropriately, as a 500 error. - pass - - -class KeystoneConfigurationError(Exception): - # This is an exception to be used in the case that Keystone config is - # invalid and Keystone should not start. - pass - - -class Conflict(Error): - message_format = _("Conflict occurred attempting to store %(type)s -" - " %(details)s") - code = 409 - title = 'Conflict' - - -class UnexpectedError(SecurityError): - """Avoids exposing details of failures, unless in insecure_debug mode.""" - - message_format = _("An unexpected error prevented the server " - "from fulfilling your request.") - - debug_message_format = _("An unexpected error prevented the server " - "from fulfilling your request: %(exception)s") - - def _build_message(self, message, **kwargs): - - # Ensure that exception has a value to be extra defensive for - # substitutions and make sure the exception doesn't raise an - # exception. - kwargs.setdefault('exception', '') - - return super(UnexpectedError, self)._build_message( - message or self.debug_message_format, **kwargs) - - code = 500 - title = 'Internal Server Error' - - -class TrustConsumeMaximumAttempt(UnexpectedError): - debug_message_format = _("Unable to consume trust %(trust_id)s, unable to " - "acquire lock.") - - -class CertificateFilesUnavailable(UnexpectedError): - debug_message_format = _("Expected signing certificates are not available " - "on the server. Please check Keystone " - "configuration.") - - -class MalformedEndpoint(UnexpectedError): - debug_message_format = _("Malformed endpoint URL (%(endpoint)s)," - " see ERROR log for details.") - - -class MappedGroupNotFound(UnexpectedError): - debug_message_format = _("Group %(group_id)s returned by mapping " - "%(mapping_id)s was not found in the backend.") - - -class MetadataFileError(UnexpectedError): - debug_message_format = _("Error while reading metadata file, %(reason)s") - - -class DirectMappingError(UnexpectedError): - message_format = _("Local section in mapping %(mapping_id)s refers to a " - "remote match that doesn't exist " - "(e.g. {0} in a local section).") - - -class AssignmentTypeCalculationError(UnexpectedError): - debug_message_format = _( - 'Unexpected combination of grant attributes - ' - 'User: %(user_id)s, Group: %(group_id)s, Project: %(project_id)s, ' - 'Domain: %(domain_id)s') - - -class NotImplemented(Error): - message_format = _("The action you have requested has not" - " been implemented.") - code = 501 - title = 'Not Implemented' - - -class Gone(Error): - message_format = _("The service you have requested is no" - " longer available on this server.") - code = 410 - title = 'Gone' - - -class ConfigFileNotFound(UnexpectedError): - debug_message_format = _("The Keystone configuration file %(config_file)s " - "could not be found.") - - -class KeysNotFound(UnexpectedError): - debug_message_format = _('No encryption keys found; run keystone-manage ' - 'fernet_setup to bootstrap one.') - - -class MultipleSQLDriversInConfig(UnexpectedError): - debug_message_format = _('The Keystone domain-specific configuration has ' - 'specified more than one SQL driver (only one is ' - 'permitted): %(source)s.') - - -class MigrationNotProvided(Exception): - def __init__(self, mod_name, path): - super(MigrationNotProvided, self).__init__(_( - "%(mod_name)s doesn't provide database migrations. The migration" - " repository path at %(path)s doesn't exist or isn't a directory." - ) % {'mod_name': mod_name, 'path': path}) - - -class UnsupportedTokenVersionException(UnexpectedError): - debug_message_format = _('Token version is unrecognizable or ' - 'unsupported.') - - -class SAMLSigningError(UnexpectedError): - debug_message_format = _('Unable to sign SAML assertion. It is likely ' - 'that this server does not have xmlsec1 ' - 'installed, or this is the result of ' - 'misconfiguration. Reason %(reason)s') - - -class OAuthHeadersMissingError(UnexpectedError): - debug_message_format = _('No Authorization headers found, cannot proceed ' - 'with OAuth related calls, if running under ' - 'HTTPd or Apache, ensure WSGIPassAuthorization ' - 'is set to On.') - - -class TokenlessAuthConfigError(ValidationError): - message_format = _('Could not determine Identity Provider ID. The ' - 'configuration option %(issuer_attribute)s ' - 'was not found in the request environment.') - - -class MigrationMovedFailure(RuntimeError): - def __init__(self, extension): - self.extension = extension - msg = _("The %s extension has been moved into keystone core and as " - "such its migrations are maintained by the main keystone " - "database control. Use the command: keystone-manage " - "db_sync") % self.extension - super(MigrationMovedFailure, self).__init__(msg) - - -class UnsupportedDriverVersion(UnexpectedError): - debug_message_format = _('%(driver)s is not supported driver version') diff --git a/keystone-moon/keystone/federation/V8_backends/__init__.py b/keystone-moon/keystone/federation/V8_backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/federation/V8_backends/sql.py b/keystone-moon/keystone/federation/V8_backends/sql.py deleted file mode 100644 index d6b42aa0..00000000 --- a/keystone-moon/keystone/federation/V8_backends/sql.py +++ /dev/null @@ -1,389 +0,0 @@ -# Copyright 2014 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log -from oslo_serialization import jsonutils -import six -from sqlalchemy import orm - -from keystone.common import sql -from keystone import exception -from keystone.federation import core -from keystone.i18n import _ - - -LOG = log.getLogger(__name__) - - -class FederationProtocolModel(sql.ModelBase, sql.DictBase): - __tablename__ = 'federation_protocol' - attributes = ['id', 'idp_id', 'mapping_id'] - mutable_attributes = frozenset(['mapping_id']) - - id = sql.Column(sql.String(64), primary_key=True) - idp_id = sql.Column(sql.String(64), sql.ForeignKey('identity_provider.id', - ondelete='CASCADE'), primary_key=True) - mapping_id = sql.Column(sql.String(64), nullable=False) - - @classmethod - def from_dict(cls, dictionary): - new_dictionary = dictionary.copy() - return cls(**new_dictionary) - - def to_dict(self): - """Return a dictionary with model's attributes.""" - d = dict() - for attr in self.__class__.attributes: - d[attr] = getattr(self, attr) - return d - - -class IdentityProviderModel(sql.ModelBase, sql.DictBase): - __tablename__ = 'identity_provider' - attributes = ['id', 'enabled', 'description', 'remote_ids'] - mutable_attributes = frozenset(['description', 'enabled', 'remote_ids']) - - id = sql.Column(sql.String(64), primary_key=True) - enabled = sql.Column(sql.Boolean, nullable=False) - description = sql.Column(sql.Text(), nullable=True) - remote_ids = orm.relationship('IdPRemoteIdsModel', - order_by='IdPRemoteIdsModel.remote_id', - cascade='all, delete-orphan') - - @classmethod - def from_dict(cls, dictionary): - new_dictionary = dictionary.copy() - remote_ids_list = new_dictionary.pop('remote_ids', None) - if not remote_ids_list: - remote_ids_list = [] - identity_provider = cls(**new_dictionary) - remote_ids = [] - # NOTE(fmarco76): the remote_ids_list contains only remote ids - # associated with the IdP because of the "relationship" established in - # sqlalchemy and corresponding to the FK in the idp_remote_ids table - for remote in remote_ids_list: - remote_ids.append(IdPRemoteIdsModel(remote_id=remote)) - identity_provider.remote_ids = remote_ids - return identity_provider - - def to_dict(self): - """Return a dictionary with model's attributes.""" - d = dict() - for attr in self.__class__.attributes: - d[attr] = getattr(self, attr) - d['remote_ids'] = [] - for remote in self.remote_ids: - d['remote_ids'].append(remote.remote_id) - return d - - -class IdPRemoteIdsModel(sql.ModelBase, sql.DictBase): - __tablename__ = 'idp_remote_ids' - attributes = ['idp_id', 'remote_id'] - mutable_attributes = frozenset(['idp_id', 'remote_id']) - - idp_id = sql.Column(sql.String(64), - sql.ForeignKey('identity_provider.id', - ondelete='CASCADE')) - remote_id = sql.Column(sql.String(255), - primary_key=True) - - @classmethod - def from_dict(cls, dictionary): - new_dictionary = dictionary.copy() - return cls(**new_dictionary) - - def to_dict(self): - """Return a dictionary with model's attributes.""" - d = dict() - for attr in self.__class__.attributes: - d[attr] = getattr(self, attr) - return d - - -class MappingModel(sql.ModelBase, sql.DictBase): - __tablename__ = 'mapping' - attributes = ['id', 'rules'] - - id = sql.Column(sql.String(64), primary_key=True) - rules = sql.Column(sql.JsonBlob(), nullable=False) - - @classmethod - def from_dict(cls, dictionary): - new_dictionary = dictionary.copy() - new_dictionary['rules'] = jsonutils.dumps(new_dictionary['rules']) - return cls(**new_dictionary) - - def to_dict(self): - """Return a dictionary with model's attributes.""" - d = dict() - for attr in self.__class__.attributes: - d[attr] = getattr(self, attr) - d['rules'] = jsonutils.loads(d['rules']) - return d - - -class ServiceProviderModel(sql.ModelBase, sql.DictBase): - __tablename__ = 'service_provider' - attributes = ['auth_url', 'id', 'enabled', 'description', - 'relay_state_prefix', 'sp_url'] - mutable_attributes = frozenset(['auth_url', 'description', 'enabled', - 'relay_state_prefix', 'sp_url']) - - id = sql.Column(sql.String(64), primary_key=True) - enabled = sql.Column(sql.Boolean, nullable=False) - description = sql.Column(sql.Text(), nullable=True) - auth_url = sql.Column(sql.String(256), nullable=False) - sp_url = sql.Column(sql.String(256), nullable=False) - relay_state_prefix = sql.Column(sql.String(256), nullable=False) - - @classmethod - def from_dict(cls, dictionary): - new_dictionary = dictionary.copy() - return cls(**new_dictionary) - - def to_dict(self): - """Return a dictionary with model's attributes.""" - d = dict() - for attr in self.__class__.attributes: - d[attr] = getattr(self, attr) - return d - - -class Federation(core.FederationDriverV8): - - _CONFLICT_LOG_MSG = 'Conflict %(conflict_type)s: %(details)s' - - def _handle_idp_conflict(self, e): - conflict_type = 'identity_provider' - details = six.text_type(e) - LOG.debug(self._CONFLICT_LOG_MSG, {'conflict_type': conflict_type, - 'details': details}) - if 'remote_id' in details: - msg = _('Duplicate remote ID: %s') - else: - msg = _('Duplicate entry: %s') - msg = msg % e.value - raise exception.Conflict(type=conflict_type, details=msg) - - # Identity Provider CRUD - @sql.handle_conflicts(conflict_type='identity_provider') - def create_idp(self, idp_id, idp): - idp['id'] = idp_id - with sql.session_for_write() as session: - idp_ref = IdentityProviderModel.from_dict(idp) - session.add(idp_ref) - return idp_ref.to_dict() - - def delete_idp(self, idp_id): - with sql.session_for_write() as session: - self._delete_assigned_protocols(session, idp_id) - idp_ref = self._get_idp(session, idp_id) - session.delete(idp_ref) - - def _get_idp(self, session, idp_id): - idp_ref = session.query(IdentityProviderModel).get(idp_id) - if not idp_ref: - raise exception.IdentityProviderNotFound(idp_id=idp_id) - return idp_ref - - def _get_idp_from_remote_id(self, session, remote_id): - q = session.query(IdPRemoteIdsModel) - q = q.filter_by(remote_id=remote_id) - try: - return q.one() - except sql.NotFound: - raise exception.IdentityProviderNotFound(idp_id=remote_id) - - def list_idps(self): - with sql.session_for_read() as session: - idps = session.query(IdentityProviderModel) - idps_list = [idp.to_dict() for idp in idps] - return idps_list - - def get_idp(self, idp_id): - with sql.session_for_read() as session: - idp_ref = self._get_idp(session, idp_id) - return idp_ref.to_dict() - - def get_idp_from_remote_id(self, remote_id): - with sql.session_for_read() as session: - ref = self._get_idp_from_remote_id(session, remote_id) - return ref.to_dict() - - def update_idp(self, idp_id, idp): - try: - with sql.session_for_write() as session: - idp_ref = self._get_idp(session, idp_id) - old_idp = idp_ref.to_dict() - old_idp.update(idp) - new_idp = IdentityProviderModel.from_dict(old_idp) - for attr in IdentityProviderModel.mutable_attributes: - setattr(idp_ref, attr, getattr(new_idp, attr)) - return idp_ref.to_dict() - except sql.DBDuplicateEntry as e: - self._handle_idp_conflict(e) - - # Protocol CRUD - def _get_protocol(self, session, idp_id, protocol_id): - q = session.query(FederationProtocolModel) - q = q.filter_by(id=protocol_id, idp_id=idp_id) - try: - return q.one() - except sql.NotFound: - kwargs = {'protocol_id': protocol_id, - 'idp_id': idp_id} - raise exception.FederatedProtocolNotFound(**kwargs) - - @sql.handle_conflicts(conflict_type='federation_protocol') - def create_protocol(self, idp_id, protocol_id, protocol): - protocol['id'] = protocol_id - protocol['idp_id'] = idp_id - with sql.session_for_write() as session: - self._get_idp(session, idp_id) - protocol_ref = FederationProtocolModel.from_dict(protocol) - session.add(protocol_ref) - return protocol_ref.to_dict() - - def update_protocol(self, idp_id, protocol_id, protocol): - with sql.session_for_write() as session: - proto_ref = self._get_protocol(session, idp_id, protocol_id) - old_proto = proto_ref.to_dict() - old_proto.update(protocol) - new_proto = FederationProtocolModel.from_dict(old_proto) - for attr in FederationProtocolModel.mutable_attributes: - setattr(proto_ref, attr, getattr(new_proto, attr)) - return proto_ref.to_dict() - - def get_protocol(self, idp_id, protocol_id): - with sql.session_for_read() as session: - protocol_ref = self._get_protocol(session, idp_id, protocol_id) - return protocol_ref.to_dict() - - def list_protocols(self, idp_id): - with sql.session_for_read() as session: - q = session.query(FederationProtocolModel) - q = q.filter_by(idp_id=idp_id) - protocols = [protocol.to_dict() for protocol in q] - return protocols - - def delete_protocol(self, idp_id, protocol_id): - with sql.session_for_write() as session: - key_ref = self._get_protocol(session, idp_id, protocol_id) - session.delete(key_ref) - - def _delete_assigned_protocols(self, session, idp_id): - query = session.query(FederationProtocolModel) - query = query.filter_by(idp_id=idp_id) - query.delete() - - # Mapping CRUD - def _get_mapping(self, session, mapping_id): - mapping_ref = session.query(MappingModel).get(mapping_id) - if not mapping_ref: - raise exception.MappingNotFound(mapping_id=mapping_id) - return mapping_ref - - @sql.handle_conflicts(conflict_type='mapping') - def create_mapping(self, mapping_id, mapping): - ref = {} - ref['id'] = mapping_id - ref['rules'] = mapping.get('rules') - with sql.session_for_write() as session: - mapping_ref = MappingModel.from_dict(ref) - session.add(mapping_ref) - return mapping_ref.to_dict() - - def delete_mapping(self, mapping_id): - with sql.session_for_write() as session: - mapping_ref = self._get_mapping(session, mapping_id) - session.delete(mapping_ref) - - def list_mappings(self): - with sql.session_for_read() as session: - mappings = session.query(MappingModel) - return [x.to_dict() for x in mappings] - - def get_mapping(self, mapping_id): - with sql.session_for_read() as session: - mapping_ref = self._get_mapping(session, mapping_id) - return mapping_ref.to_dict() - - @sql.handle_conflicts(conflict_type='mapping') - def update_mapping(self, mapping_id, mapping): - ref = {} - ref['id'] = mapping_id - ref['rules'] = mapping.get('rules') - with sql.session_for_write() as session: - mapping_ref = self._get_mapping(session, mapping_id) - old_mapping = mapping_ref.to_dict() - old_mapping.update(ref) - new_mapping = MappingModel.from_dict(old_mapping) - for attr in MappingModel.attributes: - setattr(mapping_ref, attr, getattr(new_mapping, attr)) - return mapping_ref.to_dict() - - def get_mapping_from_idp_and_protocol(self, idp_id, protocol_id): - with sql.session_for_read() as session: - protocol_ref = self._get_protocol(session, idp_id, protocol_id) - mapping_id = protocol_ref.mapping_id - mapping_ref = self._get_mapping(session, mapping_id) - return mapping_ref.to_dict() - - # Service Provider CRUD - @sql.handle_conflicts(conflict_type='service_provider') - def create_sp(self, sp_id, sp): - sp['id'] = sp_id - with sql.session_for_write() as session: - sp_ref = ServiceProviderModel.from_dict(sp) - session.add(sp_ref) - return sp_ref.to_dict() - - def delete_sp(self, sp_id): - with sql.session_for_write() as session: - sp_ref = self._get_sp(session, sp_id) - session.delete(sp_ref) - - def _get_sp(self, session, sp_id): - sp_ref = session.query(ServiceProviderModel).get(sp_id) - if not sp_ref: - raise exception.ServiceProviderNotFound(sp_id=sp_id) - return sp_ref - - def list_sps(self): - with sql.session_for_read() as session: - sps = session.query(ServiceProviderModel) - sps_list = [sp.to_dict() for sp in sps] - return sps_list - - def get_sp(self, sp_id): - with sql.session_for_read() as session: - sp_ref = self._get_sp(session, sp_id) - return sp_ref.to_dict() - - def update_sp(self, sp_id, sp): - with sql.session_for_write() as session: - sp_ref = self._get_sp(session, sp_id) - old_sp = sp_ref.to_dict() - old_sp.update(sp) - new_sp = ServiceProviderModel.from_dict(old_sp) - for attr in ServiceProviderModel.mutable_attributes: - setattr(sp_ref, attr, getattr(new_sp, attr)) - return sp_ref.to_dict() - - def get_enabled_service_providers(self): - with sql.session_for_read() as session: - service_providers = session.query(ServiceProviderModel) - service_providers = service_providers.filter_by(enabled=True) - return service_providers diff --git a/keystone-moon/keystone/federation/__init__.py b/keystone-moon/keystone/federation/__init__.py deleted file mode 100644 index b62cfb6f..00000000 --- a/keystone-moon/keystone/federation/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2014 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.federation.core import * # noqa diff --git a/keystone-moon/keystone/federation/backends/__init__.py b/keystone-moon/keystone/federation/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/federation/backends/sql.py b/keystone-moon/keystone/federation/backends/sql.py deleted file mode 100644 index add409e6..00000000 --- a/keystone-moon/keystone/federation/backends/sql.py +++ /dev/null @@ -1,393 +0,0 @@ -# Copyright 2014 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log -from oslo_serialization import jsonutils -import six -from sqlalchemy import orm - -from keystone.common import sql -from keystone import exception -from keystone.federation import core -from keystone.i18n import _ - - -LOG = log.getLogger(__name__) - - -class FederationProtocolModel(sql.ModelBase, sql.DictBase): - __tablename__ = 'federation_protocol' - attributes = ['id', 'idp_id', 'mapping_id'] - mutable_attributes = frozenset(['mapping_id']) - - id = sql.Column(sql.String(64), primary_key=True) - idp_id = sql.Column(sql.String(64), sql.ForeignKey('identity_provider.id', - ondelete='CASCADE'), primary_key=True) - mapping_id = sql.Column(sql.String(64), nullable=False) - - @classmethod - def from_dict(cls, dictionary): - new_dictionary = dictionary.copy() - return cls(**new_dictionary) - - def to_dict(self): - """Return a dictionary with model's attributes.""" - d = dict() - for attr in self.__class__.attributes: - d[attr] = getattr(self, attr) - return d - - -class IdentityProviderModel(sql.ModelBase, sql.DictBase): - __tablename__ = 'identity_provider' - attributes = ['id', 'enabled', 'description', 'remote_ids'] - mutable_attributes = frozenset(['description', 'enabled', 'remote_ids']) - - id = sql.Column(sql.String(64), primary_key=True) - enabled = sql.Column(sql.Boolean, nullable=False) - description = sql.Column(sql.Text(), nullable=True) - remote_ids = orm.relationship('IdPRemoteIdsModel', - order_by='IdPRemoteIdsModel.remote_id', - cascade='all, delete-orphan') - - @classmethod - def from_dict(cls, dictionary): - new_dictionary = dictionary.copy() - remote_ids_list = new_dictionary.pop('remote_ids', None) - if not remote_ids_list: - remote_ids_list = [] - identity_provider = cls(**new_dictionary) - remote_ids = [] - # NOTE(fmarco76): the remote_ids_list contains only remote ids - # associated with the IdP because of the "relationship" established in - # sqlalchemy and corresponding to the FK in the idp_remote_ids table - for remote in remote_ids_list: - remote_ids.append(IdPRemoteIdsModel(remote_id=remote)) - identity_provider.remote_ids = remote_ids - return identity_provider - - def to_dict(self): - """Return a dictionary with model's attributes.""" - d = dict() - for attr in self.__class__.attributes: - d[attr] = getattr(self, attr) - d['remote_ids'] = [] - for remote in self.remote_ids: - d['remote_ids'].append(remote.remote_id) - return d - - -class IdPRemoteIdsModel(sql.ModelBase, sql.DictBase): - __tablename__ = 'idp_remote_ids' - attributes = ['idp_id', 'remote_id'] - mutable_attributes = frozenset(['idp_id', 'remote_id']) - - idp_id = sql.Column(sql.String(64), - sql.ForeignKey('identity_provider.id', - ondelete='CASCADE')) - remote_id = sql.Column(sql.String(255), - primary_key=True) - - @classmethod - def from_dict(cls, dictionary): - new_dictionary = dictionary.copy() - return cls(**new_dictionary) - - def to_dict(self): - """Return a dictionary with model's attributes.""" - d = dict() - for attr in self.__class__.attributes: - d[attr] = getattr(self, attr) - return d - - -class MappingModel(sql.ModelBase, sql.DictBase): - __tablename__ = 'mapping' - attributes = ['id', 'rules'] - - id = sql.Column(sql.String(64), primary_key=True) - rules = sql.Column(sql.JsonBlob(), nullable=False) - - @classmethod - def from_dict(cls, dictionary): - new_dictionary = dictionary.copy() - new_dictionary['rules'] = jsonutils.dumps(new_dictionary['rules']) - return cls(**new_dictionary) - - def to_dict(self): - """Return a dictionary with model's attributes.""" - d = dict() - for attr in self.__class__.attributes: - d[attr] = getattr(self, attr) - d['rules'] = jsonutils.loads(d['rules']) - return d - - -class ServiceProviderModel(sql.ModelBase, sql.DictBase): - __tablename__ = 'service_provider' - attributes = ['auth_url', 'id', 'enabled', 'description', - 'relay_state_prefix', 'sp_url'] - mutable_attributes = frozenset(['auth_url', 'description', 'enabled', - 'relay_state_prefix', 'sp_url']) - - id = sql.Column(sql.String(64), primary_key=True) - enabled = sql.Column(sql.Boolean, nullable=False) - description = sql.Column(sql.Text(), nullable=True) - auth_url = sql.Column(sql.String(256), nullable=False) - sp_url = sql.Column(sql.String(256), nullable=False) - relay_state_prefix = sql.Column(sql.String(256), nullable=False) - - @classmethod - def from_dict(cls, dictionary): - new_dictionary = dictionary.copy() - return cls(**new_dictionary) - - def to_dict(self): - """Return a dictionary with model's attributes.""" - d = dict() - for attr in self.__class__.attributes: - d[attr] = getattr(self, attr) - return d - - -class Federation(core.FederationDriverV9): - - _CONFLICT_LOG_MSG = 'Conflict %(conflict_type)s: %(details)s' - - def _handle_idp_conflict(self, e): - conflict_type = 'identity_provider' - details = six.text_type(e) - LOG.debug(self._CONFLICT_LOG_MSG, {'conflict_type': conflict_type, - 'details': details}) - if 'remote_id' in details: - msg = _('Duplicate remote ID: %s') - else: - msg = _('Duplicate entry: %s') - msg = msg % e.value - raise exception.Conflict(type=conflict_type, details=msg) - - # Identity Provider CRUD - def create_idp(self, idp_id, idp): - idp['id'] = idp_id - try: - with sql.session_for_write() as session: - idp_ref = IdentityProviderModel.from_dict(idp) - session.add(idp_ref) - return idp_ref.to_dict() - except sql.DBDuplicateEntry as e: - self._handle_idp_conflict(e) - - def delete_idp(self, idp_id): - with sql.session_for_write() as session: - self._delete_assigned_protocols(session, idp_id) - idp_ref = self._get_idp(session, idp_id) - session.delete(idp_ref) - - def _get_idp(self, session, idp_id): - idp_ref = session.query(IdentityProviderModel).get(idp_id) - if not idp_ref: - raise exception.IdentityProviderNotFound(idp_id=idp_id) - return idp_ref - - def _get_idp_from_remote_id(self, session, remote_id): - q = session.query(IdPRemoteIdsModel) - q = q.filter_by(remote_id=remote_id) - try: - return q.one() - except sql.NotFound: - raise exception.IdentityProviderNotFound(idp_id=remote_id) - - def list_idps(self, hints=None): - with sql.session_for_read() as session: - query = session.query(IdentityProviderModel) - idps = sql.filter_limit_query(IdentityProviderModel, query, hints) - idps_list = [idp.to_dict() for idp in idps] - return idps_list - - def get_idp(self, idp_id): - with sql.session_for_read() as session: - idp_ref = self._get_idp(session, idp_id) - return idp_ref.to_dict() - - def get_idp_from_remote_id(self, remote_id): - with sql.session_for_read() as session: - ref = self._get_idp_from_remote_id(session, remote_id) - return ref.to_dict() - - def update_idp(self, idp_id, idp): - try: - with sql.session_for_write() as session: - idp_ref = self._get_idp(session, idp_id) - old_idp = idp_ref.to_dict() - old_idp.update(idp) - new_idp = IdentityProviderModel.from_dict(old_idp) - for attr in IdentityProviderModel.mutable_attributes: - setattr(idp_ref, attr, getattr(new_idp, attr)) - return idp_ref.to_dict() - except sql.DBDuplicateEntry as e: - self._handle_idp_conflict(e) - - # Protocol CRUD - def _get_protocol(self, session, idp_id, protocol_id): - q = session.query(FederationProtocolModel) - q = q.filter_by(id=protocol_id, idp_id=idp_id) - try: - return q.one() - except sql.NotFound: - kwargs = {'protocol_id': protocol_id, - 'idp_id': idp_id} - raise exception.FederatedProtocolNotFound(**kwargs) - - @sql.handle_conflicts(conflict_type='federation_protocol') - def create_protocol(self, idp_id, protocol_id, protocol): - protocol['id'] = protocol_id - protocol['idp_id'] = idp_id - with sql.session_for_write() as session: - self._get_idp(session, idp_id) - protocol_ref = FederationProtocolModel.from_dict(protocol) - session.add(protocol_ref) - return protocol_ref.to_dict() - - def update_protocol(self, idp_id, protocol_id, protocol): - with sql.session_for_write() as session: - proto_ref = self._get_protocol(session, idp_id, protocol_id) - old_proto = proto_ref.to_dict() - old_proto.update(protocol) - new_proto = FederationProtocolModel.from_dict(old_proto) - for attr in FederationProtocolModel.mutable_attributes: - setattr(proto_ref, attr, getattr(new_proto, attr)) - return proto_ref.to_dict() - - def get_protocol(self, idp_id, protocol_id): - with sql.session_for_read() as session: - protocol_ref = self._get_protocol(session, idp_id, protocol_id) - return protocol_ref.to_dict() - - def list_protocols(self, idp_id): - with sql.session_for_read() as session: - q = session.query(FederationProtocolModel) - q = q.filter_by(idp_id=idp_id) - protocols = [protocol.to_dict() for protocol in q] - return protocols - - def delete_protocol(self, idp_id, protocol_id): - with sql.session_for_write() as session: - key_ref = self._get_protocol(session, idp_id, protocol_id) - session.delete(key_ref) - - def _delete_assigned_protocols(self, session, idp_id): - query = session.query(FederationProtocolModel) - query = query.filter_by(idp_id=idp_id) - query.delete() - - # Mapping CRUD - def _get_mapping(self, session, mapping_id): - mapping_ref = session.query(MappingModel).get(mapping_id) - if not mapping_ref: - raise exception.MappingNotFound(mapping_id=mapping_id) - return mapping_ref - - @sql.handle_conflicts(conflict_type='mapping') - def create_mapping(self, mapping_id, mapping): - ref = {} - ref['id'] = mapping_id - ref['rules'] = mapping.get('rules') - with sql.session_for_write() as session: - mapping_ref = MappingModel.from_dict(ref) - session.add(mapping_ref) - return mapping_ref.to_dict() - - def delete_mapping(self, mapping_id): - with sql.session_for_write() as session: - mapping_ref = self._get_mapping(session, mapping_id) - session.delete(mapping_ref) - - def list_mappings(self): - with sql.session_for_read() as session: - mappings = session.query(MappingModel) - return [x.to_dict() for x in mappings] - - def get_mapping(self, mapping_id): - with sql.session_for_read() as session: - mapping_ref = self._get_mapping(session, mapping_id) - return mapping_ref.to_dict() - - @sql.handle_conflicts(conflict_type='mapping') - def update_mapping(self, mapping_id, mapping): - ref = {} - ref['id'] = mapping_id - ref['rules'] = mapping.get('rules') - with sql.session_for_write() as session: - mapping_ref = self._get_mapping(session, mapping_id) - old_mapping = mapping_ref.to_dict() - old_mapping.update(ref) - new_mapping = MappingModel.from_dict(old_mapping) - for attr in MappingModel.attributes: - setattr(mapping_ref, attr, getattr(new_mapping, attr)) - return mapping_ref.to_dict() - - def get_mapping_from_idp_and_protocol(self, idp_id, protocol_id): - with sql.session_for_read() as session: - protocol_ref = self._get_protocol(session, idp_id, protocol_id) - mapping_id = protocol_ref.mapping_id - mapping_ref = self._get_mapping(session, mapping_id) - return mapping_ref.to_dict() - - # Service Provider CRUD - @sql.handle_conflicts(conflict_type='service_provider') - def create_sp(self, sp_id, sp): - sp['id'] = sp_id - with sql.session_for_write() as session: - sp_ref = ServiceProviderModel.from_dict(sp) - session.add(sp_ref) - return sp_ref.to_dict() - - def delete_sp(self, sp_id): - with sql.session_for_write() as session: - sp_ref = self._get_sp(session, sp_id) - session.delete(sp_ref) - - def _get_sp(self, session, sp_id): - sp_ref = session.query(ServiceProviderModel).get(sp_id) - if not sp_ref: - raise exception.ServiceProviderNotFound(sp_id=sp_id) - return sp_ref - - def list_sps(self, hints=None): - with sql.session_for_read() as session: - query = session.query(ServiceProviderModel) - sps = sql.filter_limit_query(ServiceProviderModel, query, hints) - sps_list = [sp.to_dict() for sp in sps] - return sps_list - - def get_sp(self, sp_id): - with sql.session_for_read() as session: - sp_ref = self._get_sp(session, sp_id) - return sp_ref.to_dict() - - def update_sp(self, sp_id, sp): - with sql.session_for_write() as session: - sp_ref = self._get_sp(session, sp_id) - old_sp = sp_ref.to_dict() - old_sp.update(sp) - new_sp = ServiceProviderModel.from_dict(old_sp) - for attr in ServiceProviderModel.mutable_attributes: - setattr(sp_ref, attr, getattr(new_sp, attr)) - return sp_ref.to_dict() - - def get_enabled_service_providers(self): - with sql.session_for_read() as session: - service_providers = session.query(ServiceProviderModel) - service_providers = service_providers.filter_by(enabled=True) - return service_providers diff --git a/keystone-moon/keystone/federation/constants.py b/keystone-moon/keystone/federation/constants.py deleted file mode 100644 index afb38494..00000000 --- a/keystone-moon/keystone/federation/constants.py +++ /dev/null @@ -1,15 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -FEDERATION = 'OS-FEDERATION' -IDENTITY_PROVIDER = 'OS-FEDERATION:identity_provider' -PROTOCOL = 'OS-FEDERATION:protocol' diff --git a/keystone-moon/keystone/federation/controllers.py b/keystone-moon/keystone/federation/controllers.py deleted file mode 100644 index b9e2d883..00000000 --- a/keystone-moon/keystone/federation/controllers.py +++ /dev/null @@ -1,519 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Workflow logic for the Federation service.""" - -import string - -from oslo_config import cfg -from oslo_log import log -import six -from six.moves import urllib -import webob - -from keystone.auth import controllers as auth_controllers -from keystone.common import authorization -from keystone.common import controller -from keystone.common import dependency -from keystone.common import utils as k_utils -from keystone.common import validation -from keystone.common import wsgi -from keystone import exception -from keystone.federation import idp as keystone_idp -from keystone.federation import schema -from keystone.federation import utils -from keystone.i18n import _ -from keystone.models import token_model - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -class _ControllerBase(controller.V3Controller): - """Base behaviors for federation controllers.""" - - @classmethod - def base_url(cls, context, path=None): - """Construct a path and pass it to V3Controller.base_url method.""" - path = '/OS-FEDERATION/' + cls.collection_name - return super(_ControllerBase, cls).base_url(context, path=path) - - -@dependency.requires('federation_api') -class IdentityProvider(_ControllerBase): - """Identity Provider representation.""" - - collection_name = 'identity_providers' - member_name = 'identity_provider' - - _public_parameters = frozenset(['id', 'enabled', 'description', - 'remote_ids', 'links' - ]) - - @classmethod - def _add_related_links(cls, context, ref): - """Add URLs for entities related with Identity Provider. - - Add URLs pointing to: - - protocols tied to the Identity Provider - - """ - ref.setdefault('links', {}) - base_path = ref['links'].get('self') - if base_path is None: - base_path = '/'.join([IdentityProvider.base_url(context), - ref['id']]) - for name in ['protocols']: - ref['links'][name] = '/'.join([base_path, name]) - - @classmethod - def _add_self_referential_link(cls, context, ref): - id = ref['id'] - self_path = '/'.join([cls.base_url(context), id]) - ref.setdefault('links', {}) - ref['links']['self'] = self_path - - @classmethod - def wrap_member(cls, context, ref): - cls._add_self_referential_link(context, ref) - cls._add_related_links(context, ref) - ref = cls.filter_params(ref) - return {cls.member_name: ref} - - @controller.protected() - @validation.validated(schema.identity_provider_create, 'identity_provider') - def create_identity_provider(self, context, idp_id, identity_provider): - identity_provider = self._normalize_dict(identity_provider) - identity_provider.setdefault('enabled', False) - idp_ref = self.federation_api.create_idp(idp_id, identity_provider) - response = IdentityProvider.wrap_member(context, idp_ref) - return wsgi.render_response(body=response, status=('201', 'Created')) - - @controller.filterprotected('id', 'enabled') - def list_identity_providers(self, context, filters): - hints = self.build_driver_hints(context, filters) - ref = self.federation_api.list_idps(hints=hints) - ref = [self.filter_params(x) for x in ref] - return IdentityProvider.wrap_collection(context, ref, hints=hints) - - @controller.protected() - def get_identity_provider(self, context, idp_id): - ref = self.federation_api.get_idp(idp_id) - return IdentityProvider.wrap_member(context, ref) - - @controller.protected() - def delete_identity_provider(self, context, idp_id): - self.federation_api.delete_idp(idp_id) - - @controller.protected() - @validation.validated(schema.identity_provider_update, 'identity_provider') - def update_identity_provider(self, context, idp_id, identity_provider): - identity_provider = self._normalize_dict(identity_provider) - idp_ref = self.federation_api.update_idp(idp_id, identity_provider) - return IdentityProvider.wrap_member(context, idp_ref) - - -@dependency.requires('federation_api') -class FederationProtocol(_ControllerBase): - """A federation protocol representation. - - See keystone.common.controller.V3Controller docstring for explanation - on _public_parameters class attributes. - - """ - - collection_name = 'protocols' - member_name = 'protocol' - - _public_parameters = frozenset(['id', 'mapping_id', 'links']) - - @classmethod - def _add_self_referential_link(cls, context, ref): - """Add 'links' entry to the response dictionary. - - Calls IdentityProvider.base_url() class method, as it constructs - proper URL along with the 'identity providers' part included. - - :param ref: response dictionary - - """ - ref.setdefault('links', {}) - base_path = ref['links'].get('identity_provider') - if base_path is None: - base_path = [IdentityProvider.base_url(context), ref['idp_id']] - base_path = '/'.join(base_path) - self_path = [base_path, 'protocols', ref['id']] - self_path = '/'.join(self_path) - ref['links']['self'] = self_path - - @classmethod - def _add_related_links(cls, context, ref): - """Add new entries to the 'links' subdictionary in the response. - - Adds 'identity_provider' key with URL pointing to related identity - provider as a value. - - :param ref: response dictionary - - """ - ref.setdefault('links', {}) - base_path = '/'.join([IdentityProvider.base_url(context), - ref['idp_id']]) - ref['links']['identity_provider'] = base_path - - @classmethod - def wrap_member(cls, context, ref): - cls._add_related_links(context, ref) - cls._add_self_referential_link(context, ref) - ref = cls.filter_params(ref) - return {cls.member_name: ref} - - @controller.protected() - @validation.validated(schema.federation_protocol_schema, 'protocol') - def create_protocol(self, context, idp_id, protocol_id, protocol): - ref = self._normalize_dict(protocol) - ref = self.federation_api.create_protocol(idp_id, protocol_id, ref) - response = FederationProtocol.wrap_member(context, ref) - return wsgi.render_response(body=response, status=('201', 'Created')) - - @controller.protected() - @validation.validated(schema.federation_protocol_schema, 'protocol') - def update_protocol(self, context, idp_id, protocol_id, protocol): - ref = self._normalize_dict(protocol) - ref = self.federation_api.update_protocol(idp_id, protocol_id, - protocol) - return FederationProtocol.wrap_member(context, ref) - - @controller.protected() - def get_protocol(self, context, idp_id, protocol_id): - ref = self.federation_api.get_protocol(idp_id, protocol_id) - return FederationProtocol.wrap_member(context, ref) - - @controller.protected() - def list_protocols(self, context, idp_id): - protocols_ref = self.federation_api.list_protocols(idp_id) - protocols = list(protocols_ref) - return FederationProtocol.wrap_collection(context, protocols) - - @controller.protected() - def delete_protocol(self, context, idp_id, protocol_id): - self.federation_api.delete_protocol(idp_id, protocol_id) - - -@dependency.requires('federation_api') -class MappingController(_ControllerBase): - collection_name = 'mappings' - member_name = 'mapping' - - @controller.protected() - def create_mapping(self, context, mapping_id, mapping): - ref = self._normalize_dict(mapping) - utils.validate_mapping_structure(ref) - mapping_ref = self.federation_api.create_mapping(mapping_id, ref) - response = MappingController.wrap_member(context, mapping_ref) - return wsgi.render_response(body=response, status=('201', 'Created')) - - @controller.protected() - def list_mappings(self, context): - ref = self.federation_api.list_mappings() - return MappingController.wrap_collection(context, ref) - - @controller.protected() - def get_mapping(self, context, mapping_id): - ref = self.federation_api.get_mapping(mapping_id) - return MappingController.wrap_member(context, ref) - - @controller.protected() - def delete_mapping(self, context, mapping_id): - self.federation_api.delete_mapping(mapping_id) - - @controller.protected() - def update_mapping(self, context, mapping_id, mapping): - mapping = self._normalize_dict(mapping) - utils.validate_mapping_structure(mapping) - mapping_ref = self.federation_api.update_mapping(mapping_id, mapping) - return MappingController.wrap_member(context, mapping_ref) - - -@dependency.requires('federation_api') -class Auth(auth_controllers.Auth): - - def _get_sso_origin_host(self, context): - """Validate and return originating dashboard URL. - - Make sure the parameter is specified in the request's URL as well its - value belongs to a list of trusted dashboards. - - :param context: request's context - :raises keystone.exception.ValidationError: ``origin`` query parameter - was not specified. The URL is deemed invalid. - :raises keystone.exception.Unauthorized: URL specified in origin query - parameter does not exist in list of websso trusted dashboards. - :returns: URL with the originating dashboard - - """ - if 'origin' in context['query_string']: - origin = context['query_string']['origin'] - host = urllib.parse.unquote_plus(origin) - else: - msg = _('Request must have an origin query parameter') - LOG.error(msg) - raise exception.ValidationError(msg) - - # change trusted_dashboard hostnames to lowercase before comparison - trusted_dashboards = [k_utils.lower_case_hostname(trusted) - for trusted in CONF.federation.trusted_dashboard] - - if host not in trusted_dashboards: - msg = _('%(host)s is not a trusted dashboard host') - msg = msg % {'host': host} - LOG.error(msg) - raise exception.Unauthorized(msg) - - return host - - def federated_authentication(self, context, idp_id, protocol_id): - """Authenticate from dedicated url endpoint. - - Build HTTP request body for federated authentication and inject - it into the ``authenticate_for_token`` function. - - """ - auth = { - 'identity': { - 'methods': [protocol_id], - protocol_id: { - 'identity_provider': idp_id, - 'protocol': protocol_id - } - } - } - - return self.authenticate_for_token(context, auth=auth) - - def federated_sso_auth(self, context, protocol_id): - try: - remote_id_name = utils.get_remote_id_parameter(protocol_id) - remote_id = context['environment'][remote_id_name] - except KeyError: - msg = _('Missing entity ID from environment') - LOG.error(msg) - raise exception.Unauthorized(msg) - - host = self._get_sso_origin_host(context) - - ref = self.federation_api.get_idp_from_remote_id(remote_id) - # NOTE(stevemar): the returned object is a simple dict that - # contains the idp_id and remote_id. - identity_provider = ref['idp_id'] - res = self.federated_authentication(context, identity_provider, - protocol_id) - token_id = res.headers['X-Subject-Token'] - return self.render_html_response(host, token_id) - - def federated_idp_specific_sso_auth(self, context, idp_id, protocol_id): - host = self._get_sso_origin_host(context) - - # NOTE(lbragstad): We validate that the Identity Provider actually - # exists in the Mapped authentication plugin. - res = self.federated_authentication(context, idp_id, protocol_id) - token_id = res.headers['X-Subject-Token'] - return self.render_html_response(host, token_id) - - def render_html_response(self, host, token_id): - """Forms an HTML Form from a template with autosubmit.""" - headers = [('Content-Type', 'text/html')] - - with open(CONF.federation.sso_callback_template) as template: - src = string.Template(template.read()) - - subs = {'host': host, 'token': token_id} - body = src.substitute(subs) - return webob.Response(body=body, status='200', - headerlist=headers) - - def _create_base_saml_assertion(self, context, auth): - issuer = CONF.saml.idp_entity_id - sp_id = auth['scope']['service_provider']['id'] - service_provider = self.federation_api.get_sp(sp_id) - utils.assert_enabled_service_provider_object(service_provider) - sp_url = service_provider['sp_url'] - - token_id = auth['identity']['token']['id'] - token_data = self.token_provider_api.validate_token(token_id) - token_ref = token_model.KeystoneToken(token_id, token_data) - - if not token_ref.project_scoped: - action = _('Use a project scoped token when attempting to create ' - 'a SAML assertion') - raise exception.ForbiddenAction(action=action) - - subject = token_ref.user_name - roles = token_ref.role_names - project = token_ref.project_name - # NOTE(rodrigods): the domain name is necessary in order to distinguish - # between projects and users with the same name in different domains. - project_domain_name = token_ref.project_domain_name - subject_domain_name = token_ref.user_domain_name - - generator = keystone_idp.SAMLGenerator() - response = generator.samlize_token( - issuer, sp_url, subject, subject_domain_name, - roles, project, project_domain_name) - return (response, service_provider) - - def _build_response_headers(self, service_provider): - return [('Content-Type', 'text/xml'), - ('X-sp-url', six.binary_type(service_provider['sp_url'])), - ('X-auth-url', six.binary_type(service_provider['auth_url']))] - - @validation.validated(schema.saml_create, 'auth') - def create_saml_assertion(self, context, auth): - """Exchange a scoped token for a SAML assertion. - - :param auth: Dictionary that contains a token and service provider ID - :returns: SAML Assertion based on properties from the token - """ - t = self._create_base_saml_assertion(context, auth) - (response, service_provider) = t - - headers = self._build_response_headers(service_provider) - return wsgi.render_response(body=response.to_string(), - status=('200', 'OK'), - headers=headers) - - @validation.validated(schema.saml_create, 'auth') - def create_ecp_assertion(self, context, auth): - """Exchange a scoped token for an ECP assertion. - - :param auth: Dictionary that contains a token and service provider ID - :returns: ECP Assertion based on properties from the token - """ - t = self._create_base_saml_assertion(context, auth) - (saml_assertion, service_provider) = t - relay_state_prefix = service_provider['relay_state_prefix'] - - generator = keystone_idp.ECPGenerator() - ecp_assertion = generator.generate_ecp(saml_assertion, - relay_state_prefix) - - headers = self._build_response_headers(service_provider) - return wsgi.render_response(body=ecp_assertion.to_string(), - status=('200', 'OK'), - headers=headers) - - -@dependency.requires('assignment_api', 'resource_api') -class DomainV3(controller.V3Controller): - collection_name = 'domains' - member_name = 'domain' - - def __init__(self): - super(DomainV3, self).__init__() - self.get_member_from_driver = self.resource_api.get_domain - - @controller.protected() - def list_domains_for_groups(self, context): - """List all domains available to an authenticated user's groups. - - :param context: request context - :returns: list of accessible domains - - """ - auth_context = context['environment'][authorization.AUTH_CONTEXT_ENV] - domains = self.assignment_api.list_domains_for_groups( - auth_context['group_ids']) - return DomainV3.wrap_collection(context, domains) - - -@dependency.requires('assignment_api', 'resource_api') -class ProjectAssignmentV3(controller.V3Controller): - collection_name = 'projects' - member_name = 'project' - - def __init__(self): - super(ProjectAssignmentV3, self).__init__() - self.get_member_from_driver = self.resource_api.get_project - - @controller.protected() - def list_projects_for_groups(self, context): - """List all projects available to an authenticated user's groups. - - :param context: request context - :returns: list of accessible projects - - """ - auth_context = context['environment'][authorization.AUTH_CONTEXT_ENV] - projects = self.assignment_api.list_projects_for_groups( - auth_context['group_ids']) - return ProjectAssignmentV3.wrap_collection(context, projects) - - -@dependency.requires('federation_api') -class ServiceProvider(_ControllerBase): - """Service Provider representation.""" - - collection_name = 'service_providers' - member_name = 'service_provider' - - _public_parameters = frozenset(['auth_url', 'id', 'enabled', 'description', - 'links', 'relay_state_prefix', 'sp_url']) - - @controller.protected() - @validation.validated(schema.service_provider_create, 'service_provider') - def create_service_provider(self, context, sp_id, service_provider): - service_provider = self._normalize_dict(service_provider) - service_provider.setdefault('enabled', False) - service_provider.setdefault('relay_state_prefix', - CONF.saml.relay_state_prefix) - sp_ref = self.federation_api.create_sp(sp_id, service_provider) - response = ServiceProvider.wrap_member(context, sp_ref) - return wsgi.render_response(body=response, status=('201', 'Created')) - - @controller.filterprotected('id', 'enabled') - def list_service_providers(self, context, filters): - hints = self.build_driver_hints(context, filters) - ref = self.federation_api.list_sps(hints=hints) - ref = [self.filter_params(x) for x in ref] - return ServiceProvider.wrap_collection(context, ref, hints=hints) - - @controller.protected() - def get_service_provider(self, context, sp_id): - ref = self.federation_api.get_sp(sp_id) - return ServiceProvider.wrap_member(context, ref) - - @controller.protected() - def delete_service_provider(self, context, sp_id): - self.federation_api.delete_sp(sp_id) - - @controller.protected() - @validation.validated(schema.service_provider_update, 'service_provider') - def update_service_provider(self, context, sp_id, service_provider): - service_provider = self._normalize_dict(service_provider) - sp_ref = self.federation_api.update_sp(sp_id, service_provider) - return ServiceProvider.wrap_member(context, sp_ref) - - -class SAMLMetadataV3(_ControllerBase): - member_name = 'metadata' - - def get_metadata(self, context): - metadata_path = CONF.saml.idp_metadata_path - try: - with open(metadata_path, 'r') as metadata_handler: - metadata = metadata_handler.read() - except IOError as e: - # Raise HTTP 500 in case Metadata file cannot be read. - raise exception.MetadataFileError(reason=e) - return wsgi.render_response(body=metadata, status=('200', 'OK'), - headers=[('Content-Type', 'text/xml')]) diff --git a/keystone-moon/keystone/federation/core.py b/keystone-moon/keystone/federation/core.py deleted file mode 100644 index 23028dfd..00000000 --- a/keystone-moon/keystone/federation/core.py +++ /dev/null @@ -1,611 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into the Federation service.""" - -import abc - -from oslo_config import cfg -from oslo_log import versionutils -import six - -from keystone.common import dependency -from keystone.common import extension -from keystone.common import manager -from keystone import exception -from keystone.federation import utils - - -CONF = cfg.CONF -EXTENSION_DATA = { - 'name': 'OpenStack Federation APIs', - 'namespace': 'http://docs.openstack.org/identity/api/ext/' - 'OS-FEDERATION/v1.0', - 'alias': 'OS-FEDERATION', - 'updated': '2013-12-17T12:00:0-00:00', - 'description': 'OpenStack Identity Providers Mechanism.', - 'links': [{ - 'rel': 'describedby', - 'type': 'text/html', - 'href': 'http://specs.openstack.org/openstack/keystone-specs/api/v3/' - 'identity-api-v3-os-federation-ext.html', - }]} -extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) -extension.register_public_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) - - -@dependency.provider('federation_api') -class Manager(manager.Manager): - """Default pivot point for the Federation backend. - - See :mod:`keystone.common.manager.Manager` for more details on how this - dynamically calls the backend. - - """ - - driver_namespace = 'keystone.federation' - - def __init__(self): - super(Manager, self).__init__(CONF.federation.driver) - - # Make sure it is a driver version we support, and if it is a legacy - # driver, then wrap it. - if isinstance(self.driver, FederationDriverV8): - self.driver = V9FederationWrapperForV8Driver(self.driver) - elif not isinstance(self.driver, FederationDriverV9): - raise exception.UnsupportedDriverVersion( - driver=CONF.federation.driver) - - def get_enabled_service_providers(self): - """List enabled service providers for Service Catalog - - Service Provider in a catalog contains three attributes: ``id``, - ``auth_url``, ``sp_url``, where: - - - id is a unique, user defined identifier for service provider object - - auth_url is an authentication URL of remote Keystone - - sp_url a URL accessible at the remote service provider where SAML - assertion is transmitted. - - :returns: list of dictionaries with enabled service providers - :rtype: list of dicts - - """ - def normalize(sp): - ref = { - 'auth_url': sp.auth_url, - 'id': sp.id, - 'sp_url': sp.sp_url - } - return ref - - service_providers = self.driver.get_enabled_service_providers() - return [normalize(sp) for sp in service_providers] - - def evaluate(self, idp_id, protocol_id, assertion_data): - mapping = self.get_mapping_from_idp_and_protocol(idp_id, protocol_id) - rules = mapping['rules'] - rule_processor = utils.RuleProcessor(mapping['id'], rules) - mapped_properties = rule_processor.process(assertion_data) - return mapped_properties, mapping['id'] - - -# The FederationDriverBase class is the set of driver methods from earlier -# drivers that we still support, that have not been removed or modified. This -# class is then used to created the augmented V8 and V9 version abstract driver -# classes, without having to duplicate a lot of abstract method signatures. -# If you remove a method from V9, then move the abstract methods from this Base -# class to the V8 class. Do not modify any of the method signatures in the Base -# class - changes should only be made in the V8 and subsequent classes. - -@six.add_metaclass(abc.ABCMeta) -class FederationDriverBase(object): - - @abc.abstractmethod - def create_idp(self, idp_id, idp): - """Create an identity provider. - - :param idp_id: ID of IdP object - :type idp_id: string - :param idp: idp object - :type idp: dict - :returns: idp ref - :rtype: dict - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_idp(self, idp_id): - """Delete an identity provider. - - :param idp_id: ID of IdP object - :type idp_id: string - :raises keystone.exception.IdentityProviderNotFound: If the IdP - doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_idp(self, idp_id): - """Get an identity provider by ID. - - :param idp_id: ID of IdP object - :type idp_id: string - :raises keystone.exception.IdentityProviderNotFound: If the IdP - doesn't exist. - :returns: idp ref - :rtype: dict - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_idp_from_remote_id(self, remote_id): - """Get an identity provider by remote ID. - - :param remote_id: ID of remote IdP - :type idp_id: string - :raises keystone.exception.IdentityProviderNotFound: If the IdP - doesn't exist. - :returns: idp ref - :rtype: dict - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_idp(self, idp_id, idp): - """Update an identity provider by ID. - - :param idp_id: ID of IdP object - :type idp_id: string - :param idp: idp object - :type idp: dict - :raises keystone.exception.IdentityProviderNotFound: If the IdP - doesn't exist. - :returns: idp ref - :rtype: dict - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def create_protocol(self, idp_id, protocol_id, protocol): - """Add an IdP-Protocol configuration. - - :param idp_id: ID of IdP object - :type idp_id: string - :param protocol_id: ID of protocol object - :type protocol_id: string - :param protocol: protocol object - :type protocol: dict - :raises keystone.exception.IdentityProviderNotFound: If the IdP - doesn't exist. - :returns: protocol ref - :rtype: dict - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_protocol(self, idp_id, protocol_id, protocol): - """Change an IdP-Protocol configuration. - - :param idp_id: ID of IdP object - :type idp_id: string - :param protocol_id: ID of protocol object - :type protocol_id: string - :param protocol: protocol object - :type protocol: dict - :raises keystone.exception.IdentityProviderNotFound: If the IdP - doesn't exist. - :raises keystone.exception.FederatedProtocolNotFound: If the federated - protocol cannot be found. - :returns: protocol ref - :rtype: dict - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_protocol(self, idp_id, protocol_id): - """Get an IdP-Protocol configuration. - - :param idp_id: ID of IdP object - :type idp_id: string - :param protocol_id: ID of protocol object - :type protocol_id: string - :raises keystone.exception.IdentityProviderNotFound: If the IdP - doesn't exist. - :raises keystone.exception.FederatedProtocolNotFound: If the federated - protocol cannot be found. - :returns: protocol ref - :rtype: dict - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_protocols(self, idp_id): - """List an IdP's supported protocols. - - :param idp_id: ID of IdP object - :type idp_id: string - :raises keystone.exception.IdentityProviderNotFound: If the IdP - doesn't exist. - :returns: list of protocol ref - :rtype: list of dict - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_protocol(self, idp_id, protocol_id): - """Delete an IdP-Protocol configuration. - - :param idp_id: ID of IdP object - :type idp_id: string - :param protocol_id: ID of protocol object - :type protocol_id: string - :raises keystone.exception.IdentityProviderNotFound: If the IdP - doesn't exist. - :raises keystone.exception.FederatedProtocolNotFound: If the federated - protocol cannot be found. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def create_mapping(self, mapping_id, mapping): - """Create a mapping. - - :param mapping_id: ID of mapping object - :type mapping_id: string - :param mapping: mapping ref with mapping name - :type mapping: dict - :returns: mapping ref - :rtype: dict - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_mapping(self, mapping_id): - """Delete a mapping. - - :param mapping_id: id of mapping to delete - :type mapping_ref: string - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_mapping(self, mapping_id, mapping_ref): - """Update a mapping. - - :param mapping_id: id of mapping to update - :type mapping_id: string - :param mapping_ref: new mapping ref - :type mapping_ref: dict - :returns: mapping ref - :rtype: dict - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_mappings(self): - """List all mappings. - - :returns: list of mapping refs - :rtype: list of dicts - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_mapping(self, mapping_id): - """Get a mapping, returns the mapping based on mapping_id. - - :param mapping_id: id of mapping to get - :type mapping_ref: string - :raises keystone.exception.MappingNotFound: If the mapping cannot - be found. - :returns: mapping ref - :rtype: dict - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_mapping_from_idp_and_protocol(self, idp_id, protocol_id): - """Get mapping based on idp_id and protocol_id. - - :param idp_id: id of the identity provider - :type idp_id: string - :param protocol_id: id of the protocol - :type protocol_id: string - :raises keystone.exception.IdentityProviderNotFound: If the IdP - doesn't exist. - :raises keystone.exception.FederatedProtocolNotFound: If the federated - protocol cannot be found. - :returns: mapping ref - :rtype: dict - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def create_sp(self, sp_id, sp): - """Create a service provider. - - :param sp_id: id of the service provider - :type sp_id: string - :param sp: service prvider object - :type sp: dict - - :returns: service provider ref - :rtype: dict - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_sp(self, sp_id): - """Delete a service provider. - - :param sp_id: id of the service provider - :type sp_id: string - - :raises keystone.exception.ServiceProviderNotFound: If the service - provider doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_sp(self, sp_id): - """Get a service provider. - - :param sp_id: id of the service provider - :type sp_id: string - :returns: service provider ref - :rtype: dict - - :raises keystone.exception.ServiceProviderNotFound: If the service - provider doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_sp(self, sp_id, sp): - """Update a service provider. - - :param sp_id: id of the service provider - :type sp_id: string - :param sp: service prvider object - :type sp: dict - - :returns: service provider ref - :rtype: dict - - :raises keystone.exception.ServiceProviderNotFound: If the service - provider doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - def get_enabled_service_providers(self): - """List enabled service providers for Service Catalog - - Service Provider in a catalog contains three attributes: ``id``, - ``auth_url``, ``sp_url``, where: - - - id is a unique, user defined identifier for service provider object - - auth_url is an authentication URL of remote Keystone - - sp_url a URL accessible at the remote service provider where SAML - assertion is transmitted. - - :returns: list of dictionaries with enabled service providers - :rtype: list of dicts - - """ - raise exception.NotImplemented() # pragma: no cover - - -class FederationDriverV8(FederationDriverBase): - """Removed or redefined methods from V8. - - Move the abstract methods of any methods removed or modified in later - versions of the driver from FederationDriverBase to here. We maintain this - so that legacy drivers, which will be a subclass of FederationDriverV8, can - still reference them. - - """ - - @abc.abstractmethod - def list_idps(self): - """List all identity providers. - - :returns: list of idp refs - :rtype: list of dicts - - :raises keystone.exception.IdentityProviderNotFound: If the IdP - doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_sps(self): - """List all service providers. - - :returns: List of service provider ref objects - :rtype: list of dicts - - """ - raise exception.NotImplemented() # pragma: no cover - - -class FederationDriverV9(FederationDriverBase): - """New or redefined methods from V8. - - Add any new V9 abstract methods (or those with modified signatures) to - this class. - - """ - - @abc.abstractmethod - def list_idps(self, hints): - """List all identity providers. - - :param hints: filter hints which the driver should - implement if at all possible. - :returns: list of idp refs - :rtype: list of dicts - - :raises keystone.exception.IdentityProviderNotFound: If the IdP - doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_sps(self, hints): - """List all service providers. - - :param hints: filter hints which the driver should - implement if at all possible. - :returns: List of service provider ref objects - :rtype: list of dicts - - :raises keystone.exception.ServiceProviderNotFound: If the SP - doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - -class V9FederationWrapperForV8Driver(FederationDriverV9): - """Wrapper class to supported a V8 legacy driver. - - In order to support legacy drivers without having to make the manager code - driver-version aware, we wrap legacy drivers so that they look like the - latest version. For the various changes made in a new driver, here are the - actions needed in this wrapper: - - Method removed from new driver - remove the call-through method from this - class, since the manager will no longer be - calling it. - Method signature (or meaning) changed - wrap the old method in a new - signature here, and munge the input - and output parameters accordingly. - New method added to new driver - add a method to implement the new - functionality here if possible. If that is - not possible, then return NotImplemented, - since we do not guarantee to support new - functionality with legacy drivers. - - """ - - @versionutils.deprecated( - as_of=versionutils.deprecated.MITAKA, - what='keystone.federation.FederationDriverV8', - in_favor_of='keystone.federation.FederationDriverV9', - remove_in=+2) - def __init__(self, wrapped_driver): - self.driver = wrapped_driver - - def create_idp(self, idp_id, idp): - return self.driver.create_idp(idp_id, idp) - - def delete_idp(self, idp_id): - self.driver.delete_idp(idp_id) - - # NOTE(davechen): The hints is ignored here to support legacy drivers, - # but the filters in hints will be remain unsatisfied and V3Controller - # wrapper will apply these filters at the end. So that the result get - # returned for list IdP will still be filtered with the legacy drivers. - def list_idps(self, hints): - return self.driver.list_idps() - - def get_idp(self, idp_id): - return self.driver.get_idp(idp_id) - - def get_idp_from_remote_id(self, remote_id): - return self.driver.get_idp_from_remote_id(remote_id) - - def update_idp(self, idp_id, idp): - return self.driver.update_idp(idp_id, idp) - - def create_protocol(self, idp_id, protocol_id, protocol): - return self.driver.create_protocol(idp_id, protocol_id, protocol) - - def update_protocol(self, idp_id, protocol_id, protocol): - return self.driver.update_protocol(idp_id, protocol_id, protocol) - - def get_protocol(self, idp_id, protocol_id): - return self.driver.get_protocol(idp_id, protocol_id) - - def list_protocols(self, idp_id): - return self.driver.list_protocols(idp_id) - - def delete_protocol(self, idp_id, protocol_id): - self.driver.delete_protocol(idp_id, protocol_id) - - def create_mapping(self, mapping_id, mapping): - return self.driver.create_mapping(mapping_id, mapping) - - def delete_mapping(self, mapping_id): - self.driver.delete_mapping(mapping_id) - - def update_mapping(self, mapping_id, mapping_ref): - return self.driver.update_mapping(mapping_id, mapping_ref) - - def list_mappings(self): - return self.driver.list_mappings() - - def get_mapping(self, mapping_id): - return self.driver.get_mapping(mapping_id) - - def get_mapping_from_idp_and_protocol(self, idp_id, protocol_id): - return self.driver.get_mapping_from_idp_and_protocol( - idp_id, protocol_id) - - def create_sp(self, sp_id, sp): - return self.driver.create_sp(sp_id, sp) - - def delete_sp(self, sp_id): - self.driver.delete_sp(sp_id) - - # NOTE(davechen): The hints is ignored here to support legacy drivers, - # but the filters in hints will be remain unsatisfied and V3Controller - # wrapper will apply these filters at the end. So that the result get - # returned for list SPs will still be filtered with the legacy drivers. - def list_sps(self, hints): - return self.driver.list_sps() - - def get_sp(self, sp_id): - return self.driver.get_sp(sp_id) - - def update_sp(self, sp_id, sp): - return self.driver.update_sp(sp_id, sp) - - def get_enabled_service_providers(self): - return self.driver.get_enabled_service_providers() - - -Driver = manager.create_legacy_driver(FederationDriverV8) diff --git a/keystone-moon/keystone/federation/idp.py b/keystone-moon/keystone/federation/idp.py deleted file mode 100644 index 494d58b9..00000000 --- a/keystone-moon/keystone/federation/idp.py +++ /dev/null @@ -1,615 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime -import os -import uuid - -from oslo_config import cfg -from oslo_log import log -from oslo_utils import fileutils -from oslo_utils import importutils -from oslo_utils import timeutils -import saml2 -from saml2 import client_base -from saml2 import md -from saml2.profile import ecp -from saml2 import saml -from saml2 import samlp -from saml2.schema import soapenv -from saml2 import sigver -xmldsig = importutils.try_import("saml2.xmldsig") -if not xmldsig: - xmldsig = importutils.try_import("xmldsig") - -from keystone.common import environment -from keystone.common import utils -from keystone import exception -from keystone.i18n import _, _LE - - -LOG = log.getLogger(__name__) -CONF = cfg.CONF - - -class SAMLGenerator(object): - """A class to generate SAML assertions.""" - - def __init__(self): - self.assertion_id = uuid.uuid4().hex - - def samlize_token(self, issuer, recipient, user, user_domain_name, roles, - project, project_domain_name, expires_in=None): - """Convert Keystone attributes to a SAML assertion. - - :param issuer: URL of the issuing party - :type issuer: string - :param recipient: URL of the recipient - :type recipient: string - :param user: User name - :type user: string - :param user_domain_name: User Domain name - :type user_domain_name: string - :param roles: List of role names - :type roles: list - :param project: Project name - :type project: string - :param project_domain_name: Project Domain name - :type project_domain_name: string - :param expires_in: Sets how long the assertion is valid for, in seconds - :type expires_in: int - - :returns: XML object - - """ - expiration_time = self._determine_expiration_time(expires_in) - status = self._create_status() - saml_issuer = self._create_issuer(issuer) - subject = self._create_subject(user, expiration_time, recipient) - attribute_statement = self._create_attribute_statement( - user, user_domain_name, roles, project, project_domain_name) - authn_statement = self._create_authn_statement(issuer, expiration_time) - signature = self._create_signature() - - assertion = self._create_assertion(saml_issuer, signature, - subject, authn_statement, - attribute_statement) - - assertion = _sign_assertion(assertion) - - response = self._create_response(saml_issuer, status, assertion, - recipient) - return response - - def _determine_expiration_time(self, expires_in): - if expires_in is None: - expires_in = CONF.saml.assertion_expiration_time - now = timeutils.utcnow() - future = now + datetime.timedelta(seconds=expires_in) - return utils.isotime(future, subsecond=True) - - def _create_status(self): - """Create an object that represents a SAML Status. - - - - - - :returns: XML object - - """ - status = samlp.Status() - status_code = samlp.StatusCode() - status_code.value = samlp.STATUS_SUCCESS - status_code.set_text('') - status.status_code = status_code - return status - - def _create_issuer(self, issuer_url): - """Create an object that represents a SAML Issuer. - - - https://acme.com/FIM/sps/openstack/saml20 - - :returns: XML object - - """ - issuer = saml.Issuer() - issuer.format = saml.NAMEID_FORMAT_ENTITY - issuer.set_text(issuer_url) - return issuer - - def _create_subject(self, user, expiration_time, recipient): - """Create an object that represents a SAML Subject. - - - - john@smith.com - - - - - - :returns: XML object - - """ - name_id = saml.NameID() - name_id.set_text(user) - subject_conf_data = saml.SubjectConfirmationData() - subject_conf_data.recipient = recipient - subject_conf_data.not_on_or_after = expiration_time - subject_conf = saml.SubjectConfirmation() - subject_conf.method = saml.SCM_BEARER - subject_conf.subject_confirmation_data = subject_conf_data - subject = saml.Subject() - subject.subject_confirmation = subject_conf - subject.name_id = name_id - return subject - - def _create_attribute_statement(self, user, user_domain_name, roles, - project, project_domain_name): - """Create an object that represents a SAML AttributeStatement. - - - - test_user - - - Default - - - admin - member - - - development - - - Default - - - - :returns: XML object - - """ - def _build_attribute(attribute_name, attribute_values): - attribute = saml.Attribute() - attribute.name = attribute_name - - for value in attribute_values: - attribute_value = saml.AttributeValue() - attribute_value.set_text(value) - attribute.attribute_value.append(attribute_value) - - return attribute - - user_attribute = _build_attribute('openstack_user', [user]) - roles_attribute = _build_attribute('openstack_roles', roles) - project_attribute = _build_attribute('openstack_project', [project]) - project_domain_attribute = _build_attribute( - 'openstack_project_domain', [project_domain_name]) - user_domain_attribute = _build_attribute( - 'openstack_user_domain', [user_domain_name]) - - attribute_statement = saml.AttributeStatement() - attribute_statement.attribute.append(user_attribute) - attribute_statement.attribute.append(roles_attribute) - attribute_statement.attribute.append(project_attribute) - attribute_statement.attribute.append(project_domain_attribute) - attribute_statement.attribute.append(user_domain_attribute) - return attribute_statement - - def _create_authn_statement(self, issuer, expiration_time): - """Create an object that represents a SAML AuthnStatement. - - - - - urn:oasis:names:tc:SAML:2.0:ac:classes:Password - - - https://acme.com/FIM/sps/openstack/saml20 - - - - - :returns: XML object - - """ - authn_statement = saml.AuthnStatement() - authn_statement.authn_instant = utils.isotime() - authn_statement.session_index = uuid.uuid4().hex - authn_statement.session_not_on_or_after = expiration_time - - authn_context = saml.AuthnContext() - authn_context_class = saml.AuthnContextClassRef() - authn_context_class.set_text(saml.AUTHN_PASSWORD) - - authn_authority = saml.AuthenticatingAuthority() - authn_authority.set_text(issuer) - authn_context.authn_context_class_ref = authn_context_class - authn_context.authenticating_authority = authn_authority - - authn_statement.authn_context = authn_context - - return authn_statement - - def _create_assertion(self, issuer, signature, subject, authn_statement, - attribute_statement): - """Create an object that represents a SAML Assertion. - - - ... - ... - ... - ... - ... - - - :returns: XML object - - """ - assertion = saml.Assertion() - assertion.id = self.assertion_id - assertion.issue_instant = utils.isotime() - assertion.version = '2.0' - assertion.issuer = issuer - assertion.signature = signature - assertion.subject = subject - assertion.authn_statement = authn_statement - assertion.attribute_statement = attribute_statement - return assertion - - def _create_response(self, issuer, status, assertion, recipient): - """Create an object that represents a SAML Response. - - - ... - ... - ... - - - :returns: XML object - - """ - response = samlp.Response() - response.id = uuid.uuid4().hex - response.destination = recipient - response.issue_instant = utils.isotime() - response.version = '2.0' - response.issuer = issuer - response.status = status - response.assertion = assertion - return response - - def _create_signature(self): - """Create an object that represents a SAML . - - This must be filled with algorithms that the signing binary will apply - in order to sign the whole message. - Currently we enforce X509 signing. - Example of the template:: - - - - - - - - - - - - - - - - - - - - - :returns: XML object - - """ - canonicalization_method = xmldsig.CanonicalizationMethod() - canonicalization_method.algorithm = xmldsig.ALG_EXC_C14N - signature_method = xmldsig.SignatureMethod( - algorithm=xmldsig.SIG_RSA_SHA1) - - transforms = xmldsig.Transforms() - envelope_transform = xmldsig.Transform( - algorithm=xmldsig.TRANSFORM_ENVELOPED) - - c14_transform = xmldsig.Transform(algorithm=xmldsig.ALG_EXC_C14N) - transforms.transform = [envelope_transform, c14_transform] - - digest_method = xmldsig.DigestMethod(algorithm=xmldsig.DIGEST_SHA1) - digest_value = xmldsig.DigestValue() - - reference = xmldsig.Reference() - reference.uri = '#' + self.assertion_id - reference.digest_method = digest_method - reference.digest_value = digest_value - reference.transforms = transforms - - signed_info = xmldsig.SignedInfo() - signed_info.canonicalization_method = canonicalization_method - signed_info.signature_method = signature_method - signed_info.reference = reference - - key_info = xmldsig.KeyInfo() - key_info.x509_data = xmldsig.X509Data() - - signature = xmldsig.Signature() - signature.signed_info = signed_info - signature.signature_value = xmldsig.SignatureValue() - signature.key_info = key_info - - return signature - - -def _sign_assertion(assertion): - """Sign a SAML assertion. - - This method utilizes ``xmlsec1`` binary and signs SAML assertions in a - separate process. ``xmlsec1`` cannot read input data from stdin so the - prepared assertion needs to be serialized and stored in a temporary - file. This file will be deleted immediately after ``xmlsec1`` returns. - The signed assertion is redirected to a standard output and read using - subprocess.PIPE redirection. A ``saml.Assertion`` class is created - from the signed string again and returned. - - Parameters that are required in the CONF:: - * xmlsec_binary - * private key file path - * public key file path - :returns: XML object - - """ - xmlsec_binary = CONF.saml.xmlsec1_binary - idp_private_key = CONF.saml.keyfile - idp_public_key = CONF.saml.certfile - - # xmlsec1 --sign --privkey-pem privkey,cert --id-attr:ID - certificates = '%(idp_private_key)s,%(idp_public_key)s' % { - 'idp_public_key': idp_public_key, - 'idp_private_key': idp_private_key - } - - command_list = [xmlsec_binary, '--sign', '--privkey-pem', certificates, - '--id-attr:ID', 'Assertion'] - - file_path = None - try: - # NOTE(gyee): need to make the namespace prefixes explicit so - # they won't get reassigned when we wrap the assertion into - # SAML2 response - file_path = fileutils.write_to_tempfile(assertion.to_string( - nspair={'saml': saml2.NAMESPACE, - 'xmldsig': xmldsig.NAMESPACE})) - command_list.append(file_path) - subprocess = environment.subprocess - stdout = subprocess.check_output(command_list, # nosec : The contents - # of the command list are coming from - # a trusted source because the - # executable and arguments all either - # come from the config file or are - # hardcoded. The command list is - # initialized earlier in this function - # to a list and it's still a list at - # this point in the function. There is - # no opportunity for an attacker to - # attempt command injection via string - # parsing. - stderr=subprocess.STDOUT) - except Exception as e: - msg = _LE('Error when signing assertion, reason: %(reason)s%(output)s') - LOG.error(msg, - {'reason': e, - 'output': ' ' + e.output if hasattr(e, 'output') else ''}) - raise exception.SAMLSigningError(reason=e) - finally: - try: - if file_path: - os.remove(file_path) - except OSError: # nosec - # The file is already gone, good. - pass - - return saml2.create_class_from_xml_string(saml.Assertion, stdout) - - -class MetadataGenerator(object): - """A class for generating SAML IdP Metadata.""" - - def generate_metadata(self): - """Generate Identity Provider Metadata. - - Generate and format metadata into XML that can be exposed and - consumed by a federated Service Provider. - - :returns: XML object. - :raises keystone.exception.ValidationError: If the required - config options aren't set. - """ - self._ensure_required_values_present() - entity_descriptor = self._create_entity_descriptor() - entity_descriptor.idpsso_descriptor = ( - self._create_idp_sso_descriptor()) - return entity_descriptor - - def _create_entity_descriptor(self): - ed = md.EntityDescriptor() - ed.entity_id = CONF.saml.idp_entity_id - return ed - - def _create_idp_sso_descriptor(self): - - def get_cert(): - try: - return sigver.read_cert_from_file(CONF.saml.certfile, 'pem') - except (IOError, sigver.CertificateError) as e: - msg = _('Cannot open certificate %(cert_file)s. ' - 'Reason: %(reason)s') - msg = msg % {'cert_file': CONF.saml.certfile, 'reason': e} - LOG.error(msg) - raise IOError(msg) - - def key_descriptor(): - cert = get_cert() - return md.KeyDescriptor( - key_info=xmldsig.KeyInfo( - x509_data=xmldsig.X509Data( - x509_certificate=xmldsig.X509Certificate(text=cert) - ) - ), use='signing' - ) - - def single_sign_on_service(): - idp_sso_endpoint = CONF.saml.idp_sso_endpoint - return md.SingleSignOnService( - binding=saml2.BINDING_URI, - location=idp_sso_endpoint) - - def organization(): - name = md.OrganizationName(lang=CONF.saml.idp_lang, - text=CONF.saml.idp_organization_name) - display_name = md.OrganizationDisplayName( - lang=CONF.saml.idp_lang, - text=CONF.saml.idp_organization_display_name) - url = md.OrganizationURL(lang=CONF.saml.idp_lang, - text=CONF.saml.idp_organization_url) - - return md.Organization( - organization_display_name=display_name, - organization_url=url, organization_name=name) - - def contact_person(): - company = md.Company(text=CONF.saml.idp_contact_company) - given_name = md.GivenName(text=CONF.saml.idp_contact_name) - surname = md.SurName(text=CONF.saml.idp_contact_surname) - email = md.EmailAddress(text=CONF.saml.idp_contact_email) - telephone = md.TelephoneNumber( - text=CONF.saml.idp_contact_telephone) - contact_type = CONF.saml.idp_contact_type - - return md.ContactPerson( - company=company, given_name=given_name, sur_name=surname, - email_address=email, telephone_number=telephone, - contact_type=contact_type) - - def name_id_format(): - return md.NameIDFormat(text=saml.NAMEID_FORMAT_TRANSIENT) - - idpsso = md.IDPSSODescriptor() - idpsso.protocol_support_enumeration = samlp.NAMESPACE - idpsso.key_descriptor = key_descriptor() - idpsso.single_sign_on_service = single_sign_on_service() - idpsso.name_id_format = name_id_format() - if self._check_organization_values(): - idpsso.organization = organization() - if self._check_contact_person_values(): - idpsso.contact_person = contact_person() - return idpsso - - def _ensure_required_values_present(self): - """Ensure idp_sso_endpoint and idp_entity_id have values.""" - if CONF.saml.idp_entity_id is None: - msg = _('Ensure configuration option idp_entity_id is set.') - raise exception.ValidationError(msg) - if CONF.saml.idp_sso_endpoint is None: - msg = _('Ensure configuration option idp_sso_endpoint is set.') - raise exception.ValidationError(msg) - - def _check_contact_person_values(self): - """Determine if contact information is included in metadata.""" - # Check if we should include contact information - params = [CONF.saml.idp_contact_company, - CONF.saml.idp_contact_name, - CONF.saml.idp_contact_surname, - CONF.saml.idp_contact_email, - CONF.saml.idp_contact_telephone] - for value in params: - if value is None: - return False - - # Check if contact type is an invalid value - valid_type_values = ['technical', 'other', 'support', 'administrative', - 'billing'] - if CONF.saml.idp_contact_type not in valid_type_values: - msg = _('idp_contact_type must be one of: [technical, other, ' - 'support, administrative or billing.') - raise exception.ValidationError(msg) - return True - - def _check_organization_values(self): - """Determine if organization information is included in metadata.""" - params = [CONF.saml.idp_organization_name, - CONF.saml.idp_organization_display_name, - CONF.saml.idp_organization_url] - for value in params: - if value is None: - return False - return True - - -class ECPGenerator(object): - """A class for generating an ECP assertion.""" - - @staticmethod - def generate_ecp(saml_assertion, relay_state_prefix): - ecp_generator = ECPGenerator() - header = ecp_generator._create_header(relay_state_prefix) - body = ecp_generator._create_body(saml_assertion) - envelope = soapenv.Envelope(header=header, body=body) - return envelope - - def _create_header(self, relay_state_prefix): - relay_state_text = relay_state_prefix + uuid.uuid4().hex - relay_state = ecp.RelayState(actor=client_base.ACTOR, - must_understand='1', - text=relay_state_text) - header = soapenv.Header() - header.extension_elements = ( - [saml2.element_to_extension_element(relay_state)]) - return header - - def _create_body(self, saml_assertion): - body = soapenv.Body() - body.extension_elements = ( - [saml2.element_to_extension_element(saml_assertion)]) - return body diff --git a/keystone-moon/keystone/federation/routers.py b/keystone-moon/keystone/federation/routers.py deleted file mode 100644 index a463ca63..00000000 --- a/keystone-moon/keystone/federation/routers.py +++ /dev/null @@ -1,252 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools - -from keystone.common import json_home -from keystone.common import wsgi -from keystone.federation import controllers - - -build_resource_relation = functools.partial( - json_home.build_v3_extension_resource_relation, - extension_name='OS-FEDERATION', extension_version='1.0') - -build_parameter_relation = functools.partial( - json_home.build_v3_extension_parameter_relation, - extension_name='OS-FEDERATION', extension_version='1.0') - -IDP_ID_PARAMETER_RELATION = build_parameter_relation(parameter_name='idp_id') -PROTOCOL_ID_PARAMETER_RELATION = build_parameter_relation( - parameter_name='protocol_id') -SP_ID_PARAMETER_RELATION = build_parameter_relation(parameter_name='sp_id') - - -class Routers(wsgi.RoutersBase): - """API Endpoints for the Federation extension. - - The API looks like:: - - PUT /OS-FEDERATION/identity_providers/{idp_id} - GET /OS-FEDERATION/identity_providers - GET /OS-FEDERATION/identity_providers/{idp_id} - DELETE /OS-FEDERATION/identity_providers/{idp_id} - PATCH /OS-FEDERATION/identity_providers/{idp_id} - - PUT /OS-FEDERATION/identity_providers/ - {idp_id}/protocols/{protocol_id} - GET /OS-FEDERATION/identity_providers/ - {idp_id}/protocols - GET /OS-FEDERATION/identity_providers/ - {idp_id}/protocols/{protocol_id} - PATCH /OS-FEDERATION/identity_providers/ - {idp_id}/protocols/{protocol_id} - DELETE /OS-FEDERATION/identity_providers/ - {idp_id}/protocols/{protocol_id} - - PUT /OS-FEDERATION/mappings - GET /OS-FEDERATION/mappings - PATCH /OS-FEDERATION/mappings/{mapping_id} - GET /OS-FEDERATION/mappings/{mapping_id} - DELETE /OS-FEDERATION/mappings/{mapping_id} - - GET /OS-FEDERATION/projects - GET /OS-FEDERATION/domains - - PUT /OS-FEDERATION/service_providers/{sp_id} - GET /OS-FEDERATION/service_providers - GET /OS-FEDERATION/service_providers/{sp_id} - DELETE /OS-FEDERATION/service_providers/{sp_id} - PATCH /OS-FEDERATION/service_providers/{sp_id} - - GET /OS-FEDERATION/identity_providers/{idp_id}/ - protocols/{protocol_id}/auth - POST /OS-FEDERATION/identity_providers/{idp_id}/ - protocols/{protocol_id}/auth - GET /auth/OS-FEDERATION/identity_providers/ - {idp_id}/protocols/{protocol_id}/websso - ?origin=https%3A//horizon.example.com - POST /auth/OS-FEDERATION/identity_providers/ - {idp_id}/protocols/{protocol_id}/websso - ?origin=https%3A//horizon.example.com - - - POST /auth/OS-FEDERATION/saml2 - POST /auth/OS-FEDERATION/saml2/ecp - GET /OS-FEDERATION/saml2/metadata - - GET /auth/OS-FEDERATION/websso/{protocol_id} - ?origin=https%3A//horizon.example.com - - POST /auth/OS-FEDERATION/websso/{protocol_id} - ?origin=https%3A//horizon.example.com - - """ - - def _construct_url(self, suffix): - return "/OS-FEDERATION/%s" % suffix - - def append_v3_routers(self, mapper, routers): - auth_controller = controllers.Auth() - idp_controller = controllers.IdentityProvider() - protocol_controller = controllers.FederationProtocol() - mapping_controller = controllers.MappingController() - project_controller = controllers.ProjectAssignmentV3() - domain_controller = controllers.DomainV3() - saml_metadata_controller = controllers.SAMLMetadataV3() - sp_controller = controllers.ServiceProvider() - - # Identity Provider CRUD operations - - self._add_resource( - mapper, idp_controller, - path=self._construct_url('identity_providers/{idp_id}'), - get_action='get_identity_provider', - put_action='create_identity_provider', - patch_action='update_identity_provider', - delete_action='delete_identity_provider', - rel=build_resource_relation(resource_name='identity_provider'), - path_vars={ - 'idp_id': IDP_ID_PARAMETER_RELATION, - }) - self._add_resource( - mapper, idp_controller, - path=self._construct_url('identity_providers'), - get_action='list_identity_providers', - rel=build_resource_relation(resource_name='identity_providers')) - - # Protocol CRUD operations - - self._add_resource( - mapper, protocol_controller, - path=self._construct_url('identity_providers/{idp_id}/protocols/' - '{protocol_id}'), - get_action='get_protocol', - put_action='create_protocol', - patch_action='update_protocol', - delete_action='delete_protocol', - rel=build_resource_relation( - resource_name='identity_provider_protocol'), - path_vars={ - 'idp_id': IDP_ID_PARAMETER_RELATION, - 'protocol_id': PROTOCOL_ID_PARAMETER_RELATION, - }) - self._add_resource( - mapper, protocol_controller, - path=self._construct_url('identity_providers/{idp_id}/protocols'), - get_action='list_protocols', - rel=build_resource_relation( - resource_name='identity_provider_protocols'), - path_vars={ - 'idp_id': IDP_ID_PARAMETER_RELATION, - }) - - # Mapping CRUD operations - - self._add_resource( - mapper, mapping_controller, - path=self._construct_url('mappings/{mapping_id}'), - get_action='get_mapping', - put_action='create_mapping', - patch_action='update_mapping', - delete_action='delete_mapping', - rel=build_resource_relation(resource_name='mapping'), - path_vars={ - 'mapping_id': build_parameter_relation( - parameter_name='mapping_id'), - }) - self._add_resource( - mapper, mapping_controller, - path=self._construct_url('mappings'), - get_action='list_mappings', - rel=build_resource_relation(resource_name='mappings')) - - # Service Providers CRUD operations - - self._add_resource( - mapper, sp_controller, - path=self._construct_url('service_providers/{sp_id}'), - get_action='get_service_provider', - put_action='create_service_provider', - patch_action='update_service_provider', - delete_action='delete_service_provider', - rel=build_resource_relation(resource_name='service_provider'), - path_vars={ - 'sp_id': SP_ID_PARAMETER_RELATION, - }) - - self._add_resource( - mapper, sp_controller, - path=self._construct_url('service_providers'), - get_action='list_service_providers', - rel=build_resource_relation(resource_name='service_providers')) - - self._add_resource( - mapper, domain_controller, - path=self._construct_url('domains'), - new_path='/auth/domains', - get_action='list_domains_for_groups', - rel=build_resource_relation(resource_name='domains')) - self._add_resource( - mapper, project_controller, - path=self._construct_url('projects'), - new_path='/auth/projects', - get_action='list_projects_for_groups', - rel=build_resource_relation(resource_name='projects')) - - # Auth operations - self._add_resource( - mapper, auth_controller, - path=self._construct_url('identity_providers/{idp_id}/' - 'protocols/{protocol_id}/auth'), - get_post_action='federated_authentication', - rel=build_resource_relation( - resource_name='identity_provider_protocol_auth'), - path_vars={ - 'idp_id': IDP_ID_PARAMETER_RELATION, - 'protocol_id': PROTOCOL_ID_PARAMETER_RELATION, - }) - self._add_resource( - mapper, auth_controller, - path='/auth' + self._construct_url('saml2'), - post_action='create_saml_assertion', - rel=build_resource_relation(resource_name='saml2')) - self._add_resource( - mapper, auth_controller, - path='/auth' + self._construct_url('saml2/ecp'), - post_action='create_ecp_assertion', - rel=build_resource_relation(resource_name='ecp')) - self._add_resource( - mapper, auth_controller, - path='/auth' + self._construct_url('websso/{protocol_id}'), - get_post_action='federated_sso_auth', - rel=build_resource_relation(resource_name='websso'), - path_vars={ - 'protocol_id': PROTOCOL_ID_PARAMETER_RELATION, - }) - self._add_resource( - mapper, auth_controller, - path='/auth' + self._construct_url( - 'identity_providers/{idp_id}/protocols/{protocol_id}/websso'), - get_post_action='federated_idp_specific_sso_auth', - rel=build_resource_relation(resource_name='identity_providers'), - path_vars={ - 'idp_id': IDP_ID_PARAMETER_RELATION, - 'protocol_id': PROTOCOL_ID_PARAMETER_RELATION, - }) - - # Keystone-Identity-Provider metadata endpoint - self._add_resource( - mapper, saml_metadata_controller, - path=self._construct_url('saml2/metadata'), - get_action='get_metadata', - rel=build_resource_relation(resource_name='metadata')) diff --git a/keystone-moon/keystone/federation/schema.py b/keystone-moon/keystone/federation/schema.py deleted file mode 100644 index 6cdfd1f5..00000000 --- a/keystone-moon/keystone/federation/schema.py +++ /dev/null @@ -1,115 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import validation -from keystone.common.validation import parameter_types - - -basic_property_id = { - 'type': 'object', - 'properties': { - 'id': { - 'type': 'string' - } - }, - 'required': ['id'], - 'additionalProperties': False -} - -saml_create = { - 'type': 'object', - 'properties': { - 'identity': { - 'type': 'object', - 'properties': { - 'token': basic_property_id, - 'methods': { - 'type': 'array' - } - }, - 'required': ['token'], - 'additionalProperties': False - }, - 'scope': { - 'type': 'object', - 'properties': { - 'service_provider': basic_property_id - }, - 'required': ['service_provider'], - 'additionalProperties': False - }, - }, - 'required': ['identity', 'scope'], - 'additionalProperties': False -} - -_service_provider_properties = { - # NOTE(rodrigods): The database accepts URLs with 256 as max length, - # but parameter_types.url uses 225 as max length. - 'auth_url': parameter_types.url, - 'sp_url': parameter_types.url, - 'description': validation.nullable(parameter_types.description), - 'enabled': parameter_types.boolean, - 'relay_state_prefix': validation.nullable(parameter_types.description) -} - -service_provider_create = { - 'type': 'object', - 'properties': _service_provider_properties, - # NOTE(rodrigods): 'id' is not required since it is passed in the URL - 'required': ['auth_url', 'sp_url'], - 'additionalProperties': False -} - -service_provider_update = { - 'type': 'object', - 'properties': _service_provider_properties, - # Make sure at least one property is being updated - 'minProperties': 1, - 'additionalProperties': False -} - -_identity_provider_properties = { - 'enabled': parameter_types.boolean, - 'description': validation.nullable(parameter_types.description), - 'remote_ids': { - 'type': ['array', 'null'], - 'items': { - 'type': 'string' - }, - 'uniqueItems': True - } -} - -identity_provider_create = { - 'type': 'object', - 'properties': _identity_provider_properties, - 'additionalProperties': False -} - -identity_provider_update = { - 'type': 'object', - 'properties': _identity_provider_properties, - # Make sure at least one property is being updated - 'minProperties': 1, - 'additionalProperties': False -} - -federation_protocol_schema = { - 'type': 'object', - 'properties': { - 'mapping_id': parameter_types.mapping_id_string - }, - # `mapping_id` is the property that cannot be ignored - 'minProperties': 1, - 'additionalProperties': False -} diff --git a/keystone-moon/keystone/federation/utils.py b/keystone-moon/keystone/federation/utils.py deleted file mode 100644 index 1d215a68..00000000 --- a/keystone-moon/keystone/federation/utils.py +++ /dev/null @@ -1,872 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Utilities for Federation Extension.""" - -import ast -import re - -import jsonschema -from oslo_config import cfg -from oslo_log import log -from oslo_utils import timeutils -import six - -from keystone import exception -from keystone.i18n import _, _LW - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -class UserType(object): - """User mapping type.""" - - EPHEMERAL = 'ephemeral' - LOCAL = 'local' - - -MAPPING_SCHEMA = { - "type": "object", - "required": ['rules'], - "properties": { - "rules": { - "minItems": 1, - "type": "array", - "items": { - "type": "object", - "required": ['local', 'remote'], - "additionalProperties": False, - "properties": { - "local": { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "user": { - "type": "object", - "properties": { - "id": {"type": "string"}, - "name": {"type": "string"}, - "email": {"type": "string"}, - "domain": { - "type": "object", - "properties": { - "id": {"type": "string"}, - "name": {"type": "string"} - }, - "additionalProperties": False, - }, - "type": { - "type": "string", - "enum": [UserType.EPHEMERAL, - UserType.LOCAL] - } - }, - "additionalProperties": False - }, - "group": { - "type": "object", - "properties": { - "id": {"type": "string"}, - "name": {"type": "string"}, - "domain": { - "type": "object", - "properties": { - "id": {"type": "string"}, - "name": {"type": "string"} - }, - "additionalProperties": False, - }, - }, - "additionalProperties": False, - }, - "groups": { - "type": "string" - }, - "group_ids": { - "type": "string" - }, - "domain": { - "type": "object", - "properties": { - "id": {"type": "string"}, - "name": {"type": "string"} - }, - "additionalProperties": False - } - } - } - }, - "remote": { - "minItems": 1, - "type": "array", - "items": { - "type": "object", - "oneOf": [ - {"$ref": "#/definitions/empty"}, - {"$ref": "#/definitions/any_one_of"}, - {"$ref": "#/definitions/not_any_of"}, - {"$ref": "#/definitions/blacklist"}, - {"$ref": "#/definitions/whitelist"} - ], - } - } - } - } - } - }, - "definitions": { - "empty": { - "type": "object", - "required": ['type'], - "properties": { - "type": { - "type": "string" - }, - }, - "additionalProperties": False, - }, - "any_one_of": { - "type": "object", - "additionalProperties": False, - "required": ['type', 'any_one_of'], - "properties": { - "type": { - "type": "string" - }, - "any_one_of": { - "type": "array" - }, - "regex": { - "type": "boolean" - } - } - }, - "not_any_of": { - "type": "object", - "additionalProperties": False, - "required": ['type', 'not_any_of'], - "properties": { - "type": { - "type": "string" - }, - "not_any_of": { - "type": "array" - }, - "regex": { - "type": "boolean" - } - } - }, - "blacklist": { - "type": "object", - "additionalProperties": False, - "required": ['type', 'blacklist'], - "properties": { - "type": { - "type": "string" - }, - "blacklist": { - "type": "array" - } - } - }, - "whitelist": { - "type": "object", - "additionalProperties": False, - "required": ['type', 'whitelist'], - "properties": { - "type": { - "type": "string" - }, - "whitelist": { - "type": "array" - } - } - } - } -} - - -class DirectMaps(object): - """An abstraction around the remote matches. - - Each match is treated internally as a list. - """ - - def __init__(self): - self._matches = [] - - def add(self, values): - """Adds a matched value to the list of matches. - - :param list value: the match to save - - """ - self._matches.append(values) - - def __getitem__(self, idx): - """Used by Python when executing ``''.format(*DirectMaps())``.""" - value = self._matches[idx] - if isinstance(value, list) and len(value) == 1: - return value[0] - else: - return value - - -def validate_mapping_structure(ref): - v = jsonschema.Draft4Validator(MAPPING_SCHEMA) - - messages = '' - for error in sorted(v.iter_errors(ref), key=str): - messages = messages + error.message + "\n" - - if messages: - raise exception.ValidationError(messages) - - -def validate_expiration(token_ref): - if timeutils.utcnow() > token_ref.expires: - raise exception.Unauthorized(_('Federation token is expired')) - - -def validate_groups_cardinality(group_ids, mapping_id): - """Check if groups list is non-empty. - - :param group_ids: list of group ids - :type group_ids: list of str - - :raises keystone.exception.MissingGroups: if ``group_ids`` cardinality is 0 - - """ - if not group_ids: - raise exception.MissingGroups(mapping_id=mapping_id) - - -def get_remote_id_parameter(protocol): - # NOTE(marco-fargetta): Since we support any protocol ID, we attempt to - # retrieve the remote_id_attribute of the protocol ID. If it's not - # registered in the config, then register the option and try again. - # This allows the user to register protocols other than oidc and saml2. - remote_id_parameter = None - try: - remote_id_parameter = CONF[protocol]['remote_id_attribute'] - except AttributeError: - CONF.register_opt(cfg.StrOpt('remote_id_attribute'), - group=protocol) - try: - remote_id_parameter = CONF[protocol]['remote_id_attribute'] - except AttributeError: # nosec - # No remote ID attr, will be logged and use the default instead. - pass - if not remote_id_parameter: - LOG.debug('Cannot find "remote_id_attribute" in configuration ' - 'group %s. Trying default location in ' - 'group federation.', protocol) - remote_id_parameter = CONF.federation.remote_id_attribute - - return remote_id_parameter - - -def validate_idp(idp, protocol, assertion): - """The IdP providing the assertion should be registered for the mapping.""" - remote_id_parameter = get_remote_id_parameter(protocol) - if not remote_id_parameter or not idp['remote_ids']: - LOG.debug('Impossible to identify the IdP %s ', idp['id']) - # If nothing is defined, the administrator may want to - # allow the mapping of every IdP - return - try: - idp_remote_identifier = assertion[remote_id_parameter] - except KeyError: - msg = _('Could not find Identity Provider identifier in ' - 'environment') - raise exception.ValidationError(msg) - if idp_remote_identifier not in idp['remote_ids']: - msg = _('Incoming identity provider identifier not included ' - 'among the accepted identifiers.') - raise exception.Forbidden(msg) - - -def validate_groups_in_backend(group_ids, mapping_id, identity_api): - """Iterate over group ids and make sure they are present in the backend. - - This call is not transactional. - :param group_ids: IDs of the groups to be checked - :type group_ids: list of str - - :param mapping_id: id of the mapping used for this operation - :type mapping_id: str - - :param identity_api: Identity Manager object used for communication with - backend - :type identity_api: identity.Manager - - :raises keystone.exception.MappedGroupNotFound: If the group returned by - mapping was not found in the backend. - - """ - for group_id in group_ids: - try: - identity_api.get_group(group_id) - except exception.GroupNotFound: - raise exception.MappedGroupNotFound( - group_id=group_id, mapping_id=mapping_id) - - -def validate_groups(group_ids, mapping_id, identity_api): - """Check group ids cardinality and check their existence in the backend. - - This call is not transactional. - :param group_ids: IDs of the groups to be checked - :type group_ids: list of str - - :param mapping_id: id of the mapping used for this operation - :type mapping_id: str - - :param identity_api: Identity Manager object used for communication with - backend - :type identity_api: identity.Manager - - :raises keystone.exception.MappedGroupNotFound: If the group returned by - mapping was not found in the backend. - :raises keystone.exception.MissingGroups: If ``group_ids`` cardinality - is 0. - - """ - validate_groups_cardinality(group_ids, mapping_id) - validate_groups_in_backend(group_ids, mapping_id, identity_api) - - -# TODO(marek-denis): Optimize this function, so the number of calls to the -# backend are minimized. -def transform_to_group_ids(group_names, mapping_id, - identity_api, resource_api): - """Transform groups identified by name/domain to their ids - - Function accepts list of groups identified by a name and domain giving - a list of group ids in return. - - Example of group_names parameter:: - - [ - { - "name": "group_name", - "domain": { - "id": "domain_id" - }, - }, - { - "name": "group_name_2", - "domain": { - "name": "domain_name" - } - } - ] - - :param group_names: list of group identified by name and its domain. - :type group_names: list - - :param mapping_id: id of the mapping used for mapping assertion into - local credentials - :type mapping_id: str - - :param identity_api: identity_api object - :param resource_api: resource manager object - - :returns: generator object with group ids - - :raises keystone.exception.MappedGroupNotFound: in case asked group doesn't - exist in the backend. - - """ - def resolve_domain(domain): - """Return domain id. - - Input is a dictionary with a domain identified either by a ``id`` or a - ``name``. In the latter case system will attempt to fetch domain object - from the backend. - - :returns: domain's id - :rtype: str - - """ - domain_id = (domain.get('id') or - resource_api.get_domain_by_name( - domain.get('name')).get('id')) - return domain_id - - for group in group_names: - try: - group_dict = identity_api.get_group_by_name( - group['name'], resolve_domain(group['domain'])) - yield group_dict['id'] - except exception.GroupNotFound: - LOG.debug('Skip mapping group %s; has no entry in the backend', - group['name']) - - -def get_assertion_params_from_env(context): - LOG.debug('Environment variables: %s', context['environment']) - prefix = CONF.federation.assertion_prefix - for k, v in list(context['environment'].items()): - if not k.startswith(prefix): - continue - # These bytes may be decodable as ISO-8859-1 according to Section - # 3.2.4 of RFC 7230. Let's assume that our web server plugins are - # correctly encoding the data. - if not isinstance(v, six.text_type) and getattr(v, 'decode', False): - v = v.decode('ISO-8859-1') - yield (k, v) - - -class RuleProcessor(object): - """A class to process assertions and mapping rules.""" - - class _EvalType(object): - """Mapping rule evaluation types.""" - - ANY_ONE_OF = 'any_one_of' - NOT_ANY_OF = 'not_any_of' - BLACKLIST = 'blacklist' - WHITELIST = 'whitelist' - - def __init__(self, mapping_id, rules): - """Initialize RuleProcessor. - - Example rules can be found at: - :class:`keystone.tests.mapping_fixtures` - - :param mapping_id: id for the mapping - :type mapping_id: string - :param rules: rules from a mapping - :type rules: dict - - """ - self.mapping_id = mapping_id - self.rules = rules - - def process(self, assertion_data): - """Transform assertion to a dictionary. - - The dictionary contains mapping of user name and group ids - based on mapping rules. - - This function will iterate through the mapping rules to find - assertions that are valid. - - :param assertion_data: an assertion containing values from an IdP - :type assertion_data: dict - - Example assertion_data:: - - { - 'Email': 'testacct@example.com', - 'UserName': 'testacct', - 'FirstName': 'Test', - 'LastName': 'Account', - 'orgPersonType': 'Tester' - } - - :returns: dictionary with user and group_ids - - The expected return structure is:: - - { - 'name': 'foobar', - 'group_ids': ['abc123', 'def456'], - 'group_names': [ - { - 'name': 'group_name_1', - 'domain': { - 'name': 'domain1' - } - }, - { - 'name': 'group_name_1_1', - 'domain': { - 'name': 'domain1' - } - }, - { - 'name': 'group_name_2', - 'domain': { - 'id': 'xyz132' - } - } - ] - } - - """ - # Assertions will come in as string key-value pairs, and will use a - # semi-colon to indicate multiple values, i.e. groups. - # This will create a new dictionary where the values are arrays, and - # any multiple values are stored in the arrays. - LOG.debug('assertion data: %s', assertion_data) - assertion = {n: v.split(';') for n, v in assertion_data.items() - if isinstance(v, six.string_types)} - LOG.debug('assertion: %s', assertion) - identity_values = [] - - LOG.debug('rules: %s', self.rules) - for rule in self.rules: - direct_maps = self._verify_all_requirements(rule['remote'], - assertion) - - # If the compare comes back as None, then the rule did not apply - # to the assertion data, go on to the next rule - if direct_maps is None: - continue - - # If there are no direct mappings, then add the local mapping - # directly to the array of saved values. However, if there is - # a direct mapping, then perform variable replacement. - if not direct_maps: - identity_values += rule['local'] - else: - for local in rule['local']: - new_local = self._update_local_mapping(local, direct_maps) - identity_values.append(new_local) - - LOG.debug('identity_values: %s', identity_values) - mapped_properties = self._transform(identity_values) - LOG.debug('mapped_properties: %s', mapped_properties) - return mapped_properties - - def _transform(self, identity_values): - """Transform local mappings, to an easier to understand format. - - Transform the incoming array to generate the return value for - the process function. Generating content for Keystone tokens will - be easier if some pre-processing is done at this level. - - :param identity_values: local mapping from valid evaluations - :type identity_values: array of dict - - Example identity_values:: - - [ - { - 'group': {'id': '0cd5e9'}, - 'user': { - 'email': 'bob@example.com' - }, - }, - { - 'groups': ['member', 'admin', tester'], - 'domain': { - 'name': 'default_domain' - } - }, - { - 'group_ids': ['abc123', 'def456', '0cd5e9'] - } - ] - - :returns: dictionary with user name, group_ids and group_names. - :rtype: dict - - """ - def extract_groups(groups_by_domain): - for groups in list(groups_by_domain.values()): - for group in list({g['name']: g for g in groups}.values()): - yield group - - def normalize_user(user): - """Parse and validate user mapping.""" - user_type = user.get('type') - - if user_type and user_type not in (UserType.EPHEMERAL, - UserType.LOCAL): - msg = _("User type %s not supported") % user_type - raise exception.ValidationError(msg) - - if user_type is None: - user_type = user['type'] = UserType.EPHEMERAL - - if user_type == UserType.EPHEMERAL: - user['domain'] = { - 'id': CONF.federation.federated_domain_name - } - - # initialize the group_ids as a set to eliminate duplicates - user = {} - group_ids = set() - group_names = list() - groups_by_domain = dict() - - # if mapping yield no valid identity values, we should bail right away - # instead of continuing on with a normalized bogus user - if not identity_values: - msg = _("Could not map any federated user properties to identity " - "values. Check debug logs or the mapping used for " - "additional details.") - LOG.warning(msg) - raise exception.ValidationError(msg) - - for identity_value in identity_values: - if 'user' in identity_value: - # if a mapping outputs more than one user name, log it - if user: - LOG.warning(_LW('Ignoring user name')) - else: - user = identity_value.get('user') - if 'group' in identity_value: - group = identity_value['group'] - if 'id' in group: - group_ids.add(group['id']) - elif 'name' in group: - domain = (group['domain'].get('name') or - group['domain'].get('id')) - groups_by_domain.setdefault(domain, list()).append(group) - group_names.extend(extract_groups(groups_by_domain)) - if 'groups' in identity_value: - if 'domain' not in identity_value: - msg = _("Invalid rule: %(identity_value)s. Both 'groups' " - "and 'domain' keywords must be specified.") - msg = msg % {'identity_value': identity_value} - raise exception.ValidationError(msg) - # In this case, identity_value['groups'] is a string - # representation of a list, and we want a real list. This is - # due to the way we do direct mapping substitutions today (see - # function _update_local_mapping() ) - try: - group_names_list = ast.literal_eval( - identity_value['groups']) - except ValueError: - group_names_list = [identity_value['groups']] - domain = identity_value['domain'] - group_dicts = [{'name': name, 'domain': domain} for name in - group_names_list] - - group_names.extend(group_dicts) - if 'group_ids' in identity_value: - # If identity_values['group_ids'] is a string representation - # of a list, parse it to a real list. Also, if the provided - # group_ids parameter contains only one element, it will be - # parsed as a simple string, and not a list or the - # representation of a list. - try: - group_ids.update( - ast.literal_eval(identity_value['group_ids'])) - except (ValueError, SyntaxError): - group_ids.update([identity_value['group_ids']]) - - normalize_user(user) - - return {'user': user, - 'group_ids': list(group_ids), - 'group_names': group_names} - - def _update_local_mapping(self, local, direct_maps): - """Replace any {0}, {1} ... values with data from the assertion. - - :param local: local mapping reference that needs to be updated - :type local: dict - :param direct_maps: identity values used to update local - :type direct_maps: keystone.federation.utils.DirectMaps - - Example local:: - - {'user': {'name': '{0} {1}', 'email': '{2}'}} - - Example direct_maps:: - - ['Bob', 'Thompson', 'bob@example.com'] - - :returns: new local mapping reference with replaced values. - - The expected return structure is:: - - {'user': {'name': 'Bob Thompson', 'email': 'bob@example.org'}} - - :raises keystone.exception.DirectMappingError: when referring to a - remote match from a local section of a rule - - """ - LOG.debug('direct_maps: %s', direct_maps) - LOG.debug('local: %s', local) - new = {} - for k, v in local.items(): - if isinstance(v, dict): - new_value = self._update_local_mapping(v, direct_maps) - else: - try: - new_value = v.format(*direct_maps) - except IndexError: - raise exception.DirectMappingError( - mapping_id=self.mapping_id) - - new[k] = new_value - return new - - def _verify_all_requirements(self, requirements, assertion): - """Compare remote requirements of a rule against the assertion. - - If a value of ``None`` is returned, the rule with this assertion - doesn't apply. - If an array of zero length is returned, then there are no direct - mappings to be performed, but the rule is valid. - Otherwise, then it will first attempt to filter the values according - to blacklist or whitelist rules and finally return the values in - order, to be directly mapped. - - :param requirements: list of remote requirements from rules - :type requirements: list - - Example requirements:: - - [ - { - "type": "UserName" - }, - { - "type": "orgPersonType", - "any_one_of": [ - "Customer" - ] - }, - { - "type": "ADFS_GROUPS", - "whitelist": [ - "g1", "g2", "g3", "g4" - ] - } - ] - - :param assertion: dict of attributes from an IdP - :type assertion: dict - - Example assertion:: - - { - 'UserName': ['testacct'], - 'LastName': ['Account'], - 'orgPersonType': ['Tester'], - 'Email': ['testacct@example.com'], - 'FirstName': ['Test'], - 'ADFS_GROUPS': ['g1', 'g2'] - } - - :returns: identity values used to update local - :rtype: keystone.federation.utils.DirectMaps or None - - """ - direct_maps = DirectMaps() - - for requirement in requirements: - requirement_type = requirement['type'] - direct_map_values = assertion.get(requirement_type) - regex = requirement.get('regex', False) - - if not direct_map_values: - return None - - any_one_values = requirement.get(self._EvalType.ANY_ONE_OF) - if any_one_values is not None: - if self._evaluate_requirement(any_one_values, - direct_map_values, - self._EvalType.ANY_ONE_OF, - regex): - continue - else: - return None - - not_any_values = requirement.get(self._EvalType.NOT_ANY_OF) - if not_any_values is not None: - if self._evaluate_requirement(not_any_values, - direct_map_values, - self._EvalType.NOT_ANY_OF, - regex): - continue - else: - return None - - # If 'any_one_of' or 'not_any_of' are not found, then values are - # within 'type'. Attempt to find that 'type' within the assertion, - # and filter these values if 'whitelist' or 'blacklist' is set. - blacklisted_values = requirement.get(self._EvalType.BLACKLIST) - whitelisted_values = requirement.get(self._EvalType.WHITELIST) - - # If a blacklist or whitelist is used, we want to map to the - # whole list instead of just its values separately. - if blacklisted_values is not None: - direct_map_values = [v for v in direct_map_values - if v not in blacklisted_values] - elif whitelisted_values is not None: - direct_map_values = [v for v in direct_map_values - if v in whitelisted_values] - - direct_maps.add(direct_map_values) - - LOG.debug('updating a direct mapping: %s', direct_map_values) - - return direct_maps - - def _evaluate_values_by_regex(self, values, assertion_values): - for value in values: - for assertion_value in assertion_values: - if re.search(value, assertion_value): - return True - return False - - def _evaluate_requirement(self, values, assertion_values, - eval_type, regex): - """Evaluate the incoming requirement and assertion. - - If the requirement type does not exist in the assertion data, then - return False. If regex is specified, then compare the values and - assertion values. Otherwise, grab the intersection of the values - and use that to compare against the evaluation type. - - :param values: list of allowed values, defined in the requirement - :type values: list - :param assertion_values: The values from the assertion to evaluate - :type assertion_values: list/string - :param eval_type: determine how to evaluate requirements - :type eval_type: string - :param regex: perform evaluation with regex - :type regex: boolean - - :returns: boolean, whether requirement is valid or not. - - """ - if regex: - any_match = self._evaluate_values_by_regex(values, - assertion_values) - else: - any_match = bool(set(values).intersection(set(assertion_values))) - if any_match and eval_type == self._EvalType.ANY_ONE_OF: - return True - if not any_match and eval_type == self._EvalType.NOT_ANY_OF: - return True - - return False - - -def assert_enabled_identity_provider(federation_api, idp_id): - identity_provider = federation_api.get_idp(idp_id) - if identity_provider.get('enabled') is not True: - msg = _('Identity Provider %(idp)s is disabled') % {'idp': idp_id} - LOG.debug(msg) - raise exception.Forbidden(msg) - - -def assert_enabled_service_provider_object(service_provider): - if service_provider.get('enabled') is not True: - sp_id = service_provider['id'] - msg = _('Service Provider %(sp)s is disabled') % {'sp': sp_id} - LOG.debug(msg) - raise exception.Forbidden(msg) diff --git a/keystone-moon/keystone/hacking/__init__.py b/keystone-moon/keystone/hacking/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/hacking/checks.py b/keystone-moon/keystone/hacking/checks.py deleted file mode 100644 index 5d715d91..00000000 --- a/keystone-moon/keystone/hacking/checks.py +++ /dev/null @@ -1,446 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Keystone's pep8 extensions. - -In order to make the review process faster and easier for core devs we are -adding some Keystone specific pep8 checks. This will catch common errors -so that core devs don't have to. - -There are two types of pep8 extensions. One is a function that takes either -a physical or logical line. The physical or logical line is the first param -in the function definition and can be followed by other parameters supported -by pep8. The second type is a class that parses AST trees. For more info -please see pep8.py. -""" - -import ast -import re - -import six - - -class BaseASTChecker(ast.NodeVisitor): - """Provides a simple framework for writing AST-based checks. - - Subclasses should implement visit_* methods like any other AST visitor - implementation. When they detect an error for a particular node the - method should call ``self.add_error(offending_node)``. Details about - where in the code the error occurred will be pulled from the node - object. - - Subclasses should also provide a class variable named CHECK_DESC to - be used for the human readable error message. - - """ - - def __init__(self, tree, filename): - """This object is created automatically by pep8. - - :param tree: an AST tree - :param filename: name of the file being analyzed - (ignored by our checks) - """ - self._tree = tree - self._errors = [] - - def run(self): - """Called automatically by pep8.""" - self.visit(self._tree) - return self._errors - - def add_error(self, node, message=None): - """Add an error caused by a node to the list of errors for pep8.""" - message = message or self.CHECK_DESC - error = (node.lineno, node.col_offset, message, self.__class__) - self._errors.append(error) - - -class CheckForMutableDefaultArgs(BaseASTChecker): - """Checks for the use of mutable objects as function/method defaults. - - We are only checking for list and dict literals at this time. This means - that a developer could specify an instance of their own and cause a bug. - The fix for this is probably more work than it's worth because it will - get caught during code review. - - """ - - CHECK_DESC = 'K001 Using mutable as a function/method default' - MUTABLES = ( - ast.List, ast.ListComp, - ast.Dict, ast.DictComp, - ast.Set, ast.SetComp, - ast.Call) - - def visit_FunctionDef(self, node): - for arg in node.args.defaults: - if isinstance(arg, self.MUTABLES): - self.add_error(arg) - - super(CheckForMutableDefaultArgs, self).generic_visit(node) - - -def block_comments_begin_with_a_space(physical_line, line_number): - """There should be a space after the # of block comments. - - There is already a check in pep8 that enforces this rule for - inline comments. - - Okay: # this is a comment - Okay: #!/usr/bin/python - Okay: # this is a comment - K002: #this is a comment - - """ - MESSAGE = "K002 block comments should start with '# '" - - # shebangs are OK - if line_number == 1 and physical_line.startswith('#!'): - return - - text = physical_line.strip() - if text.startswith('#'): # look for block comments - if len(text) > 1 and not text[1].isspace(): - return physical_line.index('#'), MESSAGE - - -class CheckForAssertingNoneEquality(BaseASTChecker): - """Ensures that code does not use a None with assert(Not*)Equal.""" - - CHECK_DESC_IS = ('K003 Use self.assertIsNone(...) when comparing ' - 'against None') - CHECK_DESC_ISNOT = ('K004 Use assertIsNotNone(...) when comparing ' - ' against None') - - def visit_Call(self, node): - # NOTE(dstanek): I wrote this in a verbose way to make it easier to - # read for those that have little experience with Python's AST. - - if isinstance(node.func, ast.Attribute): - if node.func.attr == 'assertEqual': - for arg in node.args: - if isinstance(arg, ast.Name) and arg.id == 'None': - self.add_error(node, message=self.CHECK_DESC_IS) - elif node.func.attr == 'assertNotEqual': - for arg in node.args: - if isinstance(arg, ast.Name) and arg.id == 'None': - self.add_error(node, message=self.CHECK_DESC_ISNOT) - - super(CheckForAssertingNoneEquality, self).generic_visit(node) - - -class CheckForLoggingIssues(BaseASTChecker): - - DEBUG_CHECK_DESC = 'K005 Using translated string in debug logging' - NONDEBUG_CHECK_DESC = 'K006 Not using translating helper for logging' - EXCESS_HELPER_CHECK_DESC = 'K007 Using hints when _ is necessary' - LOG_MODULES = ('logging', 'keystone.openstack.common.log') - I18N_MODULES = ( - 'keystone.i18n._', - 'keystone.i18n._LI', - 'keystone.i18n._LW', - 'keystone.i18n._LE', - 'keystone.i18n._LC', - ) - TRANS_HELPER_MAP = { - 'debug': None, - 'info': '_LI', - 'warn': '_LW', - 'warning': '_LW', - 'error': '_LE', - 'exception': '_LE', - 'critical': '_LC', - } - - def __init__(self, tree, filename): - super(CheckForLoggingIssues, self).__init__(tree, filename) - - self.logger_names = [] - self.logger_module_names = [] - self.i18n_names = {} - - # NOTE(dstanek): this kinda accounts for scopes when talking - # about only leaf node in the graph - self.assignments = {} - - def generic_visit(self, node): - """Called if no explicit visitor function exists for a node.""" - for field, value in ast.iter_fields(node): - if isinstance(value, list): - for item in value: - if isinstance(item, ast.AST): - item._parent = node - self.visit(item) - elif isinstance(value, ast.AST): - value._parent = node - self.visit(value) - - def _filter_imports(self, module_name, alias): - """Keeps lists of logging and i18n imports - - """ - if module_name in self.LOG_MODULES: - self.logger_module_names.append(alias.asname or alias.name) - elif module_name in self.I18N_MODULES: - self.i18n_names[alias.asname or alias.name] = alias.name - - def visit_Import(self, node): - for alias in node.names: - self._filter_imports(alias.name, alias) - return super(CheckForLoggingIssues, self).generic_visit(node) - - def visit_ImportFrom(self, node): - for alias in node.names: - full_name = '%s.%s' % (node.module, alias.name) - self._filter_imports(full_name, alias) - return super(CheckForLoggingIssues, self).generic_visit(node) - - def _find_name(self, node): - """Return the fully qualified name or a Name or Attribute.""" - if isinstance(node, ast.Name): - return node.id - elif (isinstance(node, ast.Attribute) - and isinstance(node.value, (ast.Name, ast.Attribute))): - method_name = node.attr - obj_name = self._find_name(node.value) - if obj_name is None: - return None - return obj_name + '.' + method_name - elif isinstance(node, six.string_types): - return node - else: # could be Subscript, Call or many more - return None - - def visit_Assign(self, node): - """Look for 'LOG = logging.getLogger' - - This handles the simple case: - name = [logging_module].getLogger(...) - - - or - - - name = [i18n_name](...) - - And some much more comple ones: - name = [i18n_name](...) % X - - - or - - - self.name = [i18n_name](...) % X - - """ - attr_node_types = (ast.Name, ast.Attribute) - - if (len(node.targets) != 1 - or not isinstance(node.targets[0], attr_node_types)): - # say no to: "x, y = ..." - return super(CheckForLoggingIssues, self).generic_visit(node) - - target_name = self._find_name(node.targets[0]) - - if (isinstance(node.value, ast.BinOp) and - isinstance(node.value.op, ast.Mod)): - if (isinstance(node.value.left, ast.Call) and - isinstance(node.value.left.func, ast.Name) and - node.value.left.func.id in self.i18n_names): - # NOTE(dstanek): this is done to match cases like: - # `msg = _('something %s') % x` - node = ast.Assign(value=node.value.left) - - if not isinstance(node.value, ast.Call): - # node.value must be a call to getLogger - self.assignments.pop(target_name, None) - return super(CheckForLoggingIssues, self).generic_visit(node) - - # is this a call to an i18n function? - if (isinstance(node.value.func, ast.Name) - and node.value.func.id in self.i18n_names): - self.assignments[target_name] = node.value.func.id - return super(CheckForLoggingIssues, self).generic_visit(node) - - if (not isinstance(node.value.func, ast.Attribute) - or not isinstance(node.value.func.value, attr_node_types)): - # function must be an attribute on an object like - # logging.getLogger - return super(CheckForLoggingIssues, self).generic_visit(node) - - object_name = self._find_name(node.value.func.value) - func_name = node.value.func.attr - - if (object_name in self.logger_module_names - and func_name == 'getLogger'): - self.logger_names.append(target_name) - - return super(CheckForLoggingIssues, self).generic_visit(node) - - def visit_Call(self, node): - """Look for the 'LOG.*' calls. - - """ - - # obj.method - if isinstance(node.func, ast.Attribute): - obj_name = self._find_name(node.func.value) - if isinstance(node.func.value, ast.Name): - method_name = node.func.attr - elif isinstance(node.func.value, ast.Attribute): - obj_name = self._find_name(node.func.value) - method_name = node.func.attr - else: # could be Subscript, Call or many more - return super(CheckForLoggingIssues, self).generic_visit(node) - - # must be a logger instance and one of the support logging methods - if (obj_name not in self.logger_names - or method_name not in self.TRANS_HELPER_MAP): - return super(CheckForLoggingIssues, self).generic_visit(node) - - # the call must have arguments - if not len(node.args): - return super(CheckForLoggingIssues, self).generic_visit(node) - - if method_name == 'debug': - self._process_debug(node) - elif method_name in self.TRANS_HELPER_MAP: - self._process_non_debug(node, method_name) - - return super(CheckForLoggingIssues, self).generic_visit(node) - - def _process_debug(self, node): - msg = node.args[0] # first arg to a logging method is the msg - - # if first arg is a call to a i18n name - if (isinstance(msg, ast.Call) - and isinstance(msg.func, ast.Name) - and msg.func.id in self.i18n_names): - self.add_error(msg, message=self.DEBUG_CHECK_DESC) - - # if the first arg is a reference to a i18n call - elif (isinstance(msg, ast.Name) - and msg.id in self.assignments - and not self._is_raised_later(node, msg.id)): - self.add_error(msg, message=self.DEBUG_CHECK_DESC) - - def _process_non_debug(self, node, method_name): - msg = node.args[0] # first arg to a logging method is the msg - - # if first arg is a call to a i18n name - if isinstance(msg, ast.Call): - try: - func_name = msg.func.id - except AttributeError: - # in the case of logging only an exception, the msg function - # will not have an id associated with it, for instance: - # LOG.warning(six.text_type(e)) - return - - # the function name is the correct translation helper - # for the logging method - if func_name == self.TRANS_HELPER_MAP[method_name]: - return - - # the function name is an alias for the correct translation - # helper for the loggine method - if (self.i18n_names[func_name] == - self.TRANS_HELPER_MAP[method_name]): - return - - self.add_error(msg, message=self.NONDEBUG_CHECK_DESC) - - # if the first arg is not a reference to the correct i18n hint - elif isinstance(msg, ast.Name): - - # FIXME(dstanek): to make sure more robust we should be checking - # all names passed into a logging method. we can't right now - # because: - # 1. We have code like this that we'll fix when dealing with the %: - # msg = _('....') % {} - # LOG.warn(msg) - # 2. We also do LOG.exception(e) in several places. I'm not sure - # exactly what we should be doing about that. - if msg.id not in self.assignments: - return - - helper_method_name = self.TRANS_HELPER_MAP[method_name] - if (self.assignments[msg.id] != helper_method_name - and not self._is_raised_later(node, msg.id)): - self.add_error(msg, message=self.NONDEBUG_CHECK_DESC) - elif (self.assignments[msg.id] == helper_method_name - and self._is_raised_later(node, msg.id)): - self.add_error(msg, message=self.EXCESS_HELPER_CHECK_DESC) - - def _is_raised_later(self, node, name): - - def find_peers(node): - node_for_line = node._parent - for _field, value in ast.iter_fields(node._parent._parent): - if isinstance(value, list) and node_for_line in value: - return value[value.index(node_for_line) + 1:] - continue - return [] - - peers = find_peers(node) - for peer in peers: - if isinstance(peer, ast.Raise): - if (isinstance(peer.type, ast.Call) and - len(peer.type.args) > 0 and - isinstance(peer.type.args[0], ast.Name) and - name in (a.id for a in peer.type.args)): - return True - else: - return False - elif isinstance(peer, ast.Assign): - if name in (t.id for t in peer.targets): - return False - - -def check_oslo_namespace_imports(logical_line, blank_before, filename): - oslo_namespace_imports = re.compile( - r"(((from)|(import))\s+oslo\.)|(from\s+oslo\s+import\s+)") - - if re.match(oslo_namespace_imports, logical_line): - msg = ("K333: '%s' must be used instead of '%s'.") % ( - logical_line.replace('oslo.', 'oslo_'), - logical_line) - yield(0, msg) - - -def dict_constructor_with_sequence_copy(logical_line): - """Should use a dict comprehension instead of a dict constructor. - - PEP-0274 introduced dict comprehension with performance enhancement - and it also makes code more readable. - - Okay: lower_res = {k.lower(): v for k, v in six.iteritems(res[1])} - Okay: fool = dict(a='a', b='b') - K008: lower_res = dict((k.lower(), v) for k, v in six.iteritems(res[1])) - K008: attrs = dict([(k, _from_json(v)) - K008: dict([[i,i] for i in range(3)]) - - """ - MESSAGE = ("K008 Must use a dict comprehension instead of a dict" - " constructor with a sequence of key-value pairs.") - - dict_constructor_with_sequence_re = ( - re.compile(r".*\bdict\((\[)?(\(|\[)(?!\{)")) - - if dict_constructor_with_sequence_re.match(logical_line): - yield (0, MESSAGE) - - -def factory(register): - register(CheckForMutableDefaultArgs) - register(block_comments_begin_with_a_space) - register(CheckForAssertingNoneEquality) - register(CheckForLoggingIssues) - register(check_oslo_namespace_imports) - register(dict_constructor_with_sequence_copy) diff --git a/keystone-moon/keystone/i18n.py b/keystone-moon/keystone/i18n.py deleted file mode 100644 index 2eb42d3a..00000000 --- a/keystone-moon/keystone/i18n.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""oslo.i18n integration module. - -See http://docs.openstack.org/developer/oslo.i18n/usage.html . - -""" - -import oslo_i18n - - -_translators = oslo_i18n.TranslatorFactory(domain='keystone') - -# The primary translation function using the well-known name "_" -_ = _translators.primary - -# Translators for log levels. -# -# The abbreviated names are meant to reflect the usual use of a short -# name like '_'. The "L" is for "log" and the other letter comes from -# the level. -_LI = _translators.log_info -_LW = _translators.log_warning -_LE = _translators.log_error -_LC = _translators.log_critical diff --git a/keystone-moon/keystone/identity/__init__.py b/keystone-moon/keystone/identity/__init__.py deleted file mode 100644 index 96b3ee77..00000000 --- a/keystone-moon/keystone/identity/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.identity import controllers # noqa -from keystone.identity.core import * # noqa -from keystone.identity import generator # noqa diff --git a/keystone-moon/keystone/identity/backends/__init__.py b/keystone-moon/keystone/identity/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/identity/backends/ldap.py b/keystone-moon/keystone/identity/backends/ldap.py deleted file mode 100644 index fe8e8477..00000000 --- a/keystone-moon/keystone/identity/backends/ldap.py +++ /dev/null @@ -1,425 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from __future__ import absolute_import -import uuid - -import ldap.filter -from oslo_config import cfg -from oslo_log import log -from oslo_log import versionutils -import six - -from keystone.common import clean -from keystone.common import driver_hints -from keystone.common import ldap as common_ldap -from keystone.common import models -from keystone import exception -from keystone.i18n import _ -from keystone import identity - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - -_DEPRECATION_MSG = _('%s for the LDAP identity backend has been deprecated in ' - 'the Mitaka release in favor of read-only identity LDAP ' - 'access. It will be removed in the "O" release.') - - -class Identity(identity.IdentityDriverV8): - def __init__(self, conf=None): - super(Identity, self).__init__() - if conf is None: - self.conf = CONF - else: - self.conf = conf - self.user = UserApi(self.conf) - self.group = GroupApi(self.conf) - - def is_domain_aware(self): - return False - - def generates_uuids(self): - return False - - # Identity interface - - def authenticate(self, user_id, password): - try: - user_ref = self._get_user(user_id) - except exception.UserNotFound: - raise AssertionError(_('Invalid user / password')) - if not user_id or not password: - raise AssertionError(_('Invalid user / password')) - conn = None - try: - conn = self.user.get_connection(user_ref['dn'], - password, end_user_auth=True) - if not conn: - raise AssertionError(_('Invalid user / password')) - except Exception: - raise AssertionError(_('Invalid user / password')) - finally: - if conn: - conn.unbind_s() - return self.user.filter_attributes(user_ref) - - def _get_user(self, user_id): - return self.user.get(user_id) - - def get_user(self, user_id): - return self.user.get_filtered(user_id) - - def list_users(self, hints): - return self.user.get_all_filtered(hints) - - def get_user_by_name(self, user_name, domain_id): - # domain_id will already have been handled in the Manager layer, - # parameter left in so this matches the Driver specification - return self.user.filter_attributes(self.user.get_by_name(user_name)) - - # CRUD - def create_user(self, user_id, user): - msg = _DEPRECATION_MSG % "create_user" - versionutils.report_deprecated_feature(LOG, msg) - self.user.check_allow_create() - user_ref = self.user.create(user) - return self.user.filter_attributes(user_ref) - - def update_user(self, user_id, user): - msg = _DEPRECATION_MSG % "update_user" - versionutils.report_deprecated_feature(LOG, msg) - self.user.check_allow_update() - old_obj = self.user.get(user_id) - if 'name' in user and old_obj.get('name') != user['name']: - raise exception.Conflict(_('Cannot change user name')) - - if self.user.enabled_mask: - self.user.mask_enabled_attribute(user) - elif self.user.enabled_invert and not self.user.enabled_emulation: - # We need to invert the enabled value for the old model object - # to prevent the LDAP update code from thinking that the enabled - # values are already equal. - user['enabled'] = not user['enabled'] - old_obj['enabled'] = not old_obj['enabled'] - - self.user.update(user_id, user, old_obj) - return self.user.get_filtered(user_id) - - def delete_user(self, user_id): - msg = _DEPRECATION_MSG % "delete_user" - versionutils.report_deprecated_feature(LOG, msg) - self.user.check_allow_delete() - user = self.user.get(user_id) - user_dn = user['dn'] - groups = self.group.list_user_groups(user_dn) - for group in groups: - self.group.remove_user(user_dn, group['id'], user_id) - - if hasattr(user, 'tenant_id'): - self.project.remove_user(user.tenant_id, user_dn) - self.user.delete(user_id) - - def create_group(self, group_id, group): - msg = _DEPRECATION_MSG % "create_group" - versionutils.report_deprecated_feature(LOG, msg) - self.group.check_allow_create() - group['name'] = clean.group_name(group['name']) - return common_ldap.filter_entity(self.group.create(group)) - - def get_group(self, group_id): - return self.group.get_filtered(group_id) - - def get_group_by_name(self, group_name, domain_id): - # domain_id will already have been handled in the Manager layer, - # parameter left in so this matches the Driver specification - return self.group.get_filtered_by_name(group_name) - - def update_group(self, group_id, group): - msg = _DEPRECATION_MSG % "update_group" - versionutils.report_deprecated_feature(LOG, msg) - self.group.check_allow_update() - if 'name' in group: - group['name'] = clean.group_name(group['name']) - return common_ldap.filter_entity(self.group.update(group_id, group)) - - def delete_group(self, group_id): - msg = _DEPRECATION_MSG % "delete_group" - versionutils.report_deprecated_feature(LOG, msg) - self.group.check_allow_delete() - return self.group.delete(group_id) - - def add_user_to_group(self, user_id, group_id): - msg = _DEPRECATION_MSG % "add_user_to_group" - versionutils.report_deprecated_feature(LOG, msg) - user_ref = self._get_user(user_id) - user_dn = user_ref['dn'] - self.group.add_user(user_dn, group_id, user_id) - - def remove_user_from_group(self, user_id, group_id): - msg = _DEPRECATION_MSG % "remove_user_from_group" - versionutils.report_deprecated_feature(LOG, msg) - user_ref = self._get_user(user_id) - user_dn = user_ref['dn'] - self.group.remove_user(user_dn, group_id, user_id) - - def list_groups_for_user(self, user_id, hints): - user_ref = self._get_user(user_id) - if self.conf.ldap.group_members_are_ids: - user_dn = user_ref['id'] - else: - user_dn = user_ref['dn'] - return self.group.list_user_groups_filtered(user_dn, hints) - - def list_groups(self, hints): - return self.group.get_all_filtered(hints) - - def list_users_in_group(self, group_id, hints): - users = [] - for user_key in self.group.list_group_users(group_id): - if self.conf.ldap.group_members_are_ids: - user_id = user_key - else: - user_id = self.user._dn_to_id(user_key) - - try: - users.append(self.user.get_filtered(user_id)) - except exception.UserNotFound: - LOG.debug(("Group member '%(user_key)s' not found in" - " '%(group_id)s'. The user should be removed" - " from the group. The user will be ignored."), - dict(user_key=user_key, group_id=group_id)) - return users - - def check_user_in_group(self, user_id, group_id): - user_refs = self.list_users_in_group(group_id, driver_hints.Hints()) - for x in user_refs: - if x['id'] == user_id: - break - else: - # Try to fetch the user to see if it even exists. This - # will raise a more accurate exception. - self.get_user(user_id) - raise exception.NotFound(_("User '%(user_id)s' not found in" - " group '%(group_id)s'") % - {'user_id': user_id, - 'group_id': group_id}) - - -# TODO(termie): turn this into a data object and move logic to driver -class UserApi(common_ldap.EnabledEmuMixIn, common_ldap.BaseLdap): - DEFAULT_OU = 'ou=Users' - DEFAULT_STRUCTURAL_CLASSES = ['person'] - DEFAULT_ID_ATTR = 'cn' - DEFAULT_OBJECTCLASS = 'inetOrgPerson' - NotFound = exception.UserNotFound - options_name = 'user' - attribute_options_names = {'password': 'pass', - 'email': 'mail', - 'name': 'name', - 'description': 'description', - 'enabled': 'enabled', - 'default_project_id': 'default_project_id'} - immutable_attrs = ['id'] - - model = models.User - - def __init__(self, conf): - super(UserApi, self).__init__(conf) - self.enabled_mask = conf.ldap.user_enabled_mask - self.enabled_default = conf.ldap.user_enabled_default - self.enabled_invert = conf.ldap.user_enabled_invert - self.enabled_emulation = conf.ldap.user_enabled_emulation - - def _ldap_res_to_model(self, res): - obj = super(UserApi, self)._ldap_res_to_model(res) - if self.enabled_mask != 0: - enabled = int(obj.get('enabled', self.enabled_default)) - obj['enabled'] = ((enabled & self.enabled_mask) != - self.enabled_mask) - elif self.enabled_invert and not self.enabled_emulation: - # This could be a bool or a string. If it's a string, - # we need to convert it so we can invert it properly. - enabled = obj.get('enabled', self.enabled_default) - if isinstance(enabled, six.string_types): - if enabled.lower() == 'true': - enabled = True - else: - enabled = False - obj['enabled'] = not enabled - obj['dn'] = res[0] - - return obj - - def mask_enabled_attribute(self, values): - value = values['enabled'] - values.setdefault('enabled_nomask', int(self.enabled_default)) - if value != ((values['enabled_nomask'] & self.enabled_mask) != - self.enabled_mask): - values['enabled_nomask'] ^= self.enabled_mask - values['enabled'] = values['enabled_nomask'] - del values['enabled_nomask'] - - def create(self, values): - if self.enabled_mask: - orig_enabled = values['enabled'] - self.mask_enabled_attribute(values) - elif self.enabled_invert and not self.enabled_emulation: - orig_enabled = values['enabled'] - if orig_enabled is not None: - values['enabled'] = not orig_enabled - else: - values['enabled'] = self.enabled_default - values = super(UserApi, self).create(values) - if self.enabled_mask or (self.enabled_invert and - not self.enabled_emulation): - values['enabled'] = orig_enabled - return values - - def get_filtered(self, user_id): - user = self.get(user_id) - return self.filter_attributes(user) - - def get_all_filtered(self, hints): - query = self.filter_query(hints, self.ldap_filter) - return [self.filter_attributes(user) - for user in self.get_all(query, hints)] - - def filter_attributes(self, user): - return identity.filter_user(common_ldap.filter_entity(user)) - - def is_user(self, dn): - """Returns True if the entry is a user.""" - # NOTE(blk-u): It's easy to check if the DN is under the User tree, - # but may not be accurate. A more accurate test would be to fetch the - # entry to see if it's got the user objectclass, but this could be - # really expensive considering how this is used. - - return common_ldap.dn_startswith(dn, self.tree_dn) - - -class GroupApi(common_ldap.BaseLdap): - DEFAULT_OU = 'ou=UserGroups' - DEFAULT_STRUCTURAL_CLASSES = [] - DEFAULT_OBJECTCLASS = 'groupOfNames' - DEFAULT_ID_ATTR = 'cn' - DEFAULT_MEMBER_ATTRIBUTE = 'member' - NotFound = exception.GroupNotFound - options_name = 'group' - attribute_options_names = {'description': 'desc', - 'name': 'name'} - immutable_attrs = ['name'] - model = models.Group - - def _ldap_res_to_model(self, res): - model = super(GroupApi, self)._ldap_res_to_model(res) - model['dn'] = res[0] - return model - - def __init__(self, conf): - super(GroupApi, self).__init__(conf) - self.member_attribute = (conf.ldap.group_member_attribute - or self.DEFAULT_MEMBER_ATTRIBUTE) - - def create(self, values): - data = values.copy() - if data.get('id') is None: - data['id'] = uuid.uuid4().hex - if 'description' in data and data['description'] in ['', None]: - data.pop('description') - return super(GroupApi, self).create(data) - - def delete(self, group_id): - if self.subtree_delete_enabled: - super(GroupApi, self).delete_tree(group_id) - else: - # TODO(spzala): this is only placeholder for group and domain - # role support which will be added under bug 1101287 - - group_ref = self.get(group_id) - group_dn = group_ref['dn'] - if group_dn: - self._delete_tree_nodes(group_dn, ldap.SCOPE_ONELEVEL) - super(GroupApi, self).delete(group_id) - - def update(self, group_id, values): - old_obj = self.get(group_id) - return super(GroupApi, self).update(group_id, values, old_obj) - - def add_user(self, user_dn, group_id, user_id): - group_ref = self.get(group_id) - group_dn = group_ref['dn'] - try: - super(GroupApi, self).add_member(user_dn, group_dn) - except exception.Conflict: - raise exception.Conflict(_( - 'User %(user_id)s is already a member of group %(group_id)s') % - {'user_id': user_id, 'group_id': group_id}) - - def remove_user(self, user_dn, group_id, user_id): - group_ref = self.get(group_id) - group_dn = group_ref['dn'] - try: - super(GroupApi, self).remove_member(user_dn, group_dn) - except ldap.NO_SUCH_ATTRIBUTE: - raise exception.UserNotFound(user_id=user_id) - - def list_user_groups(self, user_dn): - """Return a list of groups for which the user is a member.""" - user_dn_esc = ldap.filter.escape_filter_chars(user_dn) - query = '(%s=%s)%s' % (self.member_attribute, - user_dn_esc, - self.ldap_filter or '') - return self.get_all(query) - - def list_user_groups_filtered(self, user_dn, hints): - """Return a filtered list of groups for which the user is a member.""" - user_dn_esc = ldap.filter.escape_filter_chars(user_dn) - query = '(%s=%s)%s' % (self.member_attribute, - user_dn_esc, - self.ldap_filter or '') - return self.get_all_filtered(hints, query) - - def list_group_users(self, group_id): - """Return a list of user dns which are members of a group.""" - group_ref = self.get(group_id) - group_dn = group_ref['dn'] - - try: - attrs = self._ldap_get_list(group_dn, ldap.SCOPE_BASE, - attrlist=[self.member_attribute]) - except ldap.NO_SUCH_OBJECT: - raise self.NotFound(group_id=group_id) - - users = [] - for dn, member in attrs: - user_dns = member.get(self.member_attribute, []) - for user_dn in user_dns: - if self._is_dumb_member(user_dn): - continue - users.append(user_dn) - return users - - def get_filtered(self, group_id): - group = self.get(group_id) - return common_ldap.filter_entity(group) - - def get_filtered_by_name(self, group_name): - group = self.get_by_name(group_name) - return common_ldap.filter_entity(group) - - def get_all_filtered(self, hints, query=None): - query = self.filter_query(hints, query) - return [common_ldap.filter_entity(group) - for group in self.get_all(query, hints)] diff --git a/keystone-moon/keystone/identity/backends/sql.py b/keystone-moon/keystone/identity/backends/sql.py deleted file mode 100644 index 5680a8a2..00000000 --- a/keystone-moon/keystone/identity/backends/sql.py +++ /dev/null @@ -1,402 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sqlalchemy -from sqlalchemy.ext.hybrid import hybrid_property -from sqlalchemy import orm - -from keystone.common import driver_hints -from keystone.common import sql -from keystone.common import utils -from keystone import exception -from keystone.i18n import _ -from keystone import identity - - -class User(sql.ModelBase, sql.DictBase): - __tablename__ = 'user' - attributes = ['id', 'name', 'domain_id', 'password', 'enabled', - 'default_project_id'] - id = sql.Column(sql.String(64), primary_key=True) - enabled = sql.Column(sql.Boolean) - extra = sql.Column(sql.JsonBlob()) - default_project_id = sql.Column(sql.String(64)) - local_user = orm.relationship('LocalUser', uselist=False, - single_parent=True, lazy='subquery', - cascade='all,delete-orphan', backref='user') - federated_users = orm.relationship('FederatedUser', - single_parent=True, - lazy='subquery', - cascade='all,delete-orphan', - backref='user') - - # name property - @hybrid_property - def name(self): - if self.local_user: - return self.local_user.name - elif self.federated_users: - return self.federated_users[0].display_name - else: - return None - - @name.setter - def name(self, value): - if not self.local_user: - self.local_user = LocalUser() - self.local_user.name = value - - @name.expression - def name(cls): - return LocalUser.name - - # password property - @hybrid_property - def password(self): - if self.local_user and self.local_user.passwords: - return self.local_user.passwords[0].password - else: - return None - - @password.setter - def password(self, value): - if not value: - if self.local_user and self.local_user.passwords: - self.local_user.passwords = [] - else: - if not self.local_user: - self.local_user = LocalUser() - if not self.local_user.passwords: - self.local_user.passwords.append(Password()) - self.local_user.passwords[0].password = value - - @password.expression - def password(cls): - return Password.password - - # domain_id property - @hybrid_property - def domain_id(self): - if self.local_user: - return self.local_user.domain_id - else: - return None - - @domain_id.setter - def domain_id(self, value): - if not self.local_user: - self.local_user = LocalUser() - self.local_user.domain_id = value - - @domain_id.expression - def domain_id(cls): - return LocalUser.domain_id - - def to_dict(self, include_extra_dict=False): - d = super(User, self).to_dict(include_extra_dict=include_extra_dict) - if 'default_project_id' in d and d['default_project_id'] is None: - del d['default_project_id'] - return d - - -class LocalUser(sql.ModelBase, sql.DictBase): - __tablename__ = 'local_user' - attributes = ['id', 'user_id', 'domain_id', 'name'] - id = sql.Column(sql.Integer, primary_key=True) - user_id = sql.Column(sql.String(64), sql.ForeignKey('user.id', - ondelete='CASCADE'), unique=True) - domain_id = sql.Column(sql.String(64), nullable=False) - name = sql.Column(sql.String(255), nullable=False) - passwords = orm.relationship('Password', single_parent=True, - cascade='all,delete-orphan', - backref='local_user') - __table_args__ = (sql.UniqueConstraint('domain_id', 'name'), {}) - - -class Password(sql.ModelBase, sql.DictBase): - __tablename__ = 'password' - attributes = ['id', 'local_user_id', 'password'] - id = sql.Column(sql.Integer, primary_key=True) - local_user_id = sql.Column(sql.Integer, sql.ForeignKey('local_user.id', - ondelete='CASCADE')) - password = sql.Column(sql.String(128)) - - -class FederatedUser(sql.ModelBase, sql.ModelDictMixin): - __tablename__ = 'federated_user' - attributes = ['id', 'user_id', 'idp_id', 'protocol_id', 'unique_id', - 'display_name'] - id = sql.Column(sql.Integer, primary_key=True) - user_id = sql.Column(sql.String(64), sql.ForeignKey('user.id', - ondelete='CASCADE')) - idp_id = sql.Column(sql.String(64), sql.ForeignKey('identity_provider.id', - ondelete='CASCADE')) - protocol_id = sql.Column(sql.String(64), nullable=False) - unique_id = sql.Column(sql.String(255), nullable=False) - display_name = sql.Column(sql.String(255), nullable=True) - __table_args__ = ( - sql.UniqueConstraint('idp_id', 'protocol_id', 'unique_id'), - sqlalchemy.ForeignKeyConstraint(['protocol_id', 'idp_id'], - ['federation_protocol.id', - 'federation_protocol.idp_id']) - ) - - -class Group(sql.ModelBase, sql.DictBase): - __tablename__ = 'group' - attributes = ['id', 'name', 'domain_id', 'description'] - id = sql.Column(sql.String(64), primary_key=True) - name = sql.Column(sql.String(64), nullable=False) - domain_id = sql.Column(sql.String(64), nullable=False) - description = sql.Column(sql.Text()) - extra = sql.Column(sql.JsonBlob()) - # Unique constraint across two columns to create the separation - # rather than just only 'name' being unique - __table_args__ = (sql.UniqueConstraint('domain_id', 'name'),) - - -class UserGroupMembership(sql.ModelBase, sql.DictBase): - """Group membership join table.""" - - __tablename__ = 'user_group_membership' - user_id = sql.Column(sql.String(64), - sql.ForeignKey('user.id'), - primary_key=True) - group_id = sql.Column(sql.String(64), - sql.ForeignKey('group.id'), - primary_key=True) - - -class Identity(identity.IdentityDriverV8): - # NOTE(henry-nash): Override the __init__() method so as to take a - # config parameter to enable sql to be used as a domain-specific driver. - def __init__(self, conf=None): - self.conf = conf - super(Identity, self).__init__() - - @property - def is_sql(self): - return True - - def _check_password(self, password, user_ref): - """Check the specified password against the data store. - - Note that we'll pass in the entire user_ref in case the subclass - needs things like user_ref.get('name') - For further justification, please see the follow up suggestion at - https://blueprints.launchpad.net/keystone/+spec/sql-identiy-pam - - """ - return utils.check_password(password, user_ref.password) - - # Identity interface - def authenticate(self, user_id, password): - with sql.session_for_read() as session: - user_ref = None - try: - user_ref = self._get_user(session, user_id) - except exception.UserNotFound: - raise AssertionError(_('Invalid user / password')) - if not self._check_password(password, user_ref): - raise AssertionError(_('Invalid user / password')) - return identity.filter_user(user_ref.to_dict()) - - # user crud - - @sql.handle_conflicts(conflict_type='user') - def create_user(self, user_id, user): - user = utils.hash_user_password(user) - with sql.session_for_write() as session: - user_ref = User.from_dict(user) - session.add(user_ref) - return identity.filter_user(user_ref.to_dict()) - - @driver_hints.truncated - def list_users(self, hints): - with sql.session_for_read() as session: - query = session.query(User).outerjoin(LocalUser) - user_refs = sql.filter_limit_query(User, query, hints) - return [identity.filter_user(x.to_dict()) for x in user_refs] - - def _get_user(self, session, user_id): - user_ref = session.query(User).get(user_id) - if not user_ref: - raise exception.UserNotFound(user_id=user_id) - return user_ref - - def get_user(self, user_id): - with sql.session_for_read() as session: - return identity.filter_user( - self._get_user(session, user_id).to_dict()) - - def get_user_by_name(self, user_name, domain_id): - with sql.session_for_read() as session: - query = session.query(User).join(LocalUser) - query = query.filter(sqlalchemy.and_(LocalUser.name == user_name, - LocalUser.domain_id == domain_id)) - try: - user_ref = query.one() - except sql.NotFound: - raise exception.UserNotFound(user_id=user_name) - return identity.filter_user(user_ref.to_dict()) - - @sql.handle_conflicts(conflict_type='user') - def update_user(self, user_id, user): - with sql.session_for_write() as session: - user_ref = self._get_user(session, user_id) - old_user_dict = user_ref.to_dict() - user = utils.hash_user_password(user) - for k in user: - old_user_dict[k] = user[k] - new_user = User.from_dict(old_user_dict) - for attr in User.attributes: - if attr != 'id': - setattr(user_ref, attr, getattr(new_user, attr)) - user_ref.extra = new_user.extra - return identity.filter_user( - user_ref.to_dict(include_extra_dict=True)) - - def add_user_to_group(self, user_id, group_id): - with sql.session_for_write() as session: - self.get_group(group_id) - self.get_user(user_id) - query = session.query(UserGroupMembership) - query = query.filter_by(user_id=user_id) - query = query.filter_by(group_id=group_id) - rv = query.first() - if rv: - return - - session.add(UserGroupMembership(user_id=user_id, - group_id=group_id)) - - def check_user_in_group(self, user_id, group_id): - with sql.session_for_read() as session: - self.get_group(group_id) - self.get_user(user_id) - query = session.query(UserGroupMembership) - query = query.filter_by(user_id=user_id) - query = query.filter_by(group_id=group_id) - if not query.first(): - raise exception.NotFound(_("User '%(user_id)s' not found in" - " group '%(group_id)s'") % - {'user_id': user_id, - 'group_id': group_id}) - - def remove_user_from_group(self, user_id, group_id): - # We don't check if user or group are still valid and let the remove - # be tried anyway - in case this is some kind of clean-up operation - with sql.session_for_write() as session: - query = session.query(UserGroupMembership) - query = query.filter_by(user_id=user_id) - query = query.filter_by(group_id=group_id) - membership_ref = query.first() - if membership_ref is None: - # Check if the group and user exist to return descriptive - # exceptions. - self.get_group(group_id) - self.get_user(user_id) - raise exception.NotFound(_("User '%(user_id)s' not found in" - " group '%(group_id)s'") % - {'user_id': user_id, - 'group_id': group_id}) - session.delete(membership_ref) - - def list_groups_for_user(self, user_id, hints): - with sql.session_for_read() as session: - self.get_user(user_id) - query = session.query(Group).join(UserGroupMembership) - query = query.filter(UserGroupMembership.user_id == user_id) - query = sql.filter_limit_query(Group, query, hints) - return [g.to_dict() for g in query] - - def list_users_in_group(self, group_id, hints): - with sql.session_for_read() as session: - self.get_group(group_id) - query = session.query(User).outerjoin(LocalUser) - query = query.join(UserGroupMembership) - query = query.filter(UserGroupMembership.group_id == group_id) - query = sql.filter_limit_query(User, query, hints) - return [identity.filter_user(u.to_dict()) for u in query] - - def delete_user(self, user_id): - with sql.session_for_write() as session: - ref = self._get_user(session, user_id) - - q = session.query(UserGroupMembership) - q = q.filter_by(user_id=user_id) - q.delete(False) - - session.delete(ref) - - # group crud - - @sql.handle_conflicts(conflict_type='group') - def create_group(self, group_id, group): - with sql.session_for_write() as session: - ref = Group.from_dict(group) - session.add(ref) - return ref.to_dict() - - @driver_hints.truncated - def list_groups(self, hints): - with sql.session_for_read() as session: - query = session.query(Group) - refs = sql.filter_limit_query(Group, query, hints) - return [ref.to_dict() for ref in refs] - - def _get_group(self, session, group_id): - ref = session.query(Group).get(group_id) - if not ref: - raise exception.GroupNotFound(group_id=group_id) - return ref - - def get_group(self, group_id): - with sql.session_for_read() as session: - return self._get_group(session, group_id).to_dict() - - def get_group_by_name(self, group_name, domain_id): - with sql.session_for_read() as session: - query = session.query(Group) - query = query.filter_by(name=group_name) - query = query.filter_by(domain_id=domain_id) - try: - group_ref = query.one() - except sql.NotFound: - raise exception.GroupNotFound(group_id=group_name) - return group_ref.to_dict() - - @sql.handle_conflicts(conflict_type='group') - def update_group(self, group_id, group): - with sql.session_for_write() as session: - ref = self._get_group(session, group_id) - old_dict = ref.to_dict() - for k in group: - old_dict[k] = group[k] - new_group = Group.from_dict(old_dict) - for attr in Group.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_group, attr)) - ref.extra = new_group.extra - return ref.to_dict() - - def delete_group(self, group_id): - with sql.session_for_write() as session: - ref = self._get_group(session, group_id) - - q = session.query(UserGroupMembership) - q = q.filter_by(group_id=group_id) - q.delete(False) - - session.delete(ref) diff --git a/keystone-moon/keystone/identity/controllers.py b/keystone-moon/keystone/identity/controllers.py deleted file mode 100644 index 9e8ba6fc..00000000 --- a/keystone-moon/keystone/identity/controllers.py +++ /dev/null @@ -1,344 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Workflow Logic the Identity service.""" - -from oslo_config import cfg -from oslo_log import log - -from keystone.common import controller -from keystone.common import dependency -from keystone.common import validation -from keystone import exception -from keystone.i18n import _, _LW -from keystone.identity import schema -from keystone import notifications - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -@dependency.requires('assignment_api', 'identity_api', 'resource_api') -class User(controller.V2Controller): - - @controller.v2_deprecated - def get_user(self, context, user_id): - self.assert_admin(context) - ref = self.identity_api.get_user(user_id) - return {'user': self.v3_to_v2_user(ref)} - - @controller.v2_deprecated - def get_users(self, context): - # NOTE(termie): i can't imagine that this really wants all the data - # about every single user in the system... - if 'name' in context['query_string']: - return self.get_user_by_name( - context, context['query_string'].get('name')) - - self.assert_admin(context) - user_list = self.identity_api.list_users( - CONF.identity.default_domain_id) - return {'users': self.v3_to_v2_user(user_list)} - - @controller.v2_deprecated - def get_user_by_name(self, context, user_name): - self.assert_admin(context) - ref = self.identity_api.get_user_by_name( - user_name, CONF.identity.default_domain_id) - return {'user': self.v3_to_v2_user(ref)} - - # CRUD extension - @controller.v2_deprecated - def create_user(self, context, user): - user = self._normalize_OSKSADM_password_on_request(user) - user = self.normalize_username_in_request(user) - user = self._normalize_dict(user) - self.assert_admin(context) - - if 'name' not in user or not user['name']: - msg = _('Name field is required and cannot be empty') - raise exception.ValidationError(message=msg) - if 'enabled' in user and not isinstance(user['enabled'], bool): - msg = _('Enabled field must be a boolean') - raise exception.ValidationError(message=msg) - - default_project_id = user.pop('tenantId', None) - if default_project_id is not None: - # Check to see if the project is valid before moving on. - self.resource_api.get_project(default_project_id) - user['default_project_id'] = default_project_id - - self.resource_api.ensure_default_domain_exists() - - # The manager layer will generate the unique ID for users - user_ref = self._normalize_domain_id(context, user.copy()) - initiator = notifications._get_request_audit_info(context) - new_user_ref = self.v3_to_v2_user( - self.identity_api.create_user(user_ref, initiator)) - - if default_project_id is not None: - self.assignment_api.add_user_to_project(default_project_id, - new_user_ref['id']) - return {'user': new_user_ref} - - @controller.v2_deprecated - def update_user(self, context, user_id, user): - # NOTE(termie): this is really more of a patch than a put - user = self.normalize_username_in_request(user) - self.assert_admin(context) - - if 'enabled' in user and not isinstance(user['enabled'], bool): - msg = _('Enabled field should be a boolean') - raise exception.ValidationError(message=msg) - - default_project_id = user.pop('tenantId', None) - if default_project_id is not None: - user['default_project_id'] = default_project_id - - old_user_ref = self.v3_to_v2_user( - self.identity_api.get_user(user_id)) - - # Check whether a tenant is being added or changed for the user. - # Catch the case where the tenant is being changed for a user and also - # where a user previously had no tenant but a tenant is now being - # added for the user. - if (('tenantId' in old_user_ref and - old_user_ref['tenantId'] != default_project_id and - default_project_id is not None) or - ('tenantId' not in old_user_ref and - default_project_id is not None)): - # Make sure the new project actually exists before we perform the - # user update. - self.resource_api.get_project(default_project_id) - - initiator = notifications._get_request_audit_info(context) - user_ref = self.v3_to_v2_user( - self.identity_api.update_user(user_id, user, initiator)) - - # If 'tenantId' is in either ref, we might need to add or remove the - # user from a project. - if 'tenantId' in user_ref or 'tenantId' in old_user_ref: - if user_ref['tenantId'] != old_user_ref.get('tenantId'): - if old_user_ref.get('tenantId'): - try: - member_role_id = CONF.member_role_id - self.assignment_api.remove_role_from_user_and_project( - user_id, old_user_ref['tenantId'], member_role_id) - except exception.NotFound: - # NOTE(morganfainberg): This is not a critical error it - # just means that the user cannot be removed from the - # old tenant. This could occur if roles aren't found - # or if the project is invalid or if there are no roles - # for the user on that project. - msg = _LW('Unable to remove user %(user)s from ' - '%(tenant)s.') - LOG.warning(msg, {'user': user_id, - 'tenant': old_user_ref['tenantId']}) - - if user_ref['tenantId']: - try: - self.assignment_api.add_user_to_project( - user_ref['tenantId'], user_id) - except exception.Conflict: # nosec - # We are already a member of that tenant - pass - except exception.NotFound: - # NOTE(morganfainberg): Log this and move on. This is - # not the end of the world if we can't add the user to - # the appropriate tenant. Most of the time this means - # that the project is invalid or roles are some how - # incorrect. This shouldn't prevent the return of the - # new ref. - msg = _LW('Unable to add user %(user)s to %(tenant)s.') - LOG.warning(msg, {'user': user_id, - 'tenant': user_ref['tenantId']}) - - return {'user': user_ref} - - @controller.v2_deprecated - def delete_user(self, context, user_id): - self.assert_admin(context) - initiator = notifications._get_request_audit_info(context) - self.identity_api.delete_user(user_id, initiator) - - @controller.v2_deprecated - def set_user_enabled(self, context, user_id, user): - return self.update_user(context, user_id, user) - - @controller.v2_deprecated - def set_user_password(self, context, user_id, user): - user = self._normalize_OSKSADM_password_on_request(user) - return self.update_user(context, user_id, user) - - @staticmethod - def _normalize_OSKSADM_password_on_request(ref): - """Sets the password from the OS-KSADM Admin Extension. - - The OS-KSADM Admin Extension documentation says that - `OS-KSADM:password` can be used in place of `password`. - - """ - if 'OS-KSADM:password' in ref: - ref['password'] = ref.pop('OS-KSADM:password') - return ref - - -@dependency.requires('identity_api') -class UserV3(controller.V3Controller): - collection_name = 'users' - member_name = 'user' - - def __init__(self): - super(UserV3, self).__init__() - self.get_member_from_driver = self.identity_api.get_user - - def _check_user_and_group_protection(self, context, prep_info, - user_id, group_id): - ref = {} - ref['user'] = self.identity_api.get_user(user_id) - ref['group'] = self.identity_api.get_group(group_id) - self.check_protection(context, prep_info, ref) - - @controller.protected() - @validation.validated(schema.user_create, 'user') - def create_user(self, context, user): - # The manager layer will generate the unique ID for users - ref = self._normalize_dict(user) - ref = self._normalize_domain_id(context, ref) - initiator = notifications._get_request_audit_info(context) - ref = self.identity_api.create_user(ref, initiator) - return UserV3.wrap_member(context, ref) - - @controller.filterprotected('domain_id', 'enabled', 'name') - def list_users(self, context, filters): - hints = UserV3.build_driver_hints(context, filters) - refs = self.identity_api.list_users( - domain_scope=self._get_domain_id_for_list_request(context), - hints=hints) - return UserV3.wrap_collection(context, refs, hints=hints) - - @controller.filterprotected('domain_id', 'enabled', 'name') - def list_users_in_group(self, context, filters, group_id): - hints = UserV3.build_driver_hints(context, filters) - refs = self.identity_api.list_users_in_group(group_id, hints=hints) - return UserV3.wrap_collection(context, refs, hints=hints) - - @controller.protected() - def get_user(self, context, user_id): - ref = self.identity_api.get_user(user_id) - return UserV3.wrap_member(context, ref) - - def _update_user(self, context, user_id, user): - self._require_matching_id(user_id, user) - self._require_matching_domain_id( - user_id, user, self.identity_api.get_user) - initiator = notifications._get_request_audit_info(context) - ref = self.identity_api.update_user(user_id, user, initiator) - return UserV3.wrap_member(context, ref) - - @controller.protected() - @validation.validated(schema.user_update, 'user') - def update_user(self, context, user_id, user): - return self._update_user(context, user_id, user) - - @controller.protected(callback=_check_user_and_group_protection) - def add_user_to_group(self, context, user_id, group_id): - initiator = notifications._get_request_audit_info(context) - self.identity_api.add_user_to_group(user_id, group_id, initiator) - - @controller.protected(callback=_check_user_and_group_protection) - def check_user_in_group(self, context, user_id, group_id): - return self.identity_api.check_user_in_group(user_id, group_id) - - @controller.protected(callback=_check_user_and_group_protection) - def remove_user_from_group(self, context, user_id, group_id): - initiator = notifications._get_request_audit_info(context) - self.identity_api.remove_user_from_group(user_id, group_id, initiator) - - @controller.protected() - def delete_user(self, context, user_id): - initiator = notifications._get_request_audit_info(context) - return self.identity_api.delete_user(user_id, initiator) - - @controller.protected() - def change_password(self, context, user_id, user): - original_password = user.get('original_password') - if original_password is None: - raise exception.ValidationError(target='user', - attribute='original_password') - - password = user.get('password') - if password is None: - raise exception.ValidationError(target='user', - attribute='password') - try: - self.identity_api.change_password( - context, user_id, original_password, password) - except AssertionError: - raise exception.Unauthorized() - - -@dependency.requires('identity_api') -class GroupV3(controller.V3Controller): - collection_name = 'groups' - member_name = 'group' - - def __init__(self): - super(GroupV3, self).__init__() - self.get_member_from_driver = self.identity_api.get_group - - @controller.protected() - @validation.validated(schema.group_create, 'group') - def create_group(self, context, group): - # The manager layer will generate the unique ID for groups - ref = self._normalize_dict(group) - ref = self._normalize_domain_id(context, ref) - initiator = notifications._get_request_audit_info(context) - ref = self.identity_api.create_group(ref, initiator) - return GroupV3.wrap_member(context, ref) - - @controller.filterprotected('domain_id', 'name') - def list_groups(self, context, filters): - hints = GroupV3.build_driver_hints(context, filters) - refs = self.identity_api.list_groups( - domain_scope=self._get_domain_id_for_list_request(context), - hints=hints) - return GroupV3.wrap_collection(context, refs, hints=hints) - - @controller.filterprotected('name') - def list_groups_for_user(self, context, filters, user_id): - hints = GroupV3.build_driver_hints(context, filters) - refs = self.identity_api.list_groups_for_user(user_id, hints=hints) - return GroupV3.wrap_collection(context, refs, hints=hints) - - @controller.protected() - def get_group(self, context, group_id): - ref = self.identity_api.get_group(group_id) - return GroupV3.wrap_member(context, ref) - - @controller.protected() - @validation.validated(schema.group_update, 'group') - def update_group(self, context, group_id, group): - self._require_matching_id(group_id, group) - self._require_matching_domain_id( - group_id, group, self.identity_api.get_group) - initiator = notifications._get_request_audit_info(context) - ref = self.identity_api.update_group(group_id, group, initiator) - return GroupV3.wrap_member(context, ref) - - @controller.protected() - def delete_group(self, context, group_id): - initiator = notifications._get_request_audit_info(context) - self.identity_api.delete_group(group_id, initiator) diff --git a/keystone-moon/keystone/identity/core.py b/keystone-moon/keystone/identity/core.py deleted file mode 100644 index 2f52a358..00000000 --- a/keystone-moon/keystone/identity/core.py +++ /dev/null @@ -1,1613 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into the Identity service.""" - -import abc -import functools -import os -import threading -import uuid - -from oslo_config import cfg -from oslo_log import log -from oslo_log import versionutils -import six - -from keystone import assignment # TODO(lbragstad): Decouple this dependency -from keystone.common import cache -from keystone.common import clean -from keystone.common import config -from keystone.common import dependency -from keystone.common import driver_hints -from keystone.common import manager -from keystone import exception -from keystone.i18n import _, _LW -from keystone.identity.mapping_backends import mapping -from keystone import notifications - - -CONF = cfg.CONF - -LOG = log.getLogger(__name__) - -MEMOIZE = cache.get_memoization_decorator(group='identity') - -DOMAIN_CONF_FHEAD = 'keystone.' -DOMAIN_CONF_FTAIL = '.conf' - -# The number of times we will attempt to register a domain to use the SQL -# driver, if we find that another process is in the middle of registering or -# releasing at the same time as us. -REGISTRATION_ATTEMPTS = 10 - -# Config Registration Types -SQL_DRIVER = 'SQL' - - -def filter_user(user_ref): - """Filter out private items in a user dict. - - 'password', 'tenants' and 'groups' are never returned. - - :returns: user_ref - - """ - if user_ref: - user_ref = user_ref.copy() - user_ref.pop('password', None) - user_ref.pop('tenants', None) - user_ref.pop('groups', None) - user_ref.pop('domains', None) - try: - user_ref['extra'].pop('password', None) - user_ref['extra'].pop('tenants', None) - except KeyError: # nosec - # ok to not have extra in the user_ref. - pass - return user_ref - - -@dependency.requires('domain_config_api', 'resource_api') -class DomainConfigs(dict): - """Discover, store and provide access to domain specific configs. - - The setup_domain_drivers() call will be made via the wrapper from - the first call to any driver function handled by this manager. - - Domain specific configurations are only supported for the identity backend - and the individual configurations are either specified in the resource - database or in individual domain configuration files, depending on the - setting of the 'domain_configurations_from_database' config option. - - The result will be that for each domain with a specific configuration, - this class will hold a reference to a ConfigOpts and driver object that - the identity manager and driver can use. - - """ - - configured = False - driver = None - _any_sql = False - lock = threading.Lock() - - def _load_driver(self, domain_config): - return manager.load_driver(Manager.driver_namespace, - domain_config['cfg'].identity.driver, - domain_config['cfg']) - - def _load_config_from_file(self, resource_api, file_list, domain_name): - - def _assert_no_more_than_one_sql_driver(domain_id, new_config, - config_file): - """Ensure there is no more than one sql driver. - - Check to see if the addition of the driver in this new config - would cause there to be more than one sql driver. - - """ - if (new_config['driver'].is_sql and - (self.driver.is_sql or self._any_sql)): - # The addition of this driver would cause us to have more than - # one sql driver, so raise an exception. - raise exception.MultipleSQLDriversInConfig(source=config_file) - self._any_sql = self._any_sql or new_config['driver'].is_sql - - try: - domain_ref = resource_api.get_domain_by_name(domain_name) - except exception.DomainNotFound: - LOG.warning( - _LW('Invalid domain name (%s) found in config file name'), - domain_name) - return - - # Create a new entry in the domain config dict, which contains - # a new instance of both the conf environment and driver using - # options defined in this set of config files. Later, when we - # service calls via this Manager, we'll index via this domain - # config dict to make sure we call the right driver - domain_config = {} - domain_config['cfg'] = cfg.ConfigOpts() - config.configure(conf=domain_config['cfg']) - domain_config['cfg'](args=[], project='keystone', - default_config_files=file_list) - domain_config['driver'] = self._load_driver(domain_config) - _assert_no_more_than_one_sql_driver(domain_ref['id'], - domain_config, - file_list) - self[domain_ref['id']] = domain_config - - def _setup_domain_drivers_from_files(self, standard_driver, resource_api): - """Read the domain specific configuration files and load the drivers. - - Domain configuration files are stored in the domain config directory, - and must be named of the form: - - keystone..conf - - For each file, call the load config method where the domain_name - will be turned into a domain_id and then: - - - Create a new config structure, adding in the specific additional - options defined in this config file - - Initialise a new instance of the required driver with this new config - - """ - conf_dir = CONF.identity.domain_config_dir - if not os.path.exists(conf_dir): - LOG.warning(_LW('Unable to locate domain config directory: %s'), - conf_dir) - return - - for r, d, f in os.walk(conf_dir): - for fname in f: - if (fname.startswith(DOMAIN_CONF_FHEAD) and - fname.endswith(DOMAIN_CONF_FTAIL)): - if fname.count('.') >= 2: - self._load_config_from_file( - resource_api, [os.path.join(r, fname)], - fname[len(DOMAIN_CONF_FHEAD): - -len(DOMAIN_CONF_FTAIL)]) - else: - LOG.debug(('Ignoring file (%s) while scanning domain ' - 'config directory'), - fname) - - def _load_config_from_database(self, domain_id, specific_config): - - def _assert_no_more_than_one_sql_driver(domain_id, new_config): - """Ensure adding driver doesn't push us over the limit of 1 - - The checks we make in this method need to take into account that - we may be in a multiple process configuration and ensure that - any race conditions are avoided. - - """ - if not new_config['driver'].is_sql: - self.domain_config_api.release_registration(domain_id) - return - - # To ensure the current domain is the only SQL driver, we attempt - # to register our use of SQL. If we get it we know we are good, - # if we fail to register it then we should: - # - # - First check if another process has registered for SQL for our - # domain, in which case we are fine - # - If a different domain has it, we should check that this domain - # is still valid, in case, for example, domain deletion somehow - # failed to remove its registration (i.e. we self heal for these - # kinds of issues). - - domain_registered = 'Unknown' - for attempt in range(REGISTRATION_ATTEMPTS): - if self.domain_config_api.obtain_registration( - domain_id, SQL_DRIVER): - LOG.debug('Domain %s successfully registered to use the ' - 'SQL driver.', domain_id) - return - - # We failed to register our use, let's find out who is using it - try: - domain_registered = ( - self.domain_config_api.read_registration( - SQL_DRIVER)) - except exception.ConfigRegistrationNotFound: - msg = ('While attempting to register domain %(domain)s to ' - 'use the SQL driver, another process released it, ' - 'retrying (attempt %(attempt)s).') - LOG.debug(msg, {'domain': domain_id, - 'attempt': attempt + 1}) - continue - - if domain_registered == domain_id: - # Another process already registered it for us, so we are - # fine. In the race condition when another process is - # in the middle of deleting this domain, we know the domain - # is already disabled and hence telling the caller that we - # are registered is benign. - LOG.debug('While attempting to register domain %s to use ' - 'the SQL driver, found that another process had ' - 'already registered this domain. This is normal ' - 'in multi-process configurations.', domain_id) - return - - # So we don't have it, but someone else does...let's check that - # this domain is still valid - try: - self.resource_api.get_domain(domain_registered) - except exception.DomainNotFound: - msg = ('While attempting to register domain %(domain)s to ' - 'use the SQL driver, found that it was already ' - 'registered to a domain that no longer exists ' - '(%(old_domain)s). Removing this stale ' - 'registration and retrying (attempt %(attempt)s).') - LOG.debug(msg, {'domain': domain_id, - 'old_domain': domain_registered, - 'attempt': attempt + 1}) - self.domain_config_api.release_registration( - domain_registered, type=SQL_DRIVER) - continue - - # The domain is valid, so we really do have an attempt at more - # than one SQL driver. - details = ( - _('Config API entity at /domains/%s/config') % domain_id) - raise exception.MultipleSQLDriversInConfig(source=details) - - # We fell out of the loop without either registering our domain or - # being able to find who has it...either we were very very very - # unlucky or something is awry. - msg = _('Exceeded attempts to register domain %(domain)s to use ' - 'the SQL driver, the last domain that appears to have ' - 'had it is %(last_domain)s, giving up') % { - 'domain': domain_id, 'last_domain': domain_registered} - raise exception.UnexpectedError(msg) - - domain_config = {} - domain_config['cfg'] = cfg.ConfigOpts() - config.configure(conf=domain_config['cfg']) - domain_config['cfg'](args=[], project='keystone', - default_config_files=[]) - - # Override any options that have been passed in as specified in the - # database. - for group in specific_config: - for option in specific_config[group]: - domain_config['cfg'].set_override( - option, specific_config[group][option], - group, enforce_type=True) - - domain_config['cfg_overrides'] = specific_config - domain_config['driver'] = self._load_driver(domain_config) - _assert_no_more_than_one_sql_driver(domain_id, domain_config) - self[domain_id] = domain_config - - def _setup_domain_drivers_from_database(self, standard_driver, - resource_api): - """Read domain specific configuration from database and load drivers. - - Domain configurations are stored in the domain-config backend, - so we go through each domain to find those that have a specific config - defined, and for those that do we: - - - Create a new config structure, overriding any specific options - defined in the resource backend - - Initialise a new instance of the required driver with this new config - - """ - for domain in resource_api.list_domains(): - domain_config_options = ( - self.domain_config_api. - get_config_with_sensitive_info(domain['id'])) - if domain_config_options: - self._load_config_from_database(domain['id'], - domain_config_options) - - def setup_domain_drivers(self, standard_driver, resource_api): - # This is called by the api call wrapper - self.driver = standard_driver - - if CONF.identity.domain_configurations_from_database: - self._setup_domain_drivers_from_database(standard_driver, - resource_api) - else: - self._setup_domain_drivers_from_files(standard_driver, - resource_api) - self.configured = True - - def get_domain_driver(self, domain_id): - self.check_config_and_reload_domain_driver_if_required(domain_id) - if domain_id in self: - return self[domain_id]['driver'] - - def get_domain_conf(self, domain_id): - self.check_config_and_reload_domain_driver_if_required(domain_id) - if domain_id in self: - return self[domain_id]['cfg'] - else: - return CONF - - def reload_domain_driver(self, domain_id): - # Only used to support unit tests that want to set - # new config values. This should only be called once - # the domains have been configured, since it relies on - # the fact that the configuration files/database have already been - # read. - if self.configured: - if domain_id in self: - self[domain_id]['driver'] = ( - self._load_driver(self[domain_id])) - else: - # The standard driver - self.driver = self.driver() - - def check_config_and_reload_domain_driver_if_required(self, domain_id): - """Check for, and load, any new domain specific config for this domain. - - This is only supported for the database-stored domain specific - configuration. - - When the domain specific drivers were set up, we stored away the - specific config for this domain that was available at that time. So we - now read the current version and compare. While this might seem - somewhat inefficient, the sensitive config call is cached, so should be - light weight. More importantly, when the cache timeout is reached, we - will get any config that has been updated from any other keystone - process. - - This cache-timeout approach works for both multi-process and - multi-threaded keystone configurations. In multi-threaded - configurations, even though we might remove a driver object (that - could be in use by another thread), this won't actually be thrown away - until all references to it have been broken. When that other - thread is released back and is restarted with another command to - process, next time it accesses the driver it will pickup the new one. - - """ - if (not CONF.identity.domain_specific_drivers_enabled or - not CONF.identity.domain_configurations_from_database): - # If specific drivers are not enabled, then there is nothing to do. - # If we are not storing the configurations in the database, then - # we'll only re-read the domain specific config files on startup - # of keystone. - return - - latest_domain_config = ( - self.domain_config_api. - get_config_with_sensitive_info(domain_id)) - domain_config_in_use = domain_id in self - - if latest_domain_config: - if (not domain_config_in_use or - latest_domain_config != self[domain_id]['cfg_overrides']): - self._load_config_from_database(domain_id, - latest_domain_config) - elif domain_config_in_use: - # The domain specific config has been deleted, so should remove the - # specific driver for this domain. - try: - del self[domain_id] - except KeyError: # nosec - # Allow this error in case we are unlucky and in a - # multi-threaded situation, two threads happen to be running - # in lock step. - pass - # If we fall into the else condition, this means there is no domain - # config set, and there is none in use either, so we have nothing - # to do. - - -def domains_configured(f): - """Wraps API calls to lazy load domain configs after init. - - This is required since the assignment manager needs to be initialized - before this manager, and yet this manager's init wants to be - able to make assignment calls (to build the domain configs). So - instead, we check if the domains have been initialized on entry - to each call, and if requires load them, - - """ - @functools.wraps(f) - def wrapper(self, *args, **kwargs): - if (not self.domain_configs.configured and - CONF.identity.domain_specific_drivers_enabled): - # If domain specific driver has not been configured, acquire the - # lock and proceed with loading the driver. - with self.domain_configs.lock: - # Check again just in case some other thread has already - # completed domain config. - if not self.domain_configs.configured: - self.domain_configs.setup_domain_drivers( - self.driver, self.resource_api) - return f(self, *args, **kwargs) - return wrapper - - -def exception_translated(exception_type): - """Wraps API calls to map to correct exception.""" - def _exception_translated(f): - @functools.wraps(f) - def wrapper(self, *args, **kwargs): - try: - return f(self, *args, **kwargs) - except exception.PublicIDNotFound as e: - if exception_type == 'user': - raise exception.UserNotFound(user_id=str(e)) - elif exception_type == 'group': - raise exception.GroupNotFound(group_id=str(e)) - elif exception_type == 'assertion': - raise AssertionError(_('Invalid user / password')) - else: - raise - return wrapper - return _exception_translated - - -@notifications.listener -@dependency.provider('identity_api') -@dependency.requires('assignment_api', 'credential_api', 'id_mapping_api', - 'resource_api', 'revoke_api', 'shadow_users_api') -class Manager(manager.Manager): - """Default pivot point for the Identity backend. - - See :mod:`keystone.common.manager.Manager` for more details on how this - dynamically calls the backend. - - This class also handles the support of domain specific backends, by using - the DomainConfigs class. The setup call for DomainConfigs is called - from with the @domains_configured wrapper in a lazy loading fashion - to get around the fact that we can't satisfy the assignment api it needs - from within our __init__() function since the assignment driver is not - itself yet initialized. - - Each of the identity calls are pre-processed here to choose, based on - domain, which of the drivers should be called. The non-domain-specific - driver is still in place, and is used if there is no specific driver for - the domain in question (or we are not using multiple domain drivers). - - Starting with Juno, in order to be able to obtain the domain from - just an ID being presented as part of an API call, a public ID to domain - and local ID mapping is maintained. This mapping also allows for the local - ID of drivers that do not provide simple UUIDs (such as LDAP) to be - referenced via a public facing ID. The mapping itself is automatically - generated as entities are accessed via the driver. - - This mapping is only used when: - - the entity is being handled by anything other than the default driver, or - - the entity is being handled by the default LDAP driver and backward - compatible IDs are not required. - - This means that in the standard case of a single SQL backend or the default - settings of a single LDAP backend (since backward compatible IDs is set to - True by default), no mapping is used. An alternative approach would be to - always use the mapping table, but in the cases where we don't need it to - make the public and local IDs the same. It is felt that not using the - mapping by default is a more prudent way to introduce this functionality. - - """ - - driver_namespace = 'keystone.identity' - - _USER = 'user' - _GROUP = 'group' - - def __init__(self): - super(Manager, self).__init__(CONF.identity.driver) - self.domain_configs = DomainConfigs() - - self.event_callbacks = { - notifications.ACTIONS.deleted: { - 'domain': [self._domain_deleted], - }, - } - - def _domain_deleted(self, service, resource_type, operation, - payload): - domain_id = payload['resource_info'] - - user_refs = self.list_users(domain_scope=domain_id) - group_refs = self.list_groups(domain_scope=domain_id) - - for group in group_refs: - # Cleanup any existing groups. - try: - self.delete_group(group['id']) - except exception.GroupNotFound: - LOG.debug(('Group %(groupid)s not found when deleting domain ' - 'contents for %(domainid)s, continuing with ' - 'cleanup.'), - {'groupid': group['id'], 'domainid': domain_id}) - - # And finally, delete the users themselves - for user in user_refs: - try: - self.delete_user(user['id']) - except exception.UserNotFound: - LOG.debug(('User %(userid)s not found when deleting domain ' - 'contents for %(domainid)s, continuing with ' - 'cleanup.'), - {'userid': user['id'], 'domainid': domain_id}) - - # Domain ID normalization methods - - def _set_domain_id_and_mapping(self, ref, domain_id, driver, - entity_type): - """Patch the domain_id/public_id into the resulting entity(ies). - - :param ref: the entity or list of entities to post process - :param domain_id: the domain scope used for the call - :param driver: the driver used to execute the call - :param entity_type: whether this is a user or group - - :returns: post processed entity or list or entities - - Called to post-process the entity being returned, using a mapping - to substitute a public facing ID as necessary. This method must - take into account: - - - If the driver is not domain aware, then we must set the domain - attribute of all entities irrespective of mapping. - - If the driver does not support UUIDs, then we always want to provide - a mapping, except for the special case of this being the default - driver and backward_compatible_ids is set to True. This is to ensure - that entity IDs do not change for an existing LDAP installation (only - single domain/driver LDAP configurations were previously supported). - - If the driver does support UUIDs, then we always create a mapping - entry, but use the local UUID as the public ID. The exception to - - this is that if we just have single driver (i.e. not using specific - multi-domain configs), then we don't both with the mapping at all. - - """ - conf = CONF.identity - - if not self._needs_post_processing(driver): - # a classic case would be when running with a single SQL driver - return ref - - LOG.debug('ID Mapping - Domain ID: %(domain)s, ' - 'Default Driver: %(driver)s, ' - 'Domains: %(aware)s, UUIDs: %(generate)s, ' - 'Compatible IDs: %(compat)s', - {'domain': domain_id, - 'driver': (driver == self.driver), - 'aware': driver.is_domain_aware(), - 'generate': driver.generates_uuids(), - 'compat': CONF.identity_mapping.backward_compatible_ids}) - - if isinstance(ref, dict): - return self._set_domain_id_and_mapping_for_single_ref( - ref, domain_id, driver, entity_type, conf) - elif isinstance(ref, list): - return [self._set_domain_id_and_mapping( - x, domain_id, driver, entity_type) for x in ref] - else: - raise ValueError(_('Expected dict or list: %s') % type(ref)) - - def _needs_post_processing(self, driver): - """Returns whether entity from driver needs domain added or mapping.""" - return (driver is not self.driver or not driver.generates_uuids() or - not driver.is_domain_aware()) - - def _set_domain_id_and_mapping_for_single_ref(self, ref, domain_id, - driver, entity_type, conf): - LOG.debug('Local ID: %s', ref['id']) - ref = ref.copy() - - self._insert_domain_id_if_needed(ref, driver, domain_id, conf) - - if self._is_mapping_needed(driver): - local_entity = {'domain_id': ref['domain_id'], - 'local_id': ref['id'], - 'entity_type': entity_type} - public_id = self.id_mapping_api.get_public_id(local_entity) - if public_id: - ref['id'] = public_id - LOG.debug('Found existing mapping to public ID: %s', - ref['id']) - else: - # Need to create a mapping. If the driver generates UUIDs - # then pass the local UUID in as the public ID to use. - if driver.generates_uuids(): - public_id = ref['id'] - ref['id'] = self.id_mapping_api.create_id_mapping( - local_entity, public_id) - LOG.debug('Created new mapping to public ID: %s', - ref['id']) - return ref - - def _insert_domain_id_if_needed(self, ref, driver, domain_id, conf): - """Inserts the domain ID into the ref, if required. - - If the driver can't handle domains, then we need to insert the - domain_id into the entity being returned. If the domain_id is - None that means we are running in a single backend mode, so to - remain backwardly compatible, we put in the default domain ID. - """ - if not driver.is_domain_aware(): - if domain_id is None: - domain_id = conf.default_domain_id - ref['domain_id'] = domain_id - - def _is_mapping_needed(self, driver): - """Returns whether mapping is needed. - - There are two situations where we must use the mapping: - - this isn't the default driver (i.e. multiple backends), or - - we have a single backend that doesn't use UUIDs - The exception to the above is that we must honor backward - compatibility if this is the default driver (e.g. to support - current LDAP) - """ - is_not_default_driver = driver is not self.driver - return (is_not_default_driver or ( - not driver.generates_uuids() and - not CONF.identity_mapping.backward_compatible_ids)) - - def _clear_domain_id_if_domain_unaware(self, driver, ref): - """Clear domain_id details if driver is not domain aware.""" - if not driver.is_domain_aware() and 'domain_id' in ref: - ref = ref.copy() - ref.pop('domain_id') - return ref - - def _select_identity_driver(self, domain_id): - """Choose a backend driver for the given domain_id. - - :param domain_id: The domain_id for which we want to find a driver. If - the domain_id is specified as None, then this means - we need a driver that handles multiple domains. - - :returns: chosen backend driver - - If there is a specific driver defined for this domain then choose it. - If the domain is None, or there no specific backend for the given - domain is found, then we chose the default driver. - - """ - if domain_id is None: - driver = self.driver - else: - driver = (self.domain_configs.get_domain_driver(domain_id) or - self.driver) - - # If the driver is not domain aware (e.g. LDAP) then check to - # ensure we are not mapping multiple domains onto it - the only way - # that would happen is that the default driver is LDAP and the - # domain is anything other than None or the default domain. - if (not driver.is_domain_aware() and driver == self.driver and - domain_id != CONF.identity.default_domain_id and - domain_id is not None): - LOG.warning(_LW('Found multiple domains being mapped to a ' - 'driver that does not support that (e.g. ' - 'LDAP) - Domain ID: %(domain)s, ' - 'Default Driver: %(driver)s'), - {'domain': domain_id, - 'driver': (driver == self.driver)}) - raise exception.DomainNotFound(domain_id=domain_id) - return driver - - def _get_domain_driver_and_entity_id(self, public_id): - """Look up details using the public ID. - - :param public_id: the ID provided in the call - - :returns: domain_id, which can be None to indicate that the driver - in question supports multiple domains - driver selected based on this domain - entity_id which will is understood by the driver. - - Use the mapping table to look up the domain, driver and local entity - that is represented by the provided public ID. Handle the situations - where we do not use the mapping (e.g. single driver that understands - UUIDs etc.) - - """ - conf = CONF.identity - # First, since we don't know anything about the entity yet, we must - # assume it needs mapping, so long as we are using domain specific - # drivers. - if conf.domain_specific_drivers_enabled: - local_id_ref = self.id_mapping_api.get_id_mapping(public_id) - if local_id_ref: - return ( - local_id_ref['domain_id'], - self._select_identity_driver(local_id_ref['domain_id']), - local_id_ref['local_id']) - - # So either we are using multiple drivers but the public ID is invalid - # (and hence was not found in the mapping table), or the public ID is - # being handled by the default driver. Either way, the only place left - # to look is in that standard driver. However, we don't yet know if - # this driver also needs mapping (e.g. LDAP in non backward - # compatibility mode). - driver = self.driver - if driver.generates_uuids(): - if driver.is_domain_aware: - # No mapping required, and the driver can handle the domain - # information itself. The classic case of this is the - # current SQL driver. - return (None, driver, public_id) - else: - # Although we don't have any drivers of this type, i.e. that - # understand UUIDs but not domains, conceptually you could. - return (conf.default_domain_id, driver, public_id) - - # So the only place left to find the ID is in the default driver which - # we now know doesn't generate UUIDs - if not CONF.identity_mapping.backward_compatible_ids: - # We are not running in backward compatibility mode, so we - # must use a mapping. - local_id_ref = self.id_mapping_api.get_id_mapping(public_id) - if local_id_ref: - return ( - local_id_ref['domain_id'], - driver, - local_id_ref['local_id']) - else: - raise exception.PublicIDNotFound(id=public_id) - - # If we reach here, this means that the default driver - # requires no mapping - but also doesn't understand domains - # (e.g. the classic single LDAP driver situation). Hence we pass - # back the public_ID unmodified and use the default domain (to - # keep backwards compatibility with existing installations). - # - # It is still possible that the public ID is just invalid in - # which case we leave this to the caller to check. - return (conf.default_domain_id, driver, public_id) - - def _assert_user_and_group_in_same_backend( - self, user_entity_id, user_driver, group_entity_id, group_driver): - """Ensures that user and group IDs are backed by the same backend. - - Raise a CrossBackendNotAllowed exception if they are not from the same - backend, otherwise return None. - - """ - if user_driver is not group_driver: - # Determine first if either IDs don't exist by calling - # the driver.get methods (which will raise a NotFound - # exception). - user_driver.get_user(user_entity_id) - group_driver.get_group(group_entity_id) - # If we get here, then someone is attempting to create a cross - # backend membership, which is not allowed. - raise exception.CrossBackendNotAllowed(group_id=group_entity_id, - user_id=user_entity_id) - - def _mark_domain_id_filter_satisfied(self, hints): - if hints: - for filter in hints.filters: - if (filter['name'] == 'domain_id' and - filter['comparator'] == 'equals'): - hints.filters.remove(filter) - - def _ensure_domain_id_in_hints(self, hints, domain_id): - if (domain_id is not None and - not hints.get_exact_filter_by_name('domain_id')): - hints.add_filter('domain_id', domain_id) - - def _set_list_limit_in_hints(self, hints, driver): - """Set list limit in hints from driver - - If a hints list is provided, the wrapper will insert the relevant - limit into the hints so that the underlying driver call can try and - honor it. If the driver does truncate the response, it will update the - 'truncated' attribute in the 'limit' entry in the hints list, which - enables the caller of this function to know if truncation has taken - place. If, however, the driver layer is unable to perform truncation, - the 'limit' entry is simply left in the hints list for the caller to - handle. - - A _get_list_limit() method is required to be present in the object - class hierarchy, which returns the limit for this backend to which - we will truncate. - - If a hints list is not provided in the arguments of the wrapped call - then any limits set in the config file are ignored. This allows - internal use of such wrapped methods where the entire data set is - needed as input for the calculations of some other API (e.g. get role - assignments for a given project). - - This method, specific to identity manager, is used instead of more - general response_truncated, because the limit for identity entities - can be overriden in domain-specific config files. The driver to use - is determined during processing of the passed parameters and - response_truncated is designed to set the limit before any processing. - """ - if hints is None: - return - - list_limit = driver._get_list_limit() - if list_limit: - hints.set_limit(list_limit) - - # The actual driver calls - these are pre/post processed here as - # part of the Manager layer to make sure we: - # - # - select the right driver for this domain - # - clear/set domain_ids for drivers that do not support domains - # - create any ID mapping that might be required - - @notifications.emit_event('authenticate') - @domains_configured - @exception_translated('assertion') - def authenticate(self, context, user_id, password): - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(user_id)) - ref = driver.authenticate(entity_id, password) - return self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.USER) - - @domains_configured - @exception_translated('user') - def create_user(self, user_ref, initiator=None): - user = user_ref.copy() - user['name'] = clean.user_name(user['name']) - user.setdefault('enabled', True) - user['enabled'] = clean.user_enabled(user['enabled']) - domain_id = user['domain_id'] - self.resource_api.get_domain(domain_id) - - # For creating a user, the domain is in the object itself - domain_id = user_ref['domain_id'] - driver = self._select_identity_driver(domain_id) - user = self._clear_domain_id_if_domain_unaware(driver, user) - # Generate a local ID - in the future this might become a function of - # the underlying driver so that it could conform to rules set down by - # that particular driver type. - user['id'] = uuid.uuid4().hex - ref = driver.create_user(user['id'], user) - notifications.Audit.created(self._USER, user['id'], initiator) - return self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.USER) - - @domains_configured - @exception_translated('user') - @MEMOIZE - def get_user(self, user_id): - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(user_id)) - ref = driver.get_user(entity_id) - return self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.USER) - - def assert_user_enabled(self, user_id, user=None): - """Assert the user and the user's domain are enabled. - - :raise AssertionError if the user or the user's domain is disabled. - """ - if user is None: - user = self.get_user(user_id) - self.resource_api.assert_domain_enabled(user['domain_id']) - if not user.get('enabled', True): - raise AssertionError(_('User is disabled: %s') % user_id) - - @domains_configured - @exception_translated('user') - @MEMOIZE - def get_user_by_name(self, user_name, domain_id): - driver = self._select_identity_driver(domain_id) - ref = driver.get_user_by_name(user_name, domain_id) - return self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.USER) - - @domains_configured - @exception_translated('user') - def list_users(self, domain_scope=None, hints=None): - driver = self._select_identity_driver(domain_scope) - self._set_list_limit_in_hints(hints, driver) - hints = hints or driver_hints.Hints() - if driver.is_domain_aware(): - # Force the domain_scope into the hint to ensure that we only get - # back domains for that scope. - self._ensure_domain_id_in_hints(hints, domain_scope) - else: - # We are effectively satisfying any domain_id filter by the above - # driver selection, so remove any such filter. - self._mark_domain_id_filter_satisfied(hints) - ref_list = driver.list_users(hints) - return self._set_domain_id_and_mapping( - ref_list, domain_scope, driver, mapping.EntityType.USER) - - def _check_update_of_domain_id(self, new_domain, old_domain): - if new_domain != old_domain: - versionutils.report_deprecated_feature( - LOG, - _('update of domain_id is deprecated as of Mitaka ' - 'and will be removed in O.') - ) - - @domains_configured - @exception_translated('user') - def update_user(self, user_id, user_ref, initiator=None): - old_user_ref = self.get_user(user_id) - user = user_ref.copy() - if 'name' in user: - user['name'] = clean.user_name(user['name']) - if 'enabled' in user: - user['enabled'] = clean.user_enabled(user['enabled']) - if 'domain_id' in user: - self._check_update_of_domain_id(user['domain_id'], - old_user_ref['domain_id']) - self.resource_api.get_domain(user['domain_id']) - if 'id' in user: - if user_id != user['id']: - raise exception.ValidationError(_('Cannot change user ID')) - # Since any ID in the user dict is now irrelevant, remove its so as - # the driver layer won't be confused by the fact the this is the - # public ID not the local ID - user.pop('id') - - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(user_id)) - user = self._clear_domain_id_if_domain_unaware(driver, user) - self.get_user.invalidate(self, old_user_ref['id']) - self.get_user_by_name.invalidate(self, old_user_ref['name'], - old_user_ref['domain_id']) - - ref = driver.update_user(entity_id, user) - - notifications.Audit.updated(self._USER, user_id, initiator) - - enabled_change = ((user.get('enabled') is False) and - user['enabled'] != old_user_ref.get('enabled')) - if enabled_change or user.get('password') is not None: - self.emit_invalidate_user_token_persistence(user_id) - - return self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.USER) - - @domains_configured - @exception_translated('user') - def delete_user(self, user_id, initiator=None): - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(user_id)) - # Get user details to invalidate the cache. - user_old = self.get_user(user_id) - driver.delete_user(entity_id) - self.assignment_api.delete_user_assignments(user_id) - self.get_user.invalidate(self, user_id) - self.get_user_by_name.invalidate(self, user_old['name'], - user_old['domain_id']) - self.credential_api.delete_credentials_for_user(user_id) - self.id_mapping_api.delete_id_mapping(user_id) - notifications.Audit.deleted(self._USER, user_id, initiator) - - # Invalidate user role assignments cache region, as it may be caching - # role assignments where the actor is the specified user - assignment.COMPUTED_ASSIGNMENTS_REGION.invalidate() - - @domains_configured - @exception_translated('group') - def create_group(self, group_ref, initiator=None): - group = group_ref.copy() - group.setdefault('description', '') - domain_id = group['domain_id'] - self.resource_api.get_domain(domain_id) - - # For creating a group, the domain is in the object itself - domain_id = group_ref['domain_id'] - driver = self._select_identity_driver(domain_id) - group = self._clear_domain_id_if_domain_unaware(driver, group) - # Generate a local ID - in the future this might become a function of - # the underlying driver so that it could conform to rules set down by - # that particular driver type. - group['id'] = uuid.uuid4().hex - ref = driver.create_group(group['id'], group) - - notifications.Audit.created(self._GROUP, group['id'], initiator) - - return self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.GROUP) - - @domains_configured - @exception_translated('group') - @MEMOIZE - def get_group(self, group_id): - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(group_id)) - ref = driver.get_group(entity_id) - return self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.GROUP) - - @domains_configured - @exception_translated('group') - def get_group_by_name(self, group_name, domain_id): - driver = self._select_identity_driver(domain_id) - ref = driver.get_group_by_name(group_name, domain_id) - return self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.GROUP) - - @domains_configured - @exception_translated('group') - def update_group(self, group_id, group, initiator=None): - if 'domain_id' in group: - old_group_ref = self.get_group(group_id) - self._check_update_of_domain_id(group['domain_id'], - old_group_ref['domain_id']) - self.resource_api.get_domain(group['domain_id']) - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(group_id)) - group = self._clear_domain_id_if_domain_unaware(driver, group) - ref = driver.update_group(entity_id, group) - self.get_group.invalidate(self, group_id) - notifications.Audit.updated(self._GROUP, group_id, initiator) - return self._set_domain_id_and_mapping( - ref, domain_id, driver, mapping.EntityType.GROUP) - - @domains_configured - @exception_translated('group') - def delete_group(self, group_id, initiator=None): - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(group_id)) - user_ids = (u['id'] for u in self.list_users_in_group(group_id)) - driver.delete_group(entity_id) - self.get_group.invalidate(self, group_id) - self.id_mapping_api.delete_id_mapping(group_id) - self.assignment_api.delete_group_assignments(group_id) - - notifications.Audit.deleted(self._GROUP, group_id, initiator) - - for uid in user_ids: - self.emit_invalidate_user_token_persistence(uid) - - # Invalidate user role assignments cache region, as it may be caching - # role assignments expanded from the specified group to its users - assignment.COMPUTED_ASSIGNMENTS_REGION.invalidate() - - @domains_configured - @exception_translated('group') - def add_user_to_group(self, user_id, group_id, initiator=None): - @exception_translated('user') - def get_entity_info_for_user(public_id): - return self._get_domain_driver_and_entity_id(public_id) - - _domain_id, group_driver, group_entity_id = ( - self._get_domain_driver_and_entity_id(group_id)) - # Get the same info for the user_id, taking care to map any - # exceptions correctly - _domain_id, user_driver, user_entity_id = ( - get_entity_info_for_user(user_id)) - - self._assert_user_and_group_in_same_backend( - user_entity_id, user_driver, group_entity_id, group_driver) - - group_driver.add_user_to_group(user_entity_id, group_entity_id) - - # Invalidate user role assignments cache region, as it may now need to - # include role assignments from the specified group to its users - assignment.COMPUTED_ASSIGNMENTS_REGION.invalidate() - notifications.Audit.added_to(self._GROUP, group_id, self._USER, - user_id, initiator) - - @domains_configured - @exception_translated('group') - def remove_user_from_group(self, user_id, group_id, initiator=None): - @exception_translated('user') - def get_entity_info_for_user(public_id): - return self._get_domain_driver_and_entity_id(public_id) - - _domain_id, group_driver, group_entity_id = ( - self._get_domain_driver_and_entity_id(group_id)) - # Get the same info for the user_id, taking care to map any - # exceptions correctly - _domain_id, user_driver, user_entity_id = ( - get_entity_info_for_user(user_id)) - - self._assert_user_and_group_in_same_backend( - user_entity_id, user_driver, group_entity_id, group_driver) - - group_driver.remove_user_from_group(user_entity_id, group_entity_id) - self.emit_invalidate_user_token_persistence(user_id) - - # Invalidate user role assignments cache region, as it may be caching - # role assignments expanded from this group to this user - assignment.COMPUTED_ASSIGNMENTS_REGION.invalidate() - notifications.Audit.removed_from(self._GROUP, group_id, self._USER, - user_id, initiator) - - def emit_invalidate_user_token_persistence(self, user_id): - """Emit a notification to the callback system to revoke user tokens. - - This method and associated callback listener removes the need for - making a direct call to another manager to delete and revoke tokens. - - :param user_id: user identifier - :type user_id: string - """ - notifications.Audit.internal( - notifications.INVALIDATE_USER_TOKEN_PERSISTENCE, user_id - ) - - def emit_invalidate_grant_token_persistence(self, user_project): - """Emit a notification to the callback system to revoke grant tokens. - - This method and associated callback listener removes the need for - making a direct call to another manager to delete and revoke tokens. - - :param user_project: {'user_id': user_id, 'project_id': project_id} - :type user_project: dict - """ - notifications.Audit.internal( - notifications.INVALIDATE_USER_PROJECT_TOKEN_PERSISTENCE, - user_project - ) - - @domains_configured - @exception_translated('user') - def list_groups_for_user(self, user_id, hints=None): - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(user_id)) - self._set_list_limit_in_hints(hints, driver) - hints = hints or driver_hints.Hints() - if not driver.is_domain_aware(): - # We are effectively satisfying any domain_id filter by the above - # driver selection, so remove any such filter - self._mark_domain_id_filter_satisfied(hints) - ref_list = driver.list_groups_for_user(entity_id, hints) - return self._set_domain_id_and_mapping( - ref_list, domain_id, driver, mapping.EntityType.GROUP) - - @domains_configured - @exception_translated('group') - def list_groups(self, domain_scope=None, hints=None): - driver = self._select_identity_driver(domain_scope) - self._set_list_limit_in_hints(hints, driver) - hints = hints or driver_hints.Hints() - if driver.is_domain_aware(): - # Force the domain_scope into the hint to ensure that we only get - # back domains for that scope. - self._ensure_domain_id_in_hints(hints, domain_scope) - else: - # We are effectively satisfying any domain_id filter by the above - # driver selection, so remove any such filter. - self._mark_domain_id_filter_satisfied(hints) - ref_list = driver.list_groups(hints) - return self._set_domain_id_and_mapping( - ref_list, domain_scope, driver, mapping.EntityType.GROUP) - - @domains_configured - @exception_translated('group') - def list_users_in_group(self, group_id, hints=None): - domain_id, driver, entity_id = ( - self._get_domain_driver_and_entity_id(group_id)) - self._set_list_limit_in_hints(hints, driver) - hints = hints or driver_hints.Hints() - if not driver.is_domain_aware(): - # We are effectively satisfying any domain_id filter by the above - # driver selection, so remove any such filter - self._mark_domain_id_filter_satisfied(hints) - ref_list = driver.list_users_in_group(entity_id, hints) - return self._set_domain_id_and_mapping( - ref_list, domain_id, driver, mapping.EntityType.USER) - - @domains_configured - @exception_translated('group') - def check_user_in_group(self, user_id, group_id): - @exception_translated('user') - def get_entity_info_for_user(public_id): - return self._get_domain_driver_and_entity_id(public_id) - - _domain_id, group_driver, group_entity_id = ( - self._get_domain_driver_and_entity_id(group_id)) - # Get the same info for the user_id, taking care to map any - # exceptions correctly - _domain_id, user_driver, user_entity_id = ( - get_entity_info_for_user(user_id)) - - self._assert_user_and_group_in_same_backend( - user_entity_id, user_driver, group_entity_id, group_driver) - - return group_driver.check_user_in_group(user_entity_id, - group_entity_id) - - @domains_configured - def change_password(self, context, user_id, original_password, - new_password): - - # authenticate() will raise an AssertionError if authentication fails - self.authenticate(context, user_id, original_password) - - update_dict = {'password': new_password} - self.update_user(user_id, update_dict) - - @MEMOIZE - def shadow_federated_user(self, idp_id, protocol_id, unique_id, - display_name): - """Shadows a federated user by mapping to a user. - - :param idp_id: identity provider id - :param protocol_id: protocol id - :param unique_id: unique id for the user within the IdP - :param display_name: user's display name - - :returns: dictionary of the mapped User entity - """ - user_dict = {} - try: - self.shadow_users_api.update_federated_user_display_name( - idp_id, protocol_id, unique_id, display_name) - user_dict = self.shadow_users_api.get_federated_user( - idp_id, protocol_id, unique_id) - except exception.UserNotFound: - federated_dict = { - 'idp_id': idp_id, - 'protocol_id': protocol_id, - 'unique_id': unique_id, - 'display_name': display_name - } - user_dict = self.shadow_users_api.create_federated_user( - federated_dict) - return user_dict - - -@six.add_metaclass(abc.ABCMeta) -class IdentityDriverV8(object): - """Interface description for an Identity driver.""" - - def _get_conf(self): - try: - return self.conf or CONF - except AttributeError: - return CONF - - def _get_list_limit(self): - conf = self._get_conf() - # use list_limit from domain-specific config. If list_limit in - # domain-specific config is not set, look it up in the default config - return (conf.identity.list_limit or conf.list_limit or - CONF.identity.list_limit or CONF.list_limit) - - def is_domain_aware(self): - """Indicates if Driver supports domains.""" - return True - - def default_assignment_driver(self): - # TODO(morganfainberg): To be removed when assignment driver based - # upon [identity]/driver option is removed in the "O" release. - return 'sql' - - @property - def is_sql(self): - """Indicates if this Driver uses SQL.""" - return False - - @property - def multiple_domains_supported(self): - return (self.is_domain_aware() or - CONF.identity.domain_specific_drivers_enabled) - - def generates_uuids(self): - """Indicates if Driver generates UUIDs as the local entity ID.""" - return True - - @abc.abstractmethod - def authenticate(self, user_id, password): - """Authenticate a given user and password. - - :returns: user_ref - :raises AssertionError: If user or password is invalid. - """ - raise exception.NotImplemented() # pragma: no cover - - # user crud - - @abc.abstractmethod - def create_user(self, user_id, user): - """Creates a new user. - - :raises keystone.exception.Conflict: If a duplicate user exists. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_users(self, hints): - """List users in the system. - - :param hints: filter hints which the driver should - implement if at all possible. - - :returns: a list of user_refs or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_users_in_group(self, group_id, hints): - """List users in a group. - - :param group_id: the group in question - :param hints: filter hints which the driver should - implement if at all possible. - - :returns: a list of user_refs or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_user(self, user_id): - """Get a user by ID. - - :returns: user_ref - :raises keystone.exception.UserNotFound: If the user doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_user(self, user_id, user): - """Updates an existing user. - - :raises keystone.exception.UserNotFound: If the user doesn't exist. - :raises keystone.exception.Conflict: If a duplicate user exists. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def add_user_to_group(self, user_id, group_id): - """Adds a user to a group. - - :raises keystone.exception.UserNotFound: If the user doesn't exist. - :raises keystone.exception.GroupNotFound: If the group doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def check_user_in_group(self, user_id, group_id): - """Checks if a user is a member of a group. - - :raises keystone.exception.UserNotFound: If the user doesn't exist. - :raises keystone.exception.GroupNotFound: If the group doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def remove_user_from_group(self, user_id, group_id): - """Removes a user from a group. - - :raises keystone.exception.NotFound: If the entity not found. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_user(self, user_id): - """Deletes an existing user. - - :raises keystone.exception.UserNotFound: If the user doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_user_by_name(self, user_name, domain_id): - """Get a user by name. - - :returns: user_ref - :raises keystone.exception.UserNotFound: If the user doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - # group crud - - @abc.abstractmethod - def create_group(self, group_id, group): - """Creates a new group. - - :raises keystone.exception.Conflict: If a duplicate group exists. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_groups(self, hints): - """List groups in the system. - - :param hints: filter hints which the driver should - implement if at all possible. - - :returns: a list of group_refs or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_groups_for_user(self, user_id, hints): - """List groups a user is in - - :param user_id: the user in question - :param hints: filter hints which the driver should - implement if at all possible. - - :returns: a list of group_refs or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_group(self, group_id): - """Get a group by ID. - - :returns: group_ref - :raises keystone.exception.GroupNotFound: If the group doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_group_by_name(self, group_name, domain_id): - """Get a group by name. - - :returns: group_ref - :raises keystone.exception.GroupNotFound: If the group doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_group(self, group_id, group): - """Updates an existing group. - - :raises keystone.exception.GroupNotFound: If the group doesn't exist. - :raises keystone.exception.Conflict: If a duplicate group exists. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_group(self, group_id): - """Deletes an existing group. - - :raises keystone.exception.GroupNotFound: If the group doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - # end of identity - - -Driver = manager.create_legacy_driver(IdentityDriverV8) - - -@dependency.provider('id_mapping_api') -class MappingManager(manager.Manager): - """Default pivot point for the ID Mapping backend.""" - - driver_namespace = 'keystone.identity.id_mapping' - - def __init__(self): - super(MappingManager, self).__init__(CONF.identity_mapping.driver) - - -@six.add_metaclass(abc.ABCMeta) -class MappingDriverV8(object): - """Interface description for an ID Mapping driver.""" - - @abc.abstractmethod - def get_public_id(self, local_entity): - """Returns the public ID for the given local entity. - - :param dict local_entity: Containing the entity domain, local ID and - type ('user' or 'group'). - :returns: public ID, or None if no mapping is found. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_id_mapping(self, public_id): - """Returns the local mapping. - - :param public_id: The public ID for the mapping required. - :returns dict: Containing the entity domain, local ID and type. If no - mapping is found, it returns None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def create_id_mapping(self, local_entity, public_id=None): - """Create and store a mapping to a public_id. - - :param dict local_entity: Containing the entity domain, local ID and - type ('user' or 'group'). - :param public_id: If specified, this will be the public ID. If this - is not specified, a public ID will be generated. - :returns: public ID - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_id_mapping(self, public_id): - """Deletes an entry for the given public_id. - - :param public_id: The public ID for the mapping to be deleted. - - The method is silent if no mapping is found. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def purge_mappings(self, purge_filter): - """Purge selected identity mappings. - - :param dict purge_filter: Containing the attributes of the filter that - defines which entries to purge. An empty - filter means purge all mappings. - - """ - raise exception.NotImplemented() # pragma: no cover - - -MappingDriver = manager.create_legacy_driver(MappingDriverV8) - - -@dependency.provider('shadow_users_api') -class ShadowUsersManager(manager.Manager): - """Default pivot point for the Shadow Users backend.""" - - driver_namespace = 'keystone.identity.shadow_users' - - def __init__(self): - super(ShadowUsersManager, self).__init__(CONF.shadow_users.driver) - - -@six.add_metaclass(abc.ABCMeta) -class ShadowUsersDriverV9(object): - """Interface description for an Shadow Users driver.""" - - @abc.abstractmethod - def create_federated_user(self, federated_dict): - """Create a new user with the federated identity - - :param dict federated_dict: Reference to the federated user - :param user_id: user ID for linking to the federated identity - :returns dict: Containing the user reference - - """ - raise exception.NotImplemented() - - @abc.abstractmethod - def get_federated_user(self, idp_id, protocol_id, unique_id): - """Returns the found user for the federated identity - - :param idp_id: The identity provider ID - :param protocol_id: The federation protocol ID - :param unique_id: The unique ID for the user - :returns dict: Containing the user reference - - """ - raise exception.NotImplemented() - - @abc.abstractmethod - def update_federated_user_display_name(self, idp_id, protocol_id, - unique_id, display_name): - """Updates federated user's display name if changed - - :param idp_id: The identity provider ID - :param protocol_id: The federation protocol ID - :param unique_id: The unique ID for the user - :param display_name: The user's display name - - """ - raise exception.NotImplemented() diff --git a/keystone-moon/keystone/identity/generator.py b/keystone-moon/keystone/identity/generator.py deleted file mode 100644 index 05ad2df5..00000000 --- a/keystone-moon/keystone/identity/generator.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""ID Generator provider interface.""" - -import abc - -from oslo_config import cfg -import six - -from keystone.common import dependency -from keystone.common import manager -from keystone import exception - - -CONF = cfg.CONF - - -@dependency.provider('id_generator_api') -class Manager(manager.Manager): - """Default pivot point for the identifier generator backend.""" - - driver_namespace = 'keystone.identity.id_generator' - - def __init__(self): - super(Manager, self).__init__(CONF.identity_mapping.generator) - - -@six.add_metaclass(abc.ABCMeta) -class IDGenerator(object): - """Interface description for an ID Generator provider.""" - - @abc.abstractmethod - def generate_public_ID(self, mapping): - """Return a Public ID for the given mapping dict. - - :param dict mapping: The items to be hashed. - - The ID must be reproducible and no more than 64 chars in length. - The ID generated should be independent of the order of the items - in the mapping dict. - - """ - raise exception.NotImplemented() # pragma: no cover diff --git a/keystone-moon/keystone/identity/id_generators/__init__.py b/keystone-moon/keystone/identity/id_generators/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/identity/id_generators/sha256.py b/keystone-moon/keystone/identity/id_generators/sha256.py deleted file mode 100644 index e3a8b416..00000000 --- a/keystone-moon/keystone/identity/id_generators/sha256.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import hashlib - -import six - -from keystone.identity import generator - - -class Generator(generator.IDGenerator): - - def generate_public_ID(self, mapping): - m = hashlib.sha256() - for key in sorted(six.iterkeys(mapping)): - m.update(mapping[key].encode('utf-8')) - return m.hexdigest() diff --git a/keystone-moon/keystone/identity/mapping_backends/__init__.py b/keystone-moon/keystone/identity/mapping_backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/identity/mapping_backends/mapping.py b/keystone-moon/keystone/identity/mapping_backends/mapping.py deleted file mode 100644 index dddf36c1..00000000 --- a/keystone-moon/keystone/identity/mapping_backends/mapping.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -class EntityType(object): - USER = 'user' - GROUP = 'group' diff --git a/keystone-moon/keystone/identity/mapping_backends/sql.py b/keystone-moon/keystone/identity/mapping_backends/sql.py deleted file mode 100644 index 91b33dd7..00000000 --- a/keystone-moon/keystone/identity/mapping_backends/sql.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import dependency -from keystone.common import sql -from keystone import identity -from keystone.identity.mapping_backends import mapping as identity_mapping - - -class IDMapping(sql.ModelBase, sql.ModelDictMixin): - __tablename__ = 'id_mapping' - public_id = sql.Column(sql.String(64), primary_key=True) - domain_id = sql.Column(sql.String(64), nullable=False) - local_id = sql.Column(sql.String(64), nullable=False) - # NOTE(henry-nash): Postgres requires a name to be defined for an Enum - entity_type = sql.Column( - sql.Enum(identity_mapping.EntityType.USER, - identity_mapping.EntityType.GROUP, - name='entity_type'), - nullable=False) - # Unique constraint to ensure you can't store more than one mapping to the - # same underlying values - __table_args__ = ( - sql.UniqueConstraint('domain_id', 'local_id', 'entity_type'),) - - -@dependency.requires('id_generator_api') -class Mapping(identity.MappingDriverV8): - - def get_public_id(self, local_entity): - # NOTE(henry-nash): Since the Public ID is regeneratable, rather - # than search for the entry using the local entity values, we - # could create the hash and do a PK lookup. However this would only - # work if we hashed all the entries, even those that already generate - # UUIDs, like SQL. Further, this would only work if the generation - # algorithm was immutable (e.g. it had always been sha256). - with sql.session_for_read() as session: - query = session.query(IDMapping.public_id) - query = query.filter_by(domain_id=local_entity['domain_id']) - query = query.filter_by(local_id=local_entity['local_id']) - query = query.filter_by(entity_type=local_entity['entity_type']) - try: - public_ref = query.one() - public_id = public_ref.public_id - return public_id - except sql.NotFound: - return None - - def get_id_mapping(self, public_id): - with sql.session_for_read() as session: - mapping_ref = session.query(IDMapping).get(public_id) - if mapping_ref: - return mapping_ref.to_dict() - - def create_id_mapping(self, local_entity, public_id=None): - entity = local_entity.copy() - with sql.session_for_write() as session: - if public_id is None: - public_id = self.id_generator_api.generate_public_ID(entity) - entity['public_id'] = public_id - mapping_ref = IDMapping.from_dict(entity) - session.add(mapping_ref) - return public_id - - def delete_id_mapping(self, public_id): - with sql.session_for_write() as session: - try: - session.query(IDMapping).filter( - IDMapping.public_id == public_id).delete() - except sql.NotFound: # nosec - # NOTE(morganfainberg): There is nothing to delete and nothing - # to do. - pass - - def purge_mappings(self, purge_filter): - with sql.session_for_write() as session: - query = session.query(IDMapping) - if 'domain_id' in purge_filter: - query = query.filter_by(domain_id=purge_filter['domain_id']) - if 'public_id' in purge_filter: - query = query.filter_by(public_id=purge_filter['public_id']) - if 'local_id' in purge_filter: - query = query.filter_by(local_id=purge_filter['local_id']) - if 'entity_type' in purge_filter: - query = query.filter_by( - entity_type=purge_filter['entity_type']) - query.delete() diff --git a/keystone-moon/keystone/identity/routers.py b/keystone-moon/keystone/identity/routers.py deleted file mode 100644 index e274d6f4..00000000 --- a/keystone-moon/keystone/identity/routers.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""WSGI Routers for the Identity service.""" - -from keystone.common import json_home -from keystone.common import router -from keystone.common import wsgi -from keystone.identity import controllers - - -class Admin(wsgi.ComposableRouter): - def add_routes(self, mapper): - # User Operations - user_controller = controllers.User() - mapper.connect('/users/{user_id}', - controller=user_controller, - action='get_user', - conditions=dict(method=['GET'])) - - -class Routers(wsgi.RoutersBase): - - def append_v3_routers(self, mapper, routers): - user_controller = controllers.UserV3() - routers.append( - router.Router(user_controller, - 'users', 'user', - resource_descriptions=self.v3_resources)) - - self._add_resource( - mapper, user_controller, - path='/users/{user_id}/password', - post_action='change_password', - rel=json_home.build_v3_resource_relation('user_change_password'), - path_vars={ - 'user_id': json_home.Parameters.USER_ID, - }) - - self._add_resource( - mapper, user_controller, - path='/groups/{group_id}/users', - get_action='list_users_in_group', - rel=json_home.build_v3_resource_relation('group_users'), - path_vars={ - 'group_id': json_home.Parameters.GROUP_ID, - }) - - self._add_resource( - mapper, user_controller, - path='/groups/{group_id}/users/{user_id}', - put_action='add_user_to_group', - get_head_action='check_user_in_group', - delete_action='remove_user_from_group', - rel=json_home.build_v3_resource_relation('group_user'), - path_vars={ - 'group_id': json_home.Parameters.GROUP_ID, - 'user_id': json_home.Parameters.USER_ID, - }) - - group_controller = controllers.GroupV3() - routers.append( - router.Router(group_controller, - 'groups', 'group', - resource_descriptions=self.v3_resources)) - - self._add_resource( - mapper, group_controller, - path='/users/{user_id}/groups', - get_action='list_groups_for_user', - rel=json_home.build_v3_resource_relation('user_groups'), - path_vars={ - 'user_id': json_home.Parameters.USER_ID, - }) diff --git a/keystone-moon/keystone/identity/schema.py b/keystone-moon/keystone/identity/schema.py deleted file mode 100644 index 047fcf02..00000000 --- a/keystone-moon/keystone/identity/schema.py +++ /dev/null @@ -1,67 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import validation -from keystone.common.validation import parameter_types - - -# NOTE(lhcheng): the max length is not applicable since it is specific -# to the SQL backend, LDAP does not have length limitation. -_identity_name = { - 'type': 'string', - 'minLength': 1 -} - -_user_properties = { - 'default_project_id': validation.nullable(parameter_types.id_string), - 'description': validation.nullable(parameter_types.description), - 'domain_id': parameter_types.id_string, - 'enabled': parameter_types.boolean, - 'name': _identity_name, - 'password': { - 'type': ['string', 'null'] - } -} - -user_create = { - 'type': 'object', - 'properties': _user_properties, - 'required': ['name'], - 'additionalProperties': True -} - -user_update = { - 'type': 'object', - 'properties': _user_properties, - 'minProperties': 1, - 'additionalProperties': True -} - -_group_properties = { - 'description': validation.nullable(parameter_types.description), - 'domain_id': parameter_types.id_string, - 'name': _identity_name -} - -group_create = { - 'type': 'object', - 'properties': _group_properties, - 'required': ['name'], - 'additionalProperties': True -} - -group_update = { - 'type': 'object', - 'properties': _group_properties, - 'minProperties': 1, - 'additionalProperties': True -} diff --git a/keystone-moon/keystone/identity/shadow_backends/__init__.py b/keystone-moon/keystone/identity/shadow_backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/identity/shadow_backends/sql.py b/keystone-moon/keystone/identity/shadow_backends/sql.py deleted file mode 100644 index af5a995b..00000000 --- a/keystone-moon/keystone/identity/shadow_backends/sql.py +++ /dev/null @@ -1,73 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from keystone.common import sql -from keystone import exception -from keystone import identity -from keystone.identity.backends import sql as model - - -class ShadowUsers(identity.ShadowUsersDriverV9): - @sql.handle_conflicts(conflict_type='federated_user') - def create_federated_user(self, federated_dict): - user = { - 'id': uuid.uuid4().hex, - 'enabled': True - } - with sql.session_for_write() as session: - federated_ref = model.FederatedUser.from_dict(federated_dict) - user_ref = model.User.from_dict(user) - user_ref.federated_users.append(federated_ref) - session.add(user_ref) - return identity.filter_user(user_ref.to_dict()) - - def get_federated_user(self, idp_id, protocol_id, unique_id): - user_ref = self._get_federated_user(idp_id, protocol_id, unique_id) - return identity.filter_user(user_ref.to_dict()) - - def _get_federated_user(self, idp_id, protocol_id, unique_id): - """Returns the found user for the federated identity - - :param idp_id: The identity provider ID - :param protocol_id: The federation protocol ID - :param unique_id: The user's unique ID (unique within the IdP) - :returns User: Returns a reference to the User - - """ - with sql.session_for_read() as session: - query = session.query(model.User).outerjoin(model.LocalUser) - query = query.join(model.FederatedUser) - query = query.filter(model.FederatedUser.idp_id == idp_id) - query = query.filter(model.FederatedUser.protocol_id == - protocol_id) - query = query.filter(model.FederatedUser.unique_id == unique_id) - try: - user_ref = query.one() - except sql.NotFound: - raise exception.UserNotFound(user_id=unique_id) - return user_ref - - @sql.handle_conflicts(conflict_type='federated_user') - def update_federated_user_display_name(self, idp_id, protocol_id, - unique_id, display_name): - with sql.session_for_write() as session: - query = session.query(model.FederatedUser) - query = query.filter(model.FederatedUser.idp_id == idp_id) - query = query.filter(model.FederatedUser.protocol_id == - protocol_id) - query = query.filter(model.FederatedUser.unique_id == unique_id) - query = query.filter(model.FederatedUser.display_name != - display_name) - query.update({'display_name': display_name}) - return diff --git a/keystone-moon/keystone/locale/de/LC_MESSAGES/keystone-log-critical.po b/keystone-moon/keystone/locale/de/LC_MESSAGES/keystone-log-critical.po deleted file mode 100644 index 9f77b841..00000000 --- a/keystone-moon/keystone/locale/de/LC_MESSAGES/keystone-log-critical.po +++ /dev/null @@ -1,25 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2014-08-31 03:19+0000\n" -"Last-Translator: openstackjenkins \n" -"Language: de\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: German\n" - -#, python-format -msgid "Unable to open template file %s" -msgstr "Vorlagendatei %s kann nicht geöffnet werden" diff --git a/keystone-moon/keystone/locale/de/LC_MESSAGES/keystone-log-info.po b/keystone-moon/keystone/locale/de/LC_MESSAGES/keystone-log-info.po deleted file mode 100644 index fdf84ad9..00000000 --- a/keystone-moon/keystone/locale/de/LC_MESSAGES/keystone-log-info.po +++ /dev/null @@ -1,212 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -msgid "" -msgstr "" -"Project-Id-Version: Keystone\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/keystone\n" -"POT-Creation-Date: 2015-03-09 06:03+0000\n" -"PO-Revision-Date: 2015-03-07 04:31+0000\n" -"Last-Translator: openstackjenkins \n" -"Language-Team: German (http://www.transifex.com/projects/p/keystone/language/" -"de/)\n" -"Language: de\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 1.3\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" - -#: keystone/assignment/core.py:250 -#, python-format -msgid "Creating the default role %s because it does not exist." -msgstr "" - -#: keystone/assignment/core.py:258 -#, python-format -msgid "Creating the default role %s failed because it was already created" -msgstr "" - -#: keystone/auth/controllers.py:64 -msgid "Loading auth-plugins by class-name is deprecated." -msgstr "" - -#: keystone/auth/controllers.py:106 -#, python-format -msgid "" -"\"expires_at\" has conflicting values %(existing)s and %(new)s. Will use " -"the earliest value." -msgstr "" - -#: keystone/common/openssl.py:81 -#, python-format -msgid "Running command - %s" -msgstr "" - -#: keystone/common/wsgi.py:79 -msgid "No bind information present in token" -msgstr "" - -#: keystone/common/wsgi.py:83 -#, python-format -msgid "Named bind mode %s not in bind information" -msgstr "" - -#: keystone/common/wsgi.py:90 -msgid "Kerberos credentials required and not present" -msgstr "" - -#: keystone/common/wsgi.py:94 -msgid "Kerberos credentials do not match those in bind" -msgstr "" - -#: keystone/common/wsgi.py:98 -msgid "Kerberos bind authentication successful" -msgstr "" - -#: keystone/common/wsgi.py:105 -#, python-format -msgid "Couldn't verify unknown bind: {%(bind_type)s: %(identifier)s}" -msgstr "" - -#: keystone/common/environment/eventlet_server.py:103 -#, python-format -msgid "Starting %(arg0)s on %(host)s:%(port)s" -msgstr "Starten von %(arg0)s auf %(host)s:%(port)s" - -#: keystone/common/kvs/core.py:138 -#, python-format -msgid "Adding proxy '%(proxy)s' to KVS %(name)s." -msgstr "" - -#: keystone/common/kvs/core.py:188 -#, python-format -msgid "Using %(func)s as KVS region %(name)s key_mangler" -msgstr "" - -#: keystone/common/kvs/core.py:200 -#, python-format -msgid "Using default dogpile sha1_mangle_key as KVS region %s key_mangler" -msgstr "" - -#: keystone/common/kvs/core.py:210 -#, python-format -msgid "KVS region %s key_mangler disabled." -msgstr "" - -#: keystone/contrib/example/core.py:64 keystone/contrib/example/core.py:73 -#, python-format -msgid "" -"Received the following notification: service %(service)s, resource_type: " -"%(resource_type)s, operation %(operation)s payload %(payload)s" -msgstr "" - -#: keystone/openstack/common/eventlet_backdoor.py:146 -#, python-format -msgid "Eventlet backdoor listening on %(port)s for process %(pid)d" -msgstr "Eventlet backdoor hört auf %(port)s für Prozess %(pid)d" - -#: keystone/openstack/common/service.py:173 -#, python-format -msgid "Caught %s, exiting" -msgstr "%s abgefangen. Vorgang wird beendet" - -#: keystone/openstack/common/service.py:231 -msgid "Parent process has died unexpectedly, exiting" -msgstr "" -"Übergeordneter Prozess wurde unerwartet abgebrochen. Vorgang wird beendet" - -#: keystone/openstack/common/service.py:262 -#, python-format -msgid "Child caught %s, exiting" -msgstr "Untergeordnetes Element %s abgefangen; Vorgang wird beendet" - -#: keystone/openstack/common/service.py:301 -msgid "Forking too fast, sleeping" -msgstr "Verzweigung zu schnell; im Ruhemodus" - -#: keystone/openstack/common/service.py:320 -#, python-format -msgid "Started child %d" -msgstr "Untergeordnetes Element %d gestartet" - -#: keystone/openstack/common/service.py:330 -#, python-format -msgid "Starting %d workers" -msgstr "Starten von %d Workers" - -#: keystone/openstack/common/service.py:347 -#, python-format -msgid "Child %(pid)d killed by signal %(sig)d" -msgstr "Untergeordnetes Element %(pid)d durch Signal %(sig)d abgebrochen" - -#: keystone/openstack/common/service.py:351 -#, python-format -msgid "Child %(pid)s exited with status %(code)d" -msgstr "Untergeordnete %(pid)s mit Status %(code)d beendet" - -#: keystone/openstack/common/service.py:390 -#, python-format -msgid "Caught %s, stopping children" -msgstr "%s abgefangen, untergeordnete Elemente werden gestoppt" - -#: keystone/openstack/common/service.py:399 -msgid "Wait called after thread killed. Cleaning up." -msgstr "" - -#: keystone/openstack/common/service.py:415 -#, python-format -msgid "Waiting on %d children to exit" -msgstr "Warten auf Beenden von %d untergeordneten Elementen" - -#: keystone/token/persistence/backends/sql.py:279 -#, python-format -msgid "Total expired tokens removed: %d" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:72 -msgid "" -"[fernet_tokens] key_repository does not appear to exist; attempting to " -"create it" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:130 -#, python-format -msgid "Created a new key: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:143 -msgid "Key repository is already initialized; aborting." -msgstr "" - -#: keystone/token/providers/fernet/utils.py:179 -#, python-format -msgid "Starting key rotation with %(count)s key files: %(list)s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:185 -#, python-format -msgid "Current primary key is: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:187 -#, python-format -msgid "Next primary key will be: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:197 -#, python-format -msgid "Promoted key 0 to be the primary: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:213 -#, python-format -msgid "Excess keys to purge: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:237 -#, python-format -msgid "Loaded %(count)s encryption keys from: %(dir)s" -msgstr "" diff --git a/keystone-moon/keystone/locale/de/LC_MESSAGES/keystone.po b/keystone-moon/keystone/locale/de/LC_MESSAGES/keystone.po deleted file mode 100644 index 71503a36..00000000 --- a/keystone-moon/keystone/locale/de/LC_MESSAGES/keystone.po +++ /dev/null @@ -1,1657 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Ettore Atalan , 2014 -# Robert Simai, 2014 -# Reik Keutterling , 2015 -# Frank Kloeker , 2016. #zanata -# Monika Wolf , 2016. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2016-03-24 03:13+0000\n" -"Last-Translator: Monika Wolf \n" -"Language: de\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: German\n" - -#, python-format -msgid "%(detail)s" -msgstr "%(detail)s" - -#, python-format -msgid "%(driver)s is not supported driver version" -msgstr "%(driver)s ist keine unterstützte Treiberversion." - -#, python-format -msgid "" -"%(entity)s name cannot contain the following reserved characters: %(chars)s" -msgstr "" -"Der %(entity)s-Name darf nicht die folgenden reservierten Zeichen enthalten: " -"%(chars)s" - -#, python-format -msgid "" -"%(event)s is not a valid notification event, must be one of: %(actions)s" -msgstr "" -"%(event)s ist kein gültiges Benachrichtigungsereignis; erforderlich ist " -"%(actions)s" - -#, python-format -msgid "%(host)s is not a trusted dashboard host" -msgstr "%(host)s ist kein vertrauenswürdiger Dashboard-Host" - -#, python-format -msgid "%(message)s %(amendment)s" -msgstr "%(message)s %(amendment)s" - -#, python-format -msgid "" -"%(mod_name)s doesn't provide database migrations. The migration repository " -"path at %(path)s doesn't exist or isn't a directory." -msgstr "" -"%(mod_name)s bietet keine Datenbankmigrationen. Der Migrations-Repository-" -"Pfad unter %(path)s ist nicht vorhanden oder ist kein Verzeichnis." - -#, python-format -msgid "%(prior_role_id)s does not imply %(implied_role_id)s" -msgstr "%(prior_role_id)s impliziert nicht %(implied_role_id)s" - -#, python-format -msgid "%(property_name)s cannot be less than %(min_length)s characters." -msgstr "%(property_name)s darf nicht kleiner als %(min_length)s Zeichen sein." - -#, python-format -msgid "%(property_name)s is not a %(display_expected_type)s" -msgstr "%(property_name)s ist nicht %(display_expected_type)s" - -#, python-format -msgid "%(property_name)s should not be greater than %(max_length)s characters." -msgstr "%(property_name)s sollte nicht größer als %(max_length)s Zeichen sein." - -#, python-format -msgid "%(role_id)s cannot be an implied roles" -msgstr "%(role_id)s darf keine implizierte Rolle sein" - -#, python-format -msgid "%s cannot be empty." -msgstr "%s darf nicht leer sein." - -#, python-format -msgid "%s extension does not exist." -msgstr "Erweiterung %s ist nicht vorhanden." - -#, python-format -msgid "%s field is required and cannot be empty" -msgstr "%s-Feld ist erforderlich und darf nicht leer sein" - -#, python-format -msgid "%s field(s) cannot be empty" -msgstr "%s-Felder können nicht leer sein" - -#, python-format -msgid "" -"%s for the LDAP identity backend has been deprecated in the Mitaka release " -"in favor of read-only identity LDAP access. It will be removed in the \"O\" " -"release." -msgstr "" -"%s für das LDAP-ID-Back-End wurde in Mitaka zugunsten des schreibgeschützten " -"ID-LDAP-Zugriffs eingestellt und wird im \"O\"-Release entfernt." - -msgid "(Disable insecure_debug mode to suppress these details.)" -msgstr "(Modus insecure_debug inaktivieren, um diese Details zu unterdrücken.)" - -msgid "--all option cannot be mixed with other options" -msgstr "--all-Option kann nicht zusammen mit anderen Optionen verwendet werden" - -msgid "A project-scoped token is required to produce a service catalog." -msgstr "" -"Ein projektorientiertes Token ist zum Produzieren eines Dienstekatalogs " -"erforderlich." - -msgid "Access token is expired" -msgstr "Zugriffstoken ist abgelaufen" - -msgid "Access token not found" -msgstr "Zugriffstoken nicht gefunden" - -msgid "Additional authentications steps required." -msgstr "Zusätzliche Authentifizierungsschritte sind notwendig." - -msgid "An unexpected error occurred when retrieving domain configs" -msgstr "" -"Beim Abrufen der Domänenkonfigurationen ist ein unerwarteter Fehler " -"aufgetreten" - -#, python-format -msgid "An unexpected error occurred when trying to store %s" -msgstr "Beim Versuch, %s zu speichern, ist ein unerwarteter Fehler aufgetreten" - -msgid "An unexpected error prevented the server from fulfilling your request." -msgstr "" -"Wegen eines unerwarteten Fehlers konnte der Server Ihre Anforderung nicht " -"ausführen." - -#, python-format -msgid "" -"An unexpected error prevented the server from fulfilling your request: " -"%(exception)s" -msgstr "" -"Wegen eines unerwarteten Fehlers konnte der Server Ihre Anforderung nicht " -"ausführen: %(exception)s" - -msgid "An unhandled exception has occurred: Could not find metadata." -msgstr "" -"Eine nicht behandelte Ausnahme ist aufgetreten: Metadaten konnten nicht " -"gefunden werden." - -msgid "At least one option must be provided" -msgstr "Mindestens eine Option muss angegeben werden" - -msgid "At least one option must be provided, use either --all or --domain-name" -msgstr "" -"Mindestens eine Option muss angegeben werden. Verwenden Sie entweder --all " -"oder --domain-name" - -msgid "At least one role should be specified." -msgstr "Mindestens eine Rolle sollte angegeben werden." - -#, python-format -msgid "" -"Attempted automatic driver selection for assignment based upon " -"[identity]\\driver option failed since driver %s is not found. Set " -"[assignment]/driver to a valid driver in keystone config." -msgstr "" -"Der Versuch, für die Zuordnung den Treiber basierend auf der Option " -"[identity]\\driver automatisch auszuwählen, ist fehlgeschlagen, da der " -"Treiber %s nicht gefunden wurde. Setzen Sie die Option [assignment]/driver " -"in der Keystone-Konfiguration auf einen gültigen Treiber." - -msgid "Attempted to authenticate with an unsupported method." -msgstr "Versuch einer Authentifizierung mit einer nicht unterstützten Methode." - -msgid "" -"Attempting to use OS-FEDERATION token with V2 Identity Service, use V3 " -"Authentication" -msgstr "" -"Versuch, OS-FEDERATION-Token mit V2 Identity Service zu verwenden, verwenden " -"Sie v3- Authentifizierung" - -msgid "Authentication plugin error." -msgstr "Authentifizierung-Plugin-Fehler" - -#, python-format -msgid "" -"Backend `%(backend)s` is not a valid memcached backend. Valid backends: " -"%(backend_list)s" -msgstr "" -"Back-End '%(backend)s' ist kein gültiges memcached Back-End. Gültige Back-" -"Ends: %(backend_list)s" - -msgid "Cannot authorize a request token with a token issued via delegation." -msgstr "" -"Anforderungstoken kann mit einem per Delegierung ausgegebenen Token nicht " -"autorisiert werden." - -#, python-format -msgid "Cannot change %(option_name)s %(attr)s" -msgstr "%(option_name)s %(attr)s kann nicht geändert werden" - -msgid "Cannot change Domain ID" -msgstr "Die Domänen-ID kann nicht geändert werden" - -msgid "Cannot change user ID" -msgstr "Benutzer-ID kann nicht geändert werden" - -msgid "Cannot change user name" -msgstr "Benutzername kann nicht geändert werden" - -#, python-format -msgid "Cannot create an endpoint with an invalid URL: %(url)s" -msgstr "" -"Es kann kein Endpunkt mit einer ungültigen URL erstellt werden: %(url)s" - -#, python-format -msgid "Cannot create project with parent: %(project_id)s" -msgstr "" -"Projekt kann nicht mit dem übergeordneten Element %(project_id)s erstellt " -"werden" - -#, python-format -msgid "" -"Cannot create project, since it specifies its owner as domain %(domain_id)s, " -"but specifies a parent in a different domain (%(parent_domain_id)s)." -msgstr "" -"Das Projekt kann nicht erstellt werden, da es den zugehörigen Eigner als " -"Domäne %(domain_id)s angibt, jedoch ein übergeordnetes Projekt in einer " -"anderen Domäne (%(parent_domain_id)s) angibt." - -#, python-format -msgid "" -"Cannot create project, since its parent (%(domain_id)s) is acting as a " -"domain, but project's specified parent_id (%(parent_id)s) does not match " -"this domain_id." -msgstr "" -"Das Projekt kann nicht erstellt werden, da das zugehörige übergeordnete " -"Projekt (%(domain_id)s) als Domäne fungiert, aber die für das Projekt " -"angegebene 'parent_id' (%(parent_id)s) nicht mit dieser 'domain_id' " -"übereinstimmt." - -msgid "Cannot delete a domain that is enabled, please disable it first." -msgstr "" -"Eine aktivierte Domäne kann nicht gelöscht werden. Deaktivieren Sie sie " -"zuerst." - -#, python-format -msgid "" -"Cannot delete project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" -"Kann Projekt %(project_id)s nicht löschen, da die zugehörige untergeordnete " -"Baumstruktur aktivierte Projekte enthält." - -#, python-format -msgid "" -"Cannot delete the project %s since it is not a leaf in the hierarchy. Use " -"the cascade option if you want to delete a whole subtree." -msgstr "" -"Das Projekt %s kann nicht gelöscht werden, da es kein Blattelement in der " -"Hierarchie darstellt. Verwenden Sie die Option 'cascade', wenn Sie eine " -"vollständige, untergeordnete Baumstruktur löschen möchten. " - -#, python-format -msgid "" -"Cannot disable project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" -"Kann Projekt %(project_id)s nicht deaktivieren, da die zugehörige " -"untergeordnete Baumstruktur aktivierte Projekte enthält." - -#, python-format -msgid "Cannot enable project %s since it has disabled parents" -msgstr "" -"Kann Projekt %s nicht aktivieren, da es über inaktivierte übergeordnete " -"Projekte verfügt" - -msgid "Cannot list assignments sourced from groups and filtered by user ID." -msgstr "" -"Aus Gruppen erstellte und nach Benutzer-ID gefilterte Zuordnungen können " -"nicht aufgelistet werden." - -msgid "Cannot list request tokens with a token issued via delegation." -msgstr "" -"Anforderungstokens können mit einem per Delegierung ausgegebenen Token nicht " -"aufgelistet werden." - -#, python-format -msgid "Cannot open certificate %(cert_file)s. Reason: %(reason)s" -msgstr "" -"Zertifikat %(cert_file)s kann nicht geöffnet werden. Ursache: %(reason)s" - -#, python-format -msgid "Cannot remove role that has not been granted, %s" -msgstr "Nicht gewährte Rolle kann nicht entfernt werden, %s" - -msgid "" -"Cannot truncate a driver call without hints list as first parameter after " -"self " -msgstr "" -"Abschneiden eines Treiberaufrufs ohne Hinweisliste als erstem Parameter nach " -"dem Treiber nicht möglich " - -msgid "Cannot update domain_id of a project that has children." -msgstr "" -"Die Aktualisierung von 'domain_id' eines Projekts mit untergeordneten " -"Projekten ist nicht möglich." - -msgid "" -"Cannot use parents_as_list and parents_as_ids query params at the same time." -msgstr "" -"Die Abfrageparameter parents_as_list und parents_as_ids können nicht " -"gleichzeitig verwendet werden." - -msgid "" -"Cannot use subtree_as_list and subtree_as_ids query params at the same time." -msgstr "" -"Die Abfrageparameter subtree_as_list und subtree_as_ids können nicht " -"gleichzeitig verwendet werden." - -msgid "Cascade update is only allowed for enabled attribute." -msgstr "" -"Die Aktualisierungsweitergabe ist nur für aktivierte Attribute zulässig." - -msgid "" -"Combining effective and group filter will always result in an empty list." -msgstr "" -"Die Kombination von effektivem Filter und Gruppenfilter führt immer zu einer " -"leeren Liste." - -msgid "" -"Combining effective, domain and inherited filters will always result in an " -"empty list." -msgstr "" -"Die Kombination von effektivem Filter, Domänenfilter und vererbten Filtern " -"führt immer zu einer leeren Liste." - -#, python-format -msgid "Config API entity at /domains/%s/config" -msgstr "Konfigurations-API-Entität unter /domains/%s/config" - -#, python-format -msgid "Conflict occurred attempting to store %(type)s - %(details)s" -msgstr "Konflikt beim Versuch, %(type)s zu speichern - %(details)s" - -#, python-format -msgid "Conflicting region IDs specified: \"%(url_id)s\" != \"%(ref_id)s\"" -msgstr "" -"Angabe von Regions-IDs, die miteinander im Konflikt stehen: \"%(url_id)s\" !" -"= \"%(ref_id)s\"" - -msgid "Consumer not found" -msgstr "Kunde nicht gefunden" - -#, python-format -msgid "" -"Could not change immutable attribute(s) '%(attributes)s' in target %(target)s" -msgstr "" -"Unveränderliche Attribute '%(attributes)s' konnten nicht geändert werden in " -"Ziel %(target)s" - -#, python-format -msgid "" -"Could not determine Identity Provider ID. The configuration option " -"%(issuer_attribute)s was not found in the request environment." -msgstr "" -"Identitätsprovider-ID nicht gefunden. Die Konfigurationsoption " -"%(issuer_attribute)s wurde in der Anforderungsumgebung nicht gefunden." - -#, python-format -msgid "" -"Could not find %(group_or_option)s in domain configuration for domain " -"%(domain_id)s" -msgstr "" -"%(group_or_option)s konnte in der Domänenkonfiguration für Domäne " -"%(domain_id)s nicht gefunden werden" - -#, python-format -msgid "Could not find Endpoint Group: %(endpoint_group_id)s" -msgstr "Endpunktgruppe konnte nicht gefunden werden: %(endpoint_group_id)s" - -msgid "Could not find Identity Provider identifier in environment" -msgstr "Identitätsprovider-ID konnte in der Umgebung nicht gefunden werden" - -#, python-format -msgid "Could not find Identity Provider: %(idp_id)s" -msgstr "Identitätsprovider %(idp_id)s konnte nicht gefunden werden" - -#, python-format -msgid "Could not find Service Provider: %(sp_id)s" -msgstr "Service-Provider %(sp_id)s konnte nicht gefunden werden" - -#, python-format -msgid "Could not find credential: %(credential_id)s" -msgstr "Berechtigungsnachweis %(credential_id)s konnte nicht gefunden werden" - -#, python-format -msgid "Could not find domain: %(domain_id)s" -msgstr "Domäne %(domain_id)s konnte nicht gefunden werden" - -#, python-format -msgid "Could not find endpoint: %(endpoint_id)s" -msgstr "Endpunkt %(endpoint_id)s konnte nicht gefunden werden" - -#, python-format -msgid "" -"Could not find federated protocol %(protocol_id)s for Identity Provider: " -"%(idp_id)s" -msgstr "" -"Föderiertes Protokoll %(protocol_id)s konnte nicht gefunden werden für " -"Identitätsprovider: %(idp_id)s" - -#, python-format -msgid "Could not find group: %(group_id)s" -msgstr "Gruppe %(group_id)s konnte nicht gefunden werden" - -#, python-format -msgid "Could not find mapping: %(mapping_id)s" -msgstr "Zuordnung %(mapping_id)s konnte nicht gefunden werden" - -msgid "Could not find policy association" -msgstr "Richtlinienzuordnung konnte nicht gefunden werden" - -#, python-format -msgid "Could not find policy: %(policy_id)s" -msgstr "Richtlinie %(policy_id)s konnte nicht gefunden werden" - -#, python-format -msgid "Could not find project: %(project_id)s" -msgstr "Projekt %(project_id)s konnte nicht gefunden werden" - -#, python-format -msgid "Could not find region: %(region_id)s" -msgstr "Region %(region_id)s konnte nicht gefunden werden" - -#, python-format -msgid "" -"Could not find role assignment with role: %(role_id)s, user or group: " -"%(actor_id)s, project or domain: %(target_id)s" -msgstr "" -"Rollenzuordnung mit Rolle: %(role_id)s, Benutzer oder Gruppe: %(actor_id)s, " -"Projekt oder Domäne: %(target_id)s, konnte nicht gefunden werden" - -#, python-format -msgid "Could not find role: %(role_id)s" -msgstr "Rolle %(role_id)s konnte nicht gefunden werden" - -#, python-format -msgid "Could not find service: %(service_id)s" -msgstr "Dienst %(service_id)s konnte nicht gefunden werden" - -#, python-format -msgid "Could not find token: %(token_id)s" -msgstr "Token %(token_id)s konnte nicht gefunden werden" - -#, python-format -msgid "Could not find trust: %(trust_id)s" -msgstr "Vertrauensbeziehung %(trust_id)s konnte nicht gefunden werden" - -#, python-format -msgid "Could not find user: %(user_id)s" -msgstr "Benutzer %(user_id)s konnte nicht gefunden werden" - -#, python-format -msgid "Could not find version: %(version)s" -msgstr "Version %(version)s konnte nicht gefunden werden" - -#, python-format -msgid "Could not find: %(target)s" -msgstr "Konnte nicht gefunden werden: %(target)s" - -msgid "" -"Could not map any federated user properties to identity values. Check debug " -"logs or the mapping used for additional details." -msgstr "" -"Es konnten keine eingebundenen Benutzereigenschaften Identitätswerten " -"zugeordnet werden. Überprüfen Sie die Debugprotokolle oder die verwendete " -"Zuordnung, um weitere Details zu erhalten." - -msgid "" -"Could not map user while setting ephemeral user identity. Either mapping " -"rules must specify user id/name or REMOTE_USER environment variable must be " -"set." -msgstr "" -"Benutzer konnte beim Festlegen der ephemeren Benutzeridentität nicht " -"zugeordnet werden. Entweder muss in Zuordnungsregeln Benutzer-ID/Name " -"angegeben werden oder Umgebungsvariable REMOTE_USER muss festgelegt werden." - -msgid "Could not validate the access token" -msgstr "Das Zugriffstoken konnte nicht geprüft werden" - -msgid "Credential belongs to another user" -msgstr "Berechtigungsnachweis gehört einem anderen Benutzer" - -msgid "Credential signature mismatch" -msgstr "Übereinstimmungsfehler bei Berechtigungssignatur" - -#, python-format -msgid "" -"Direct import of auth plugin %(name)r is deprecated as of Liberty in favor " -"of its entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"Der direkte Import des Authentifizierungsplugins %(name)r wird zugunsten des " -"zugehörigen Einstiegspunkts aus %(namespace)r seit Liberty nicht mehr " -"unterstützt und wird möglicherweise im N-Release entfernt." - -#, python-format -msgid "" -"Direct import of driver %(name)r is deprecated as of Liberty in favor of its " -"entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"Der direkte Import des Treibers %(name)r wird zugunsten des zugehörigen " -"Einstiegspunkts aus %(namespace)r seit Liberty nicht mehr unterstützt und " -"wird möglicherweise im N-Release entfernt." - -msgid "" -"Disabling an entity where the 'enable' attribute is ignored by configuration." -msgstr "" -"Eine Entität inaktivieren, in der das Attribut 'enable' ignoriert wird von " - -#, python-format -msgid "Domain (%s)" -msgstr "Domain (%s)" - -#, python-format -msgid "Domain cannot be named %s" -msgstr "Domäne kann nicht mit %s benannt werden" - -#, python-format -msgid "Domain cannot have ID %s" -msgstr "Domäne kann nicht die ID %s haben" - -#, python-format -msgid "Domain is disabled: %s" -msgstr "Domäne ist inaktiviert: %s" - -msgid "Domain name cannot contain reserved characters." -msgstr "Der Domänenname darf keine reservierten Zeichen enthalten." - -msgid "Domain scoped token is not supported" -msgstr "Bereichsorientiertes Token der Domäne wird nicht unterstützt" - -msgid "Domain specific roles are not supported in the V8 role driver" -msgstr "" -"Domänenspezifische rollen werden im V8-Rollentreiber nicht unterstützt." - -#, python-format -msgid "" -"Domain: %(domain)s already has a configuration defined - ignoring file: " -"%(file)s." -msgstr "" -"Domäne: für %(domain)s ist bereits eine Konfiguration definiert - Datei wird " -"ignoriert: %(file)s." - -msgid "Duplicate Entry" -msgstr "Doppelter Eintrag" - -#, python-format -msgid "Duplicate ID, %s." -msgstr "Doppelte ID, %s." - -#, python-format -msgid "Duplicate entry: %s" -msgstr "Doppelter Eintrag: %s" - -#, python-format -msgid "Duplicate name, %s." -msgstr "Doppelter Name, %s." - -#, python-format -msgid "Duplicate remote ID: %s" -msgstr "Doppelte ferne ID: %s" - -msgid "EC2 access key not found." -msgstr "EC2 Zugriffsschlüssel nicht gefunden." - -msgid "EC2 signature not supplied." -msgstr "EC2-Signatur nicht angegeben." - -msgid "" -"Either --bootstrap-password argument or OS_BOOTSTRAP_PASSWORD must be set." -msgstr "" -"Es muss entweder das Argument --bootstrap-password oder " -"OS_BOOTSTRAP_PASSWORD gesetzt werden." - -msgid "Enabled field must be a boolean" -msgstr "Das Feld 'Aktiviert' muss ein boolescher Wert sein" - -msgid "Enabled field should be a boolean" -msgstr "Das Feld 'Aktiviert' sollte ein boolescher Wert sein" - -#, python-format -msgid "Endpoint %(endpoint_id)s not found in project %(project_id)s" -msgstr "Endpunkt %(endpoint_id)s nicht gefunden in Projekt %(project_id)s" - -msgid "Endpoint Group Project Association not found" -msgstr "Projektzuordnung für Endpunktgruppe nicht gefunden" - -msgid "Ensure configuration option idp_entity_id is set." -msgstr "" -"Stellen Sie sicher, dass die Konfigurationsoption idp_entity_id gesetzt ist. " - -msgid "Ensure configuration option idp_sso_endpoint is set." -msgstr "" -"Stellen Sie sicher, dass die Konfigurationsoption idp_sso_endpoint gesetzt " -"ist. " - -#, python-format -msgid "" -"Error parsing configuration file for domain: %(domain)s, file: %(file)s." -msgstr "" -"Fehler bei der Auswertung der Konfigurationsdatei für Domäne: %(domain)s, " -"Datei: %(file)s." - -#, python-format -msgid "Error while opening file %(path)s: %(err)s" -msgstr "Fehler beim Öffnen der Datei %(path)s: %(err)s" - -#, python-format -msgid "Error while parsing line: '%(line)s': %(err)s" -msgstr "Fehler beim Parsing der Zeile '%(line)s': %(err)s" - -#, python-format -msgid "Error while parsing rules %(path)s: %(err)s" -msgstr "Fehler beim Parsing der Regeln %(path)s: %(err)s" - -#, python-format -msgid "Error while reading metadata file, %(reason)s" -msgstr "Fehler beim Lesen der Metadatendatei, %(reason)s" - -#, python-format -msgid "" -"Exceeded attempts to register domain %(domain)s to use the SQL driver, the " -"last domain that appears to have had it is %(last_domain)s, giving up" -msgstr "" -"Die maximal zulässige Anzahl an Versuchen, die Domäne %(domain)s für die " -"Verwendung des SQL-Treibers zu registrieren, wurde überschritten. Die letzte " -"Domäne, bei der die Registrierung erfolgreich gewesen zu sein scheint, war " -"%(last_domain)s. Abbruch." - -#, python-format -msgid "Expected dict or list: %s" -msgstr "Verzeichnis oder Liste erwartet: %s" - -msgid "" -"Expected signing certificates are not available on the server. Please check " -"Keystone configuration." -msgstr "" -"Erwartete Signierzertifikate sind auf dem Server nicht verfügbar. Überprüfen " -"Sie die Keystone-Konfiguration." - -#, python-format -msgid "" -"Expecting to find %(attribute)s in %(target)s - the server could not comply " -"with the request since it is either malformed or otherwise incorrect. The " -"client is assumed to be in error." -msgstr "" -"Es wurde erwartet, %(attribute)s in %(target)s zu finden. Der Server konnte " -"die Anforderung nicht erfüllen, da ein fehlerhaftes Format oder ein anderer " -"Fehler vorliegt. Es wird angenommen, dass der Fehler beim Client liegt." - -#, python-format -msgid "Failed to start the %(name)s server" -msgstr "Fehler beim Starten des %(name)s-Servers" - -msgid "Failed to validate token" -msgstr "Token konnte nicht geprüft werden" - -msgid "Federation token is expired" -msgstr "Föderationstoken ist abgelaufen" - -#, python-format -msgid "" -"Field \"remaining_uses\" is set to %(value)s while it must not be set in " -"order to redelegate a trust" -msgstr "" -"Feld \"remaining_uses\" ist auf %(value)s festgelegt, es darf jedoch nicht " -"festgelegt werden, um eine Vertrauensbeziehung zu übertragen" - -msgid "Found invalid token: scoped to both project and domain." -msgstr "" -"Ungültiges Token gefunden. Es ist sowohl projekt- als auch domänenorientiert." - -#, python-format -msgid "Group %s not found in config" -msgstr "Die Gruppe %s wurde nicht in der Konfiguration gefunden." - -#, python-format -msgid "Group %(group)s is not supported for domain specific configurations" -msgstr "" -"Gruppe %(group)s wird für domänenspezifische Konfigurationen nicht " -"unterstützt" - -#, python-format -msgid "" -"Group %(group_id)s returned by mapping %(mapping_id)s was not found in the " -"backend." -msgstr "" -"Die von der Zuordnung %(mapping_id)s zurückgegebene Gruppe %(group_id)s " -"konnte im Back-End nicht gefunden werden." - -#, python-format -msgid "" -"Group membership across backend boundaries is not allowed, group in question " -"is %(group_id)s, user is %(user_id)s" -msgstr "" -"Back-End-übergreifende Gruppenmitgliedschaft ist nicht zulässig, betroffene " -"Gruppe ist %(group_id)s, Benutzer ist %(user_id)s" - -#, python-format -msgid "ID attribute %(id_attr)s not found in LDAP object %(dn)s" -msgstr "ID-Attribut %(id_attr)s wurde in LDAP-Objekt %(dn)s nicht gefunden" - -#, python-format -msgid "Identity Provider %(idp)s is disabled" -msgstr "Identitätsprovider %(idp)s ist inaktiviert" - -msgid "" -"Incoming identity provider identifier not included among the accepted " -"identifiers." -msgstr "" -"Eingehende Identitätsprovider-ID ist nicht in den akzeptierten IDs enthalten." - -msgid "Invalid EC2 signature." -msgstr "Ungültige EC2-Signatur." - -#, python-format -msgid "Invalid LDAP TLS certs option: %(option)s. Choose one of: %(options)s" -msgstr "" -"Ungültige LDAP-TLS-Zertifikatsoption: %(option)s. Wählen Sie aus: %(options)s" - -#, python-format -msgid "Invalid LDAP TLS_AVAIL option: %s. TLS not available" -msgstr "Ungültige LDAP TLS_AVAIL Option: %s. TLS nicht verfügbar" - -#, python-format -msgid "Invalid LDAP deref option: %(option)s. Choose one of: %(options)s" -msgstr "" -"Ungültige LDAP-TLS-deref-Option: %(option)s. Wählen Sie aus: %(options)s" - -#, python-format -msgid "Invalid LDAP scope: %(scope)s. Choose one of: %(options)s" -msgstr "Ungültiger LDAP Bereich: %(scope)s. Wählen Sie aus: %(options)s" - -msgid "Invalid TLS / LDAPS combination" -msgstr "Ungültige TLS /LDAPS Kombination" - -#, python-format -msgid "Invalid audit info data type: %(data)s (%(type)s)" -msgstr "Ungültiger Datentyp für Prüfungsinformationen: %(data)s (%(type)s)" - -msgid "Invalid blob in credential" -msgstr "Ungültiges Blob-Objekt im Berechtigungsnachweis" - -#, python-format -msgid "" -"Invalid domain name: %(domain)s found in config file name: %(file)s - " -"ignoring this file." -msgstr "" -"Ungültiger Domänenname: %(domain)s im Konfigurationsdateinamen gefunden: " -"%(file)s - diese Datei wird ignoriert." - -#, python-format -msgid "Invalid domain specific configuration: %(reason)s" -msgstr "Ungültige domänenspezifische Konfiguration: %(reason)s" - -#, python-format -msgid "Invalid input for field '%(path)s'. The value is '%(value)s'." -msgstr "Ungültige Eingabe für Feld '%(path)s'. Der Wert lautet '%(value)s'." - -msgid "Invalid limit value" -msgstr "Ungültiger Grenzwert" - -#, python-format -msgid "" -"Invalid mix of entities for policy association - only Endpoint, Service or " -"Region+Service allowed. Request was - Endpoint: %(endpoint_id)s, Service: " -"%(service_id)s, Region: %(region_id)s" -msgstr "" -"Ungültige Mischung von Entitäten für Richtlinienzuordnung - nur Endpunkt, " -"Dienst oder Region+Dienst zulässig. Anforderung war - Endpunkt: " -"%(endpoint_id)s, Service: %(service_id)s, Region: %(region_id)s" - -#, python-format -msgid "" -"Invalid rule: %(identity_value)s. Both 'groups' and 'domain' keywords must " -"be specified." -msgstr "" -"Ungültige Regel: %(identity_value)s. Die Suchbegriffe 'groups' und 'domain' " -"müssen angegeben sein." - -msgid "Invalid signature" -msgstr "Ungültige Signatur" - -msgid "Invalid user / password" -msgstr "Ungültiger Benutzer / Passwort" - -msgid "Invalid username or TOTP passcode" -msgstr "Ungültiger Benutzername oder TOTP-Kenncode" - -msgid "Invalid username or password" -msgstr "Ungültiger Benutzername oder ungültiges Passwort." - -#, python-format -msgid "KVS region %s is already configured. Cannot reconfigure." -msgstr "KVS-Region %s ist bereits konfiguriert. Rekonfiguration nicht möglich." - -#, python-format -msgid "Key Value Store not configured: %s" -msgstr "Schlüsselwertspeicher nicht konfiguriert: %s" - -#, python-format -msgid "LDAP %s create" -msgstr "LDAP %s erstellen" - -#, python-format -msgid "LDAP %s delete" -msgstr "LDAP %s löschen" - -#, python-format -msgid "LDAP %s update" -msgstr "LDAP %s aktualisieren" - -msgid "" -"Length of transformable resource id > 64, which is max allowed characters" -msgstr "" -"Länge der transformierbaren Ressourcen-ID liegt über der maximal zulässigen " -"Anzahl von 64 Zeichen. " - -#, python-format -msgid "" -"Local section in mapping %(mapping_id)s refers to a remote match that " -"doesn't exist (e.g. {0} in a local section)." -msgstr "" -"Der lokale Abschnitt in der Zuordnung %(mapping_id)s bezieht sich auf eine " -"ferne Übereinstimmung, die nicht vorhanden ist (z. B. '{0}' in einem lokalen " -"Abschnitt)." - -#, python-format -msgid "Lock Timeout occurred for key, %(target)s" -msgstr "Überschreitung der Sperrzeit aufgetreten für Schlüssel %(target)s" - -#, python-format -msgid "Lock key must match target key: %(lock)s != %(target)s" -msgstr "" -"Sperrschlüssel muss mit Zielschlüssel übereinstimmen: %(lock)s != %(target)s" - -#, python-format -msgid "Malformed endpoint URL (%(endpoint)s), see ERROR log for details." -msgstr "" -"Fehlerhafte Endpunkt-URL (%(endpoint)s), siehe Details im FEHLER-Protokoll. " - -msgid "Marker could not be found" -msgstr "Marker konnte nicht gefunden werden" - -#, python-format -msgid "Max hierarchy depth reached for %s branch." -msgstr "Die maximale Hierarchietiefe für den %s-Branch wurde erreicht." - -#, python-format -msgid "Maximum lock attempts on %s occurred." -msgstr "Maximale Anzahl an Sperrversuchen auf %s erfolgt." - -#, python-format -msgid "Member %(member)s is already a member of group %(group)s" -msgstr "Mitglied %(member)s ist bereits Mitglied der Gruppe %(group)s" - -#, python-format -msgid "Method not callable: %s" -msgstr "Methode kann nicht aufgerufen werden: %s" - -msgid "Missing entity ID from environment" -msgstr "Fehlende Entitäts-ID von Umgebung" - -msgid "" -"Modifying \"redelegation_count\" upon redelegation is forbidden. Omitting " -"this parameter is advised." -msgstr "" -"Das Ändern von \"redelegation_count\" ist bei der Redelegation nicht " -"zulässig. Es wird empfohlen, diesen Parameter auszulassen." - -msgid "Multiple domains are not supported" -msgstr "Mehrere Domänen werden nicht unterstützt" - -msgid "Must be called within an active lock context." -msgstr "Aufruf innerhalb des Kontexts einer aktiven Sperre erforderlich." - -msgid "Must specify either domain or project" -msgstr "Entweder Domäne oder Projekt muss angegeben werden" - -msgid "Name field is required and cannot be empty" -msgstr "Namensfeld ist erforderlich und darf nicht leer sein" - -msgid "Neither Project Domain ID nor Project Domain Name was provided." -msgstr "Weder Projektdomänen-ID noch Projektdomänenname wurde angegeben." - -msgid "" -"No Authorization headers found, cannot proceed with OAuth related calls, if " -"running under HTTPd or Apache, ensure WSGIPassAuthorization is set to On." -msgstr "" -"Keine Authorisierungskopfzeilen gefunden, zu OAuth zugehörige Aufrufe können " -"nicht fortgesetzt werden. Stellen Sie bei Ausführung unter HTTPd oder Apache " -"sicher, dass WSGIPassAuthorization auf 'On' gesetzt ist." - -msgid "No authenticated user" -msgstr "Kein authentifizierter Benutzer" - -msgid "" -"No encryption keys found; run keystone-manage fernet_setup to bootstrap one." -msgstr "" -"Keine Chiffrierschlüssel gefunden; Führen Sie keystone-manage fernet_setup " -"aus, um über Bootstrapping einen Schlüssel zu erhalten." - -msgid "No options specified" -msgstr "Keine Optionen angegeben" - -#, python-format -msgid "No policy is associated with endpoint %(endpoint_id)s." -msgstr "Endpunkt %(endpoint_id)s ist keine Richtlinie zugeordnet. " - -#, python-format -msgid "No remaining uses for trust: %(trust_id)s" -msgstr "Keine verbleibende Verwendung für Vertrauensbeziehung %(trust_id)s" - -msgid "No token in the request" -msgstr "Kein Token in der Anforderung" - -msgid "Non-default domain is not supported" -msgstr "Nicht-Standard-Domäne wird nicht unterstützt" - -msgid "One of the trust agents is disabled or deleted" -msgstr "Einer der Vertrauensagenten wurde deaktiviert oder gelöscht" - -#, python-format -msgid "" -"Option %(option)s found with no group specified while checking domain " -"configuration request" -msgstr "" -"Option %(option)s ohne angegebene Gruppe gefunden, während die Domänen- " -"Konfigurationsanforderung geprüft wurde" - -#, python-format -msgid "" -"Option %(option)s in group %(group)s is not supported for domain specific " -"configurations" -msgstr "" -"Option %(option)s in Gruppe %(group)s wird für domänenspezifische " -"Konfigurationen nicht unterstützt" - -#, python-format -msgid "Project (%s)" -msgstr "Projekt (%s)" - -#, python-format -msgid "Project ID not found: %(t_id)s" -msgstr "Projekt-ID nicht gefunden: %(t_id)s" - -msgid "Project field is required and cannot be empty." -msgstr "Projektfeld ist erforderlich und darf nicht leer sein." - -#, python-format -msgid "Project is disabled: %s" -msgstr "Projekt ist inaktiviert: %s" - -msgid "Project name cannot contain reserved characters." -msgstr "Der Projektname darf keine reservierten Zeichen enthalten." - -msgid "Query string is not UTF-8 encoded" -msgstr "Abfragezeichenfolge ist nicht UTF-8-codiert" - -#, python-format -msgid "" -"Reading the default for option %(option)s in group %(group)s is not supported" -msgstr "" -"Lesen des Standardwerts für die Option %(option)s in der Gruppe %(group)s " -"wird nicht unterstützt." - -msgid "Redelegation allowed for delegated by trust only" -msgstr "Redelegation nur zulässig für im Vertrauen redelegierte" - -#, python-format -msgid "" -"Remaining redelegation depth of %(redelegation_depth)d out of allowed range " -"of [0..%(max_count)d]" -msgstr "" -"Verbleibende Redelegationstiefe von %(redelegation_depth)d aus dem " -"zulässigen Bereich von [0..%(max_count)d]" - -msgid "" -"Remove admin_crud_extension from the paste pipeline, the admin_crud " -"extension is now always available. Updatethe [pipeline:admin_api] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Entfernen Sie 'admin_crud_extension' aus der Einfügepipeline. " -"'admin_crud_extension' ist jetzt immer verfügbar. Aktualisieren Sie den " -"Abschnitt [pipeline:admin_api] in der Datei 'keystone-paste.ini' " -"entsprechend, da er im 'O'-Release entfernt wird. " - -msgid "" -"Remove endpoint_filter_extension from the paste pipeline, the endpoint " -"filter extension is now always available. Update the [pipeline:api_v3] " -"section in keystone-paste.ini accordingly as it will be removed in the O " -"release." -msgstr "" -"Entfernen Sie 'endpoint_filter_extension' aus der Einfügepipeline. Die " -"Endpunktfiltererweiterung ist jetzt immer verfügbar. Aktualisieren Sie den " -"Abschnitt [pipeline:api_v3] in der Datei 'keystone-paste.ini' entsprechend, " -"da er im 'O'-Release entfernt wird." - -msgid "" -"Remove federation_extension from the paste pipeline, the federation " -"extension is now always available. Update the [pipeline:api_v3] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Entfernen Sie 'federation_extension' aus der Einfügepipeline. Sie ist jetzt " -"immer verfügbar. Aktualisieren Sie den Abschnitt [pipeline:api_v3] in der " -"Datei 'keystone-paste.ini' entsprechend, da er im 'O'-Release entfernt wird." - -msgid "" -"Remove oauth1_extension from the paste pipeline, the oauth1 extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Entfernen Sie 'oauth1_extension' aus der Einfügepipeline. Die oauth1-" -"Erweiterung ist jetzt immer verfügbar. Aktualisieren Sie den Abschnitt " -"[pipeline:api_v3] in der Datei 'keystone-paste.ini' entsprechend, da er im " -"'O'-Release entfernt wird." - -msgid "" -"Remove revoke_extension from the paste pipeline, the revoke extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Entfernen Sie 'revoke_extension' aus der Einfügepipeline. Die revoke-" -"Erweiterung ist jetzt immer verfügbar. Aktualisieren Sie den Abschnitt " -"[pipeline:api_v3] in der Datei 'keystone-paste.ini' entsprechend, da er im " -"'O'-Release entfernt wird. " - -msgid "" -"Remove simple_cert from the paste pipeline, the PKI and PKIz token providers " -"are now deprecated and simple_cert was only used insupport of these token " -"providers. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Entfernen Sie 'simple_cert' aus der Einfügepipeline. Die PKI- und PKIz-Token-" -"Provider sind jetzt veraltet und 'simple_cert' wurde nur zur Unterstützung " -"dieser Token-Provider verwendet. Aktualisieren Sie den Abschnitt [pipeline:" -"api_v3] in der Datei 'keystone-paste.ini' entsprechend, da er im 'O'-Release " -"entfernt wird." - -msgid "" -"Remove user_crud_extension from the paste pipeline, the user_crud extension " -"is now always available. Updatethe [pipeline:public_api] section in keystone-" -"paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Entfernen Sie 'user_crud_extension' aus der Einfügepipeline. 'user_crud " -"extension' ist jetzt immer verfügbar. Aktualisieren Sie den Abschnitt " -"[pipeline:public_api] in der Datei 'keystone-paste.ini' entsprechend, da er " -"im 'O'-Release entfernt wird." - -msgid "Request Token does not have an authorizing user id" -msgstr "Anforderungstoken weist keine autorisierte Benutzer-ID auf" - -#, python-format -msgid "" -"Request attribute %(attribute)s must be less than or equal to %(size)i. The " -"server could not comply with the request because the attribute size is " -"invalid (too large). The client is assumed to be in error." -msgstr "" -"Anforderungsattribut %(attribute)s muss kleiner-gleich %(size)i sein. Der " -"Server konnte die Anforderung nicht erfüllen, da die Attributgröße ungültig " -"ist (zu groß). Es wird angenommen, dass der Fehler beim Client liegt." - -msgid "Request must have an origin query parameter" -msgstr "Anforderung muss über einen ursprünglichen Abfrageparameter verfügen" - -msgid "Request token is expired" -msgstr "Anforderungstoken ist abgelaufen" - -msgid "Request token not found" -msgstr "Anforderungstoken nicht gefunden" - -msgid "Requested expiration time is more than redelegated trust can provide" -msgstr "" -"Angeforderte Ablaufzeit übersteigt die, die von der redelegierten " -"Vertrauensbeziehung bereitgestellt werden kann" - -#, python-format -msgid "" -"Requested redelegation depth of %(requested_count)d is greater than allowed " -"%(max_count)d" -msgstr "" -"Die angeforderte Redelegationstiefe von %(requested_count)d übersteigt den " -"zulässigen Wert von %(max_count)d" - -msgid "" -"Running keystone via eventlet is deprecated as of Kilo in favor of running " -"in a WSGI server (e.g. mod_wsgi). Support for keystone under eventlet will " -"be removed in the \"M\"-Release." -msgstr "" -"Die Ausführung von Keystone über eventlet ist seit Kilo veraltet. " -"Stattdessen wird ein WSGI-Server (z. B. mod_wsgi) für die Ausführung " -"verwendet. Unterstützung für Keystone unter eventlet wird im \"M\"-Release " -"entfernt." - -msgid "Scoping to both domain and project is not allowed" -msgstr "Scoping sowohl auf 'domain' als auch auf 'project' ist nicht zulässig" - -msgid "Scoping to both domain and trust is not allowed" -msgstr "Scoping sowohl auf 'domain' als auch auf 'trust' ist nicht zulässig" - -msgid "Scoping to both project and trust is not allowed" -msgstr "Scoping sowohl auf 'project' als auch auf 'trust' ist nicht zulässig" - -#, python-format -msgid "Service Provider %(sp)s is disabled" -msgstr "Service-Provider %(sp)s ist inaktiviert" - -msgid "Some of requested roles are not in redelegated trust" -msgstr "" -"Einige angeforderte Rollen befinden sich nicht in einer redelegierten " -"Vertrauensbeziehung" - -msgid "Specify a domain or project, not both" -msgstr "Geben Sie eine Domäne oder ein Projekt an, nicht beides" - -msgid "Specify a user or group, not both" -msgstr "Geben Sie einen Benutzer oder eine Gruppe an, nicht beides" - -msgid "Specify one of domain or project" -msgstr "Entweder eine Domäne oder ein Projekt muss angegeben werden" - -msgid "Specify one of user or group" -msgstr "Entweder ein Benutzer oder eine Gruppe muss angegeben werden" - -#, python-format -msgid "" -"String length exceeded.The length of string '%(string)s' exceeded the limit " -"of column %(type)s(CHAR(%(length)d))." -msgstr "" -"Zeichenfolgelänge überschritten. Die Länge der Zeichenfolge '%(string)s' hat " -"den Grenzwert von Spalte %(type)s(CHAR(%(length)d)) überschritten." - -msgid "Tenant name cannot contain reserved characters." -msgstr "Der Name des Mandanten darf keine reservierten Zeichen enthalten." - -#, python-format -msgid "" -"The %s extension has been moved into keystone core and as such its " -"migrations are maintained by the main keystone database control. Use the " -"command: keystone-manage db_sync" -msgstr "" -"Die Erweiterung %s wurde in den Keystone-Kern verschoben. Daher werden die " -"zugehörigen Migrationen über die Keystone-Hauptdatenbanksteuerung verwaltet. " -"Verwenden Sie den Befehl keystone-manage db_sync" - -msgid "" -"The 'expires_at' must not be before now. The server could not comply with " -"the request since it is either malformed or otherwise incorrect. The client " -"is assumed to be in error." -msgstr "" -"Die Zeitangabe in 'expires_at' darf nicht vor dem jetzigen Zeitpunkt liegen. " -"Der Server konnte der Anforderung nicht nachkommen, da ein fehlerhaftes " -"Format oder ein anderer Fehler vorliegt. Es wird angenommen, dass der Fehler " -"beim Client liegt." - -msgid "The --all option cannot be used with the --domain-name option" -msgstr "" -"Die Option --all kann nicht zusammen mit der Option --domain-name verwendet " -"werden" - -#, python-format -msgid "The Keystone configuration file %(config_file)s could not be found." -msgstr "" -"Die Keystone-Konfigurationsdatei %(config_file)s konnte nicht gefunden " -"werden." - -#, python-format -msgid "" -"The Keystone domain-specific configuration has specified more than one SQL " -"driver (only one is permitted): %(source)s." -msgstr "" -"Die domänenspezifische Keystone-Konfiguration hat mehrere SQL-Treiber " -"angegeben (nur einer ist zulässig): %(source)s." - -msgid "The action you have requested has not been implemented." -msgstr "Die von Ihnen angeforderte Aktion wurde nicht implementiert." - -msgid "The authenticated user should match the trustor." -msgstr "Der authentifizierte Benutzer sollte dem Trustor entsprechen." - -msgid "" -"The certificates you requested are not available. It is likely that this " -"server does not use PKI tokens otherwise this is the result of " -"misconfiguration." -msgstr "" -"Die Zertifikate, die Sie angefordert haben, sind nicht verfügbar. Es ist " -"wahrscheinlich, dass dieser Server keine PKI-Tokens verwendet; andernfalls " -"ist dies die Folge einer fehlerhaften Konfiguration." - -msgid "The configured token provider does not support bind authentication." -msgstr "" -"Der konfigurierte Token-Anbieter unterstützt die Bindungsauthentifizierung " -"nicht." - -msgid "The creation of projects acting as domains is not allowed in v2." -msgstr "" -"Die Erstellung von Projekten die als Domänen agieren, ist in v2 nicht " -"zulässig." - -#, python-format -msgid "" -"The password length must be less than or equal to %(size)i. The server could " -"not comply with the request because the password is invalid." -msgstr "" -"Die Kennwortlänge muss kleiner-gleich %(size)i sein. Der Server konnte die " -"Anforderung nicht erfüllen, da das Kennwort ungültig ist." - -msgid "The request you have made requires authentication." -msgstr "Die von Ihnen gestellte Anfrage erfoderdert eine Authentifizierung." - -msgid "The resource could not be found." -msgstr "Die Ressource konnte nicht gefunden werden." - -msgid "" -"The revoke call must not have both domain_id and project_id. This is a bug " -"in the Keystone server. The current request is aborted." -msgstr "" -"Der Aufruf zum Entziehen darf nicht sowohl domain_id als auch project_id " -"aufweisen. Dies ist ein Fehler im Keystone-Server. Die aktuelle Anforderung " -"wird abgebrochen. " - -msgid "The service you have requested is no longer available on this server." -msgstr "" -"Den Dienst, den Sie angefordert haben, ist auf diesem Server nicht mehr " -"verfügbar." - -#, python-format -msgid "" -"The specified parent region %(parent_region_id)s would create a circular " -"region hierarchy." -msgstr "" -"Die angegebene übergeordnete Region %(parent_region_id)s würde eine " -"zirkuläre Regionshierarchie erstellen." - -#, python-format -msgid "" -"The value of group %(group)s specified in the config should be a dictionary " -"of options" -msgstr "" -"Der Wert der Gruppe %(group)s, der in der Konfiguration angegeben ist, muss " -"ein Verzeichnis mit Optionen sein" - -msgid "There should not be any non-oauth parameters" -msgstr "Es sollten keine non-oauth-Parameter vorhanden sein" - -#, python-format -msgid "This is not a recognized Fernet payload version: %s" -msgstr "Dies ist keine anerkannte Fernet-Nutzdatenversion: %s" - -#, python-format -msgid "This is not a recognized Fernet token %s" -msgstr "Dies ist kein bekanntes Fernet-Token %s" - -msgid "" -"Timestamp not in expected format. The server could not comply with the " -"request since it is either malformed or otherwise incorrect. The client is " -"assumed to be in error." -msgstr "" -"Zeitstempel nicht im erwarteten Format. Der Server konnte der Anforderung " -"nicht nachkommen, da ein fehlerhaftes Format oder ein anderer Fehler " -"vorliegt. Es wird angenommen, dass der Fehler beim Client liegt." - -#, python-format -msgid "" -"To get a more detailed information on this error, re-run this command for " -"the specific domain, i.e.: keystone-manage domain_config_upload --domain-" -"name %s" -msgstr "" -"Um ausführliche Informationen zu diesem Fehler zu erhalten, führen Sie " -"diesen Befehl für die angegebene Domäne erneut durch: keystone-manage " -"domain_config_upload --domain-name %s" - -msgid "Token belongs to another user" -msgstr "Token gehört einem anderen Benutzer" - -msgid "Token does not belong to specified tenant." -msgstr "Token gehört nicht zu angegebenem Nutzer." - -msgid "Token version is unrecognizable or unsupported." -msgstr "Tokenversion ist nicht erkennbar oder wird nicht unterstützt." - -msgid "Trustee has no delegated roles." -msgstr "Trustee hat keine beauftragten Rollen." - -msgid "Trustor is disabled." -msgstr "Trustor ist deaktiviert." - -#, python-format -msgid "" -"Trying to update group %(group)s, so that, and only that, group must be " -"specified in the config" -msgstr "" -"Es wird versucht, Gruppe %(group)s zu aktualisieren, damit nur diese Gruppe " -"in der Konfiguration angegeben werden muss" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, but config provided " -"contains option %(option_other)s instead" -msgstr "" -"Es wird versucht, Option %(option)s in Gruppe %(group)s zu aktualisieren, " -"die angegebene Konfiguration enthält jedoch stattdessen Option " -"%(option_other)s" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, so that, and only " -"that, option must be specified in the config" -msgstr "" -"Es wird versucht, Option %(option)s in Gruppe %(group)s zu aktualisieren, " -"damit nur diese Option in der Konfiguration angegeben werden muss" - -msgid "" -"Unable to access the keystone database, please check it is configured " -"correctly." -msgstr "" -"Auf die Keystone-Datenbank kann nicht zugegriffen werden, überprüfen Sie, ob " -"sie ordnungsgemäß konfiguriert ist. " - -#, python-format -msgid "Unable to consume trust %(trust_id)s, unable to acquire lock." -msgstr "" -"Vertrauensbeziehung %(trust_id)s kann nicht verarbeitet werden, Sperre kann " -"nicht angefordert werden." - -#, python-format -msgid "" -"Unable to delete region %(region_id)s because it or its child regions have " -"associated endpoints." -msgstr "" -"Region %(region_id)s kann nicht gelöscht werden, da sie oder ihr " -"untergeordnete Regionen über zugeordnete Endpunkte verfügen. " - -msgid "Unable to downgrade schema" -msgstr "Das Schema konnte nicht herabgestuft werden." - -#, python-format -msgid "Unable to find valid groups while using mapping %(mapping_id)s" -msgstr "" -"Beim Verwenden der Zuordnung %(mapping_id)s können keine gültigen Gruppen " -"gefunden werden" - -#, python-format -msgid "Unable to locate domain config directory: %s" -msgstr "Domänenkonfigurationsverzeichnis wurde nicht gefunden: %s" - -#, python-format -msgid "Unable to lookup user %s" -msgstr "Suche nach Benutzer %s nicht möglich" - -#, python-format -msgid "" -"Unable to reconcile identity attribute %(attribute)s as it has conflicting " -"values %(new)s and %(old)s" -msgstr "" -"Identitätsattribut %(attribute)s kann nicht abgeglichen werden, da es die " -"kollidierenden Werte %(new)s und %(old)s aufweist" - -#, python-format -msgid "" -"Unable to sign SAML assertion. It is likely that this server does not have " -"xmlsec1 installed, or this is the result of misconfiguration. Reason " -"%(reason)s" -msgstr "" -"SAML-Zusicherung kann nicht signiert werden. Wahrscheinlich ist auf dem " -"Server xmlsec1 nicht installiert oder dies liegt an einer fehlerhaften " -"Konfiguration. Ursache: %(reason)s" - -msgid "Unable to sign token." -msgstr "Token kann nicht unterzeichnet werden." - -#, python-format -msgid "Unexpected assignment type encountered, %s" -msgstr "Unerwarteter Zuordnungstyp: %s" - -#, python-format -msgid "" -"Unexpected combination of grant attributes - User: %(user_id)s, Group: " -"%(group_id)s, Project: %(project_id)s, Domain: %(domain_id)s" -msgstr "" -"Unerwartete Kombination von Grant-Attributen - Benutzer: %(user_id)s, " -"Gruppe: %(group_id)s, Projekt: %(project_id)s, Domäne: %(domain_id)s" - -#, python-format -msgid "Unexpected status requested for JSON Home response, %s" -msgstr "Unerwarteter Status für JSON-Home-Antwort angefordert, %s" - -msgid "Unknown Target" -msgstr "Unbekanntes Ziel" - -#, python-format -msgid "Unknown domain '%(name)s' specified by --domain-name" -msgstr "Unbekannte Domäne '%(name)s' angegeben durch --domain-name" - -#, python-format -msgid "Unknown token version %s" -msgstr "Unbekannte Tokenversion %s" - -#, python-format -msgid "Unregistered dependency: %(name)s for %(targets)s" -msgstr "Nicht registrierte Abhängigkeit: %(name)s für %(targets)s" - -msgid "Update of `domain_id` is not allowed." -msgstr "Das Aktualisieren von `domain_id` ist nicht zulässig. " - -msgid "Update of `is_domain` is not allowed." -msgstr "Das Aktualisieren von 'is_domain' ist nicht zulässig." - -msgid "Update of `parent_id` is not allowed." -msgstr "Das Aktualisieren von 'parent_id' ist nicht zulässig." - -msgid "Update of domain_id is only allowed for root projects." -msgstr "Die Aktualisierung von 'domain_id' ist nur für Rootprojekte zulässig." - -msgid "Update of domain_id of projects acting as domains is not allowed." -msgstr "" -"Es ist nicht zulässig, die 'domain_id' von Projekten zu aktualisieren, die " -"als Domänen agieren." - -msgid "Use a project scoped token when attempting to create a SAML assertion" -msgstr "" -"Verwenden Sie ein Projektumfangstoken, wenn Sie versuchen, eine SAML-" -"Zusicherung zu erstellen" - -msgid "" -"Use of the identity driver config to automatically configure the same " -"assignment driver has been deprecated, in the \"O\" release, the assignment " -"driver will need to be expicitly configured if different than the default " -"(SQL)." -msgstr "" -"Die Verwendung der Identitätstreiberkonfiguration für die automatische " -"Konfiguration desselben Zuordnungstreibers ist veraltet. Der " -"Zuordnungstreiber muss im \"O\"-Release explizit konfiguriert werden, wenn " -"er sich vom Standardtreiber (SQL) unterscheidet." - -#, python-format -msgid "User %(u_id)s is unauthorized for tenant %(t_id)s" -msgstr "Benutzer %(u_id)s ist nicht berechtigt für Nutzer %(t_id)s" - -#, python-format -msgid "User %(user_id)s has no access to domain %(domain_id)s" -msgstr "Benutzer %(user_id)s hat keinen Zugriff auf Domäne %(domain_id)s" - -#, python-format -msgid "User %(user_id)s has no access to project %(project_id)s" -msgstr "Benutzer %(user_id)s hat keinen Zugriff auf Projekt %(project_id)s" - -#, python-format -msgid "User %(user_id)s is already a member of group %(group_id)s" -msgstr "Benutzer %(user_id)s ist bereits Mitglied der Gruppe %(group_id)s." - -#, python-format -msgid "User '%(user_id)s' not found in group '%(group_id)s'" -msgstr "Benutzer '%(user_id)s' nicht gefunden in Gruppe '%(group_id)s'" - -msgid "User IDs do not match" -msgstr "Benutzerkennungen stimmen nicht überein" - -msgid "" -"User auth cannot be built due to missing either user id, or user name with " -"domain id, or user name with domain name." -msgstr "" -"Benutzerauthentifizierung kann nicht erstellt werden, da entweder Benutzer-" -"ID oder Benutzername mit Domänen-ID oder Benutzername mit Domänenname fehlt." - -#, python-format -msgid "User is disabled: %s" -msgstr "Benutzer ist deaktiviert: %s" - -msgid "User is not a member of the requested project" -msgstr "Benutzer ist kein Mitglied des angeforderten Projekts" - -msgid "User is not a trustee." -msgstr "Benutzer ist kein Trustee." - -msgid "User not found" -msgstr "Benutzer nicht gefunden" - -msgid "User not valid for tenant." -msgstr "Benutzer nicht gültig für Mandant." - -msgid "User roles not supported: tenant_id required" -msgstr "Benutzerrollen nicht unterstützt: tenant_id erforderlich" - -#, python-format -msgid "User type %s not supported" -msgstr "Benutzertyp %s nicht unterstützt" - -msgid "You are not authorized to perform the requested action." -msgstr "" -"Sie sind nicht dazu authorisiert, die angeforderte Aktion durchzuführen." - -#, python-format -msgid "You are not authorized to perform the requested action: %(action)s" -msgstr "" -"Sie sind nicht berechtigt, die angeforderte Aktion %(action)s auszuführen" - -msgid "" -"You have tried to create a resource using the admin token. As this token is " -"not within a domain you must explicitly include a domain for this resource " -"to belong to." -msgstr "" -"Sie haben versucht, eine Ressourcen mit dem Admin-Token zu erstellen. Da " -"sich dieses Token nicht innerhalb einer Domäne befindet, müssen Sie explizit " -"eine Domäne angeben, zu der diese Ressource gehört. " - -msgid "`key_mangler` functions must be callable." -msgstr "`key_mangler`-Funktionen müssen aufrufbar sein." - -msgid "`key_mangler` option must be a function reference" -msgstr "Option `key_mangler` muss eine Funktionsreferenz sein" - -msgid "any options" -msgstr "beliebige Optionen" - -msgid "auth_type is not Negotiate" -msgstr "auth_type ist nicht 'Negotiate'" - -msgid "authorizing user does not have role required" -msgstr "Der autorisierte Benutzer verfügt nicht über die erforderliche Rolle" - -#, python-format -msgid "cannot create a project in a branch containing a disabled project: %s" -msgstr "" -"kann kein Projekt in einer Niederlassung erstellen, die ein inaktiviertes " -"Projekt enthält: %s" - -#, python-format -msgid "" -"cannot delete an enabled project acting as a domain. Please disable the " -"project %s first." -msgstr "" -"Ein aktiviertes Projekt, das als Domäne agiert, kann nicht gelöscht werden. " -"Inaktivieren Sie zuerst das Projekt %s." - -#, python-format -msgid "group %(group)s" -msgstr "Gruppe %(group)s" - -msgid "" -"idp_contact_type must be one of: [technical, other, support, administrative " -"or billing." -msgstr "" -"idp_contact_type muss einer der folgenden Werte sein: technical, other, " -"support, administrative oder billing." - -#, python-format -msgid "invalid date format %s" -msgstr "ungültiges Datumsformat %s" - -#, python-format -msgid "" -"it is not permitted to have two projects acting as domains with the same " -"name: %s" -msgstr "" -"Es ist nicht zulässig, zwei Projekte zu haben, die als Domänen mit demselben " -"Namen agieren: %s" - -#, python-format -msgid "" -"it is not permitted to have two projects within a domain with the same " -"name : %s" -msgstr "" -"Es ist nicht zulässig, zwei Projekte mit demselben Namen innerhalb einer " -"Domäne zu haben: %s" - -msgid "only root projects are allowed to act as domains." -msgstr "Nur Rootprojekte dürfen als Domänen agieren." - -#, python-format -msgid "option %(option)s in group %(group)s" -msgstr "Option %(option)s in Gruppe %(group)s" - -msgid "provided consumer key does not match stored consumer key" -msgstr "" -"bereitgestellter Konsumentenschlüssel stimmt nicht mit dem gespeicherten " -"Konsumentenschlüssel überein" - -msgid "provided request key does not match stored request key" -msgstr "" -"bereitgestellter Anforderungsschlüssel stimmt nicht mit dem gespeicherten " -"Anforderungsschlüssel überein" - -msgid "provided verifier does not match stored verifier" -msgstr "" -"bereitgestellte Prüffunktion stimmt nicht mit gespeicherter Prüffunktion " -"überein" - -msgid "remaining_uses must be a positive integer or null." -msgstr "remaining_uses muss eine positive Ganzzahl oder null sein." - -msgid "remaining_uses must not be set if redelegation is allowed" -msgstr "" -"remaining_uses darf nicht festgelegt werden, wenn eine Redelegation zulässig " -"ist" - -#, python-format -msgid "" -"request to update group %(group)s, but config provided contains group " -"%(group_other)s instead" -msgstr "" -"Anforderung zur Aktualisierung von Gruppe %(group)s, die angegebene " -"Konfiguration enthält jedoch stattdessen Gruppe %(group_other)s" - -msgid "rescope a scoped token" -msgstr "Bereich für bereichsorientierten Token ändern" - -#, python-format -msgid "role %s is not defined" -msgstr "Die Rolle %s ist nicht definiert." - -msgid "scope.project.id must be specified if include_subtree is also specified" -msgstr "" -"scope.project.id muss angegeben werden, wenn include_subtree angegeben wurde." - -#, python-format -msgid "tls_cacertdir %s not found or is not a directory" -msgstr "tls_cacertdir %s nicht gefunden oder ist kein Verzeichnis" - -#, python-format -msgid "tls_cacertfile %s not found or is not a file" -msgstr "tls_cacertfile %s wurde nicht gefunden oder ist keine Datei" - -#, python-format -msgid "token reference must be a KeystoneToken type, got: %s" -msgstr "Tokenreferenz muss vom Typ 'KeystoneToken' sein. Abgerufen wurde: %s" - -msgid "" -"update of domain_id is deprecated as of Mitaka and will be removed in O." -msgstr "" -"Die Aktualisierung von 'domain_id' wurde in Mitaka eingestellt und wird im " -"\"O\"-Release entfernt. " - -#, python-format -msgid "" -"validated expected to find %(param_name)r in function signature for " -"%(func_name)r." -msgstr "" -"Validierung erwartete %(param_name)r in Funktionssignatur für %(func_name)r." diff --git a/keystone-moon/keystone/locale/el/LC_MESSAGES/keystone-log-critical.po b/keystone-moon/keystone/locale/el/LC_MESSAGES/keystone-log-critical.po deleted file mode 100644 index 72c931a3..00000000 --- a/keystone-moon/keystone/locale/el/LC_MESSAGES/keystone-log-critical.po +++ /dev/null @@ -1,26 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Efstathios Iosifidis , 2015 -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2015-09-05 01:09+0000\n" -"Last-Translator: Efstathios Iosifidis \n" -"Language: el\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Greek\n" - -#, python-format -msgid "Unable to open template file %s" -msgstr "Αδυναμία ανοίγματος αρχείου προτύπου %s" diff --git a/keystone-moon/keystone/locale/en_AU/LC_MESSAGES/keystone-log-critical.po b/keystone-moon/keystone/locale/en_AU/LC_MESSAGES/keystone-log-critical.po deleted file mode 100644 index ab001a72..00000000 --- a/keystone-moon/keystone/locale/en_AU/LC_MESSAGES/keystone-log-critical.po +++ /dev/null @@ -1,25 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2014-08-31 03:19+0000\n" -"Last-Translator: openstackjenkins \n" -"Language: en-AU\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: English (Australia)\n" - -#, python-format -msgid "Unable to open template file %s" -msgstr "Unable to open template file %s" diff --git a/keystone-moon/keystone/locale/en_AU/LC_MESSAGES/keystone-log-error.po b/keystone-moon/keystone/locale/en_AU/LC_MESSAGES/keystone-log-error.po deleted file mode 100644 index 141e7ec1..00000000 --- a/keystone-moon/keystone/locale/en_AU/LC_MESSAGES/keystone-log-error.po +++ /dev/null @@ -1,65 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 8.0.1.dev11\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/keystone\n" -"POT-Creation-Date: 2015-11-05 06:13+0000\n" -"PO-Revision-Date: 2015-06-26 05:13+0000\n" -"Last-Translator: openstackjenkins \n" -"Language-Team: English (Australia)\n" -"Language: en-AU\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.1\n" - -msgid "" -"Error setting up the debug environment. Verify that the option --debug-url " -"has the format : and that a debugger processes is listening on " -"that port." -msgstr "" -"Error setting up the debug environment. Verify that the option --debug-url " -"has the format : and that a debugger processes is listening on " -"that port." - -#, python-format -msgid "Failed to send %(res_id)s %(event_type)s notification" -msgstr "Failed to send %(res_id)s %(event_type)s notification" - -msgid "Failed to validate token" -msgstr "Failed to validate token" - -#, python-format -msgid "Malformed endpoint %(url)s - unknown key %(keyerror)s" -msgstr "Malformed endpoint %(url)s - unknown key %(keyerror)s" - -#, python-format -msgid "" -"Malformed endpoint %s - incomplete format (are you missing a type notifier ?)" -msgstr "" -"Malformed endpoint %s - incomplete format (are you missing a type notifier ?)" - -msgid "Server error" -msgstr "Server error" - -#, python-format -msgid "" -"Unable to build cache config-key. Expected format \":\". " -"Skipping unknown format: %s" -msgstr "" -"Unable to build cache config-key. Expected format \":\". " -"Skipping unknown format: %s" - -msgid "Unable to sign token" -msgstr "Unable to sign token" - -#, python-format -msgid "Unexpected error or malformed token determining token expiry: %s" -msgstr "Unexpected error or malformed token determining token expiry: %s" diff --git a/keystone-moon/keystone/locale/en_AU/LC_MESSAGES/keystone.po b/keystone-moon/keystone/locale/en_AU/LC_MESSAGES/keystone.po deleted file mode 100644 index f290a110..00000000 --- a/keystone-moon/keystone/locale/en_AU/LC_MESSAGES/keystone.po +++ /dev/null @@ -1,348 +0,0 @@ -# English (Australia) translations for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Tom Fifield , 2013 -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 8.0.1.dev11\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/keystone\n" -"POT-Creation-Date: 2015-11-05 06:13+0000\n" -"PO-Revision-Date: 2015-09-03 12:54+0000\n" -"Last-Translator: openstackjenkins \n" -"Language: en_AU\n" -"Language-Team: English (Australia)\n" -"Plural-Forms: nplurals=2; plural=(n != 1)\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 2.1.1\n" - -#, python-format -msgid "%(property_name)s cannot be less than %(min_length)s characters." -msgstr "%(property_name)s cannot be less than %(min_length)s characters." - -#, python-format -msgid "%(property_name)s is not a %(display_expected_type)s" -msgstr "%(property_name)s is not a %(display_expected_type)s" - -#, python-format -msgid "%(property_name)s should not be greater than %(max_length)s characters." -msgstr "" -"%(property_name)s should not be greater than %(max_length)s characters." - -#, python-format -msgid "%s cannot be empty." -msgstr "%s cannot be empty." - -msgid "Access token is expired" -msgstr "Access token is expired" - -msgid "Access token not found" -msgstr "Access token not found" - -msgid "Additional authentications steps required." -msgstr "Additional authentications steps required." - -msgid "An unhandled exception has occurred: Could not find metadata." -msgstr "An unhandled exception has occurred: Could not find metadata." - -msgid "Attempted to authenticate with an unsupported method." -msgstr "Attempted to authenticate with an unsupported method." - -msgid "Authentication plugin error." -msgstr "Authentication plugin error." - -#, python-format -msgid "Cannot change %(option_name)s %(attr)s" -msgstr "Cannot change %(option_name)s %(attr)s" - -msgid "Cannot change consumer secret" -msgstr "Cannot change consumer secret" - -#, python-format -msgid "Cannot remove role that has not been granted, %s" -msgstr "Cannot remove role that has not been granted, %s" - -msgid "Consumer not found" -msgstr "Consumer not found" - -msgid "Could not find role" -msgstr "Could not find role" - -msgid "Credential belongs to another user" -msgstr "Credential belongs to another user" - -#, python-format -msgid "Domain (%s)" -msgstr "Domain (%s)" - -#, python-format -msgid "Domain is disabled: %s" -msgstr "Domain is disabled: %s" - -msgid "Domain scoped token is not supported" -msgstr "Domain scoped token is not supported" - -#, python-format -msgid "Duplicate ID, %s." -msgstr "Duplicate ID, %s." - -#, python-format -msgid "Duplicate name, %s." -msgstr "Duplicate name, %s." - -msgid "Enabled field must be a boolean" -msgstr "Enabled field must be a boolean" - -msgid "Enabled field should be a boolean" -msgstr "Enabled field should be a boolean" - -#, python-format -msgid "Endpoint %(endpoint_id)s not found in project %(project_id)s" -msgstr "Endpoint %(endpoint_id)s not found in project %(project_id)s" - -#, python-format -msgid "Expected dict or list: %s" -msgstr "Expected dict or list: %s" - -msgid "Failed to validate token" -msgstr "Failed to validate token" - -#, python-format -msgid "Invalid LDAP TLS certs option: %(option)s. Choose one of: %(options)s" -msgstr "Invalid LDAP TLS certs option: %(option)s. Choose one of: %(options)s" - -#, python-format -msgid "Invalid LDAP TLS_AVAIL option: %s. TLS not available" -msgstr "Invalid LDAP TLS_AVAIL option: %s. TLS not available" - -#, python-format -msgid "Invalid LDAP scope: %(scope)s. Choose one of: %(options)s" -msgstr "Invalid LDAP scope: %(scope)s. Choose one of: %(options)s" - -msgid "Invalid TLS / LDAPS combination" -msgstr "Invalid TLS / LDAPS combination" - -msgid "Invalid blob in credential" -msgstr "Invalid blob in credential" - -msgid "Invalid limit value" -msgstr "Invalid limit value" - -msgid "Invalid username or password" -msgstr "Invalid username or password" - -#, python-format -msgid "LDAP %s create" -msgstr "LDAP %s create" - -#, python-format -msgid "LDAP %s delete" -msgstr "LDAP %s delete" - -#, python-format -msgid "LDAP %s update" -msgstr "LDAP %s update" - -#, python-format -msgid "Malformed endpoint URL (%(endpoint)s), see ERROR log for details." -msgstr "Malformed endpoint URL (%(endpoint)s), see ERROR log for details." - -msgid "Marker could not be found" -msgstr "Marker could not be found" - -msgid "Name field is required and cannot be empty" -msgstr "Name field is required and cannot be empty" - -msgid "No authenticated user" -msgstr "No authenticated user" - -msgid "No options specified" -msgstr "No options specified" - -msgid "Non-default domain is not supported" -msgstr "Non-default domain is not supported" - -#, python-format -msgid "Project (%s)" -msgstr "Project (%s)" - -#, python-format -msgid "Project is disabled: %s" -msgstr "Project is disabled: %s" - -msgid "Request Token does not have an authorizing user id" -msgstr "Request Token does not have an authorizing user id" - -#, python-format -msgid "" -"Request attribute %(attribute)s must be less than or equal to %(size)i. The " -"server could not comply with the request because the attribute size is " -"invalid (too large). The client is assumed to be in error." -msgstr "" -"Request attribute %(attribute)s must be less than or equal to %(size)i. The " -"server could not comply with the request because the attribute size is " -"invalid (too large). The client is assumed to be in error." - -msgid "Request token is expired" -msgstr "Request token is expired" - -msgid "Request token not found" -msgstr "Request token not found" - -#, python-format -msgid "Role %s not found" -msgstr "Role %s not found" - -msgid "Scoping to both domain and project is not allowed" -msgstr "Scoping to both domain and project is not allowed" - -msgid "Scoping to both domain and trust is not allowed" -msgstr "Scoping to both domain and trust is not allowed" - -msgid "Scoping to both project and trust is not allowed" -msgstr "Scoping to both project and trust is not allowed" - -msgid "Specify a domain or project, not both" -msgstr "Specify a domain or project, not both" - -msgid "Specify a user or group, not both" -msgstr "Specify a user or group, not both" - -#, python-format -msgid "" -"String length exceeded.The length of string '%(string)s' exceeded the limit " -"of column %(type)s(CHAR(%(length)d))." -msgstr "" -"String length exceeded.The length of string '%(string)s' exceeded the limit " -"of column %(type)s(CHAR(%(length)d))." - -#, python-format -msgid "The Keystone configuration file %(config_file)s could not be found." -msgstr "The Keystone configuration file %(config_file)s could not be found." - -msgid "The action you have requested has not been implemented." -msgstr "The action you have requested has not been implemented." - -msgid "The request you have made requires authentication." -msgstr "The request you have made requires authentication." - -msgid "The resource could not be found." -msgstr "The resource could not be found." - -msgid "There should not be any non-oauth parameters" -msgstr "There should not be any non-oauth parameters" - -msgid "" -"Timestamp not in expected format. The server could not comply with the " -"request since it is either malformed or otherwise incorrect. The client is " -"assumed to be in error." -msgstr "" -"Timestamp not in expected format. The server could not comply with the " -"request since it is either malformed or otherwise incorrect. The client is " -"assumed to be in error." - -msgid "Token belongs to another user" -msgstr "Token belongs to another user" - -msgid "Token does not belong to specified tenant." -msgstr "Token does not belong to specified tenant." - -msgid "Trustee has no delegated roles." -msgstr "Trustee has no delegated roles." - -msgid "Trustor is disabled." -msgstr "Trustor is disabled." - -#, python-format -msgid "Unable to locate domain config directory: %s" -msgstr "Unable to locate domain config directory: %s" - -#, python-format -msgid "Unable to lookup user %s" -msgstr "Unable to lookup user %s" - -msgid "Unable to sign token." -msgstr "Unable to sign token." - -msgid "Unknown Target" -msgstr "Unknown Target" - -#, python-format -msgid "User %(u_id)s is unauthorized for tenant %(t_id)s" -msgstr "User %(u_id)s is unauthorized for tenant %(t_id)s" - -#, python-format -msgid "User %(user_id)s has no access to domain %(domain_id)s" -msgstr "User %(user_id)s has no access to domain %(domain_id)s" - -#, python-format -msgid "User %(user_id)s has no access to project %(project_id)s" -msgstr "User %(user_id)s has no access to project %(project_id)s" - -#, python-format -msgid "User %(user_id)s is already a member of group %(group_id)s" -msgstr "User %(user_id)s is already a member of group %(group_id)s" - -msgid "User IDs do not match" -msgstr "User IDs do not match" - -#, python-format -msgid "User is disabled: %s" -msgstr "User is disabled: %s" - -msgid "User is not a member of the requested project" -msgstr "User is not a member of the requested project" - -msgid "User is not a trustee." -msgstr "User is not a trustee." - -msgid "User not found" -msgstr "User not found" - -msgid "You are not authorized to perform the requested action." -msgstr "You are not authorized to perform the requested action." - -msgid "authorizing user does not have role required" -msgstr "authorizing user does not have role required" - -msgid "pad must be single character" -msgstr "pad must be single character" - -msgid "padded base64url text must be multiple of 4 characters" -msgstr "padded base64url text must be multiple of 4 characters" - -msgid "provided consumer key does not match stored consumer key" -msgstr "provided consumer key does not match stored consumer key" - -msgid "provided request key does not match stored request key" -msgstr "provided request key does not match stored request key" - -msgid "provided verifier does not match stored verifier" -msgstr "provided verifier does not match stored verifier" - -msgid "region not type dogpile.cache.CacheRegion" -msgstr "region not type dogpile.cache.CacheRegion" - -#, python-format -msgid "text is multiple of 4, but pad \"%s\" occurs before 2nd to last char" -msgstr "text is multiple of 4, but pad \"%s\" occurs before 2nd to last char" - -#, python-format -msgid "text is multiple of 4, but pad \"%s\" occurs before non-pad last char" -msgstr "text is multiple of 4, but pad \"%s\" occurs before non-pad last char" - -#, python-format -msgid "text is not a multiple of 4, but contains pad \"%s\"" -msgstr "text is not a multiple of 4, but contains pad \"%s\"" - -#, python-format -msgid "tls_cacertdir %s not found or is not a directory" -msgstr "tls_cacertdir %s not found or is not a directory" - -#, python-format -msgid "tls_cacertfile %s not found or is not a file" -msgstr "tls_cacertfile %s not found or is not a file" diff --git a/keystone-moon/keystone/locale/en_GB/LC_MESSAGES/keystone-log-info.po b/keystone-moon/keystone/locale/en_GB/LC_MESSAGES/keystone-log-info.po deleted file mode 100644 index a0da5eed..00000000 --- a/keystone-moon/keystone/locale/en_GB/LC_MESSAGES/keystone-log-info.po +++ /dev/null @@ -1,214 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Andi Chandler , 2014 -msgid "" -msgstr "" -"Project-Id-Version: Keystone\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/keystone\n" -"POT-Creation-Date: 2015-03-09 06:03+0000\n" -"PO-Revision-Date: 2015-03-07 04:31+0000\n" -"Last-Translator: openstackjenkins \n" -"Language-Team: English (United Kingdom) (http://www.transifex.com/projects/p/" -"keystone/language/en_GB/)\n" -"Language: en_GB\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 1.3\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" - -#: keystone/assignment/core.py:250 -#, python-format -msgid "Creating the default role %s because it does not exist." -msgstr "" - -#: keystone/assignment/core.py:258 -#, python-format -msgid "Creating the default role %s failed because it was already created" -msgstr "" - -#: keystone/auth/controllers.py:64 -msgid "Loading auth-plugins by class-name is deprecated." -msgstr "" - -#: keystone/auth/controllers.py:106 -#, python-format -msgid "" -"\"expires_at\" has conflicting values %(existing)s and %(new)s. Will use " -"the earliest value." -msgstr "" -"\"expires_at\" has conflicting values %(existing)s and %(new)s. Will use " -"the earliest value." - -#: keystone/common/openssl.py:81 -#, python-format -msgid "Running command - %s" -msgstr "" - -#: keystone/common/wsgi.py:79 -msgid "No bind information present in token" -msgstr "" - -#: keystone/common/wsgi.py:83 -#, python-format -msgid "Named bind mode %s not in bind information" -msgstr "" - -#: keystone/common/wsgi.py:90 -msgid "Kerberos credentials required and not present" -msgstr "" - -#: keystone/common/wsgi.py:94 -msgid "Kerberos credentials do not match those in bind" -msgstr "" - -#: keystone/common/wsgi.py:98 -msgid "Kerberos bind authentication successful" -msgstr "" - -#: keystone/common/wsgi.py:105 -#, python-format -msgid "Couldn't verify unknown bind: {%(bind_type)s: %(identifier)s}" -msgstr "" - -#: keystone/common/environment/eventlet_server.py:103 -#, python-format -msgid "Starting %(arg0)s on %(host)s:%(port)s" -msgstr "" - -#: keystone/common/kvs/core.py:138 -#, python-format -msgid "Adding proxy '%(proxy)s' to KVS %(name)s." -msgstr "" - -#: keystone/common/kvs/core.py:188 -#, python-format -msgid "Using %(func)s as KVS region %(name)s key_mangler" -msgstr "" - -#: keystone/common/kvs/core.py:200 -#, python-format -msgid "Using default dogpile sha1_mangle_key as KVS region %s key_mangler" -msgstr "" - -#: keystone/common/kvs/core.py:210 -#, python-format -msgid "KVS region %s key_mangler disabled." -msgstr "" - -#: keystone/contrib/example/core.py:64 keystone/contrib/example/core.py:73 -#, python-format -msgid "" -"Received the following notification: service %(service)s, resource_type: " -"%(resource_type)s, operation %(operation)s payload %(payload)s" -msgstr "" - -#: keystone/openstack/common/eventlet_backdoor.py:146 -#, python-format -msgid "Eventlet backdoor listening on %(port)s for process %(pid)d" -msgstr "Eventlet backdoor listening on %(port)s for process %(pid)d" - -#: keystone/openstack/common/service.py:173 -#, python-format -msgid "Caught %s, exiting" -msgstr "Caught %s, exiting" - -#: keystone/openstack/common/service.py:231 -msgid "Parent process has died unexpectedly, exiting" -msgstr "Parent process has died unexpectedly, exiting" - -#: keystone/openstack/common/service.py:262 -#, python-format -msgid "Child caught %s, exiting" -msgstr "Child caught %s, exiting" - -#: keystone/openstack/common/service.py:301 -msgid "Forking too fast, sleeping" -msgstr "Forking too fast, sleeping" - -#: keystone/openstack/common/service.py:320 -#, python-format -msgid "Started child %d" -msgstr "Started child %d" - -#: keystone/openstack/common/service.py:330 -#, python-format -msgid "Starting %d workers" -msgstr "Starting %d workers" - -#: keystone/openstack/common/service.py:347 -#, python-format -msgid "Child %(pid)d killed by signal %(sig)d" -msgstr "Child %(pid)d killed by signal %(sig)d" - -#: keystone/openstack/common/service.py:351 -#, python-format -msgid "Child %(pid)s exited with status %(code)d" -msgstr "Child %(pid)s exited with status %(code)d" - -#: keystone/openstack/common/service.py:390 -#, python-format -msgid "Caught %s, stopping children" -msgstr "Caught %s, stopping children" - -#: keystone/openstack/common/service.py:399 -msgid "Wait called after thread killed. Cleaning up." -msgstr "" - -#: keystone/openstack/common/service.py:415 -#, python-format -msgid "Waiting on %d children to exit" -msgstr "Waiting on %d children to exit" - -#: keystone/token/persistence/backends/sql.py:279 -#, python-format -msgid "Total expired tokens removed: %d" -msgstr "Total expired tokens removed: %d" - -#: keystone/token/providers/fernet/utils.py:72 -msgid "" -"[fernet_tokens] key_repository does not appear to exist; attempting to " -"create it" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:130 -#, python-format -msgid "Created a new key: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:143 -msgid "Key repository is already initialized; aborting." -msgstr "" - -#: keystone/token/providers/fernet/utils.py:179 -#, python-format -msgid "Starting key rotation with %(count)s key files: %(list)s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:185 -#, python-format -msgid "Current primary key is: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:187 -#, python-format -msgid "Next primary key will be: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:197 -#, python-format -msgid "Promoted key 0 to be the primary: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:213 -#, python-format -msgid "Excess keys to purge: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:237 -#, python-format -msgid "Loaded %(count)s encryption keys from: %(dir)s" -msgstr "" diff --git a/keystone-moon/keystone/locale/es/LC_MESSAGES/keystone-log-critical.po b/keystone-moon/keystone/locale/es/LC_MESSAGES/keystone-log-critical.po deleted file mode 100644 index 565b8ee0..00000000 --- a/keystone-moon/keystone/locale/es/LC_MESSAGES/keystone-log-critical.po +++ /dev/null @@ -1,25 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2014-08-31 03:19+0000\n" -"Last-Translator: openstackjenkins \n" -"Language: es\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Spanish\n" - -#, python-format -msgid "Unable to open template file %s" -msgstr "No se puede abrir el archivo de plantilla %s" diff --git a/keystone-moon/keystone/locale/es/LC_MESSAGES/keystone-log-error.po b/keystone-moon/keystone/locale/es/LC_MESSAGES/keystone-log-error.po deleted file mode 100644 index d1c2eaa6..00000000 --- a/keystone-moon/keystone/locale/es/LC_MESSAGES/keystone-log-error.po +++ /dev/null @@ -1,177 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -msgid "" -msgstr "" -"Project-Id-Version: Keystone\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/keystone\n" -"POT-Creation-Date: 2015-03-09 06:03+0000\n" -"PO-Revision-Date: 2015-03-07 04:31+0000\n" -"Last-Translator: openstackjenkins \n" -"Language-Team: Spanish (http://www.transifex.com/projects/p/keystone/" -"language/es/)\n" -"Language: es\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 1.3\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" - -#: keystone/notifications.py:304 -msgid "Failed to construct notifier" -msgstr "" - -#: keystone/notifications.py:389 -#, python-format -msgid "Failed to send %(res_id)s %(event_type)s notification" -msgstr "" - -#: keystone/notifications.py:606 -#, python-format -msgid "Failed to send %(action)s %(event_type)s notification" -msgstr "" - -#: keystone/catalog/core.py:62 -#, python-format -msgid "Malformed endpoint - %(url)r is not a string" -msgstr "" - -#: keystone/catalog/core.py:66 -#, python-format -msgid "Malformed endpoint %(url)s - unknown key %(keyerror)s" -msgstr "" -"Punto final formado incorrectamente %(url)s - clave desconocida %(keyerror)s" - -#: keystone/catalog/core.py:71 -#, python-format -msgid "" -"Malformed endpoint '%(url)s'. The following type error occurred during " -"string substitution: %(typeerror)s" -msgstr "" - -#: keystone/catalog/core.py:77 -#, python-format -msgid "" -"Malformed endpoint %s - incomplete format (are you missing a type notifier ?)" -msgstr "" - -#: keystone/common/openssl.py:93 -#, python-format -msgid "Command %(to_exec)s exited with %(retcode)s- %(output)s" -msgstr "" - -#: keystone/common/openssl.py:121 -#, python-format -msgid "Failed to remove file %(file_path)r: %(error)s" -msgstr "" - -#: keystone/common/utils.py:239 -msgid "" -"Error setting up the debug environment. Verify that the option --debug-url " -"has the format : and that a debugger processes is listening on " -"that port." -msgstr "" -"Error configurando el entorno de depuración. Verifique que la opción --debug-" -"url tiene el formato : y que un proceso de depuración está " -"publicado en ese host y puerto" - -#: keystone/common/cache/core.py:100 -#, python-format -msgid "" -"Unable to build cache config-key. Expected format \":\". " -"Skipping unknown format: %s" -msgstr "" - -#: keystone/common/environment/eventlet_server.py:99 -#, python-format -msgid "Could not bind to %(host)s:%(port)s" -msgstr "No se puede asociar a %(host)s:%(port)s" - -#: keystone/common/environment/eventlet_server.py:185 -msgid "Server error" -msgstr "Error del servidor" - -#: keystone/contrib/endpoint_policy/core.py:129 -#: keystone/contrib/endpoint_policy/core.py:228 -#, python-format -msgid "" -"Circular reference or a repeated entry found in region tree - %(region_id)s." -msgstr "" - -#: keystone/contrib/federation/idp.py:410 -#, python-format -msgid "Error when signing assertion, reason: %(reason)s" -msgstr "" - -#: keystone/contrib/oauth1/core.py:136 -msgid "Cannot retrieve Authorization headers" -msgstr "" - -#: keystone/openstack/common/loopingcall.py:95 -msgid "in fixed duration looping call" -msgstr "en llamada en bucle de duración fija" - -#: keystone/openstack/common/loopingcall.py:138 -msgid "in dynamic looping call" -msgstr "en llamada en bucle dinámica" - -#: keystone/openstack/common/service.py:268 -msgid "Unhandled exception" -msgstr "Excepción no controlada" - -#: keystone/resource/core.py:477 -#, python-format -msgid "" -"Circular reference or a repeated entry found projects hierarchy - " -"%(project_id)s." -msgstr "" - -#: keystone/resource/core.py:939 -#, python-format -msgid "" -"Unexpected results in response for domain config - %(count)s responses, " -"first option is %(option)s, expected option %(expected)s" -msgstr "" - -#: keystone/resource/backends/sql.py:102 keystone/resource/backends/sql.py:121 -#, python-format -msgid "" -"Circular reference or a repeated entry found in projects hierarchy - " -"%(project_id)s." -msgstr "" - -#: keystone/token/provider.py:292 -#, python-format -msgid "Unexpected error or malformed token determining token expiry: %s" -msgstr "" - -#: keystone/token/persistence/backends/kvs.py:226 -#, python-format -msgid "" -"Reinitializing revocation list due to error in loading revocation list from " -"backend. Expected `list` type got `%(type)s`. Old revocation list data: " -"%(list)r" -msgstr "" - -#: keystone/token/providers/common.py:611 -msgid "Failed to validate token" -msgstr "Ha fallado la validación del token" - -#: keystone/token/providers/pki.py:47 -msgid "Unable to sign token" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:38 -#, python-format -msgid "" -"Either [fernet_tokens] key_repository does not exist or Keystone does not " -"have sufficient permission to access it: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:79 -msgid "" -"Failed to create [fernet_tokens] key_repository: either it already exists or " -"you don't have sufficient permissions to create it" -msgstr "" diff --git a/keystone-moon/keystone/locale/es/LC_MESSAGES/keystone.po b/keystone-moon/keystone/locale/es/LC_MESSAGES/keystone.po deleted file mode 100644 index f2336cc3..00000000 --- a/keystone-moon/keystone/locale/es/LC_MESSAGES/keystone.po +++ /dev/null @@ -1,1653 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Alberto Molina Coballes , 2014 -# dario hereñu , 2015 -# Guillermo Vitas Gil , 2014 -# Jose Enrique Ruiz Navarro , 2014 -# Jose Ramirez Garcia , 2014 -# Pablo Sanchez , 2015 -# Eugènia Torrella , 2016. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2016-04-28 11:25+0000\n" -"Last-Translator: Eugènia Torrella \n" -"Language: es\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Spanish\n" - -#, python-format -msgid "%(detail)s" -msgstr "%(detail)s" - -#, python-format -msgid "%(driver)s is not supported driver version" -msgstr "%(driver)s es una versión de controlador no soportada" - -#, python-format -msgid "" -"%(entity)s name cannot contain the following reserved characters: %(chars)s" -msgstr "" -"El nombre %(entity)s no puede contener los siguientes caracteres " -"reservados: %(chars)s" - -#, python-format -msgid "" -"%(event)s is not a valid notification event, must be one of: %(actions)s" -msgstr "" -"%(event)s no es un suceso de notificación válido, debe ser uno de: " -"%(actions)s" - -#, python-format -msgid "%(host)s is not a trusted dashboard host" -msgstr "%(host)s no es un host de panel de control de confianza" - -#, python-format -msgid "%(message)s %(amendment)s" -msgstr "%(message)s %(amendment)s" - -#, python-format -msgid "" -"%(mod_name)s doesn't provide database migrations. The migration repository " -"path at %(path)s doesn't exist or isn't a directory." -msgstr "" -"%(mod_name)s no proporciona migración de base de datos. La vía de acceso de " -"repositorio de migración en %(path)s no existe o no es un directorio." - -#, python-format -msgid "%(prior_role_id)s does not imply %(implied_role_id)s" -msgstr "%(prior_role_id)s no implica %(implied_role_id)s" - -#, python-format -msgid "%(property_name)s cannot be less than %(min_length)s characters." -msgstr "%(property_name)s no puede tener menos de %(min_length)s caracteres." - -#, python-format -msgid "%(property_name)s is not a %(display_expected_type)s" -msgstr "%(property_name)s no es %(display_expected_type)s" - -#, python-format -msgid "%(property_name)s should not be greater than %(max_length)s characters." -msgstr "%(property_name)s no debe tener más de %(max_length)s caracteres." - -#, python-format -msgid "%(role_id)s cannot be an implied roles" -msgstr "%(role_id)s no puede ser un rol implicado" - -#, python-format -msgid "%s cannot be empty." -msgstr "%s no puede estar vacío." - -#, python-format -msgid "%s extension does not exist." -msgstr "La extensión %s no existe." - -#, python-format -msgid "%s field is required and cannot be empty" -msgstr "el campo %s es obligatorio y no puede estar vacío" - -#, python-format -msgid "%s field(s) cannot be empty" -msgstr "el campo %s no puede estar vacío" - -#, python-format -msgid "" -"%s for the LDAP identity backend has been deprecated in the Mitaka release " -"in favor of read-only identity LDAP access. It will be removed in the \"O\" " -"release." -msgstr "" -"El programa de fondo de identidad LDAP %s se ha dejado en desuso en el " -"release de Mitaka, sustituyéndolo por un acceso LDAP de identidad de solo " -"lectura. Se eliminará en el release \"O\"." - -msgid "(Disable insecure_debug mode to suppress these details.)" -msgstr "(Inhabilite la modalidad insecure_debug para suprimir estos detalles.)" - -msgid "--all option cannot be mixed with other options" -msgstr "La opción --all no puede mezclarse con otras opciones" - -msgid "A project-scoped token is required to produce a service catalog." -msgstr "" -"Se necesita un token con ámbito de proyecto para producir un catálogo de " -"servicio." - -msgid "Access token is expired" -msgstr "El token de acceso ha caducado" - -msgid "Access token not found" -msgstr "No se ha encontrado el token de acceso" - -msgid "Additional authentications steps required." -msgstr "Se precisan pasos adicionales de autenticación." - -msgid "An unexpected error occurred when retrieving domain configs" -msgstr "" -"Se ha producido un error inesperado al recuperar las configuraciones de " -"dominio" - -#, python-format -msgid "An unexpected error occurred when trying to store %s" -msgstr "Se ha producido un error inesperado al intentar almacenar %s" - -msgid "An unexpected error prevented the server from fulfilling your request." -msgstr "" -"El servidor no ha podido completar su solicitud debido a un error inesperado." - -#, python-format -msgid "" -"An unexpected error prevented the server from fulfilling your request: " -"%(exception)s" -msgstr "" -"Un error inesperado a impedido que el servidor complete su solicitud: " -"%(exception)s" - -msgid "An unhandled exception has occurred: Could not find metadata." -msgstr "" -"Se ha producido una excepción no controlada: no se han podido encontrar los " -"metadatos." - -msgid "At least one option must be provided" -msgstr "Se debe especificar al menos una opción" - -msgid "At least one option must be provided, use either --all or --domain-name" -msgstr "Debe proporcionarse al menos una opción, utilice --all o --domain-name" - -msgid "At least one role should be specified." -msgstr "Se debe especificar al menos un rol" - -#, python-format -msgid "" -"Attempted automatic driver selection for assignment based upon " -"[identity]\\driver option failed since driver %s is not found. Set " -"[assignment]/driver to a valid driver in keystone config." -msgstr "" -"Se ha intentado la seleción automática de controlador para la asignación en " -"base a la opción [identity]\\driver, pero ha fallado porque no se encuentra " -"el controlador %s. Defina [assignment]/driver con un controlador válido en " -"la configuración de keystone." - -msgid "Attempted to authenticate with an unsupported method." -msgstr "Se ha intentado autenticar con un método no compatible." - -msgid "" -"Attempting to use OS-FEDERATION token with V2 Identity Service, use V3 " -"Authentication" -msgstr "" -"Intentando utilizar el token OS-FEDERATION con el servicio de identidad V2, " -"utilice la autenticación V3 ." - -msgid "Authentication plugin error." -msgstr "Error en el plugin de autenticación " - -#, python-format -msgid "" -"Backend `%(backend)s` is not a valid memcached backend. Valid backends: " -"%(backend_list)s" -msgstr "" -"El programa de fondo `%(backend)s` no es un programa de fondo almacenado en " -"caché válido. Programas de fondo válidos: %(backend_list)s" - -msgid "Cannot authorize a request token with a token issued via delegation." -msgstr "" -"No se puede autorizar una señal de solicitud con una señal emitida mediante " -"delegación." - -#, python-format -msgid "Cannot change %(option_name)s %(attr)s" -msgstr "No se puede cambiar %(option_name)s %(attr)s" - -msgid "Cannot change Domain ID" -msgstr "No se puede cambiar el ID del Dominio" - -msgid "Cannot change user ID" -msgstr "No se puede cambiar el ID de usuario" - -msgid "Cannot change user name" -msgstr "No se puede cambiar el nombre de usuario" - -#, python-format -msgid "Cannot create an endpoint with an invalid URL: %(url)s" -msgstr "No se puede crear un punto final con un URL no válido: %(url)s" - -#, python-format -msgid "Cannot create project with parent: %(project_id)s" -msgstr "No se puede crear un proyecto con el padre: %(project_id)s" - -#, python-format -msgid "" -"Cannot create project, since it specifies its owner as domain %(domain_id)s, " -"but specifies a parent in a different domain (%(parent_domain_id)s)." -msgstr "" -"No se puede crear el proyecto porque especifica que su propietario es el " -"dominio %(domain_id)s, pero especifica un padre en otro dominio distinto " -"(%(parent_domain_id)s)." - -#, python-format -msgid "" -"Cannot create project, since its parent (%(domain_id)s) is acting as a " -"domain, but project's specified parent_id (%(parent_id)s) does not match " -"this domain_id." -msgstr "" -"No se puede crear el proyecto porque su padre (%(domain_id)s) actúa como " -"dominio, pero el parent_id especificado en el proyecto, (%(parent_id)s), no " -"coincide con este domain_id." - -msgid "Cannot delete a domain that is enabled, please disable it first." -msgstr "" -"No se puede suprimir un dominio que está habilitado, antes debe " -"inhabilitarlo." - -#, python-format -msgid "" -"Cannot delete project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" -"No se puede suprimir el proyecto %(project_id)s porque su subárbol contiene " -"proyectos habilitados." - -#, python-format -msgid "" -"Cannot delete the project %s since it is not a leaf in the hierarchy. Use " -"the cascade option if you want to delete a whole subtree." -msgstr "" -"No se puede suprimir el proyecto %s porque no es una hoja en la jerarquía. " -"Utilice la opción de cascada si desea suprimir un subárbol entero." - -#, python-format -msgid "" -"Cannot disable project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" -"No se puede inhabilitar el proyecto %(project_id)s porque su subárbol " -"contiene proyectos habilitados." - -#, python-format -msgid "Cannot enable project %s since it has disabled parents" -msgstr "" -"No se puede habilitar el proyecto %s, ya que tiene padres inhabilitados" - -msgid "Cannot list assignments sourced from groups and filtered by user ID." -msgstr "" -"No se pueden enumerar las asignaciones obtenidas de grupos y filtradas por " -"ID de usuario." - -msgid "Cannot list request tokens with a token issued via delegation." -msgstr "" -"No se pueden listar los tokens de solicitud con un token emitido por " -"delegación." - -#, python-format -msgid "Cannot open certificate %(cert_file)s. Reason: %(reason)s" -msgstr "No se puede abrir el certificado %(cert_file)s. Motivo: %(reason)s" - -#, python-format -msgid "Cannot remove role that has not been granted, %s" -msgstr "No se puede eliminar un rol que no se ha otorgado, %s" - -msgid "" -"Cannot truncate a driver call without hints list as first parameter after " -"self " -msgstr "" -"No se puede truncar una llamada de controlador sin la lista de sugerencias " -"como primer parámetro después de self " - -msgid "Cannot update domain_id of a project that has children." -msgstr "No se puede actualizar el domain_id de un proyecto que tenga hijos." - -msgid "" -"Cannot use parents_as_list and parents_as_ids query params at the same time." -msgstr "" -"No se pueden utilizar los parámetros de consulta parents_as_list y " -"parents_as_ids al mismo tiempo." - -msgid "" -"Cannot use subtree_as_list and subtree_as_ids query params at the same time." -msgstr "" -"No se pueden utilizar los parámetros de consulta subtree_as_list y " -"subtree_as_ids al mismo tiempo." - -msgid "Cascade update is only allowed for enabled attribute." -msgstr "" -"Solo se permite la actualización en cascada de los atributos habilitados." - -msgid "" -"Combining effective and group filter will always result in an empty list." -msgstr "" -"La combinación de filtro de grupo y filtro efectivo dará siempre como " -"resultado una lista vacía." - -msgid "" -"Combining effective, domain and inherited filters will always result in an " -"empty list." -msgstr "" -"La combinación de un filtro heredado, un filtro de dominio y un filtro " -"efectivo dará siempre como resultado una lista vacía." - -#, python-format -msgid "Config API entity at /domains/%s/config" -msgstr "Entidad de API de config en /domains/%s/config" - -#, python-format -msgid "Conflict occurred attempting to store %(type)s - %(details)s" -msgstr "" -"Se ha producido un conflicto al intentar almacenar %(type)s - %(details)s" - -#, python-format -msgid "Conflicting region IDs specified: \"%(url_id)s\" != \"%(ref_id)s\"" -msgstr "" -"Se han especificado ID de región conflictivos: \"%(url_id)s\" != \"%(ref_id)s" -"\"" - -msgid "Consumer not found" -msgstr "No se ha encontrado el consumidor" - -#, python-format -msgid "" -"Could not change immutable attribute(s) '%(attributes)s' in target %(target)s" -msgstr "" -"No se pueden cambiar atributos inalterables '%(attributes)s' en el destino " -"%(target)s" - -#, python-format -msgid "" -"Could not determine Identity Provider ID. The configuration option " -"%(issuer_attribute)s was not found in the request environment." -msgstr "" -"No se ha podido determinar el ID del proveedor de identidades. No se ha " -"encontrado la opción de configuración %(issuer_attribute)s en el entorno de " -"la solicitud." - -#, python-format -msgid "" -"Could not find %(group_or_option)s in domain configuration for domain " -"%(domain_id)s" -msgstr "" -"No se ha podido encontrar %(group_or_option)s en la configuración de dominio " -"para el dominio %(domain_id)s" - -#, python-format -msgid "Could not find Endpoint Group: %(endpoint_group_id)s" -msgstr "No se ha encontrado el grupo de puntos finales: %(endpoint_group_id)s" - -msgid "Could not find Identity Provider identifier in environment" -msgstr "" -"No se ha podido encontrar el identificador del proveedor de identidad en el " -"entorno" - -#, python-format -msgid "Could not find Identity Provider: %(idp_id)s" -msgstr "No se ha podido encontrar el proveedor de identidad: %(idp_id)s" - -#, python-format -msgid "Could not find Service Provider: %(sp_id)s" -msgstr "No se ha podido encontrar el proveedor de servicios: %(sp_id)s" - -#, python-format -msgid "Could not find credential: %(credential_id)s" -msgstr "No se ha podido encontrar la credencial: %(credential_id)s" - -#, python-format -msgid "Could not find domain: %(domain_id)s" -msgstr "No se ha podido encontrar el dominio: %(domain_id)s" - -#, python-format -msgid "Could not find endpoint: %(endpoint_id)s" -msgstr "No se ha podido encontrar : %(endpoint_id)s" - -#, python-format -msgid "" -"Could not find federated protocol %(protocol_id)s for Identity Provider: " -"%(idp_id)s" -msgstr "" -"No se ha podido encontrar el protocolo federado %(protocol_id)s para el " -"proveedor de identidad: %(idp_id)s" - -#, python-format -msgid "Could not find group: %(group_id)s" -msgstr "No se ha podido encontrar el grupo: %(group_id)s" - -#, python-format -msgid "Could not find mapping: %(mapping_id)s" -msgstr "No se ha podido encontrar la correlación: %(mapping_id)s" - -msgid "Could not find policy association" -msgstr "No se ha encontrado la asociación de política" - -#, python-format -msgid "Could not find policy: %(policy_id)s" -msgstr "No se ha podido encontrar : %(policy_id)s" - -#, python-format -msgid "Could not find project: %(project_id)s" -msgstr "No se ha podido encontrar el proyecto: %(project_id)s" - -#, python-format -msgid "Could not find region: %(region_id)s" -msgstr "No se ha podido encontrar la región: %(region_id)s" - -#, python-format -msgid "" -"Could not find role assignment with role: %(role_id)s, user or group: " -"%(actor_id)s, project or domain: %(target_id)s" -msgstr "" -"No se ha podido encontrar la asignación de roles con el rol: %(role_id)s, " -"usuario o grupo: %(actor_id)s, proyecto o dominio: %(target_id)s" - -#, python-format -msgid "Could not find role: %(role_id)s" -msgstr "No se ha podido encontrar el rol: %(role_id)s" - -#, python-format -msgid "Could not find service: %(service_id)s" -msgstr "No se ha podido encontrar el servicio: %(service_id)s" - -#, python-format -msgid "Could not find token: %(token_id)s" -msgstr "No se ha podido encontrar el token: %(token_id)s" - -#, python-format -msgid "Could not find trust: %(trust_id)s" -msgstr "No se ha podido encontrar la confianza: %(trust_id)s" - -#, python-format -msgid "Could not find user: %(user_id)s" -msgstr "No se ha podido encontrar el usuario: %(user_id)s" - -#, python-format -msgid "Could not find version: %(version)s" -msgstr "No se ha podido encontrar la versión: %(version)s" - -#, python-format -msgid "Could not find: %(target)s" -msgstr "No se ha podido encontrar : %(target)s" - -msgid "" -"Could not map any federated user properties to identity values. Check debug " -"logs or the mapping used for additional details." -msgstr "" -"No se ha podido correlacionar ninguna propiedad de usuario federado a valor " -"de identidad. Compruebe los registros de depuración o la correlación " -"utilizada para obtener información más detallada." - -msgid "" -"Could not map user while setting ephemeral user identity. Either mapping " -"rules must specify user id/name or REMOTE_USER environment variable must be " -"set." -msgstr "" -"No se ha podido correlacionar el usuario al establecer la identidad de " -"usuario efímera. Las reglas de correlación deben especificar ID/nombre de " -"usuario o se debe establecer la variable de entorno REMOTE_USER." - -msgid "Could not validate the access token" -msgstr "No se ha podido validar el token de acceso" - -msgid "Credential belongs to another user" -msgstr "La credencial pertenece a otro usuario" - -msgid "Credential signature mismatch" -msgstr "Discrepancia en la firma de credencial" - -#, python-format -msgid "" -"Direct import of auth plugin %(name)r is deprecated as of Liberty in favor " -"of its entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"La importación directa del plugin de autorización %(name)r está en desuso a " -"partir de Liberty, sustituyéndose por su punto de entrada desde " -"%(namespace)r y puede que se elimine en N." - -#, python-format -msgid "" -"Direct import of driver %(name)r is deprecated as of Liberty in favor of its " -"entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"La importación directa del controlador %(name)r está en desuso a partir de " -"Liberty, sustituyéndose por su punto de entrada desde %(namespace)r y puede " -"que se elimine en N." - -msgid "" -"Disabling an entity where the 'enable' attribute is ignored by configuration." -msgstr "" -"Inhabilitando una entidad donde el atributo 'enable' se omite en la " -"configuración." - -#, python-format -msgid "Domain (%s)" -msgstr "Dominio (%s)" - -#, python-format -msgid "Domain cannot be named %s" -msgstr "No se puede invocar al dominio %s" - -#, python-format -msgid "Domain cannot have ID %s" -msgstr "El dominio no puede tener el ID %s" - -#, python-format -msgid "Domain is disabled: %s" -msgstr "El dominio %s está inhabilitado" - -msgid "Domain name cannot contain reserved characters." -msgstr "El nombre de dominio no puede contener caracteres reservados." - -msgid "Domain scoped token is not supported" -msgstr "No se da soporte a tokens con ámbito de dominio" - -msgid "Domain specific roles are not supported in the V8 role driver" -msgstr "El controlador de roles V8 no admite roles específicos de dominio." - -#, python-format -msgid "" -"Domain: %(domain)s already has a configuration defined - ignoring file: " -"%(file)s." -msgstr "" -"El dominio: %(domain)s ya tiene definida una configuración - se ignorará el " -"archivo: %(file)s." - -msgid "Duplicate Entry" -msgstr "Entrada duplicada " - -#, python-format -msgid "Duplicate ID, %s." -msgstr "ID duplicado, %s." - -#, python-format -msgid "Duplicate entry: %s" -msgstr "Entrada duplicada: %s" - -#, python-format -msgid "Duplicate name, %s." -msgstr "Nombre duplicado, %s." - -#, python-format -msgid "Duplicate remote ID: %s" -msgstr "ID remoto duplicado: %s" - -msgid "EC2 access key not found." -msgstr "No se ha encontrado la clave de acceso de EC2." - -msgid "EC2 signature not supplied." -msgstr "No se ha proporcionado la firma de EC2." - -msgid "" -"Either --bootstrap-password argument or OS_BOOTSTRAP_PASSWORD must be set." -msgstr "" -"Se debe definir el argumento bootstrap-password o bien OS_BOOTSTRAP_PASSWORD." - -msgid "Enabled field must be a boolean" -msgstr "El campo habilitado debe ser un booleano" - -msgid "Enabled field should be a boolean" -msgstr "El campo habilitado debe ser un booleano" - -#, python-format -msgid "Endpoint %(endpoint_id)s not found in project %(project_id)s" -msgstr "" -"No se ha encontrado el punto final %(endpoint_id)s en el proyecto " -"%(project_id)s" - -msgid "Endpoint Group Project Association not found" -msgstr "" -"No se ha encontrado la asociación de proyecto del grupo de puntos finales" - -msgid "Ensure configuration option idp_entity_id is set." -msgstr "" -"Compruebe que se haya establecido la opción de configuración idp_entity_id." - -msgid "Ensure configuration option idp_sso_endpoint is set." -msgstr "" -"Compruebe que se haya establecido la opción de configuración " -"idp_sso_endpoint." - -#, python-format -msgid "" -"Error parsing configuration file for domain: %(domain)s, file: %(file)s." -msgstr "" -"Error al analizar el archivo de configuración para el dominio: %(domain)s, " -"archivo: %(file)s." - -#, python-format -msgid "Error while opening file %(path)s: %(err)s" -msgstr "Error al abrir el archivo %(path)s: %(err)s" - -#, python-format -msgid "Error while parsing line: '%(line)s': %(err)s" -msgstr "Error al analizar la línea: '%(line)s': %(err)s" - -#, python-format -msgid "Error while parsing rules %(path)s: %(err)s" -msgstr "Error al analizar las reglas %(path)s: %(err)s" - -#, python-format -msgid "Error while reading metadata file, %(reason)s" -msgstr "Error al leer el archivo de metadatos, %(reason)s" - -#, python-format -msgid "" -"Exceeded attempts to register domain %(domain)s to use the SQL driver, the " -"last domain that appears to have had it is %(last_domain)s, giving up" -msgstr "" -"Se ha superado el número máximo de intentos de registrar un dominio " -"%(domain)s para utilizar el controlador SQL, el último dominio que parece " -"haberlo tenido es %(last_domain)s, abandonando" - -#, python-format -msgid "Expected dict or list: %s" -msgstr "Se espera un diccionario o una lista: %s" - -msgid "" -"Expected signing certificates are not available on the server. Please check " -"Keystone configuration." -msgstr "" -"No hay los certificados para firmas esperados disponibles en el servidor. " -"Compruebe la configuración de Keystone." - -#, python-format -msgid "" -"Expecting to find %(attribute)s in %(target)s - the server could not comply " -"with the request since it is either malformed or otherwise incorrect. The " -"client is assumed to be in error." -msgstr "" -"Se esperaba encontrar %(attribute)s en %(target)s - el servidor no pudo " -"satisfacer la solicitud porque está mal formada o es incorrecta por algún " -"otro motivo. Se entiende que el cliente da error." - -#, python-format -msgid "Failed to start the %(name)s server" -msgstr "No se ha podido iniciar el servidor %(name)s" - -msgid "Failed to validate token" -msgstr "Ha fallado la validación del token" - -msgid "Federation token is expired" -msgstr "El token de federación ha caducado" - -#, python-format -msgid "" -"Field \"remaining_uses\" is set to %(value)s while it must not be set in " -"order to redelegate a trust" -msgstr "" -"El campo \"remaining_uses\" está establecido en %(value)s, pero no puede " -"estar establecido para poder redelegar una confianza" - -msgid "Found invalid token: scoped to both project and domain." -msgstr "" -"Se ha encontrado un token no válido: se ha definido el ámbito a proyecto y " -"dominio a la vez." - -#, python-format -msgid "Group %s not found in config" -msgstr "No se ha encontrado el grupo %s en la configuración" - -#, python-format -msgid "Group %(group)s is not supported for domain specific configurations" -msgstr "" -"No se admite el grupo %(group)s para configuraciones específicas de dominio" - -#, python-format -msgid "" -"Group %(group_id)s returned by mapping %(mapping_id)s was not found in the " -"backend." -msgstr "" -"El grupo %(group_id)s devuelto por la correlación %(mapping_id)s no se ha " -"encontrado en el programa de fondo." - -#, python-format -msgid "" -"Group membership across backend boundaries is not allowed, group in question " -"is %(group_id)s, user is %(user_id)s" -msgstr "" -"No se permite la pertenencia a grupos traspasando los límites del programa " -"de fondo, el grupo en cuestión es %(group_id)s, el usuario es %(user_id)s" - -#, python-format -msgid "ID attribute %(id_attr)s not found in LDAP object %(dn)s" -msgstr "" -"No se ha encontrado el ID de atributo %(id_attr)s en el objeto LDAP %(dn)s" - -#, python-format -msgid "Identity Provider %(idp)s is disabled" -msgstr "El proveedor de identidad %(idp)s está inhabilitado" - -msgid "" -"Incoming identity provider identifier not included among the accepted " -"identifiers." -msgstr "" -"No se ha incluido el identificador del proveedor de identidad de entrada " -"entre los identificadores aceptados." - -msgid "Invalid EC2 signature." -msgstr "Firma de EC2 no válida." - -#, python-format -msgid "Invalid LDAP TLS certs option: %(option)s. Choose one of: %(options)s" -msgstr "" -"Opción de LDAP TLS no válida: %(option)s. Elegir una de las siguientes: " -"%(options)s" - -#, python-format -msgid "Invalid LDAP TLS_AVAIL option: %s. TLS not available" -msgstr "Opción LDAP TLS_AVAIL no válida: %s. TLS no disponible" - -#, python-format -msgid "Invalid LDAP deref option: %(option)s. Choose one of: %(options)s" -msgstr "" -"Opción deref LDAP no válida: %(option)s. Elija una de las siguientes: " -"%(options)s" - -#, python-format -msgid "Invalid LDAP scope: %(scope)s. Choose one of: %(options)s" -msgstr "" -"Ámbito LDAP incorrecto: %(scope)s. Seleccione una de las siguientes " -"opciones: %(options)s" - -msgid "Invalid TLS / LDAPS combination" -msgstr "Combinación de TLS/LDAPS no válida" - -#, python-format -msgid "Invalid audit info data type: %(data)s (%(type)s)" -msgstr "" -"Tipo de datos de información de auditoría no válido: %(data)s (%(type)s)" - -msgid "Invalid blob in credential" -msgstr "Blob no válido en la credencial" - -#, python-format -msgid "" -"Invalid domain name: %(domain)s found in config file name: %(file)s - " -"ignoring this file." -msgstr "" -"Se ha encontrado un nombre de dominio no válido: %(domain)s en el nombre del " -"archivo de configuración: %(file)s - se ignorará este archivo." - -#, python-format -msgid "Invalid domain specific configuration: %(reason)s" -msgstr "Configuración específica de dominio no válida: %(reason)s" - -#, python-format -msgid "Invalid input for field '%(path)s'. The value is '%(value)s'." -msgstr "Entrada no válida para el campo '%(path)s'. El valor es '%(value)s'." - -msgid "Invalid limit value" -msgstr "Valor de límite no válido" - -#, python-format -msgid "" -"Invalid mix of entities for policy association - only Endpoint, Service or " -"Region+Service allowed. Request was - Endpoint: %(endpoint_id)s, Service: " -"%(service_id)s, Region: %(region_id)s" -msgstr "" -"Combinación no válida de entidades para la asociación de políticas: solo se " -"permite Punto final, Servicio o Región + Servicio. La solicitud era: Punto " -"final: %(endpoint_id)s, Servicio: %(service_id)s, Región: %(region_id)s" - -#, python-format -msgid "" -"Invalid rule: %(identity_value)s. Both 'groups' and 'domain' keywords must " -"be specified." -msgstr "" -"Regla no válida: %(identity_value)s. Se deben especificar las palabras clave " -"'groups' y 'domain'." - -msgid "Invalid signature" -msgstr "Firma no válida" - -msgid "Invalid user / password" -msgstr "Usuario / contraseña no válidos" - -msgid "Invalid username or TOTP passcode" -msgstr "Nombre de usuario o código de acceso TOTP no válidos" - -msgid "Invalid username or password" -msgstr "Usuario o contraseña no válidos" - -#, python-format -msgid "KVS region %s is already configured. Cannot reconfigure." -msgstr "" -"La región KVS %s ya se ha configurado. No se puede volver a configurar." - -#, python-format -msgid "Key Value Store not configured: %s" -msgstr "Almacén de valor de clave no configurado: %s" - -#, python-format -msgid "LDAP %s create" -msgstr "Creación de LDAP %s" - -#, python-format -msgid "LDAP %s delete" -msgstr "Supresión de LDAP %s" - -#, python-format -msgid "LDAP %s update" -msgstr "Actualización de LDAP %s" - -msgid "" -"Length of transformable resource id > 64, which is max allowed characters" -msgstr "" -"Longitud del ID de recurso transformable > 64, que es el número máximo de " -"caracteres permitidos" - -#, python-format -msgid "" -"Local section in mapping %(mapping_id)s refers to a remote match that " -"doesn't exist (e.g. {0} in a local section)." -msgstr "" -"La sección local de la correlación %(mapping_id)s hace referencia a una " -"coincidencia remota que no existe (p.e. {0} en una sección local)." - -#, python-format -msgid "Lock Timeout occurred for key, %(target)s" -msgstr "" -"Se ha excedido el tiempo de espera de bloqueo para la clave, %(target)s" - -#, python-format -msgid "Lock key must match target key: %(lock)s != %(target)s" -msgstr "" -"La clave de bloqueo debe coincidir con la clave de destino: %(lock)s != " -"%(target)s" - -#, python-format -msgid "Malformed endpoint URL (%(endpoint)s), see ERROR log for details." -msgstr "" -"URL de punto final formado incorrectamente (%(endpoint)s), vea el registro " -"de ERROR para obtener detalles." - -msgid "Marker could not be found" -msgstr "No se ha podido encontrar el marcador" - -#, python-format -msgid "Max hierarchy depth reached for %s branch." -msgstr "Se ha alcanzado la profundidad máxima de jerarquía en la rama %s." - -#, python-format -msgid "Maximum lock attempts on %s occurred." -msgstr "Se han producido el máximo de intentos de bloqueo en %s." - -#, python-format -msgid "Member %(member)s is already a member of group %(group)s" -msgstr "El miembro %(member)s ya es miembro del grupo %(group)s" - -#, python-format -msgid "Method not callable: %s" -msgstr "Método no invocable: %s" - -msgid "Missing entity ID from environment" -msgstr "Falta el ID de entidad del entorno" - -msgid "" -"Modifying \"redelegation_count\" upon redelegation is forbidden. Omitting " -"this parameter is advised." -msgstr "" -"La modificación de \"redelegation_count\" tras la redelegación está " -"prohibida. Se recomienda omitir este parámetro." - -msgid "Multiple domains are not supported" -msgstr "No se admiten varios dominios" - -msgid "Must be called within an active lock context." -msgstr "Se debe invocar dentro de un contexto de bloqueo activo." - -msgid "Must specify either domain or project" -msgstr "Debe especificar dominio o proyecto, pero no ambas cosas a la vez" - -msgid "Name field is required and cannot be empty" -msgstr "El nombre de campo es obligatorio y no puede estar vacío" - -msgid "Neither Project Domain ID nor Project Domain Name was provided." -msgstr "" -"No se ha proporcionado el ID de dominio de proyecto ni el nombre de dominio " -"de proyecto." - -msgid "" -"No Authorization headers found, cannot proceed with OAuth related calls, if " -"running under HTTPd or Apache, ensure WSGIPassAuthorization is set to On." -msgstr "" -"No se han encontrado cabeceras de autorización, no se puede continuar con " -"las llamadas relacionadas OAuth, si se están ejecutando bajo HTTPd o Apache, " -"asegúrese de que WSGIPassAuthorization se establece en activada." - -msgid "No authenticated user" -msgstr "No hay ningún usuario autenticado " - -msgid "" -"No encryption keys found; run keystone-manage fernet_setup to bootstrap one." -msgstr "" -"No se han encontrado claves de cifrado; ejecute keystone-manage fernet_setup " -"en el programa de arranque uno." - -msgid "No options specified" -msgstr "No se han especificado opciones" - -#, python-format -msgid "No policy is associated with endpoint %(endpoint_id)s." -msgstr "No hay ninguna política asociada con el punto final %(endpoint_id)s." - -#, python-format -msgid "No remaining uses for trust: %(trust_id)s" -msgstr "No quedan usos para la confianza: %(trust_id)s" - -msgid "No token in the request" -msgstr "No hay ningún token en la solicitud" - -msgid "Non-default domain is not supported" -msgstr "El dominio no predeterminado no está soportado" - -msgid "One of the trust agents is disabled or deleted" -msgstr "Uno de los agentes de confianza está inhabilitado o se ha suprimido" - -#, python-format -msgid "" -"Option %(option)s found with no group specified while checking domain " -"configuration request" -msgstr "" -"Se ha encontrado la opción %(option)s sin un grupo especificado al comprobar " -"la solicitud de configuración del dominio" - -#, python-format -msgid "" -"Option %(option)s in group %(group)s is not supported for domain specific " -"configurations" -msgstr "" -"No se admite la opción %(option)s del grupo %(group)s para configuraciones " -"específicas de dominio" - -#, python-format -msgid "Project (%s)" -msgstr "Proyecto (%s)" - -#, python-format -msgid "Project ID not found: %(t_id)s" -msgstr "No se ha encontrado el ID de proyecto : %(t_id)s" - -msgid "Project field is required and cannot be empty." -msgstr "El campo de proyecto es obligatorio y no puede estar vacío." - -#, python-format -msgid "Project is disabled: %s" -msgstr "El proyecto %s está inhabilitado" - -msgid "Project name cannot contain reserved characters." -msgstr "El nombre de proyecto no puede contener caracteres reservados." - -msgid "Query string is not UTF-8 encoded" -msgstr "La cadena de consulta no está en UTF-8" - -#, python-format -msgid "" -"Reading the default for option %(option)s in group %(group)s is not supported" -msgstr "" -"No se da soporte para leer el valor predeterminado para la opción %(option)s " -"del grupo %(group)s" - -msgid "Redelegation allowed for delegated by trust only" -msgstr "Solo se permite volver a delegar un delegado por confianza" - -#, python-format -msgid "" -"Remaining redelegation depth of %(redelegation_depth)d out of allowed range " -"of [0..%(max_count)d]" -msgstr "" -"La profundidad de redelegación restante de %(redelegation_depth)d está fuera " -"del rango permitido de [0..%(max_count)d]" - -msgid "" -"Remove admin_crud_extension from the paste pipeline, the admin_crud " -"extension is now always available. Updatethe [pipeline:admin_api] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Elimine admin_crud_extension de la interconexión de pegar, la extensión " -"admin_crud ahora está siempre disponible. Actualice la sección [pipeline:" -"admin_api] en keystone-paste.ini consecuentemente, ya que se eliminará en el " -"release O." - -msgid "" -"Remove endpoint_filter_extension from the paste pipeline, the endpoint " -"filter extension is now always available. Update the [pipeline:api_v3] " -"section in keystone-paste.ini accordingly as it will be removed in the O " -"release." -msgstr "" -"Elimine endpoint_filter_extension de la interconexión de pegar, la extensión " -"de filtro de punto final ahora está siempre disponible. Actualice la " -"sección [pipeline:api_v3] en keystone-paste.ini consecuentemente, ya que se " -"eliminará en el release O." - -msgid "" -"Remove federation_extension from the paste pipeline, the federation " -"extension is now always available. Update the [pipeline:api_v3] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Elimine federation_extension de la interconexión de pegar, la extensión de " -"federación ahora está siempre disponible. Actualice la sección [pipeline:" -"api_v3] en keystone-paste.ini consecuentemente, ya que se eliminará en el " -"release O." - -msgid "" -"Remove oauth1_extension from the paste pipeline, the oauth1 extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Elimine oauth1_extension de la interconexión de pegar, la extensión oauth1 " -"ahora está siempre disponible. Actualice la sección [pipeline:api_v3] en " -"keystone-paste.ini consecuentemente, ya que se eliminará en el release O." - -msgid "" -"Remove revoke_extension from the paste pipeline, the revoke extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Elimine revoke_extension de la interconexión de pegar, la extensión de " -"revocación ahora está siempre disponible. Actualice la sección [pipeline:" -"api_v3] en keystone-paste.ini consecuentemente, ya que se eliminará en el " -"release O." - -msgid "" -"Remove simple_cert from the paste pipeline, the PKI and PKIz token providers " -"are now deprecated and simple_cert was only used insupport of these token " -"providers. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Elimine simple_cert de la interconexión de pegar, los proveedores de token " -"PKI y PKIz están ahora en desuso y simple_cert se utilizaba únicamente para " -"dar soporte a estos proveedores de token. Actualice la sección [pipeline:" -"api_v3] en keystone-paste.ini consecuentemente, ya que se eliminará en el " -"release O." - -msgid "" -"Remove user_crud_extension from the paste pipeline, the user_crud extension " -"is now always available. Updatethe [pipeline:public_api] section in keystone-" -"paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Elimine user_crud_extension de la interconexión de pegar, la extensión " -"user_crud ahora está siempre disponible. Actualice la sección [pipeline:" -"public_api] en keystone-paste.ini consecuentemente, ya que se eliminará en " -"el release O." - -msgid "Request Token does not have an authorizing user id" -msgstr "El token de solicitud no tiene un id de usuario de autorización" - -#, python-format -msgid "" -"Request attribute %(attribute)s must be less than or equal to %(size)i. The " -"server could not comply with the request because the attribute size is " -"invalid (too large). The client is assumed to be in error." -msgstr "" -"El atributo de solicitud %(attribute)s debe ser menor que o igual a " -"%(size)i. El servidor no pudo satisfacer la solicitud porque el tamaño del " -"atributo no es válido (demasiado grande). Se entiende que el cliente da " -"error." - -msgid "Request must have an origin query parameter" -msgstr "La solicitud debe tener un parámetro de consulta de origen" - -msgid "Request token is expired" -msgstr "El token solicitado ha caducado" - -msgid "Request token not found" -msgstr "No se ha encontrado el token solicitado" - -msgid "Requested expiration time is more than redelegated trust can provide" -msgstr "" -"El tiempo de caducidad solicitado es mayor que el que puede proporcionar la " -"confianza redelegada" - -#, python-format -msgid "" -"Requested redelegation depth of %(requested_count)d is greater than allowed " -"%(max_count)d" -msgstr "" -"La profundidad de redelegación solicitada de %(requested_count)d es mayor " -"que la permitida %(max_count)d" - -msgid "" -"Running keystone via eventlet is deprecated as of Kilo in favor of running " -"in a WSGI server (e.g. mod_wsgi). Support for keystone under eventlet will " -"be removed in the \"M\"-Release." -msgstr "" -"La ejecución de keystone a través de eventlet está en desuso a partir de " -"Kilo sustituyéndose por la ejecución en un servidor WSGI (por ejemplo, " -"mod_wsgi). El soporte para keystone en eventlet se eliminará en el release " -"\"M\"." - -msgid "Scoping to both domain and project is not allowed" -msgstr "No se permite definir el ámbito tanto a dominio como a proyecto" - -msgid "Scoping to both domain and trust is not allowed" -msgstr "No se permite definir el ámbito tanto a dominio como a confianza" - -msgid "Scoping to both project and trust is not allowed" -msgstr "No se permite definir el ámbito tanto a proyecto como a confianza" - -#, python-format -msgid "Service Provider %(sp)s is disabled" -msgstr "El proveedor de servicios %(sp)s está inhabilitado" - -msgid "Some of requested roles are not in redelegated trust" -msgstr "Algunos roles solicitados no están en la confianza redelegada" - -msgid "Specify a domain or project, not both" -msgstr "Especifique un dominio o un proyecto, no ambas cosas a la vez" - -msgid "Specify a user or group, not both" -msgstr "Especifique un usuario o un grupo, no ambas cosas a la vez" - -msgid "Specify one of domain or project" -msgstr "Especifique un dominio o un proyecto" - -msgid "Specify one of user or group" -msgstr "Especifique un usuario o un grupo" - -#, python-format -msgid "" -"String length exceeded.The length of string '%(string)s' exceeded the limit " -"of column %(type)s(CHAR(%(length)d))." -msgstr "" -"Se ha superado la longitud de la cadena. La longitud de la cadena " -"'%(string)s' ha excedido el límite de la columna %(type)s(CHAR(%(length)d))." - -msgid "Tenant name cannot contain reserved characters." -msgstr "El nombre de arrendatario no puede contener caracteres reservados." - -#, python-format -msgid "" -"The %s extension has been moved into keystone core and as such its " -"migrations are maintained by the main keystone database control. Use the " -"command: keystone-manage db_sync" -msgstr "" -"La extensión %s se ha trasladado al núcleo de keystone y, como tal, el " -"mantenimiento de sus migraciones se hace desde el control de bases de datos " -"principal de keystone. Utilice el comando: keystone-manage db_sync" - -msgid "" -"The 'expires_at' must not be before now. The server could not comply with " -"the request since it is either malformed or otherwise incorrect. The client " -"is assumed to be in error." -msgstr "" -"El valor de 'expires_at' no debe ser antes que el momento actual. El " -"servidor no ha podido satisfacer la solicitud porque está mal formada o es " -"incorrecta por algún otro motivo. Se entiende que el cliente da error." - -msgid "The --all option cannot be used with the --domain-name option" -msgstr "La opción --all no se puede utilizar con la opción --domain-name" - -#, python-format -msgid "The Keystone configuration file %(config_file)s could not be found." -msgstr "" -"No se ha podido encontrar el archivo de configuración de Keystone " -"%(config_file)s." - -#, python-format -msgid "" -"The Keystone domain-specific configuration has specified more than one SQL " -"driver (only one is permitted): %(source)s." -msgstr "" -"La configuración específica del dominio Keystone ha especificado más de un " -"controlador SQL (sólo se permite uno): %(source)s." - -msgid "The action you have requested has not been implemented." -msgstr "La acción que ha solicitado no está implementada" - -msgid "The authenticated user should match the trustor." -msgstr "El usuario autenticado debe coincidir con el fideicomitente." - -msgid "" -"The certificates you requested are not available. It is likely that this " -"server does not use PKI tokens otherwise this is the result of " -"misconfiguration." -msgstr "" -"Los certificados que ha solicitado no están disponibles. Es probable que " -"este servidor no utilice tokens PKI o que se haya hecho una configuración " -"incorrecta." - -msgid "The configured token provider does not support bind authentication." -msgstr "" -"El proveedor de señales configurado no da soporte a la autenticación de " -"enlaces." - -msgid "The creation of projects acting as domains is not allowed in v2." -msgstr "En la v2, no se permite crear proyectos que actúen como dominios." - -#, python-format -msgid "" -"The password length must be less than or equal to %(size)i. The server could " -"not comply with the request because the password is invalid." -msgstr "" -"La longitud de la contraseña debe ser menor o igual que %(size)i. El " -"servidor no ha podido satisfacer la solicitud porque la contraseña no es " -"válida." - -msgid "The request you have made requires authentication." -msgstr "La solicitud que ha hecho requiere autenticación." - -msgid "The resource could not be found." -msgstr "No se ha podido encontrar el recurso." - -msgid "" -"The revoke call must not have both domain_id and project_id. This is a bug " -"in the Keystone server. The current request is aborted." -msgstr "" -"La llamada de revocación debe tener un id_dominio y un id_proyecto. Esto es " -"un error del servidor de Keystone. La solicitud actual ha terminado " -"anormalmente." - -msgid "The service you have requested is no longer available on this server." -msgstr "El servicio que ha solicitado ya no está disponible en este servidor." - -#, python-format -msgid "" -"The specified parent region %(parent_region_id)s would create a circular " -"region hierarchy." -msgstr "" -"La región padre %(parent_region_id)s especificada crearía una jerarquía de " -"regiones circular." - -#, python-format -msgid "" -"The value of group %(group)s specified in the config should be a dictionary " -"of options" -msgstr "" -"El valor de grupo %(group)s especificado en la configuración debe ser un " -"diccionario de opciones" - -msgid "There should not be any non-oauth parameters" -msgstr "Solo puede haber parámetros de oauth" - -#, python-format -msgid "This is not a recognized Fernet payload version: %s" -msgstr "Esta no es una versión de carga útil Fernet reconocida: %s" - -#, python-format -msgid "This is not a recognized Fernet token %s" -msgstr "Este no es un token Fernet reconocido %s" - -msgid "" -"Timestamp not in expected format. The server could not comply with the " -"request since it is either malformed or otherwise incorrect. The client is " -"assumed to be in error." -msgstr "" -"La indicación de fecha y hora no está en el formato esperado. El servidor no " -"ha podido satisfacer la solicitud porque está mal formada o es incorrecta " -"por algún otro motivo. Se entiende que el cliente da error." - -#, python-format -msgid "" -"To get a more detailed information on this error, re-run this command for " -"the specific domain, i.e.: keystone-manage domain_config_upload --domain-" -"name %s" -msgstr "" -"Para obtener información más detallada sobre este error, vuelva a ejecutar " -"este mandato para el dominio especificado, por ejemplo: keystone-manage " -"domain_config_upload --domain-name %s" - -msgid "Token belongs to another user" -msgstr "El token pertenece a otro usuario" - -msgid "Token does not belong to specified tenant." -msgstr "El token no pertenece al arrendatario especificado." - -msgid "Token version is unrecognizable or unsupported." -msgstr "Versión de token no reconocida o no soportada." - -msgid "Trustee has no delegated roles." -msgstr "La entidad de confianza no tiene roles delegados." - -msgid "Trustor is disabled." -msgstr "El fideicomitente está deshabilitado." - -#, python-format -msgid "" -"Trying to update group %(group)s, so that, and only that, group must be " -"specified in the config" -msgstr "" -"Intentando actualizar el grupo %(group)s, de forma que se pueda especificar " -"ese grupo, y solo ese, enla configuración" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, but config provided " -"contains option %(option_other)s instead" -msgstr "" -"Intentando actualizar la opción %(option)s en el grupo %(group)s, pero la " -"configuración proporcionada contiene la opción %(option_other)s en su lugar" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, so that, and only " -"that, option must be specified in the config" -msgstr "" -"Intentando actualizar la opción %(option)s en el grupo %(group)s, de forma " -"que se pueda especificar esa opción, y solo esa, en la configuración" - -msgid "" -"Unable to access the keystone database, please check it is configured " -"correctly." -msgstr "" -"No se puede acceder a la base de datos de keystone, compruebe que está " -"configurada correctamente." - -#, python-format -msgid "Unable to consume trust %(trust_id)s, unable to acquire lock." -msgstr "" -"No se puede consumir la confianza %(trust_id)s, no se puede adquirir el " -"bloqueo." - -#, python-format -msgid "" -"Unable to delete region %(region_id)s because it or its child regions have " -"associated endpoints." -msgstr "" -"No se puede suprimir la región %(region_id)s porque sus regiones secundarias " -"tienen puntos finales asociados." - -msgid "Unable to downgrade schema" -msgstr "No se ha podido degradar el esquema" - -#, python-format -msgid "Unable to find valid groups while using mapping %(mapping_id)s" -msgstr "" -"No se han podido encontrar grupos válidos utilizando la correlación " -"%(mapping_id)s" - -#, python-format -msgid "Unable to locate domain config directory: %s" -msgstr "No se ha podido localizar el directorio config de dominio: %s" - -#, python-format -msgid "Unable to lookup user %s" -msgstr "No se ha podido buscar el usuario %s" - -#, python-format -msgid "" -"Unable to reconcile identity attribute %(attribute)s as it has conflicting " -"values %(new)s and %(old)s" -msgstr "" -"No se puede reconciliar el atributo de identidad %(attribute)s porque tiene " -"los siguientes valores en conflicto: %(new)s y %(old)s" - -#, python-format -msgid "" -"Unable to sign SAML assertion. It is likely that this server does not have " -"xmlsec1 installed, or this is the result of misconfiguration. Reason " -"%(reason)s" -msgstr "" -"No se puede firmar la aserción SAML. Es probable que este servidor no tenga " -"xmlsec1 instalado o que se haya hecho una configuración incorrecta. Motivo: " -"%(reason)s" - -msgid "Unable to sign token." -msgstr "No se ha podido firmar el token." - -#, python-format -msgid "Unexpected assignment type encountered, %s" -msgstr "Se ha encontrado un tipo de asignación inesperado, %s" - -#, python-format -msgid "" -"Unexpected combination of grant attributes - User: %(user_id)s, Group: " -"%(group_id)s, Project: %(project_id)s, Domain: %(domain_id)s" -msgstr "" -"Combinación no esperada de atributos de otorgamiento - Usuario: %(user_id)s, " -"Grupo: %(group_id)s, Proyecto: %(project_id)s, Dominio: %(domain_id)s" - -#, python-format -msgid "Unexpected status requested for JSON Home response, %s" -msgstr "Estado inesperado solicitado para la respuesta de JSON Home, %s" - -msgid "Unknown Target" -msgstr "Destino desconocido" - -#, python-format -msgid "Unknown domain '%(name)s' specified by --domain-name" -msgstr "Dominio desconocido '%(name)s' especificado por --domain-name" - -#, python-format -msgid "Unknown token version %s" -msgstr "Versión de token desconocida %s" - -#, python-format -msgid "Unregistered dependency: %(name)s for %(targets)s" -msgstr "Dependencia no registrada: %(name)s para %(targets)s" - -msgid "Update of `domain_id` is not allowed." -msgstr "No se permite la actualización de `domain_id`." - -msgid "Update of `is_domain` is not allowed." -msgstr "No se permite la actualización de `is_domain`." - -msgid "Update of `parent_id` is not allowed." -msgstr "No se permite la actualización de `parent_id`." - -msgid "Update of domain_id is only allowed for root projects." -msgstr "Solo se permite actualizar el domain_id de los proyectos raíz." - -msgid "Update of domain_id of projects acting as domains is not allowed." -msgstr "" -"No se permite actualizar el domain_id de los proyectos que actúen como " -"dominios." - -msgid "Use a project scoped token when attempting to create a SAML assertion" -msgstr "" -"Utilice un token de ámbito de proyecto cuando intente crear una aserción SAML" - -msgid "" -"Use of the identity driver config to automatically configure the same " -"assignment driver has been deprecated, in the \"O\" release, the assignment " -"driver will need to be expicitly configured if different than the default " -"(SQL)." -msgstr "" -"El uso de la configuración del controlador de identidad para configurar " -"automáticamente el mismo controlador de asignación está en desuso. En el " -"release \"O\", el controlador de asignación se deberá configurar " -"explícitamente si es distinto que el valor predeterminado (SQL)." - -#, python-format -msgid "User %(u_id)s is unauthorized for tenant %(t_id)s" -msgstr "El usuario %(u_id)s no está autorizado en el proyecto %(t_id)s" - -#, python-format -msgid "User %(user_id)s has no access to domain %(domain_id)s" -msgstr "El usuario %(user_id)s no tiene acceso al dominio %(domain_id)s" - -#, python-format -msgid "User %(user_id)s has no access to project %(project_id)s" -msgstr "El usuario %(user_id)s no tiene acceso al proyecto %(project_id)s" - -#, python-format -msgid "User %(user_id)s is already a member of group %(group_id)s" -msgstr "El usuario %(user_id)s ya es miembro del grupo %(group_id)s" - -#, python-format -msgid "User '%(user_id)s' not found in group '%(group_id)s'" -msgstr "" -"No se ha encontrado el usuario '%(user_id)s' en el grupo '%(group_id)s'" - -msgid "User IDs do not match" -msgstr "Los ID de usuario no coinciden" - -msgid "" -"User auth cannot be built due to missing either user id, or user name with " -"domain id, or user name with domain name." -msgstr "" -"No se puede crear la autorización de usuario porque falta el ID de usuario o " -"el nombre de usuario con el ID de dominio, o el nombre de usuario con el " -"nombre de dominio." - -#, python-format -msgid "User is disabled: %s" -msgstr "El usuario está inhabilitado: %s" - -msgid "User is not a member of the requested project" -msgstr "El usuario no es miembro del proyecto solicitado" - -msgid "User is not a trustee." -msgstr "El usuario no es de confianza." - -msgid "User not found" -msgstr "Usuario no encontrado" - -msgid "User not valid for tenant." -msgstr "Usuario no válido para este arrendatario." - -msgid "User roles not supported: tenant_id required" -msgstr "Roles de usuario no admitidos: tenant_id obligatorio" - -#, python-format -msgid "User type %s not supported" -msgstr "El tipo de usuario %s no está soportado" - -msgid "You are not authorized to perform the requested action." -msgstr "No está autorizado para realizar la acción solicitada." - -#, python-format -msgid "You are not authorized to perform the requested action: %(action)s" -msgstr "No está autorizado para realizar la acción solicitada: %(action)s" - -msgid "" -"You have tried to create a resource using the admin token. As this token is " -"not within a domain you must explicitly include a domain for this resource " -"to belong to." -msgstr "" -"Ha intentado crear un recurso utilizando el token de administración. Dado " -"que este token no se encuentra dentro de un dominio, debe incluir " -"explícitamente un dominio al que pertenecerá este recurso." - -msgid "`key_mangler` functions must be callable." -msgstr "Las funciones `key_mangler` deben ser invocables." - -msgid "`key_mangler` option must be a function reference" -msgstr "La opción `key_mangler` debe ser una referencia de función" - -msgid "any options" -msgstr "cualesquiera opciones" - -msgid "auth_type is not Negotiate" -msgstr "auth_type no es Negotiate" - -msgid "authorizing user does not have role required" -msgstr "el usuario de autorización no tiene el rol necesario" - -#, python-format -msgid "cannot create a project in a branch containing a disabled project: %s" -msgstr "" -"No se puede crear un proyecto en una rama que contiene un proyecto " -"inhabilitado: %s" - -#, python-format -msgid "" -"cannot delete an enabled project acting as a domain. Please disable the " -"project %s first." -msgstr "" -"no se puede suprimir un proyecto habilitado que actúe como dominio. " -"Inhabilite el proyecto %s." - -#, python-format -msgid "group %(group)s" -msgstr "grupo %(group)s" - -msgid "" -"idp_contact_type must be one of: [technical, other, support, administrative " -"or billing." -msgstr "" -"idp_contact_type debe ser una de estas opciones: [técnico, otros, soporte, " -"administrativo o facturación." - -#, python-format -msgid "invalid date format %s" -msgstr "formato de fecha no válido %s" - -#, python-format -msgid "" -"it is not permitted to have two projects acting as domains with the same " -"name: %s" -msgstr "" -"no se permite tener dos proyectos actuando como dominios con el mismo " -"nombre: %s" - -#, python-format -msgid "" -"it is not permitted to have two projects within a domain with the same " -"name : %s" -msgstr "" -"no se permite tener dos proyectos dentro de un mismo dominio con el mismo " -"nombre: %s" - -msgid "only root projects are allowed to act as domains." -msgstr "Sólo los proyectos raíz pueden actuar como dominios." - -#, python-format -msgid "option %(option)s in group %(group)s" -msgstr "opción %(option)s en el grupo %(group)s" - -msgid "provided consumer key does not match stored consumer key" -msgstr "" -"la clave de consumidor proporcionada no coincide con la clave de consumidor " -"almacenada" - -msgid "provided request key does not match stored request key" -msgstr "" -"la clave de solicitud proporcionada no coincide con la clave de solicitud " -"almacenada" - -msgid "provided verifier does not match stored verifier" -msgstr "el verificador proporcionado no coincide con el verificador almacenado" - -msgid "remaining_uses must be a positive integer or null." -msgstr "remaining_uses debe ser un entero positivo o nulo." - -msgid "remaining_uses must not be set if redelegation is allowed" -msgstr "remaining_uses no se debe establecer si se permite la redelegación" - -#, python-format -msgid "" -"request to update group %(group)s, but config provided contains group " -"%(group_other)s instead" -msgstr "" -"solicitud para actualizar el grupo %(group)s, pero la configuración " -"proporcionada contiene el grupo %(group_other)s en su lugar" - -msgid "rescope a scoped token" -msgstr "Volver a establecer el ámbito de un token con ámbito" - -#, python-format -msgid "role %s is not defined" -msgstr "el rol %s no está definido" - -msgid "scope.project.id must be specified if include_subtree is also specified" -msgstr "" -"Se debe especificar scope.project.id si se especifica también include_subtree" - -#, python-format -msgid "tls_cacertdir %s not found or is not a directory" -msgstr "No se ha encontrado tls_cacertdir %s o no es un directorio" - -#, python-format -msgid "tls_cacertfile %s not found or is not a file" -msgstr "No se ha encontrado tls_cacertfile %s o no es un archivo" - -#, python-format -msgid "token reference must be a KeystoneToken type, got: %s" -msgstr "" -"la referencia de señal debe ser un tipo KeystoneToken, se ha obtenido: %s" - -msgid "" -"update of domain_id is deprecated as of Mitaka and will be removed in O." -msgstr "" -"La actualización de domain_id está en desuso en Mitaka y se eliminará en O." - -#, python-format -msgid "" -"validated expected to find %(param_name)r in function signature for " -"%(func_name)r." -msgstr "" -"En la validación se esperaba encontrar %(param_name)r en la firma de función " -"para %(func_name)r." diff --git a/keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone-log-critical.po b/keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone-log-critical.po deleted file mode 100644 index c7e55ed2..00000000 --- a/keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone-log-critical.po +++ /dev/null @@ -1,25 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2014-08-31 03:19+0000\n" -"Last-Translator: openstackjenkins \n" -"Language: fr\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: French\n" - -#, python-format -msgid "Unable to open template file %s" -msgstr "Impossible d'ouvrir le fichier modèle %s" diff --git a/keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone-log-error.po b/keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone-log-error.po deleted file mode 100644 index 0339cacd..00000000 --- a/keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone-log-error.po +++ /dev/null @@ -1,94 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Bruno Cornec , 2014 -# Maxime COQUEREL , 2014 -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 8.0.1.dev11\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/keystone\n" -"POT-Creation-Date: 2015-11-05 06:13+0000\n" -"PO-Revision-Date: 2015-06-26 05:13+0000\n" -"Last-Translator: openstackjenkins \n" -"Language-Team: French\n" -"Language: fr\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.1\n" - -#, python-format -msgid "" -"Circular reference or a repeated entry found in region tree - %(region_id)s." -msgstr "" -"Référence circulaire ou entrée dupliquée trouvée dans l'arbre de la région - " -"%(region_id)s." - -#, python-format -msgid "Command %(to_exec)s exited with %(retcode)s- %(output)s" -msgstr "La commande %(to_exec)s a retourné %(retcode)s- %(output)s" - -#, python-format -msgid "Could not bind to %(host)s:%(port)s" -msgstr "Impossible de s'attacher à %(host)s:%(port)s" - -msgid "" -"Error setting up the debug environment. Verify that the option --debug-url " -"has the format : and that a debugger processes is listening on " -"that port." -msgstr "" -"Erreur de configuration de l'environnement de débogage. Vérifiez que " -"l'option --debug-url a le format : et que le processus de " -"débogage écoute sur ce port." - -msgid "Failed to construct notifier" -msgstr "Échec de construction de la notification" - -#, python-format -msgid "Failed to remove file %(file_path)r: %(error)s" -msgstr "Échec de la suppression du fichier %(file_path)r: %(error)s" - -#, python-format -msgid "Failed to send %(action)s %(event_type)s notification" -msgstr "Échec de l'envoi de la notification %(action)s %(event_type)s " - -#, python-format -msgid "Failed to send %(res_id)s %(event_type)s notification" -msgstr "Échec de l'envoi de la notification %(res_id)s %(event_type)s" - -msgid "Failed to validate token" -msgstr "Echec de validation du token" - -#, python-format -msgid "Malformed endpoint %(url)s - unknown key %(keyerror)s" -msgstr "Noeud final incorrect %(url)s - clé inconnue %(keyerror)s" - -#, python-format -msgid "" -"Malformed endpoint %s - incomplete format (are you missing a type notifier ?)" -msgstr "" -"Noeud final incorrect '%s - Format incomplet (un type de notification manque-" -"t-il ?)" - -#, python-format -msgid "" -"Malformed endpoint '%(url)s'. The following type error occurred during " -"string substitution: %(typeerror)s" -msgstr "" -"Noeud final incorrect '%(url)s'. L'erreur suivante est survenue pendant la " -"substitution de chaine : %(typeerror)s" - -#, python-format -msgid "Malformed endpoint - %(url)r is not a string" -msgstr "Critère mal formé - %(url)r n'est pas une chaine de caractère" - -msgid "Server error" -msgstr "Erreur serveur" - -msgid "Unable to sign token" -msgstr "Impossible de signer le jeton" diff --git a/keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone-log-info.po b/keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone-log-info.po deleted file mode 100644 index 37ef89ea..00000000 --- a/keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone-log-info.po +++ /dev/null @@ -1,97 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Bruno Cornec , 2014 -# Maxime COQUEREL , 2014 -# Andrew Melim , 2014 -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 8.0.1.dev11\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/keystone\n" -"POT-Creation-Date: 2015-11-05 06:13+0000\n" -"PO-Revision-Date: 2015-08-01 06:26+0000\n" -"Last-Translator: openstackjenkins \n" -"Language-Team: French\n" -"Language: fr\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.1\n" - -#, python-format -msgid "" -"\"expires_at\" has conflicting values %(existing)s and %(new)s. Will use " -"the earliest value." -msgstr "" -"\"expires_at\" a des valeurs conflictuelles %(existing)s et %(new)s. " -"Utilsation de la première valeur." - -#, python-format -msgid "Adding proxy '%(proxy)s' to KVS %(name)s." -msgstr "Ahour du mandataire '%(proxy)s' au KVS %(name)s." - -#, python-format -msgid "Couldn't verify unknown bind: {%(bind_type)s: %(identifier)s}" -msgstr "" -"Impossible de vérifier l'attachement inconnu: {%(bind_type)s: " -"%(identifier)s}" - -#, python-format -msgid "Creating the default role %s because it does not exist." -msgstr "Création du rôle par défaut %s, car il n'existe pas" - -#, python-format -msgid "KVS region %s key_mangler disabled." -msgstr "Région KVS %s key_mangler désactivée" - -msgid "Kerberos bind authentication successful" -msgstr "Attachement Kerberos identifié correctement" - -msgid "Kerberos credentials do not match those in bind" -msgstr "L'identification Kerberos ne correspond pas à celle de l'attachement" - -msgid "Kerberos credentials required and not present" -msgstr "L'identitification Kerberos est requise mais non présente" - -#, python-format -msgid "Named bind mode %s not in bind information" -msgstr "" -"Le mode d'attachement nommé %s n'est pas dans l'information d'attachement" - -msgid "No bind information present in token" -msgstr "Aucune information d'attachement n'est présente dans le jeton" - -#, python-format -msgid "" -"Received the following notification: service %(service)s, resource_type: " -"%(resource_type)s, operation %(operation)s payload %(payload)s" -msgstr "" -"Réception de la notification suivante: service %(service)s, resource_type: " -"%(resource_type)s, operation %(operation)s payload %(payload)s" - -#, python-format -msgid "Running command - %s" -msgstr "Exécution de la commande %s" - -#, python-format -msgid "Starting %(arg0)s on %(host)s:%(port)s" -msgstr "Démarrage de %(arg0)s sur %(host)s:%(port)s" - -#, python-format -msgid "Total expired tokens removed: %d" -msgstr "Total des jetons expirés effacés: %d" - -#, python-format -msgid "Using %(func)s as KVS region %(name)s key_mangler" -msgstr "Utilise %(func)s comme région KVS %(name)s key_mangler" - -#, python-format -msgid "Using default dogpile sha1_mangle_key as KVS region %s key_mangler" -msgstr "" -"Utilisation du dogpile sha1_mangle_key par défaut comme région KVS %s " -"key_mangler" diff --git a/keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone-log-warning.po b/keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone-log-warning.po deleted file mode 100644 index 6eb07830..00000000 --- a/keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone-log-warning.po +++ /dev/null @@ -1,102 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Bruno Cornec , 2014 -# Maxime COQUEREL , 2014 -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 8.0.1.dev11\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/keystone\n" -"POT-Creation-Date: 2015-11-05 06:13+0000\n" -"PO-Revision-Date: 2015-07-29 06:04+0000\n" -"Last-Translator: openstackjenkins \n" -"Language-Team: French\n" -"Language: fr\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.1\n" - -#, python-format -msgid "%s is not a dogpile.proxy.ProxyBackend" -msgstr "%s n'est pas un dogpile.proxy.ProxyBackend" - -#, python-format -msgid "Authorization failed. %(exception)s from %(remote_addr)s" -msgstr "Echec d'autorisation. %(exception)s depuis %(remote_addr)s" - -#, python-format -msgid "" -"Endpoint %(endpoint_id)s referenced in association for policy %(policy_id)s " -"not found." -msgstr "" -"Le point d'entrée %(endpoint_id)s référencé en association avec la politique " -"%(policy_id)s est introuvable." - -#, python-format -msgid "" -"ID attribute %(id_attr)s for LDAP object %(dn)s has multiple values and " -"therefore cannot be used as an ID. Will get the ID from DN instead" -msgstr "" -"L'attribut ID %(id_attr)s pour l'objet LDAP %(dn)s a de multiples valeurs et " -"par conséquent ne peut être utilisé comme un ID. Obtention de l'ID depuis le " -"DN à la place." - -#, python-format -msgid "" -"Invalid additional attribute mapping: \"%s\". Format must be " -":" -msgstr "" -"Mauvais mappage d'attribut additionnel: \"%s\". Le format doit être " -":" - -#, python-format -msgid "Invalid domain name (%s) found in config file name" -msgstr "Non de domaine trouvé non valide (%s) dans le fichier de configuration" - -#, python-format -msgid "KVS lock released (timeout reached) for: %s" -msgstr "Verrou KVS relaché (temps limite atteint) pour : %s" - -msgid "" -"LDAP Server does not support paging. Disable paging in keystone.conf to " -"avoid this message." -msgstr "" -"Le serveur LDAP ne prend pas en charge la pagination. Désactivez la " -"pagination dans keystone.conf pour éviter de recevoir ce message." - -msgid "RBAC: Bypassing authorization" -msgstr "RBAC : Autorisation ignorée" - -msgid "RBAC: Invalid token" -msgstr "RBAC : Jeton non valide" - -msgid "RBAC: Invalid user data in token" -msgstr "RBAC: Donnée utilisation non valide dans le token" - -#, python-format -msgid "Unable to add user %(user)s to %(tenant)s." -msgstr "Impossible d'ajouter l'utilisateur %(user)s à %(tenant)s." - -#, python-format -msgid "Unable to locate domain config directory: %s" -msgstr "Impossible de localiser le répertoire de configuration domaine: %s" - -#, python-format -msgid "Unable to remove user %(user)s from %(tenant)s." -msgstr "Impossible de supprimer l'utilisateur %(user)s depuis %(tenant)s." - -msgid "keystone-manage pki_setup is not recommended for production use." -msgstr "" -"keystone-manage pki_setup n'est pas recommandé pour une utilisation en " -"production." - -msgid "keystone-manage ssl_setup is not recommended for production use." -msgstr "" -"keystone-manage ssl_setup n'est pas recommandé pour une utilisation en " -"production." diff --git a/keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone.po b/keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone.po deleted file mode 100644 index de00f697..00000000 --- a/keystone-moon/keystone/locale/fr/LC_MESSAGES/keystone.po +++ /dev/null @@ -1,1649 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Fries , 2014 -# Maxime COQUEREL , 2014 -# Andrew Melim , 2014 -# Olivier Perrin , 2013 -# Olivier Perrin , 2013 -# Rémi Le Trocquer , 2014 -# leroy , 2016. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2016-04-18 04:18+0000\n" -"Last-Translator: Martine Marin \n" -"Language: fr\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: French\n" - -#, python-format -msgid "%(detail)s" -msgstr "%(detail)s" - -#, python-format -msgid "%(driver)s is not supported driver version" -msgstr "%(driver)s n'est pas une version de pilote prise en charge" - -#, python-format -msgid "" -"%(entity)s name cannot contain the following reserved characters: %(chars)s" -msgstr "" -"Le nom %(entity)s ne peut pas contenir les caractères réservés suivants : " -"%(chars)s" - -#, python-format -msgid "" -"%(event)s is not a valid notification event, must be one of: %(actions)s" -msgstr "" -"%(event)s n'est pas un événement de notification valide, ce doit être l'un " -"des suivants : %(actions)s" - -#, python-format -msgid "%(host)s is not a trusted dashboard host" -msgstr "%(host)s n'est pas un hôte de tableau de bord digne de confiance" - -#, python-format -msgid "%(message)s %(amendment)s" -msgstr "%(message)s %(amendment)s" - -#, python-format -msgid "" -"%(mod_name)s doesn't provide database migrations. The migration repository " -"path at %(path)s doesn't exist or isn't a directory." -msgstr "" -"%(mod_name)s ne permet pas les migrations de base de données. Le chemin du " -"référentiel de migration %(path)s n'existe pas ou n'est pas un répertoire." - -#, python-format -msgid "%(prior_role_id)s does not imply %(implied_role_id)s" -msgstr "%(prior_role_id)s n'implique pas %(implied_role_id)s" - -#, python-format -msgid "%(property_name)s cannot be less than %(min_length)s characters." -msgstr "" -"%(property_name)s ne peut pas contenir moins de %(min_length)s caractères." - -#, python-format -msgid "%(property_name)s is not a %(display_expected_type)s" -msgstr "%(property_name)s n'est pas un %(display_expected_type)s" - -#, python-format -msgid "%(property_name)s should not be greater than %(max_length)s characters." -msgstr "" -"%(property_name)s ne doit pas contenir plus de %(max_length)s caractères." - -#, python-format -msgid "%(role_id)s cannot be an implied roles" -msgstr "%(role_id)s ne peut pas être un rôle impliqué" - -#, python-format -msgid "%s cannot be empty." -msgstr "%s ne peut pas être vide." - -#, python-format -msgid "%s extension does not exist." -msgstr "L'extension %s n'existe pas." - -#, python-format -msgid "%s field is required and cannot be empty" -msgstr "La zone %s est obligatoire et ne peut pas être vide" - -#, python-format -msgid "%s field(s) cannot be empty" -msgstr "la ou les zones %s ne peuvent pas être vides" - -#, python-format -msgid "" -"%s for the LDAP identity backend has been deprecated in the Mitaka release " -"in favor of read-only identity LDAP access. It will be removed in the \"O\" " -"release." -msgstr "" -"%s pour le back-end d'identité LDAP est désormais obsolète dans l'édition " -"Mitaka en faveur de l'accès LDAP d'identité en lecture seule. Il sera " -"supprimé dans l'édition \"O\"." - -msgid "(Disable insecure_debug mode to suppress these details.)" -msgstr "(Désactivez le mode insecure_debug pour supprimer ces détails.)" - -msgid "--all option cannot be mixed with other options" -msgstr "L'option -all ne peut pas être associée à d'autres options" - -msgid "A project-scoped token is required to produce a service catalog." -msgstr "" -"Un jeton de niveau projet est requis pour produire un catalogue de service." - -msgid "Access token is expired" -msgstr "Le jeton d'accès a expiré" - -msgid "Access token not found" -msgstr "Jeton d'accès non trouvé" - -msgid "Additional authentications steps required." -msgstr "Des étapes d'authentifications supplémentaires sont nécessaires ." - -msgid "An unexpected error occurred when retrieving domain configs" -msgstr "" -"Une erreur inattendue s'est produite lors de l'extraction des configurations " -"de domaine" - -#, python-format -msgid "An unexpected error occurred when trying to store %s" -msgstr "" -"Une erreur inattendue s'est produite lors de la tentative de stockage de %s" - -msgid "An unexpected error prevented the server from fulfilling your request." -msgstr "Une erreur inattendue a empêché le serveur de traiter votre demande." - -#, python-format -msgid "" -"An unexpected error prevented the server from fulfilling your request: " -"%(exception)s" -msgstr "" -"Une erreur inattendue a empêché le serveur de traiter votre demande : " -"%(exception)s" - -msgid "An unhandled exception has occurred: Could not find metadata." -msgstr "Une exception non gérée s'est produite : métadonnées introuvables." - -msgid "At least one option must be provided" -msgstr "Au moins une option doit être fournie" - -msgid "At least one option must be provided, use either --all or --domain-name" -msgstr "" -"Au moins une option doit être indiquée ; utilisez --all ou --domain-name" - -msgid "At least one role should be specified." -msgstr "Au moins un rôle doit être indiqué." - -#, python-format -msgid "" -"Attempted automatic driver selection for assignment based upon " -"[identity]\\driver option failed since driver %s is not found. Set " -"[assignment]/driver to a valid driver in keystone config." -msgstr "" -"La tentative de sélection du pilote automatique pour l'affectation basée sur " -"l'option [identity]\\driver a échoué car le pilote %s est introuvable. " -"Définissez l'option [assignment]/driver sur un pilote valide dans la " -"configuration Keystone." - -msgid "Attempted to authenticate with an unsupported method." -msgstr "Tentative d'authentification avec une méthode non prise en charge ." - -msgid "" -"Attempting to use OS-FEDERATION token with V2 Identity Service, use V3 " -"Authentication" -msgstr "" -"Tentative d'utilisation du jeton OS-FEDERATION avec V2 Identity Service, " -"utilisez l'authentification V3" - -msgid "Authentication plugin error." -msgstr "Erreur du plug-in d'authentification." - -#, python-format -msgid "" -"Backend `%(backend)s` is not a valid memcached backend. Valid backends: " -"%(backend_list)s" -msgstr "" -"Le back-end `%(backend)s` n'est pas un back-end memcached valide. Back-ends " -"valides : %(backend_list)s" - -msgid "Cannot authorize a request token with a token issued via delegation." -msgstr "" -"Impossible d'autoriser un jeton de demande avec un jeton émis via une " -"délégation." - -#, python-format -msgid "Cannot change %(option_name)s %(attr)s" -msgstr "Impossible de modifier %(option_name)s %(attr)s" - -msgid "Cannot change Domain ID" -msgstr "Impossible de modifier l'ID du domaine" - -msgid "Cannot change user ID" -msgstr "Impossible de modifier l'ID utilisateur" - -msgid "Cannot change user name" -msgstr "Impossible de modifier le nom d'utilisateur" - -#, python-format -msgid "Cannot create an endpoint with an invalid URL: %(url)s" -msgstr "Impossible de créer un nœud final avec une URL non valide : %(url)s" - -#, python-format -msgid "Cannot create project with parent: %(project_id)s" -msgstr "Impossible de créer le projet %(project_id)s avec le parent" - -#, python-format -msgid "" -"Cannot create project, since it specifies its owner as domain %(domain_id)s, " -"but specifies a parent in a different domain (%(parent_domain_id)s)." -msgstr "" -"Impossible de créer le projet, car il indique son propriétaire comme domaine " -"%(domain_id)s, mais spécifie un parent figurant dans un autre domaine " -"(%(parent_domain_id)s)." - -#, python-format -msgid "" -"Cannot create project, since its parent (%(domain_id)s) is acting as a " -"domain, but project's specified parent_id (%(parent_id)s) does not match " -"this domain_id." -msgstr "" -"Impossible de créer le projet, car son parent (%(domain_id)s) fait office de " -"domaine, mais l'ID parent (%(parent_id)s) spécifié pour le projet ne " -"correspond pas à cet ID de domaine (domain_id)." - -msgid "Cannot delete a domain that is enabled, please disable it first." -msgstr "" -"Impossible de supprimer un domaine activé, veuillez d'abord le désactiver." - -#, python-format -msgid "" -"Cannot delete project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" -"Impossible de supprimer le projet %(project_id)s car son sous-arbre contient " -"des projets activés." - -#, python-format -msgid "" -"Cannot delete the project %s since it is not a leaf in the hierarchy. Use " -"the cascade option if you want to delete a whole subtree." -msgstr "" -"Impossible de supprimer le projet %s car il ne s'agit pas d'une feuille dans " -"la hiérarchie. Utilisez l'option cascade si vous voulez supprimer un sous-" -"arbre complet." - -#, python-format -msgid "" -"Cannot disable project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" -"Impossible de désactiver le projet %(project_id)s car son sous-arbre " -"contient des projets activés." - -#, python-format -msgid "Cannot enable project %s since it has disabled parents" -msgstr "Impossible d'activer le projet %s car ses parents sont désactivés" - -msgid "Cannot list assignments sourced from groups and filtered by user ID." -msgstr "" -"Impossible de répertorier les affectations en provenance de groupes et " -"filtrées par ID utilisateur." - -msgid "Cannot list request tokens with a token issued via delegation." -msgstr "" -"Impossible de répertorier des jetons de demande avec un jeton émis via une " -"délégation." - -#, python-format -msgid "Cannot open certificate %(cert_file)s. Reason: %(reason)s" -msgstr "Impossible d'ouvrir le certificat %(cert_file)s. Raison : %(reason)s" - -#, python-format -msgid "Cannot remove role that has not been granted, %s" -msgstr "Impossible de retirer le rôle qui n'est pas accordé, %s" - -msgid "" -"Cannot truncate a driver call without hints list as first parameter after " -"self " -msgstr "" -"Impossible de tronquer un appel de pilote sans hints list comme premier " -"paramètre après self " - -msgid "Cannot update domain_id of a project that has children." -msgstr "" -"Impossible de mettre à jour l'ID de domaine (domain_id) d'un projet " -"comportant des enfants." - -msgid "" -"Cannot use parents_as_list and parents_as_ids query params at the same time." -msgstr "" -"Impossible d'utiliser les paramètres d'interrogation parents_as_list et " -"parents_as_ids en même temps." - -msgid "" -"Cannot use subtree_as_list and subtree_as_ids query params at the same time." -msgstr "" -"Impossible d'utiliser les paramètres d'interrogation subtree_as_list et " -"subtree_as_ids en même temps." - -msgid "Cascade update is only allowed for enabled attribute." -msgstr "La mise à jour en cascade n'est autorisée que pour l'attribut activé." - -msgid "" -"Combining effective and group filter will always result in an empty list." -msgstr "" -"Le fait de combiner un filtre effectif et un filtre de groupes produira " -"toujours une liste vide." - -msgid "" -"Combining effective, domain and inherited filters will always result in an " -"empty list." -msgstr "" -"Le fait de combiner des filtres effectifs, de domaine et hérités produira " -"toujours une liste vide." - -#, python-format -msgid "Config API entity at /domains/%s/config" -msgstr "Entité Config API à /domains/%s/config" - -#, python-format -msgid "Conflict occurred attempting to store %(type)s - %(details)s" -msgstr "" -"Un conflit s'est produit lors de la tentative de stockage de %(type)s - " -"%(details)s" - -#, python-format -msgid "Conflicting region IDs specified: \"%(url_id)s\" != \"%(ref_id)s\"" -msgstr "ID de région indiqués en conflit : \"%(url_id)s\" != \"%(ref_id)s\"" - -msgid "Consumer not found" -msgstr "Consommateur non trouvé" - -#, python-format -msgid "" -"Could not change immutable attribute(s) '%(attributes)s' in target %(target)s" -msgstr "" -"Impossible de modifier le(s) attribut(s) non modifiable(s) '%(attributes)s' " -"dans la cible %(target)s" - -#, python-format -msgid "" -"Could not determine Identity Provider ID. The configuration option " -"%(issuer_attribute)s was not found in the request environment." -msgstr "" -"Impossible de déterminer l'ID du fournisseur d'identité. L'option de " -"configuration %(issuer_attribute)s est introuvable dans l'environnement de " -"demande." - -#, python-format -msgid "" -"Could not find %(group_or_option)s in domain configuration for domain " -"%(domain_id)s" -msgstr "" -"Impossible de trouver %(group_or_option)s dans la configuration de domaine " -"pour le domaine %(domain_id)s" - -#, python-format -msgid "Could not find Endpoint Group: %(endpoint_group_id)s" -msgstr "" -"Impossible de trouver le groupe de nœuds finaux : %(endpoint_group_id)s" - -msgid "Could not find Identity Provider identifier in environment" -msgstr "" -"Identificateur de fournisseur d'identité introuvable dans l'environnement" - -#, python-format -msgid "Could not find Identity Provider: %(idp_id)s" -msgstr "Impossible de trouver le fournisseur d'identité : %(idp_id)s" - -#, python-format -msgid "Could not find Service Provider: %(sp_id)s" -msgstr "Impossible de trouver le fournisseur de services : %(sp_id)s" - -#, python-format -msgid "Could not find credential: %(credential_id)s" -msgstr "Impossible de trouver les données d'identification : %(credential_id)s" - -#, python-format -msgid "Could not find domain: %(domain_id)s" -msgstr "Impossible de trouver le domaine : %(domain_id)s" - -#, python-format -msgid "Could not find endpoint: %(endpoint_id)s" -msgstr "Impossible de trouver le nœud final : %(endpoint_id)s" - -#, python-format -msgid "" -"Could not find federated protocol %(protocol_id)s for Identity Provider: " -"%(idp_id)s" -msgstr "" -"Impossible de trouver le protocole fédéré %(protocol_id)s pour le " -"fournisseur d'identité : %(idp_id)s" - -#, python-format -msgid "Could not find group: %(group_id)s" -msgstr "Impossible de trouver le groupe : %(group_id)s" - -#, python-format -msgid "Could not find mapping: %(mapping_id)s" -msgstr "Impossible de trouver le mappage : %(mapping_id)s" - -msgid "Could not find policy association" -msgstr "Association de stratégie introuvable" - -#, python-format -msgid "Could not find policy: %(policy_id)s" -msgstr "Impossible de trouver la stratégie : %(policy_id)s" - -#, python-format -msgid "Could not find project: %(project_id)s" -msgstr "Impossible de trouver le projet : %(project_id)s" - -#, python-format -msgid "Could not find region: %(region_id)s" -msgstr "Impossible de trouver la région : %(region_id)s" - -#, python-format -msgid "" -"Could not find role assignment with role: %(role_id)s, user or group: " -"%(actor_id)s, project or domain: %(target_id)s" -msgstr "" -"Impossible de trouver l'affectation de rôle avec le rôle : %(role_id)s, " -"utilisateur ou groupe : %(actor_id)s, projet ou domaine : %(target_id)s" - -#, python-format -msgid "Could not find role: %(role_id)s" -msgstr "Impossible de trouver le rôle : %(role_id)s" - -#, python-format -msgid "Could not find service: %(service_id)s" -msgstr "Impossible de trouver le service : %(service_id)s" - -#, python-format -msgid "Could not find token: %(token_id)s" -msgstr "Impossible de trouver le jeton : %(token_id)s" - -#, python-format -msgid "Could not find trust: %(trust_id)s" -msgstr "Impossible de trouver la confiance : %(trust_id)s" - -#, python-format -msgid "Could not find user: %(user_id)s" -msgstr "Impossible de trouver l'utilisateur : %(user_id)s" - -#, python-format -msgid "Could not find version: %(version)s" -msgstr "Impossible de trouver la version : %(version)s" - -#, python-format -msgid "Could not find: %(target)s" -msgstr "Impossible de trouver : %(target)s" - -msgid "" -"Could not map any federated user properties to identity values. Check debug " -"logs or the mapping used for additional details." -msgstr "" -"Impossible de mapper des propriétés d'utilisateur fédéré à des valeurs " -"d'identité. Pour plus d'informations, consultez les journaux de débogage ou " -"le mappage utilisé." - -msgid "" -"Could not map user while setting ephemeral user identity. Either mapping " -"rules must specify user id/name or REMOTE_USER environment variable must be " -"set." -msgstr "" -"Impossible de mapper l'utilisateur lors de la définition de l'identité " -"utilisateur éphémère. Des règles de mappage doivent spécifier l'ID " -"utilisateur/le nom ou la variable d'environnement REMOTE_USER doit être " -"définie." - -msgid "Could not validate the access token" -msgstr "Impossible de valider le jeton d'accès" - -msgid "Credential belongs to another user" -msgstr "Les données d'identification appartiennent à un autre utilisateur" - -msgid "Credential signature mismatch" -msgstr "Non concordance de signature des données d'identification" - -#, python-format -msgid "" -"Direct import of auth plugin %(name)r is deprecated as of Liberty in favor " -"of its entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"L'importation directe du plug-in d'authentification %(name)r est obsolète " -"depuis Liberty en faveur de son point d'entrée depuis %(namespace)r et " -"susceptible d'être supprimée dans N." - -#, python-format -msgid "" -"Direct import of driver %(name)r is deprecated as of Liberty in favor of its " -"entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"L'importation directe du pilote %(name)r est obsolète depuis Liberty en " -"faveur de son point d'entrée depuis %(namespace)r et susceptible d'être " -"supprimée dans N." - -msgid "" -"Disabling an entity where the 'enable' attribute is ignored by configuration." -msgstr "" -"Désactivation d'une entité dont l'attribut 'enable' est ignoré par la " -"configuration." - -#, python-format -msgid "Domain (%s)" -msgstr "Domaine (%s)" - -#, python-format -msgid "Domain cannot be named %s" -msgstr "Le domaine ne peut pas s'appeler %s" - -#, python-format -msgid "Domain cannot have ID %s" -msgstr "Le domaine ne peut pas avoir l'ID %s" - -#, python-format -msgid "Domain is disabled: %s" -msgstr "Domaine désactivé : %s" - -msgid "Domain name cannot contain reserved characters." -msgstr "Le nom de domaine ne peut pas contenir des caractères réservés." - -msgid "Domain scoped token is not supported" -msgstr "Le jeton configuré du domaine n'est pas pris en charge" - -msgid "Domain specific roles are not supported in the V8 role driver" -msgstr "" -"Les rôles spécifiques au domaine ne sont pas pris en charge dans le pilote " -"de rôle V8 " - -#, python-format -msgid "" -"Domain: %(domain)s already has a configuration defined - ignoring file: " -"%(file)s." -msgstr "" -"Une configuration est déjà définie pour le domaine %(domain)s - fichier " -"ignoré : %(file)s." - -msgid "Duplicate Entry" -msgstr "Entrée en double" - -#, python-format -msgid "Duplicate ID, %s." -msgstr "ID en double, %s." - -#, python-format -msgid "Duplicate entry: %s" -msgstr "Entrée en double : %s" - -#, python-format -msgid "Duplicate name, %s." -msgstr "Nom en double, %s." - -#, python-format -msgid "Duplicate remote ID: %s" -msgstr "ID distant en double : %s" - -msgid "EC2 access key not found." -msgstr "Clé d'accès EC2 non trouvée." - -msgid "EC2 signature not supplied." -msgstr "Signature EC2 non fournie." - -msgid "" -"Either --bootstrap-password argument or OS_BOOTSTRAP_PASSWORD must be set." -msgstr "" -"L'argument --bootstrap-password ou OS_BOOTSTRAP_PASSWORD doit être défini." - -msgid "Enabled field must be a boolean" -msgstr "La zone activée doit être de type booléen" - -msgid "Enabled field should be a boolean" -msgstr "La zone activée devrait être de type booléen" - -#, python-format -msgid "Endpoint %(endpoint_id)s not found in project %(project_id)s" -msgstr "Nœud final %(endpoint_id)s non trouvé dans le projet %(project_id)s" - -msgid "Endpoint Group Project Association not found" -msgstr "Association de projets du groupe de nœuds finaux non trouvée" - -msgid "Ensure configuration option idp_entity_id is set." -msgstr "Assurez-vous que l'option de configuration idp_entity_id est définie." - -msgid "Ensure configuration option idp_sso_endpoint is set." -msgstr "" -"Assurez-vous que l'option de configuration idp_sso_endpoint est définie." - -#, python-format -msgid "" -"Error parsing configuration file for domain: %(domain)s, file: %(file)s." -msgstr "" -"Erreur lors de l'analyse syntaxique du fichier de configuration pour le " -"domaine : %(domain)s, fichier : %(file)s." - -#, python-format -msgid "Error while opening file %(path)s: %(err)s" -msgstr "Erreur lors de l'ouverture du fichier %(path)s : %(err)s" - -#, python-format -msgid "Error while parsing line: '%(line)s': %(err)s" -msgstr "Erreur lors de l'analyse de la ligne : '%(line)s' : %(err)s" - -#, python-format -msgid "Error while parsing rules %(path)s: %(err)s" -msgstr "Erreur lors de l'analyse syntaxique des règles %(path)s : %(err)s" - -#, python-format -msgid "Error while reading metadata file, %(reason)s" -msgstr "" -"Erreur lors de la lecture du fichier de métadonnées du fichier, %(reason)s" - -#, python-format -msgid "" -"Exceeded attempts to register domain %(domain)s to use the SQL driver, the " -"last domain that appears to have had it is %(last_domain)s, giving up" -msgstr "" -"Nombre de tentatives d'enregistrement du domaine %(domain)s dépassé pour " -"utiliser le pilote SQL, le dernier domaine qui semble l'avoir contenu est " -"%(last_domain)s, abandon..." - -#, python-format -msgid "Expected dict or list: %s" -msgstr "Dictionnaire ou liste attendu : %s" - -msgid "" -"Expected signing certificates are not available on the server. Please check " -"Keystone configuration." -msgstr "" -"Les certificats signataires attendus ne sont pas disponibles sur le serveur. " -"Vérifiez la configuration de Keystone." - -#, python-format -msgid "" -"Expecting to find %(attribute)s in %(target)s - the server could not comply " -"with the request since it is either malformed or otherwise incorrect. The " -"client is assumed to be in error." -msgstr "" -"En attente de recherche de %(attribute)s dans %(target)s - le serveur n'a " -"pas pu se conformer à la demande car elle est incorrectement formée ou " -"incorrecte. Le client est considéré comme étant à l'état d'erreur." - -#, python-format -msgid "Failed to start the %(name)s server" -msgstr "Impossible de démarrer le serveur %(name)s" - -msgid "Failed to validate token" -msgstr "Échec de validation du jeton" - -msgid "Federation token is expired" -msgstr "Le jeton de fédération a expiré" - -#, python-format -msgid "" -"Field \"remaining_uses\" is set to %(value)s while it must not be set in " -"order to redelegate a trust" -msgstr "" -"La zone \"remaining_uses\" est définie sur %(value)s alors qu'elle ne doit " -"pas être définie pour redéléguer une fiducie" - -msgid "Found invalid token: scoped to both project and domain." -msgstr "Jeton non valide trouvé : configuré dans projet et domaine." - -#, python-format -msgid "Group %s not found in config" -msgstr "Groupe %s non trouvé dans la configuration" - -#, python-format -msgid "Group %(group)s is not supported for domain specific configurations" -msgstr "" -"Le groupe %(group)s n'est pas pris en charge pour les configurations " -"spécifiques à un domaine" - -#, python-format -msgid "" -"Group %(group_id)s returned by mapping %(mapping_id)s was not found in the " -"backend." -msgstr "" -"Le groupe %(group_id)s renvoyé par le mappage %(mapping_id)s introuvable " -"dans le back-end." - -#, python-format -msgid "" -"Group membership across backend boundaries is not allowed, group in question " -"is %(group_id)s, user is %(user_id)s" -msgstr "" -"L'appartenance de groupe entre frontières de back-end n'est pas autorisée, " -"le groupe en question est %(group_id)s, l'utilisateur est %(user_id)s" - -#, python-format -msgid "ID attribute %(id_attr)s not found in LDAP object %(dn)s" -msgstr "L'attribut ID %(id_attr)s non trouvé dans l'objet LDAP %(dn)s" - -#, python-format -msgid "Identity Provider %(idp)s is disabled" -msgstr "Le fournisseur d'identité %(idp)s est désactivé" - -msgid "" -"Incoming identity provider identifier not included among the accepted " -"identifiers." -msgstr "" -"L'identificateur de fournisseur d'identité entrant ne fait pas partie des " -"identificateurs acceptés." - -msgid "Invalid EC2 signature." -msgstr "Signature EC2 non valide." - -#, python-format -msgid "Invalid LDAP TLS certs option: %(option)s. Choose one of: %(options)s" -msgstr "" -"Option de certificat TLS LDAP non valide : %(option)s. Choisissez l'une des " -"options suivantes : %(options)s" - -#, python-format -msgid "Invalid LDAP TLS_AVAIL option: %s. TLS not available" -msgstr "Option TLS_AVAIL LDAP non valide : %s. TLS non disponible" - -#, python-format -msgid "Invalid LDAP deref option: %(option)s. Choose one of: %(options)s" -msgstr "" -"Option déréférencée LDAP non valide : %(option)s. Choisissez l'une des " -"options suivantes : %(options)s" - -#, python-format -msgid "Invalid LDAP scope: %(scope)s. Choose one of: %(options)s" -msgstr "" -"Portée LDAP non valide : %(scope)s. Choisissez l'une des portées suivantes : " -"%(options)s" - -msgid "Invalid TLS / LDAPS combination" -msgstr "Combinaison TLS / LDAPS non valide" - -#, python-format -msgid "Invalid audit info data type: %(data)s (%(type)s)" -msgstr "Type de données d'information d'audit non valide : %(data)s (%(type)s)" - -msgid "Invalid blob in credential" -msgstr "Objet LOB non valide dans les informations d'identification" - -#, python-format -msgid "" -"Invalid domain name: %(domain)s found in config file name: %(file)s - " -"ignoring this file." -msgstr "" -"Nom de domaine non valide : %(domain)s trouvé dans le nom du fichier de " -"configuration : %(file)s - fichier ignoré." - -#, python-format -msgid "Invalid domain specific configuration: %(reason)s" -msgstr "Configuration spécifique au domaine non valide : %(reason)s" - -#, python-format -msgid "Invalid input for field '%(path)s'. The value is '%(value)s'." -msgstr "Entrée non valide pour la zone '%(path)s'. La valeur est '%(value)s'." - -msgid "Invalid limit value" -msgstr "Valeur de limite non valide" - -#, python-format -msgid "" -"Invalid mix of entities for policy association - only Endpoint, Service or " -"Region+Service allowed. Request was - Endpoint: %(endpoint_id)s, Service: " -"%(service_id)s, Region: %(region_id)s" -msgstr "" -"Combinaison d'entités non valide pour l'association de stratégie. Seules les " -"entités Nœud final, Service ou Région+Service sont autorisées. La demande " -"était Point final : %(endpoint_id)s, Service : %(service_id)s, Région : " -"%(region_id)s" - -#, python-format -msgid "" -"Invalid rule: %(identity_value)s. Both 'groups' and 'domain' keywords must " -"be specified." -msgstr "" -"Règle non valide : %(identity_value)s. Les mots clés 'groups' et 'domain' " -"doivent être spécifiés." - -msgid "Invalid signature" -msgstr "Signature non valide" - -msgid "Invalid user / password" -msgstr "Utilisateur / Mot de passe non valide" - -msgid "Invalid username or TOTP passcode" -msgstr "Nom d'utilisateur ou code TOTP non valide" - -msgid "Invalid username or password" -msgstr "Nom d'utilisateur ou mot de passe non valide" - -#, python-format -msgid "KVS region %s is already configured. Cannot reconfigure." -msgstr "La région KVS %s est déjà configurée. Reconfiguration impossible." - -#, python-format -msgid "Key Value Store not configured: %s" -msgstr "Magasin de valeurs de clé non configuré : %s" - -#, python-format -msgid "LDAP %s create" -msgstr "Création LDAP %s" - -#, python-format -msgid "LDAP %s delete" -msgstr "Suppression LDAP %s" - -#, python-format -msgid "LDAP %s update" -msgstr "Mise à jour LDAP %s" - -msgid "" -"Length of transformable resource id > 64, which is max allowed characters" -msgstr "" -"Longueur de l'ID de ressource transformable > 64 (nombre maximal de " -"caractères autorisé)" - -#, python-format -msgid "" -"Local section in mapping %(mapping_id)s refers to a remote match that " -"doesn't exist (e.g. {0} in a local section)." -msgstr "" -"La section locale dans le mappage %(mapping_id)s fait référence à une " -"correspondance éloignée qui n'existe pas (par exemple, {0} dans une section " -"locale)." - -#, python-format -msgid "Lock Timeout occurred for key, %(target)s" -msgstr "" -"Un dépassement de délai de verrouillage s'est produit pour la clé, %(target)s" - -#, python-format -msgid "Lock key must match target key: %(lock)s != %(target)s" -msgstr "" -"La clé de verrouillage doit correspondre à la clé cible : %(lock)s != " -"%(target)s" - -#, python-format -msgid "Malformed endpoint URL (%(endpoint)s), see ERROR log for details." -msgstr "" -"URL de nœud final incorrectement formée (%(endpoint)s), consultez le journal " -"des ERREURS pour plus de détails." - -msgid "Marker could not be found" -msgstr "Le marqueur est introuvable" - -#, python-format -msgid "Max hierarchy depth reached for %s branch." -msgstr "La profondeur maximale de hiérarchie est atteinte pour la branche %s." - -#, python-format -msgid "Maximum lock attempts on %s occurred." -msgstr "Le nombre maximal de tentatives de verrouillage sur %s est atteint." - -#, python-format -msgid "Member %(member)s is already a member of group %(group)s" -msgstr "Le membre %(member)s est déjà membre du groupe %(group)s" - -#, python-format -msgid "Method not callable: %s" -msgstr "Impossible d'appeler la méthode : %s" - -msgid "Missing entity ID from environment" -msgstr "IP d'entité absent de l'environnement" - -msgid "" -"Modifying \"redelegation_count\" upon redelegation is forbidden. Omitting " -"this parameter is advised." -msgstr "" -"La modification de \"redelegation_count\" lors de la redélégation est " -"interdite. Il est conseillé d'omettre ce paramètre." - -msgid "Multiple domains are not supported" -msgstr "Les domaines multiples ne sont pas pris en charge" - -msgid "Must be called within an active lock context." -msgstr "Doit être appelé au sein d'un contexte de verrouillage actif." - -msgid "Must specify either domain or project" -msgstr "Indiquez un domaine ou un projet" - -msgid "Name field is required and cannot be empty" -msgstr "Le champ de nom est obligatoire et ne peut pas être vide" - -msgid "Neither Project Domain ID nor Project Domain Name was provided." -msgstr "Aucun ID ou nom de domaine de projet n'a été fourni." - -msgid "" -"No Authorization headers found, cannot proceed with OAuth related calls, if " -"running under HTTPd or Apache, ensure WSGIPassAuthorization is set to On." -msgstr "" -"Aucun en-tête d'autorisation trouvé, impossible de procéder aux appels liés " -"à OAuth, en cas d'exécution sous HTTPd ou Apache, vérifiez que " -"WSGIPassAuthorization est défini sur Activé." - -msgid "No authenticated user" -msgstr "Aucun utilisateur authentifié" - -msgid "" -"No encryption keys found; run keystone-manage fernet_setup to bootstrap one." -msgstr "" -"Aucune clé de chiffrement trouvée ; exécutez keystone-manage fernet_setup " -"pour en amorcer une." - -msgid "No options specified" -msgstr "Aucune option spécifiée" - -#, python-format -msgid "No policy is associated with endpoint %(endpoint_id)s." -msgstr "Aucune stratégie n'est associée au nœud final %(endpoint_id)s." - -#, python-format -msgid "No remaining uses for trust: %(trust_id)s" -msgstr "Aucune utilisation restante pour la confiance : %(trust_id)s" - -msgid "No token in the request" -msgstr "Aucun jeton dans la demande" - -msgid "Non-default domain is not supported" -msgstr "Le domaine non par défaut n'est pas pris en charge" - -msgid "One of the trust agents is disabled or deleted" -msgstr "L'un des agents de confiance est désactivé ou supprimé" - -#, python-format -msgid "" -"Option %(option)s found with no group specified while checking domain " -"configuration request" -msgstr "" -"Option %(option)s trouvée sans aucun groupe spécifié lors de la vérification " -"de la demande de configuration du domaine" - -#, python-format -msgid "" -"Option %(option)s in group %(group)s is not supported for domain specific " -"configurations" -msgstr "" -"L'option %(option)s dans le groupe %(group)s n'est pas prise en charge pour " -"les configurations spécifiques à un domaine" - -#, python-format -msgid "Project (%s)" -msgstr "Projet (%s)" - -#, python-format -msgid "Project ID not found: %(t_id)s" -msgstr "ID de projet non trouvé : %(t_id)s" - -msgid "Project field is required and cannot be empty." -msgstr "La zone Projet est requise et ne doit pas être vide." - -#, python-format -msgid "Project is disabled: %s" -msgstr "Le projet est désactivé : %s" - -msgid "Project name cannot contain reserved characters." -msgstr "Le nom de projet ne peut pas contenir des caractères réservés." - -msgid "Query string is not UTF-8 encoded" -msgstr "La chaine de requête n'est pas au format UTF-8. " - -#, python-format -msgid "" -"Reading the default for option %(option)s in group %(group)s is not supported" -msgstr "" -"La lecture de la valeur par défaut pour l'option %(option)s dans le groupe " -"%(group)s n'est pas prise en charge" - -msgid "Redelegation allowed for delegated by trust only" -msgstr "Redélégation autorisée pour une délégation par fiducie uniquement" - -#, python-format -msgid "" -"Remaining redelegation depth of %(redelegation_depth)d out of allowed range " -"of [0..%(max_count)d]" -msgstr "" -"Profondeur de redélégation restante %(redelegation_depth)d par rapport à la " -"plage admise [0..%(max_count)d]" - -msgid "" -"Remove admin_crud_extension from the paste pipeline, the admin_crud " -"extension is now always available. Updatethe [pipeline:admin_api] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Supprimez admin_crud_extension du pipeline de collage, l'extension " -"admin_crud est désormais toujours disponible. Mettez à jour la section " -"[pipeline:admin_api] dans le fichier keystone-paste.ini en conséquence, car " -"elle sera supprimée dans l'édition O." - -msgid "" -"Remove endpoint_filter_extension from the paste pipeline, the endpoint " -"filter extension is now always available. Update the [pipeline:api_v3] " -"section in keystone-paste.ini accordingly as it will be removed in the O " -"release." -msgstr "" -"Supprimez endpoint_filter_extension du pipeline de collage, l'extension du " -"filtre de nœud final est désormais toujours disponible. Mettez à jour la " -"section [pipeline:api_v3] dans le fichier keystone-paste.ini en conséquence " -"car elle sera supprimée dans l'édition O." - -msgid "" -"Remove federation_extension from the paste pipeline, the federation " -"extension is now always available. Update the [pipeline:api_v3] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Supprimez federation_extension du pipeline de collage, l'extension de " -"fédération est désormais toujours disponible. Mettez à jour la section " -"[pipeline:api_v3] dans le fichier keystone-paste.ini en conséquence, car " -"elle sera supprimée dans l'édition O." - -msgid "" -"Remove oauth1_extension from the paste pipeline, the oauth1 extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Supprimez oauth1_extension du pipeline de collage, l'extension oauth1 est " -"désormais toujours disponible. Mettez à jour la section [pipeline:api_v3] " -"dans le fichier keystone-paste.ini en conséquence, car elle sera supprimée " -"dans l'édition O." - -msgid "" -"Remove revoke_extension from the paste pipeline, the revoke extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Supprimez revoke_extension du pipeline de collage, l'extension revoke est " -"désormais toujours disponible. Mettez à jour la section [pipeline:api_v3] " -"dans le fichier keystone-paste.ini en conséquence, car elle sera supprimée " -"dans l'édition O." - -msgid "" -"Remove simple_cert from the paste pipeline, the PKI and PKIz token providers " -"are now deprecated and simple_cert was only used insupport of these token " -"providers. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Supprimez simple_cert du pipeline de collage, les fournisseurs de jetons " -"PKI et PKIz sont désormais obsolètes et simple_cert n'a été utilisé que pour " -"la prise en charge de ces fournisseurs. Mettez à jour la section [pipeline:" -"api_v3] dans le fichier keystone-paste.ini en conséquence, car elle sera " -"supprimée dans l'édition O." - -msgid "" -"Remove user_crud_extension from the paste pipeline, the user_crud extension " -"is now always available. Updatethe [pipeline:public_api] section in keystone-" -"paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Supprimez user_crud_extension du pipeline de collage, l'extension user_crud " -"est désormais toujours disponible. Mettez à jour la section [pipeline:" -"public_api] dans le fichier keystone-paste.ini en conséquence, car elle sera " -"supprimée de l'édition O." - -msgid "Request Token does not have an authorizing user id" -msgstr "Le jeton de demande ne comporte pas d'ID utilisateur d'autorisation" - -#, python-format -msgid "" -"Request attribute %(attribute)s must be less than or equal to %(size)i. The " -"server could not comply with the request because the attribute size is " -"invalid (too large). The client is assumed to be in error." -msgstr "" -"La valeur de l'attribut de demande %(attribute)s doit être inférieure ou " -"égale à %(size)i. Le serveur n'a pas pu se conformer à la demande car la " -"taille de l'attribut n'est pas valide. Le client est considéré comme étant à " -"l'état d'erreur." - -msgid "Request must have an origin query parameter" -msgstr "La demande doit avoir un paramètre de requête d'origine" - -msgid "Request token is expired" -msgstr "Le jeton de la demande a expiré" - -msgid "Request token not found" -msgstr "Jeton de demande non trouvé" - -msgid "Requested expiration time is more than redelegated trust can provide" -msgstr "" -"Le délai d'expiration demandé dépasse celui que la fiducie redéléguée peut " -"fournir" - -#, python-format -msgid "" -"Requested redelegation depth of %(requested_count)d is greater than allowed " -"%(max_count)d" -msgstr "" -"La profondeur de redélégation demandée %(requested_count)d est supérieure à " -"la limite autorisée %(max_count)d" - -msgid "" -"Running keystone via eventlet is deprecated as of Kilo in favor of running " -"in a WSGI server (e.g. mod_wsgi). Support for keystone under eventlet will " -"be removed in the \"M\"-Release." -msgstr "" -"L'exécution de Keystone via eventlet est obsolète depuis Kilo et remplacée " -"par l'exécution sur un serveur WSGI (par exemple, mod_wsgi). La prise en " -"charge pour Keystone sous l'eventlet sera supprimée dans \"M\"-Release." - -msgid "Scoping to both domain and project is not allowed" -msgstr "La configuration du domaine et du projet n'est pas autorisée" - -msgid "Scoping to both domain and trust is not allowed" -msgstr "" -"La configuration du domaine et du certificat de confiance n'est pas autorisée" - -msgid "Scoping to both project and trust is not allowed" -msgstr "" -"La configuration du projet et du certificat de confiance n'est pas autorisée" - -#, python-format -msgid "Service Provider %(sp)s is disabled" -msgstr "Le fournisseur de services %(sp)s est désactivé" - -msgid "Some of requested roles are not in redelegated trust" -msgstr "Certains rôles demandés ne font pas partie de la fiducie redéléguée" - -msgid "Specify a domain or project, not both" -msgstr "Indiquez un domaine ou un projet, mais pas les deux" - -msgid "Specify a user or group, not both" -msgstr "Indiquez un utilisateur ou un groupe, mais pas les deux" - -msgid "Specify one of domain or project" -msgstr "Indiquez un domaine ou un projet" - -msgid "Specify one of user or group" -msgstr "Indiquez un utilisateur ou un groupe" - -#, python-format -msgid "" -"String length exceeded.The length of string '%(string)s' exceeded the limit " -"of column %(type)s(CHAR(%(length)d))." -msgstr "" -"Longueur de chaîne dépassée. La longueur de la chaîne '%(string)s a dépassé " -"la limite de la colonne %(type)s(CHAR(%(length)d))." - -msgid "Tenant name cannot contain reserved characters." -msgstr "Le nom de locataire ne peut pas contenir des caractères réservés." - -#, python-format -msgid "" -"The %s extension has been moved into keystone core and as such its " -"migrations are maintained by the main keystone database control. Use the " -"command: keystone-manage db_sync" -msgstr "" -"L'extension %s a été déplacée vers le noyau Keystone et ses migrations sont " -"donc gérées par le contrôle de la base de données Keystone principale. " -"Utilisez la commande : keystone-manage db_sync" - -msgid "" -"The 'expires_at' must not be before now. The server could not comply with " -"the request since it is either malformed or otherwise incorrect. The client " -"is assumed to be in error." -msgstr "" -"La valeur de 'expires_at' ne doit pas être située dans le passé. Le serveur " -"n'a pas pu se conformer à la demande car elle est incorrectement formée ou " -"incorrecte. Le client est considéré comme étant à l'état d'erreur." - -msgid "The --all option cannot be used with the --domain-name option" -msgstr "L'option --all ne peut pas être utilisée avec l'option --domain-name" - -#, python-format -msgid "The Keystone configuration file %(config_file)s could not be found." -msgstr "Le fichier de configuration Keystone %(config_file)s est introuvable." - -#, python-format -msgid "" -"The Keystone domain-specific configuration has specified more than one SQL " -"driver (only one is permitted): %(source)s." -msgstr "" -"La configuration spécifique au domaine Keystone a spécifié plusieurs pilotes " -"SQL (un seul est autorisé) : %(source)s." - -msgid "The action you have requested has not been implemented." -msgstr "L'action que vous avez demandée n'est pas implémentée." - -msgid "The authenticated user should match the trustor." -msgstr "L'utilisateur authentifié doit correspondre au fiduciant." - -msgid "" -"The certificates you requested are not available. It is likely that this " -"server does not use PKI tokens otherwise this is the result of " -"misconfiguration." -msgstr "" -"Les certificats que vous avez demandés ne sont pas disponibles. Il est " -"probable que ce serveur n'utilise pas des jetons PKI ; sinon, ceci est le " -"résultat d'un problème de configuration." - -msgid "The configured token provider does not support bind authentication." -msgstr "" -"Le fournisseur de jeton configuré ne prend pas en charge l'authentification " -"par liaison." - -msgid "The creation of projects acting as domains is not allowed in v2." -msgstr "" -"La création de projets faisant office de domaines n'est pas autorisée en v2." - -#, python-format -msgid "" -"The password length must be less than or equal to %(size)i. The server could " -"not comply with the request because the password is invalid." -msgstr "" -"La longueur du mot de passe doit être inférieure ou égale à %(size)i. Le " -"serveur n'a pas pu se conformer à la demande car le mot de passe n'est pas " -"valide." - -msgid "The request you have made requires authentication." -msgstr "La demande que vous avez présentée requiert une authentification." - -msgid "The resource could not be found." -msgstr "La ressource est introuvable." - -msgid "" -"The revoke call must not have both domain_id and project_id. This is a bug " -"in the Keystone server. The current request is aborted." -msgstr "" -"L'appel de révocation ne doit pas contenir à la fois domain_id et " -"project_id. Il s'agit d'un bogue sur le serveur Keystone. La demande en " -"cours est abandonnée." - -msgid "The service you have requested is no longer available on this server." -msgstr "Le service que vous avez demandé n'est plus disponible sur ce serveur." - -#, python-format -msgid "" -"The specified parent region %(parent_region_id)s would create a circular " -"region hierarchy." -msgstr "" -"La région parente spécifiée %(parent_region_id)s pourrait créer une " -"hiérarchie de région circulaire." - -#, python-format -msgid "" -"The value of group %(group)s specified in the config should be a dictionary " -"of options" -msgstr "" -"La valeur du groupe %(group)s spécifié dans la configuration doit être un " -"dictionnaire d'options" - -msgid "There should not be any non-oauth parameters" -msgstr "Il ne doit y avoir aucun paramètre non-oauth" - -#, python-format -msgid "This is not a recognized Fernet payload version: %s" -msgstr "Il ne s'agit pas d'une version de contenu Fernet reconnue : %s" - -#, python-format -msgid "This is not a recognized Fernet token %s" -msgstr "Il ne s'agit pas d'un jeton Fernet reconnu %s" - -msgid "" -"Timestamp not in expected format. The server could not comply with the " -"request since it is either malformed or otherwise incorrect. The client is " -"assumed to be in error." -msgstr "" -"L'horodatage n'est pas au format attendu. Le serveur n'a pas pu se conformer " -"à la demande car elle est incorrectement formée ou incorrecte. Le client est " -"considéré comme étant à l'état d'erreur." - -#, python-format -msgid "" -"To get a more detailed information on this error, re-run this command for " -"the specific domain, i.e.: keystone-manage domain_config_upload --domain-" -"name %s" -msgstr "" -"Pour obtenir des informations plus détaillées sur cette erreur, réexécutez " -"cette commande pour le domaine spécifique, par exemple : keystone-manage " -"domain_config_upload --domain-name %s" - -msgid "Token belongs to another user" -msgstr "Le jeton appartient à un autre utilisateur" - -msgid "Token does not belong to specified tenant." -msgstr "Le jeton n'appartient pas au locataire spécifié." - -msgid "Token version is unrecognizable or unsupported." -msgstr "Version de jeton non reconnue ou non prise en charge." - -msgid "Trustee has no delegated roles." -msgstr "Le fiduciaire n'a aucun rôle délégué." - -msgid "Trustor is disabled." -msgstr "Le fiduciant est désactivé. " - -#, python-format -msgid "" -"Trying to update group %(group)s, so that, and only that, group must be " -"specified in the config" -msgstr "" -"Tentative de mise à jour du groupe %(group)s, de sorte que le groupe est " -"spécifié dans la configuration uniquement" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, but config provided " -"contains option %(option_other)s instead" -msgstr "" -"Tentative de mise à jour de l'option %(option)s dans le groupe %(group)s, " -"mais la configuration fournie contient l'option %(option_other)s à la place" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, so that, and only " -"that, option must be specified in the config" -msgstr "" -"Tentative de mise à jour de l'option %(option)s dans le groupe %(group)s, de " -"sorte que l'option est spécifiée dans la configuration uniquement" - -msgid "" -"Unable to access the keystone database, please check it is configured " -"correctly." -msgstr "" -"Impossible d'accéder à la base de données Keystone, vérifiez qu'elle est " -"correctement configurée." - -#, python-format -msgid "Unable to consume trust %(trust_id)s, unable to acquire lock." -msgstr "" -"Impossible de consommer la confiance %(trust_id)s et d'acquérir un verrou." - -#, python-format -msgid "" -"Unable to delete region %(region_id)s because it or its child regions have " -"associated endpoints." -msgstr "" -"Impossible de supprimer la région %(region_id)s car la région ou ses régions " -"enfants comportent des nœuds finaux associés." - -msgid "Unable to downgrade schema" -msgstr "Impossible de rétrograder le schéma" - -#, python-format -msgid "Unable to find valid groups while using mapping %(mapping_id)s" -msgstr "" -"Impossible de trouver des groupes valides en utilisant le mappage " -"%(mapping_id)s" - -#, python-format -msgid "Unable to locate domain config directory: %s" -msgstr "Impossible de localiser le répertoire de configuration de domaine : %s" - -#, python-format -msgid "Unable to lookup user %s" -msgstr "Impossible de rechercher l'utilisateur %s" - -#, python-format -msgid "" -"Unable to reconcile identity attribute %(attribute)s as it has conflicting " -"values %(new)s and %(old)s" -msgstr "" -"Impossible de rapprocher l'attribut d'identité %(attribute)s car il comporte " -"des valeurs en conflit : %(new)s et %(old)s" - -#, python-format -msgid "" -"Unable to sign SAML assertion. It is likely that this server does not have " -"xmlsec1 installed, or this is the result of misconfiguration. Reason " -"%(reason)s" -msgstr "" -"Impossible de signer l'assertion SAML. Il est probable que xmlsec1 ne soit " -"pas installé sur ce serveur ; sinon, cela est dû à un problème de " -"configuration. Raison : %(reason)s" - -msgid "Unable to sign token." -msgstr "Impossible de signer le jeton." - -#, python-format -msgid "Unexpected assignment type encountered, %s" -msgstr "Type d'affectation inattendu, %s" - -#, python-format -msgid "" -"Unexpected combination of grant attributes - User: %(user_id)s, Group: " -"%(group_id)s, Project: %(project_id)s, Domain: %(domain_id)s" -msgstr "" -"Combinaison inattendue d'attributs d'octroi - Utilisateur : %(user_id)s. " -"Groupe : %(group_id)s. Projet : %(project_id)s. Domaine : %(domain_id)s" - -#, python-format -msgid "Unexpected status requested for JSON Home response, %s" -msgstr "Statut inattendu demandé pour la réponse JSON Home, %s" - -msgid "Unknown Target" -msgstr "Cible inconnue" - -#, python-format -msgid "Unknown domain '%(name)s' specified by --domain-name" -msgstr "Domaine inconnu '%(name)s' spécifié par --domain-name" - -#, python-format -msgid "Unknown token version %s" -msgstr "Version de jeton inconnue %s" - -#, python-format -msgid "Unregistered dependency: %(name)s for %(targets)s" -msgstr "Dépendance désenregistrée : %(name)s pour %(targets)s" - -msgid "Update of `domain_id` is not allowed." -msgstr "La mise à jour de `domain_id` n'est pas autorisée." - -msgid "Update of `is_domain` is not allowed." -msgstr "La mise à jour de `is_domain` n'est pas autorisée." - -msgid "Update of `parent_id` is not allowed." -msgstr "La mise à jour de `parent_id` n'est pas autorisée." - -msgid "Update of domain_id is only allowed for root projects." -msgstr "" -"La mise à jour de l'ID de domaine (domain_id) est autorisée uniquement pour " -"les projets racine." - -msgid "Update of domain_id of projects acting as domains is not allowed." -msgstr "" -"La mise à jour de l'ID de domaine (domain_id) des projets faisant office de " -"domaines n'est pas autorisée." - -msgid "Use a project scoped token when attempting to create a SAML assertion" -msgstr "" -"Utilisez un jeton configuré du projet lorsque vous essayez de créer une " -"assertion SAML" - -msgid "" -"Use of the identity driver config to automatically configure the same " -"assignment driver has been deprecated, in the \"O\" release, the assignment " -"driver will need to be expicitly configured if different than the default " -"(SQL)." -msgstr "" -"L'utilisation de la configuration du pilote d'identité pour configurer " -"automatiquement le même pilote d'affectation est désormais obsolète. Dans " -"l'édition \"O\", le pilote d'affectation doit être configuré de manière " -"explicite s'il est différent de la valeur par défaut (SQL)." - -#, python-format -msgid "User %(u_id)s is unauthorized for tenant %(t_id)s" -msgstr "L'utilisateur %(u_id)s n'est pas autorisé pour le locataire %(t_id)s" - -#, python-format -msgid "User %(user_id)s has no access to domain %(domain_id)s" -msgstr "L'utilisateur %(user_id)s n'a pas accès au domaine %(domain_id)s" - -#, python-format -msgid "User %(user_id)s has no access to project %(project_id)s" -msgstr "L'utilisateur %(user_id)s n'a pas accès au projet %(project_id)s" - -#, python-format -msgid "User %(user_id)s is already a member of group %(group_id)s" -msgstr "L'utilisateur %(user_id)s est déjà membre du groupe %(group_id)s" - -#, python-format -msgid "User '%(user_id)s' not found in group '%(group_id)s'" -msgstr "Utilisateur '%(user_id)s' non trouvé dans le groupe '%(group_id)s'" - -msgid "User IDs do not match" -msgstr "Les ID utilisateur ne correspondent pas." - -msgid "" -"User auth cannot be built due to missing either user id, or user name with " -"domain id, or user name with domain name." -msgstr "" -"L'authentification utilisateur ne peut pas être créée en raison de l'absence " -"d'un ID, utilisateur, nom d'utilisateur avec ID de domaine ou nom " -"d'utilisateur avec nom de domaine." - -#, python-format -msgid "User is disabled: %s" -msgstr "Utilisateur désactivé : %s" - -msgid "User is not a member of the requested project" -msgstr "L'utilisateur n'est pas membre du projet demandé" - -msgid "User is not a trustee." -msgstr "L'utilisateur n'est pas un fiduciaire." - -msgid "User not found" -msgstr "Utilisateur introuvable" - -msgid "User not valid for tenant." -msgstr "Utilisateur non valide pour le locataire." - -msgid "User roles not supported: tenant_id required" -msgstr "Rôles utilisateur non pris en charge : tenant_id est obligatoire" - -#, python-format -msgid "User type %s not supported" -msgstr "Type d'utilisateur %s non pris en charge" - -msgid "You are not authorized to perform the requested action." -msgstr "Vous n'êtes pas autorisé à effectuer l'action demandée." - -#, python-format -msgid "You are not authorized to perform the requested action: %(action)s" -msgstr "Vous n'êtes pas autorisé à effectuer l'action demandée : %(action)s" - -msgid "" -"You have tried to create a resource using the admin token. As this token is " -"not within a domain you must explicitly include a domain for this resource " -"to belong to." -msgstr "" -"Vous avez essayé de créer une ressource à l'aide du jeton admin. Comme ce " -"jeton ne figure pas dans un domaine, vous devez inclure explicitement un " -"domaine auquel cette ressource doit appartenir." - -msgid "`key_mangler` functions must be callable." -msgstr "Les fonctions `key_mangler` doivent pouvoir être appelées." - -msgid "`key_mangler` option must be a function reference" -msgstr "L'option `key_mangler` doit être une référence de fonction" - -msgid "any options" -msgstr "toutes les options" - -msgid "auth_type is not Negotiate" -msgstr "auth_type n'est pas négocié" - -msgid "authorizing user does not have role required" -msgstr "l'utilisateur qui autorise n'a pas de rôle obligatoire" - -#, python-format -msgid "cannot create a project in a branch containing a disabled project: %s" -msgstr "" -"Impossible de créer un projet dans une branche contenant un projet " -"désactivé : %s" - -#, python-format -msgid "" -"cannot delete an enabled project acting as a domain. Please disable the " -"project %s first." -msgstr "" -"impossible de supprimer un projet activé faisant office de domaine. Veuillez " -"d'abord désactiver le projet %s." - -#, python-format -msgid "group %(group)s" -msgstr "groupe %(group)s" - -msgid "" -"idp_contact_type must be one of: [technical, other, support, administrative " -"or billing." -msgstr "" -"idp_contact_type doit avoir l'une des valeurs suivantes : [technical, other, " -"support, administrative ou billing." - -#, python-format -msgid "invalid date format %s" -msgstr "Format de date non valide %s" - -#, python-format -msgid "" -"it is not permitted to have two projects acting as domains with the same " -"name: %s" -msgstr "" -"il est interdit d'avoir deux projets faisant office de domaines avec le même " -"nom : %s" - -#, python-format -msgid "" -"it is not permitted to have two projects within a domain with the same " -"name : %s" -msgstr "" -"il est interdit d'avoir deux projets au sein d'un domaine avec le même nom : " -"%s" - -msgid "only root projects are allowed to act as domains." -msgstr "seuls les projets racine sont autorisés à faire office de domaines." - -#, python-format -msgid "option %(option)s in group %(group)s" -msgstr "option %(option)s dans le groupe %(group)s" - -msgid "provided consumer key does not match stored consumer key" -msgstr "" -"la clé de consommateur fournie ne correspond pas à la clé de consommateur " -"stockée" - -msgid "provided request key does not match stored request key" -msgstr "" -"la clé de demande fournie ne correspond pas à la clé de demande stockée" - -msgid "provided verifier does not match stored verifier" -msgstr "le vérificateur fourni ne correspond pas au vérificateur stocké" - -msgid "remaining_uses must be a positive integer or null." -msgstr "remaining_uses doit être un entier positif ou nul." - -msgid "remaining_uses must not be set if redelegation is allowed" -msgstr "" -"remaining_uses ne doit pas être défini si la redélégation est autorisée" - -#, python-format -msgid "" -"request to update group %(group)s, but config provided contains group " -"%(group_other)s instead" -msgstr "" -"demande de mise à jour du groupe %(group)s, mais la configuration fournie " -"contient le groupe %(group_other)s à la place" - -msgid "rescope a scoped token" -msgstr "Reconfigurer un jeton configuré" - -#, python-format -msgid "role %s is not defined" -msgstr "Le rôle %s n'est pas défini" - -msgid "scope.project.id must be specified if include_subtree is also specified" -msgstr "" -"scope.project.id doit être spécifié si include_subtree est également spécifié" - -#, python-format -msgid "tls_cacertdir %s not found or is not a directory" -msgstr "tls_cacertdir %s non trouvé ou il ne s'agit pas d'un répertoire" - -#, python-format -msgid "tls_cacertfile %s not found or is not a file" -msgstr "tls_cacertfile %s non trouvé ou il ne s'agit pas d'un fichier" - -#, python-format -msgid "token reference must be a KeystoneToken type, got: %s" -msgstr "La référence de jeton doit être un type KeystoneToken, obtenu : %s" - -msgid "" -"update of domain_id is deprecated as of Mitaka and will be removed in O." -msgstr "" -"la mise à jour de domain_id est obsolète depuis Mitaka et sera supprimée " -"dans l'édition O." - -#, python-format -msgid "" -"validated expected to find %(param_name)r in function signature for " -"%(func_name)r." -msgstr "" -"La validation s'attendait %(param_name)r dans la signature de fonction pour " -"%(func_name)r." diff --git a/keystone-moon/keystone/locale/hu/LC_MESSAGES/keystone-log-critical.po b/keystone-moon/keystone/locale/hu/LC_MESSAGES/keystone-log-critical.po deleted file mode 100644 index ff8ff2d6..00000000 --- a/keystone-moon/keystone/locale/hu/LC_MESSAGES/keystone-log-critical.po +++ /dev/null @@ -1,25 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2014-08-31 03:19+0000\n" -"Last-Translator: openstackjenkins \n" -"Language: hu\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Hungarian\n" - -#, python-format -msgid "Unable to open template file %s" -msgstr "Nem nyitható meg a sablonfájl: %s" diff --git a/keystone-moon/keystone/locale/it/LC_MESSAGES/keystone-log-critical.po b/keystone-moon/keystone/locale/it/LC_MESSAGES/keystone-log-critical.po deleted file mode 100644 index 35960a34..00000000 --- a/keystone-moon/keystone/locale/it/LC_MESSAGES/keystone-log-critical.po +++ /dev/null @@ -1,25 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2014-08-31 03:19+0000\n" -"Last-Translator: openstackjenkins \n" -"Language: it\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Italian\n" - -#, python-format -msgid "Unable to open template file %s" -msgstr "Impossibile aprire il file di template %s" diff --git a/keystone-moon/keystone/locale/it/LC_MESSAGES/keystone-log-error.po b/keystone-moon/keystone/locale/it/LC_MESSAGES/keystone-log-error.po deleted file mode 100644 index d6ac2cf7..00000000 --- a/keystone-moon/keystone/locale/it/LC_MESSAGES/keystone-log-error.po +++ /dev/null @@ -1,173 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -msgid "" -msgstr "" -"Project-Id-Version: Keystone\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/keystone\n" -"POT-Creation-Date: 2015-03-09 06:03+0000\n" -"PO-Revision-Date: 2015-03-07 04:31+0000\n" -"Last-Translator: openstackjenkins \n" -"Language-Team: Italian (http://www.transifex.com/projects/p/keystone/" -"language/it/)\n" -"Language: it\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 1.3\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" - -#: keystone/notifications.py:304 -msgid "Failed to construct notifier" -msgstr "" - -#: keystone/notifications.py:389 -#, python-format -msgid "Failed to send %(res_id)s %(event_type)s notification" -msgstr "" - -#: keystone/notifications.py:606 -#, python-format -msgid "Failed to send %(action)s %(event_type)s notification" -msgstr "" - -#: keystone/catalog/core.py:62 -#, python-format -msgid "Malformed endpoint - %(url)r is not a string" -msgstr "" - -#: keystone/catalog/core.py:66 -#, python-format -msgid "Malformed endpoint %(url)s - unknown key %(keyerror)s" -msgstr "Endpoint %(url)s non valdio - chiave sconosciuta %(keyerror)s" - -#: keystone/catalog/core.py:71 -#, python-format -msgid "" -"Malformed endpoint '%(url)s'. The following type error occurred during " -"string substitution: %(typeerror)s" -msgstr "" - -#: keystone/catalog/core.py:77 -#, python-format -msgid "" -"Malformed endpoint %s - incomplete format (are you missing a type notifier ?)" -msgstr "" - -#: keystone/common/openssl.py:93 -#, python-format -msgid "Command %(to_exec)s exited with %(retcode)s- %(output)s" -msgstr "" - -#: keystone/common/openssl.py:121 -#, python-format -msgid "Failed to remove file %(file_path)r: %(error)s" -msgstr "" - -#: keystone/common/utils.py:239 -msgid "" -"Error setting up the debug environment. Verify that the option --debug-url " -"has the format : and that a debugger processes is listening on " -"that port." -msgstr "" - -#: keystone/common/cache/core.py:100 -#, python-format -msgid "" -"Unable to build cache config-key. Expected format \":\". " -"Skipping unknown format: %s" -msgstr "" - -#: keystone/common/environment/eventlet_server.py:99 -#, python-format -msgid "Could not bind to %(host)s:%(port)s" -msgstr "Impossible fare il bind verso %(host)s:%(port)s" - -#: keystone/common/environment/eventlet_server.py:185 -msgid "Server error" -msgstr "Errore del server" - -#: keystone/contrib/endpoint_policy/core.py:129 -#: keystone/contrib/endpoint_policy/core.py:228 -#, python-format -msgid "" -"Circular reference or a repeated entry found in region tree - %(region_id)s." -msgstr "" - -#: keystone/contrib/federation/idp.py:410 -#, python-format -msgid "Error when signing assertion, reason: %(reason)s" -msgstr "" - -#: keystone/contrib/oauth1/core.py:136 -msgid "Cannot retrieve Authorization headers" -msgstr "" - -#: keystone/openstack/common/loopingcall.py:95 -msgid "in fixed duration looping call" -msgstr "chiamata in loop a durata fissa" - -#: keystone/openstack/common/loopingcall.py:138 -msgid "in dynamic looping call" -msgstr "chiamata in loop dinamico" - -#: keystone/openstack/common/service.py:268 -msgid "Unhandled exception" -msgstr "Eccezione non gestita" - -#: keystone/resource/core.py:477 -#, python-format -msgid "" -"Circular reference or a repeated entry found projects hierarchy - " -"%(project_id)s." -msgstr "" - -#: keystone/resource/core.py:939 -#, python-format -msgid "" -"Unexpected results in response for domain config - %(count)s responses, " -"first option is %(option)s, expected option %(expected)s" -msgstr "" - -#: keystone/resource/backends/sql.py:102 keystone/resource/backends/sql.py:121 -#, python-format -msgid "" -"Circular reference or a repeated entry found in projects hierarchy - " -"%(project_id)s." -msgstr "" - -#: keystone/token/provider.py:292 -#, python-format -msgid "Unexpected error or malformed token determining token expiry: %s" -msgstr "" - -#: keystone/token/persistence/backends/kvs.py:226 -#, python-format -msgid "" -"Reinitializing revocation list due to error in loading revocation list from " -"backend. Expected `list` type got `%(type)s`. Old revocation list data: " -"%(list)r" -msgstr "" - -#: keystone/token/providers/common.py:611 -msgid "Failed to validate token" -msgstr "" - -#: keystone/token/providers/pki.py:47 -msgid "Unable to sign token" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:38 -#, python-format -msgid "" -"Either [fernet_tokens] key_repository does not exist or Keystone does not " -"have sufficient permission to access it: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:79 -msgid "" -"Failed to create [fernet_tokens] key_repository: either it already exists or " -"you don't have sufficient permissions to create it" -msgstr "" diff --git a/keystone-moon/keystone/locale/it/LC_MESSAGES/keystone-log-info.po b/keystone-moon/keystone/locale/it/LC_MESSAGES/keystone-log-info.po deleted file mode 100644 index b88a5de8..00000000 --- a/keystone-moon/keystone/locale/it/LC_MESSAGES/keystone-log-info.po +++ /dev/null @@ -1,211 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -msgid "" -msgstr "" -"Project-Id-Version: Keystone\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/keystone\n" -"POT-Creation-Date: 2015-03-09 06:03+0000\n" -"PO-Revision-Date: 2015-03-07 04:31+0000\n" -"Last-Translator: openstackjenkins \n" -"Language-Team: Italian (http://www.transifex.com/projects/p/keystone/" -"language/it/)\n" -"Language: it\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 1.3\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" - -#: keystone/assignment/core.py:250 -#, python-format -msgid "Creating the default role %s because it does not exist." -msgstr "" - -#: keystone/assignment/core.py:258 -#, python-format -msgid "Creating the default role %s failed because it was already created" -msgstr "" - -#: keystone/auth/controllers.py:64 -msgid "Loading auth-plugins by class-name is deprecated." -msgstr "" - -#: keystone/auth/controllers.py:106 -#, python-format -msgid "" -"\"expires_at\" has conflicting values %(existing)s and %(new)s. Will use " -"the earliest value." -msgstr "" - -#: keystone/common/openssl.py:81 -#, python-format -msgid "Running command - %s" -msgstr "" - -#: keystone/common/wsgi.py:79 -msgid "No bind information present in token" -msgstr "" - -#: keystone/common/wsgi.py:83 -#, python-format -msgid "Named bind mode %s not in bind information" -msgstr "" - -#: keystone/common/wsgi.py:90 -msgid "Kerberos credentials required and not present" -msgstr "" - -#: keystone/common/wsgi.py:94 -msgid "Kerberos credentials do not match those in bind" -msgstr "" - -#: keystone/common/wsgi.py:98 -msgid "Kerberos bind authentication successful" -msgstr "" - -#: keystone/common/wsgi.py:105 -#, python-format -msgid "Couldn't verify unknown bind: {%(bind_type)s: %(identifier)s}" -msgstr "" - -#: keystone/common/environment/eventlet_server.py:103 -#, python-format -msgid "Starting %(arg0)s on %(host)s:%(port)s" -msgstr "Avvio %(arg0)s in %(host)s:%(port)s" - -#: keystone/common/kvs/core.py:138 -#, python-format -msgid "Adding proxy '%(proxy)s' to KVS %(name)s." -msgstr "" - -#: keystone/common/kvs/core.py:188 -#, python-format -msgid "Using %(func)s as KVS region %(name)s key_mangler" -msgstr "" - -#: keystone/common/kvs/core.py:200 -#, python-format -msgid "Using default dogpile sha1_mangle_key as KVS region %s key_mangler" -msgstr "" - -#: keystone/common/kvs/core.py:210 -#, python-format -msgid "KVS region %s key_mangler disabled." -msgstr "" - -#: keystone/contrib/example/core.py:64 keystone/contrib/example/core.py:73 -#, python-format -msgid "" -"Received the following notification: service %(service)s, resource_type: " -"%(resource_type)s, operation %(operation)s payload %(payload)s" -msgstr "" - -#: keystone/openstack/common/eventlet_backdoor.py:146 -#, python-format -msgid "Eventlet backdoor listening on %(port)s for process %(pid)d" -msgstr "Ascolto di eventlet backdoor su %(port)s per il processo %(pid)d" - -#: keystone/openstack/common/service.py:173 -#, python-format -msgid "Caught %s, exiting" -msgstr "Rilevato %s, esistente" - -#: keystone/openstack/common/service.py:231 -msgid "Parent process has died unexpectedly, exiting" -msgstr "Il processo principale è stato interrotto inaspettatamente, uscire" - -#: keystone/openstack/common/service.py:262 -#, python-format -msgid "Child caught %s, exiting" -msgstr "Cogliere Child %s, uscendo" - -#: keystone/openstack/common/service.py:301 -msgid "Forking too fast, sleeping" -msgstr "Sblocco troppo veloce, attendere" - -#: keystone/openstack/common/service.py:320 -#, python-format -msgid "Started child %d" -msgstr "Child avviato %d" - -#: keystone/openstack/common/service.py:330 -#, python-format -msgid "Starting %d workers" -msgstr "Avvio %d operatori" - -#: keystone/openstack/common/service.py:347 -#, python-format -msgid "Child %(pid)d killed by signal %(sig)d" -msgstr "Child %(pid)d interrotto dal segnale %(sig)d" - -#: keystone/openstack/common/service.py:351 -#, python-format -msgid "Child %(pid)s exited with status %(code)d" -msgstr "Child %(pid)s terminato con stato %(code)d" - -#: keystone/openstack/common/service.py:390 -#, python-format -msgid "Caught %s, stopping children" -msgstr "Intercettato %s, arresto in corso dei children" - -#: keystone/openstack/common/service.py:399 -msgid "Wait called after thread killed. Cleaning up." -msgstr "" - -#: keystone/openstack/common/service.py:415 -#, python-format -msgid "Waiting on %d children to exit" -msgstr "In attesa %d degli elementi secondari per uscire" - -#: keystone/token/persistence/backends/sql.py:279 -#, python-format -msgid "Total expired tokens removed: %d" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:72 -msgid "" -"[fernet_tokens] key_repository does not appear to exist; attempting to " -"create it" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:130 -#, python-format -msgid "Created a new key: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:143 -msgid "Key repository is already initialized; aborting." -msgstr "" - -#: keystone/token/providers/fernet/utils.py:179 -#, python-format -msgid "Starting key rotation with %(count)s key files: %(list)s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:185 -#, python-format -msgid "Current primary key is: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:187 -#, python-format -msgid "Next primary key will be: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:197 -#, python-format -msgid "Promoted key 0 to be the primary: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:213 -#, python-format -msgid "Excess keys to purge: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:237 -#, python-format -msgid "Loaded %(count)s encryption keys from: %(dir)s" -msgstr "" diff --git a/keystone-moon/keystone/locale/it/LC_MESSAGES/keystone.po b/keystone-moon/keystone/locale/it/LC_MESSAGES/keystone.po deleted file mode 100644 index bf854577..00000000 --- a/keystone-moon/keystone/locale/it/LC_MESSAGES/keystone.po +++ /dev/null @@ -1,1631 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Remo Mattei , 2016. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2016-04-28 07:07+0000\n" -"Last-Translator: Alessandra \n" -"Language: it\n" -"Plural-Forms: nplurals=2; plural=(n != 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Italian\n" - -#, python-format -msgid "%(detail)s" -msgstr "%(detail)s" - -#, python-format -msgid "%(driver)s is not supported driver version" -msgstr "%(driver)s non è una versione driver supportata" - -#, python-format -msgid "" -"%(entity)s name cannot contain the following reserved characters: %(chars)s" -msgstr "Il nome %(entity)s non può contenere caratteri riservati: %(chars)s" - -#, python-format -msgid "" -"%(event)s is not a valid notification event, must be one of: %(actions)s" -msgstr "" -"%(event)s non è un evento di notifica valido, deve essere uno tra: " -"%(actions)s" - -#, python-format -msgid "%(host)s is not a trusted dashboard host" -msgstr "%(host)s non è un host di dashboard attendibile" - -#, python-format -msgid "%(message)s %(amendment)s" -msgstr "%(message)s %(amendment)s" - -#, python-format -msgid "" -"%(mod_name)s doesn't provide database migrations. The migration repository " -"path at %(path)s doesn't exist or isn't a directory." -msgstr "" -"%(mod_name)s non fornisce le migrazioni del database. Il percorso del " -"repository di migrazione in %(path)s non esiste o non è una directory." - -#, python-format -msgid "%(prior_role_id)s does not imply %(implied_role_id)s" -msgstr "%(prior_role_id)s non implica %(implied_role_id)s" - -#, python-format -msgid "%(property_name)s cannot be less than %(min_length)s characters." -msgstr "%(property_name)s non può essere inferiore a %(min_length)s caratteri." - -#, python-format -msgid "%(property_name)s is not a %(display_expected_type)s" -msgstr "%(property_name)s non è un %(display_expected_type)s" - -#, python-format -msgid "%(property_name)s should not be greater than %(max_length)s characters." -msgstr "%(property_name)s non può essere superiore a %(max_length)s caratteri." - -#, python-format -msgid "%(role_id)s cannot be an implied roles" -msgstr "%(role_id)s non può essere un ruolo implicato" - -#, python-format -msgid "%s cannot be empty." -msgstr "%s non può essere vuoto." - -#, python-format -msgid "%s extension does not exist." -msgstr "L'estensione %s non esiste." - -#, python-format -msgid "%s field is required and cannot be empty" -msgstr "Il campo %s è obbligatorio e non può essere vuoto" - -#, python-format -msgid "%s field(s) cannot be empty" -msgstr "i campi %s non possono essere vuoti" - -#, python-format -msgid "" -"%s for the LDAP identity backend has been deprecated in the Mitaka release " -"in favor of read-only identity LDAP access. It will be removed in the \"O\" " -"release." -msgstr "" -"%s per il backend di 'identità LDAP è obsoleto nella release Mitaka rispetto " -"all'accesso LDAP di sola lettura. Verrà rimosso nella release \"O\"." - -msgid "(Disable insecure_debug mode to suppress these details.)" -msgstr "" -"(Disabilitare la modalità insecure_debug per eliminare questi dettagli)." - -msgid "--all option cannot be mixed with other options" -msgstr "--l'opzione all non può essere combinata con altre opzioni" - -msgid "A project-scoped token is required to produce a service catalog." -msgstr "" -"È necessario un token in ambito progetto per produrre un catalogo del " -"servizio." - -msgid "Access token is expired" -msgstr "Il token di accesso è scaduto" - -msgid "Access token not found" -msgstr "Token di accesso non trovato" - -msgid "Additional authentications steps required." -msgstr "Sono richiesti ulteriori passi per le autenticazioni." - -msgid "An unexpected error occurred when retrieving domain configs" -msgstr "" -"Si è verificato un errore non previsto durante il richiamo delle " -"configurazioni del dominio" - -#, python-format -msgid "An unexpected error occurred when trying to store %s" -msgstr "Si è verificato un errore quando si tenta di archiviare %s" - -msgid "An unexpected error prevented the server from fulfilling your request." -msgstr "" -"Si è verificato un errore non previsto che ha impedito al server di " -"soddisfare la richiesta." - -#, python-format -msgid "" -"An unexpected error prevented the server from fulfilling your request: " -"%(exception)s" -msgstr "" -"Si è verificato un errore imprevisto che impedisce al server di soddisfare " -"la richiesta: %(exception)s" - -msgid "An unhandled exception has occurred: Could not find metadata." -msgstr "" -"Si è verificata un'eccezione non gestita: impossibile trovare i metadati." - -msgid "At least one option must be provided" -msgstr "È necessario fornire almeno un'opzione" - -msgid "At least one option must be provided, use either --all or --domain-name" -msgstr "" -"È necessario fornire almeno un'opzione, utilizzare --all o --domain-name" - -msgid "At least one role should be specified." -msgstr "Specificare almeno un ruolo." - -#, python-format -msgid "" -"Attempted automatic driver selection for assignment based upon " -"[identity]\\driver option failed since driver %s is not found. Set " -"[assignment]/driver to a valid driver in keystone config." -msgstr "" -"Tentata selezione automatica del driver per l'assegnazione basata su " -"[identity]. Opzione \\driver non riuscita in quanto il driver %s non è stato " -"trovato. Impostare [assignment]/driver su un driver valido nella " -"configurazione keystone." - -msgid "Attempted to authenticate with an unsupported method." -msgstr "Tentativo di autenticazione con un metodo non supportato." - -msgid "" -"Attempting to use OS-FEDERATION token with V2 Identity Service, use V3 " -"Authentication" -msgstr "" -"Tentativo di utilizzare il token OS-FEDERATION con il servizio identità V2, " -"utilizzare l'autenticazione V3" - -msgid "Authentication plugin error." -msgstr "errore di autenticazione plugin." - -#, python-format -msgid "" -"Backend `%(backend)s` is not a valid memcached backend. Valid backends: " -"%(backend_list)s" -msgstr "" -"Il backend `%(backend)s` non è un backend memcached valido. Backend validi: " -"%(backend_list)s" - -msgid "Cannot authorize a request token with a token issued via delegation." -msgstr "" -"Impossibile autorizzare un token di richiesta con un token emesso mediante " -"delega." - -#, python-format -msgid "Cannot change %(option_name)s %(attr)s" -msgstr "Impossibile modificare %(option_name)s %(attr)s" - -msgid "Cannot change Domain ID" -msgstr "Impossibile modificare l'ID dominio" - -msgid "Cannot change user ID" -msgstr "Impossibile modificare l'ID utente" - -msgid "Cannot change user name" -msgstr "Impossibile modificare il nome utente" - -#, python-format -msgid "Cannot create an endpoint with an invalid URL: %(url)s" -msgstr "Impossibile creare un endpoint con un URL non valido: %(url)s" - -#, python-format -msgid "Cannot create project with parent: %(project_id)s" -msgstr "Impossibile creare il progetto con l'elemento parent: %(project_id)s" - -#, python-format -msgid "" -"Cannot create project, since it specifies its owner as domain %(domain_id)s, " -"but specifies a parent in a different domain (%(parent_domain_id)s)." -msgstr "" -"Impossibile creare un progetto in quanto specifica il relativo proprietario " -"come un dominio (%(domain_id)s) ma specifica un elemento parent in un altro " -"dominio (%(parent_domain_id)s)." - -#, python-format -msgid "" -"Cannot create project, since its parent (%(domain_id)s) is acting as a " -"domain, but project's specified parent_id (%(parent_id)s) does not match " -"this domain_id." -msgstr "" -"Impossibile creare un progetto in quanto il relativo parent (%(domain_id)s) " -"agisce come un dominio, ma l'id_parent (%(parent_id)s) specificato del " -"progetto non corrisponde all'id_dominio." - -msgid "Cannot delete a domain that is enabled, please disable it first." -msgstr "" -"Impossibile eliminare un dominio abilitato; è necessario prima disabilitarlo." - -#, python-format -msgid "" -"Cannot delete project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" -"Impossibile eliminare il progetto %(project_id)s perché la relativa " -"struttura ad albero secondaria contiene progetti abilitati." - -#, python-format -msgid "" -"Cannot delete the project %s since it is not a leaf in the hierarchy. Use " -"the cascade option if you want to delete a whole subtree." -msgstr "" -"Impossibile eliminare il progetto %s perché non è una foglia nella " -"gerarchia. Se si desidera eliminare un'intera struttura ad albero secondaria " -"utilizza l'opzione a catena." - -#, python-format -msgid "" -"Cannot disable project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" -"Impossibile disabilitare il progetto %(project_id)s perché la relativa " -"struttura ad albero secondaria contiene progetti abilitati." - -#, python-format -msgid "Cannot enable project %s since it has disabled parents" -msgstr "" -"Impossibile abilitare il progetto %s perché dispone di elementi parent " -"disabilitati" - -msgid "Cannot list assignments sourced from groups and filtered by user ID." -msgstr "" -"Impossibile elencare le assegnazione originate da gruppi e filtrate da ID " -"utente." - -msgid "Cannot list request tokens with a token issued via delegation." -msgstr "" -"Impossibile elencare i token della richiesta con un token emesso mediante " -"delega." - -#, python-format -msgid "Cannot open certificate %(cert_file)s. Reason: %(reason)s" -msgstr "Impossibile aprire il certificato %(cert_file)s. Motivo: %(reason)s" - -#, python-format -msgid "Cannot remove role that has not been granted, %s" -msgstr "Impossibile rimuovere un ruolo che non è stato concesso, %s" - -msgid "" -"Cannot truncate a driver call without hints list as first parameter after " -"self " -msgstr "" -"Impossibile troncare una chiamata al driver senza hints list come primo " -"parametro dopo self " - -msgid "Cannot update domain_id of a project that has children." -msgstr "Impossibile aggiornare domain_id di un progetto con elementi child." - -msgid "" -"Cannot use parents_as_list and parents_as_ids query params at the same time." -msgstr "" -"Impossibile utilizzare i parametri della query parents_as_list e " -"parents_as_ids contemporaneamente." - -msgid "" -"Cannot use subtree_as_list and subtree_as_ids query params at the same time." -msgstr "" -"Impossibile utilizzare i parametri della query subtree_as_list e " -"subtree_as_ids contemporaneamente." - -msgid "Cascade update is only allowed for enabled attribute." -msgstr "L'aggiornamento a catena è consentito solo per un attributo abilitato." - -msgid "" -"Combining effective and group filter will always result in an empty list." -msgstr "" -"La combinazione del filtro operativo e di gruppo avrà sempre come risultato " -"un elenco vuoto." - -msgid "" -"Combining effective, domain and inherited filters will always result in an " -"empty list." -msgstr "" -"La combinazione di filtri operativi, di dominio ed ereditati avrà sempre " -"come risultato un elenco vuoto." - -#, python-format -msgid "Config API entity at /domains/%s/config" -msgstr "Entità API config in /domains/%s/config" - -#, python-format -msgid "Conflict occurred attempting to store %(type)s - %(details)s" -msgstr "" -"Si è verificato un conflitto nel tentativo di archiviare %(type)s - " -"%(details)s" - -#, python-format -msgid "Conflicting region IDs specified: \"%(url_id)s\" != \"%(ref_id)s\"" -msgstr "" -"Sono stati specificati ID regione in conflitto: \"%(url_id)s\" != " -"\"%(ref_id)s\"" - -msgid "Consumer not found" -msgstr "Consumer non trovato" - -#, python-format -msgid "" -"Could not change immutable attribute(s) '%(attributes)s' in target %(target)s" -msgstr "" -"Impossibile modificare gli attributi non modificabili '%(attributes)s' nella " -"destinazione %(target)s" - -#, python-format -msgid "" -"Could not determine Identity Provider ID. The configuration option " -"%(issuer_attribute)s was not found in the request environment." -msgstr "" -"Impossibile determinare l'ID del provider di identità. L'opzione di " -"configurazione %(issuer_attribute)s non è stata trovata nell'ambiente di " -"richiesta. " - -#, python-format -msgid "" -"Could not find %(group_or_option)s in domain configuration for domain " -"%(domain_id)s" -msgstr "" -"Impossibile trovare %(group_or_option)s nella configurazione del dominio per " -"il dominio %(domain_id)s" - -#, python-format -msgid "Could not find Endpoint Group: %(endpoint_group_id)s" -msgstr "Impossibile trovare il gruppo di endpoint: %(endpoint_group_id)s" - -msgid "Could not find Identity Provider identifier in environment" -msgstr "" -"Impossibile trovare l'identificativo del provider identità nell'ambiente" - -#, python-format -msgid "Could not find Identity Provider: %(idp_id)s" -msgstr "Impossibile trovare il provider identità: %(idp_id)s" - -#, python-format -msgid "Could not find Service Provider: %(sp_id)s" -msgstr "Impossibile trovare il provider del servizio: %(sp_id)s" - -#, python-format -msgid "Could not find credential: %(credential_id)s" -msgstr "Impossibile trovare la credenziale: %(credential_id)s" - -#, python-format -msgid "Could not find domain: %(domain_id)s" -msgstr "Impossibile trovare il dominio: %(domain_id)s" - -#, python-format -msgid "Could not find endpoint: %(endpoint_id)s" -msgstr "Impossibile trovare l'endpoint: %(endpoint_id)s" - -#, python-format -msgid "" -"Could not find federated protocol %(protocol_id)s for Identity Provider: " -"%(idp_id)s" -msgstr "" -"Impossibile trovare il protocollo federato %(protocol_id)s per il provider " -"identità: %(idp_id)s" - -#, python-format -msgid "Could not find group: %(group_id)s" -msgstr "Impossibile trovare il gruppo: %(group_id)s" - -#, python-format -msgid "Could not find mapping: %(mapping_id)s" -msgstr "Impossibile trovare l'associazione: %(mapping_id)s" - -msgid "Could not find policy association" -msgstr "Impossibile trovare l'associazione della politica" - -#, python-format -msgid "Could not find policy: %(policy_id)s" -msgstr "Impossibile trovare la politica: %(policy_id)s" - -#, python-format -msgid "Could not find project: %(project_id)s" -msgstr "Impossibile trovare il progetto: %(project_id)s" - -#, python-format -msgid "Could not find region: %(region_id)s" -msgstr "Impossibile trovare la regione: %(region_id)s" - -#, python-format -msgid "" -"Could not find role assignment with role: %(role_id)s, user or group: " -"%(actor_id)s, project or domain: %(target_id)s" -msgstr "" -"Impossibile trovare l'assegnazione ruolo con il ruolo: %(role_id)s, utente o " -"gruppo: %(actor_id)s, progetto o dominio: %(target_id)s" - -#, python-format -msgid "Could not find role: %(role_id)s" -msgstr "Impossibile trovare il ruolo: %(role_id)s" - -#, python-format -msgid "Could not find service: %(service_id)s" -msgstr "Impossibile trovare il servizio: %(service_id)s" - -#, python-format -msgid "Could not find token: %(token_id)s" -msgstr "Impossibile trovare il token: %(token_id)s" - -#, python-format -msgid "Could not find trust: %(trust_id)s" -msgstr "Impossibile trovare il trust: %(trust_id)s" - -#, python-format -msgid "Could not find user: %(user_id)s" -msgstr "Impossibile trovare l'utente: %(user_id)s" - -#, python-format -msgid "Could not find version: %(version)s" -msgstr "Impossibile trovare la versione: %(version)s" - -#, python-format -msgid "Could not find: %(target)s" -msgstr "Impossibile trovare: %(target)s" - -msgid "" -"Could not map any federated user properties to identity values. Check debug " -"logs or the mapping used for additional details." -msgstr "" -"Impossibile associare le proprietà dell'utente federato per identificare i " -"valori. Controllare i log di debug o l'associazione utilizzata per ulteriori " -"dettagli." - -msgid "" -"Could not map user while setting ephemeral user identity. Either mapping " -"rules must specify user id/name or REMOTE_USER environment variable must be " -"set." -msgstr "" -"Impossibile associare l'utente durante l'impostazione dell'identità utente " -"temporanea. Le regole di associazione devono specificare nome/id utente o la " -"variabile di ambiente REMOTE_USER deve essereimpostata." - -msgid "Could not validate the access token" -msgstr "Impossibile convalidare il token di accesso" - -msgid "Credential belongs to another user" -msgstr "La credenziale appartiene ad un altro utente" - -msgid "Credential signature mismatch" -msgstr "Mancata corrispondenza della firma delle credenziali" - -#, python-format -msgid "" -"Direct import of auth plugin %(name)r is deprecated as of Liberty in favor " -"of its entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"L'importazione diretta di auth plugin %(name)r è obsoleta a partire da " -"Liberty rispetto al relativo entrypoint da %(namespace)r e potrebbe essere " -"rimossa in N." - -#, python-format -msgid "" -"Direct import of driver %(name)r is deprecated as of Liberty in favor of its " -"entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"L'importazione diretta del driver %(name)r è obsoleta a partire da Liberty " -"rispetto al relativo entrypoint da %(namespace)r e potrebbe essere rimossa " -"in N." - -msgid "" -"Disabling an entity where the 'enable' attribute is ignored by configuration." -msgstr "" -"Disabilitazione di un'entità in cui l'attributo 'enable' è ignorato dalla " -"configurazione." - -#, python-format -msgid "Domain (%s)" -msgstr "Dominio (%s)" - -#, python-format -msgid "Domain cannot be named %s" -msgstr "Il dominio non può essere denominato %s" - -#, python-format -msgid "Domain cannot have ID %s" -msgstr "Il dominio non può avere l'ID %s" - -#, python-format -msgid "Domain is disabled: %s" -msgstr "Il dominio è disabilitato: %s" - -msgid "Domain name cannot contain reserved characters." -msgstr "Il nome dominio non può contenere caratteri riservati." - -msgid "Domain scoped token is not supported" -msgstr "L'ambito del dominio token non è supportato" - -msgid "Domain specific roles are not supported in the V8 role driver" -msgstr "Ruoli specifici di dominio non sono supportati nel driver ruolo V8" - -#, python-format -msgid "" -"Domain: %(domain)s already has a configuration defined - ignoring file: " -"%(file)s." -msgstr "" -"Il dominio: %(domain)s dispone già di una configurazione definita - si sta " -"ignorando il file: %(file)s." - -msgid "Duplicate Entry" -msgstr "Duplica voce" - -#, python-format -msgid "Duplicate ID, %s." -msgstr "ID duplicato, %s." - -#, python-format -msgid "Duplicate entry: %s" -msgstr "Voce duplicata: %s" - -#, python-format -msgid "Duplicate name, %s." -msgstr "Nome duplicato, %s." - -#, python-format -msgid "Duplicate remote ID: %s" -msgstr "ID remoto duplicato: %s" - -msgid "EC2 access key not found." -msgstr "Chiave di accesso EC2 non trovata." - -msgid "EC2 signature not supplied." -msgstr "Firma EC2 non fornita." - -msgid "" -"Either --bootstrap-password argument or OS_BOOTSTRAP_PASSWORD must be set." -msgstr "" -"Entrambi gli argomenti bootstrap-password o OS_BOOTSTRAP_PASSWORD devono " -"essere impostati." - -msgid "Enabled field must be a boolean" -msgstr "Il campo Abilitato deve essere un valore booleano" - -msgid "Enabled field should be a boolean" -msgstr "Il campo Abilitato deve essere un valore booleano" - -#, python-format -msgid "Endpoint %(endpoint_id)s not found in project %(project_id)s" -msgstr "Endpoint %(endpoint_id)s non trovato nel progetto %(project_id)s" - -msgid "Endpoint Group Project Association not found" -msgstr "Associazione al progetto del gruppo di endpoint non trovata" - -msgid "Ensure configuration option idp_entity_id is set." -msgstr "" -"Accertarsi che l'opzione di configurazione idp_entity_id sia impostata." - -msgid "Ensure configuration option idp_sso_endpoint is set." -msgstr "" -"Accertarsi che l'opzione di configurazione idp_sso_endpoint sia impostata." - -#, python-format -msgid "" -"Error parsing configuration file for domain: %(domain)s, file: %(file)s." -msgstr "" -"Errore durante l'analisi del file di configurazione per il dominio: " -"%(domain)s, file: %(file)s." - -#, python-format -msgid "Error while opening file %(path)s: %(err)s" -msgstr "Errore durante l'apertura del file %(path)s: %(err)s" - -#, python-format -msgid "Error while parsing line: '%(line)s': %(err)s" -msgstr "Errore durante l'analisi della riga: '%(line)s': %(err)s" - -#, python-format -msgid "Error while parsing rules %(path)s: %(err)s" -msgstr "Errore durante l'analisi delle regole %(path)s: %(err)s" - -#, python-format -msgid "Error while reading metadata file, %(reason)s" -msgstr "Errore durante le lettura del file di metadati, %(reason)s" - -#, python-format -msgid "" -"Exceeded attempts to register domain %(domain)s to use the SQL driver, the " -"last domain that appears to have had it is %(last_domain)s, giving up" -msgstr "" -"Superato il numero di tentativi per registrare il dominio %(domain)s al fine " -"di utilizzare il driver SQL, l'ultimo dominio che sembra avere avuto quel " -"driver è %(last_domain)s, operazione terminata" - -#, python-format -msgid "Expected dict or list: %s" -msgstr "Previsto dict o list: %s" - -msgid "" -"Expected signing certificates are not available on the server. Please check " -"Keystone configuration." -msgstr "" -"I certificati di firma previsti non sono disponibili sul server. Controllare " -"la configurazione Keystone." - -#, python-format -msgid "" -"Expecting to find %(attribute)s in %(target)s - the server could not comply " -"with the request since it is either malformed or otherwise incorrect. The " -"client is assumed to be in error." -msgstr "" -"previsto di trovare %(attribute)s in %(target)s - il server non è in grado " -"di soddisfare la richiesta perché non è valido o non è corretto. Si ritiene " -"che il client sia in errore." - -#, python-format -msgid "Failed to start the %(name)s server" -msgstr "Impossibile avviare il server %(name)s" - -msgid "Failed to validate token" -msgstr "Impossibile convalidare il token" - -msgid "Federation token is expired" -msgstr "Il token comune è scaduto" - -#, python-format -msgid "" -"Field \"remaining_uses\" is set to %(value)s while it must not be set in " -"order to redelegate a trust" -msgstr "" -"Il campo \"remaining_uses\" è impostato su %(value)s mentre non deve essere " -"impostato per assegnare una nuova delega ad un trust" - -msgid "Found invalid token: scoped to both project and domain." -msgstr "trovato token non valido: in ambito sia di progetto che di dominio." - -#, python-format -msgid "Group %s not found in config" -msgstr "Gruppo %s non trovato in config" - -#, python-format -msgid "Group %(group)s is not supported for domain specific configurations" -msgstr "" -"Il gruppo %(group)s non è supportato per le configurazioni specifiche del " -"dominio" - -#, python-format -msgid "" -"Group %(group_id)s returned by mapping %(mapping_id)s was not found in the " -"backend." -msgstr "" -"Il gruppo %(group_id)s restituito dall'associazione %(mapping_id)s non è " -"stato trovato nel backend." - -#, python-format -msgid "" -"Group membership across backend boundaries is not allowed, group in question " -"is %(group_id)s, user is %(user_id)s" -msgstr "" -"L'appartenenza al gruppo tra i limiti di backend non è consentita, il gruppo " -"in questione è %(group_id)s, l'utente è %(user_id)s" - -#, python-format -msgid "ID attribute %(id_attr)s not found in LDAP object %(dn)s" -msgstr "Attributo ID %(id_attr)s non trovato nell'oggetto LDAP %(dn)s" - -#, python-format -msgid "Identity Provider %(idp)s is disabled" -msgstr "Il provider identità %(idp)s è disabilitato" - -msgid "" -"Incoming identity provider identifier not included among the accepted " -"identifiers." -msgstr "" -"L'identificativo del provider identità in entrata non è incluso tra gli " -"identificativi accettati." - -msgid "Invalid EC2 signature." -msgstr "Firma EC2 non valida." - -#, python-format -msgid "Invalid LDAP TLS certs option: %(option)s. Choose one of: %(options)s" -msgstr "" -"Opzione certificazioni (certs) LDAP TLS non valida: %(option)s. Scegliere " -"una delle seguenti: %(options)s" - -#, python-format -msgid "Invalid LDAP TLS_AVAIL option: %s. TLS not available" -msgstr "Opzione LDAP TLS_AVAIL non valida: %s. TLS non disponibile" - -#, python-format -msgid "Invalid LDAP deref option: %(option)s. Choose one of: %(options)s" -msgstr "" -"Opzione deref LDAP non valida: %(option)s. Scegliere una tra: %(options)s" - -#, python-format -msgid "Invalid LDAP scope: %(scope)s. Choose one of: %(options)s" -msgstr "" -"Ambito LDAP non valido: %(scope)s. Scegliere uno dei seguenti: %(options)s" - -msgid "Invalid TLS / LDAPS combination" -msgstr "Combinazione TLS / LDAPS non valida" - -#, python-format -msgid "Invalid audit info data type: %(data)s (%(type)s)" -msgstr "" -"Tipo di dati delle informazioni di verifica non valido: %(data)s (%(type)s)" - -msgid "Invalid blob in credential" -msgstr "Blob non valido nella credenziale" - -#, python-format -msgid "" -"Invalid domain name: %(domain)s found in config file name: %(file)s - " -"ignoring this file." -msgstr "" -"Nome dominio non valido: %(domain)s trovato nel nome file di configurazione: " -"%(file)s - si sta ignorando questo file." - -#, python-format -msgid "Invalid domain specific configuration: %(reason)s" -msgstr "Configurazione specifica del dominio non valida: %(reason)s" - -#, python-format -msgid "Invalid input for field '%(path)s'. The value is '%(value)s'." -msgstr "Input non valido per il campo '%(path)s'. Il valore è '%(value)s'." - -msgid "Invalid limit value" -msgstr "Valore del limite non valido" - -#, python-format -msgid "" -"Invalid mix of entities for policy association - only Endpoint, Service or " -"Region+Service allowed. Request was - Endpoint: %(endpoint_id)s, Service: " -"%(service_id)s, Region: %(region_id)s" -msgstr "" -"combinazione di entità non valida per l'associazione della politica - È " -"consentito solo endpoint, servizio o regione+servizio. La richiesta era - " -"Endpoint: %(endpoint_id)s, Servizio: %(service_id)s, Regione: %(region_id)s" - -#, python-format -msgid "" -"Invalid rule: %(identity_value)s. Both 'groups' and 'domain' keywords must " -"be specified." -msgstr "" -"Regola non valida: %(identity_value)s. Entrambi le parole chiave 'groups' e " -"'domain' devono essere specificate." - -msgid "Invalid signature" -msgstr "Firma non valida" - -msgid "Invalid user / password" -msgstr "Utente/password non validi" - -msgid "Invalid username or TOTP passcode" -msgstr "username o passcode TOTP non validi" - -msgid "Invalid username or password" -msgstr "username o password non validi" - -#, python-format -msgid "KVS region %s is already configured. Cannot reconfigure." -msgstr "La regione KVS %s è già configurata. Impossibile riconfigurare." - -#, python-format -msgid "Key Value Store not configured: %s" -msgstr "KVS (Key Value Store) non configurato: %s" - -#, python-format -msgid "LDAP %s create" -msgstr "LDAP %s crea" - -#, python-format -msgid "LDAP %s delete" -msgstr "LDAP %s elimina" - -#, python-format -msgid "LDAP %s update" -msgstr "LDAP %s aggiorna" - -msgid "" -"Length of transformable resource id > 64, which is max allowed characters" -msgstr "" -"La lunghezza dell'id risorsa trasformabile è > 64, che rappresenta il numero " -"massimo di caratteri consentiti" - -#, python-format -msgid "" -"Local section in mapping %(mapping_id)s refers to a remote match that " -"doesn't exist (e.g. {0} in a local section)." -msgstr "" -"La sezione locale nell'associazione %(mapping_id)s si riferisce ad una " -"corrispondenza remota che non esiste (ad esempio {0} in una sezione locale)." - -#, python-format -msgid "Lock Timeout occurred for key, %(target)s" -msgstr "Si è verificato un timeout di blocco per la chiave, %(target)s" - -#, python-format -msgid "Lock key must match target key: %(lock)s != %(target)s" -msgstr "" -"La chiave di blocco deve corrispondere alla chiave di destinazione: " -"%(lock)s != %(target)s" - -#, python-format -msgid "Malformed endpoint URL (%(endpoint)s), see ERROR log for details." -msgstr "" -"Url dell'endpoint non corretto (%(endpoint)s), consultare il log ERROR per " -"ulteriori dettagli." - -msgid "Marker could not be found" -msgstr "Impossibile trovare l'indicatore" - -#, python-format -msgid "Max hierarchy depth reached for %s branch." -msgstr "Profondità massima della gerarchia raggiunta per il ramo %s." - -#, python-format -msgid "Maximum lock attempts on %s occurred." -msgstr "È stato raggiunto il numero massimo di tentativi di blocco su %s." - -#, python-format -msgid "Member %(member)s is already a member of group %(group)s" -msgstr "Il membro %(member)s è già un membro del gruppo %(group)s" - -#, python-format -msgid "Method not callable: %s" -msgstr "Metodo non richiamabile: %s" - -msgid "Missing entity ID from environment" -msgstr "ID entità mancante dall'ambiente" - -msgid "" -"Modifying \"redelegation_count\" upon redelegation is forbidden. Omitting " -"this parameter is advised." -msgstr "" -"La modifica di \"redelegation_count\" dopo la riassegnazione della delega " -"non è consentita. Si consiglia di omettere questo parametro." - -msgid "Multiple domains are not supported" -msgstr "Non sono supportati più domini" - -msgid "Must be called within an active lock context." -msgstr "Deve essere richiamato all'interno di un contesto di blocco attivo." - -msgid "Must specify either domain or project" -msgstr "È necessario specificare il dominio o il progetto" - -msgid "Name field is required and cannot be empty" -msgstr "Il campo relativo al nome è obbligatorio e non può essere vuoto" - -msgid "Neither Project Domain ID nor Project Domain Name was provided." -msgstr "" -"Non è stato fornito l'ID dominio progetto né il nome dominio progetto. " - -msgid "" -"No Authorization headers found, cannot proceed with OAuth related calls, if " -"running under HTTPd or Apache, ensure WSGIPassAuthorization is set to On." -msgstr "" -"Nessuna intestazione di autorizzazione trovata, impossibile procedere con le " -"chiamate correlate a OAuth, se l'esecuzione avviene in ambito HTTPd o " -"Apache, assicurarsi che WSGIPassAuthorization sia impostato su Attivo." - -msgid "No authenticated user" -msgstr "Nessun utente autenticato" - -msgid "" -"No encryption keys found; run keystone-manage fernet_setup to bootstrap one." -msgstr "" -"Nessuna chiave di codifica trovata; eseguire keystone-manage fernet_setup " -"per eseguire un avvio." - -msgid "No options specified" -msgstr "Nessuna opzione specificata" - -#, python-format -msgid "No policy is associated with endpoint %(endpoint_id)s." -msgstr "Nessuna politica associata all'endpoint %(endpoint_id)s." - -#, python-format -msgid "No remaining uses for trust: %(trust_id)s" -msgstr "Nessun utilizzo residuo per trust: %(trust_id)s" - -msgid "No token in the request" -msgstr "Nessun token nella richiesta" - -msgid "Non-default domain is not supported" -msgstr "Il dominio non predefinito non è supportato" - -msgid "One of the trust agents is disabled or deleted" -msgstr "Uno degli agent trust è disabilitato o eliminato" - -#, python-format -msgid "" -"Option %(option)s found with no group specified while checking domain " -"configuration request" -msgstr "" -"L'opzione %(option)s è stato trovato senza alcun gruppo specificato durante " -"il controllo della richiesta di configurazione del dominio" - -#, python-format -msgid "" -"Option %(option)s in group %(group)s is not supported for domain specific " -"configurations" -msgstr "" -"L'opzione %(option)s nel gruppo %(group)s non è supportata per le " -"configurazioni specifiche del dominio" - -#, python-format -msgid "Project (%s)" -msgstr "Progetto (%s)" - -#, python-format -msgid "Project ID not found: %(t_id)s" -msgstr "ID progetto non trovato: %(t_id)s " - -msgid "Project field is required and cannot be empty." -msgstr "Il campo progetto è obbligatorio e non può essere vuoto." - -#, python-format -msgid "Project is disabled: %s" -msgstr "Il progetto è disabilitato: %s" - -msgid "Project name cannot contain reserved characters." -msgstr "Il nome progetto non può contenere caratteri riservati." - -msgid "Query string is not UTF-8 encoded" -msgstr "La stringa di query non è codificata in UTF-8 " - -#, python-format -msgid "" -"Reading the default for option %(option)s in group %(group)s is not supported" -msgstr "" -"La lettura dell'impostazione predefinita per l'opzione %(option)s nel gruppo " -"%(group)s non è supportata" - -msgid "Redelegation allowed for delegated by trust only" -msgstr "" -"Assegnazione di una nuova delega consentita solo per i delegati dal trust" - -#, python-format -msgid "" -"Remaining redelegation depth of %(redelegation_depth)d out of allowed range " -"of [0..%(max_count)d]" -msgstr "" -"profondità di riassegnazione della delega rimanente %(redelegation_depth)d " -"non compresa nell'intervallo consentito [0..%(max_count)d]" - -msgid "" -"Remove admin_crud_extension from the paste pipeline, the admin_crud " -"extension is now always available. Updatethe [pipeline:admin_api] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Rimuovere admin_crud_extension dalla pipeline paste, l'estensione admin_crud " -"è ora sempre disponibile. Aggiornare la sezione [pipeline:admin_api] in " -"keystone-paste.ini di conseguenza, in quanto verrà rimossa nella release O." - -msgid "" -"Remove endpoint_filter_extension from the paste pipeline, the endpoint " -"filter extension is now always available. Update the [pipeline:api_v3] " -"section in keystone-paste.ini accordingly as it will be removed in the O " -"release." -msgstr "" -"Rimuovere endpoint_filter_extension dalla pipeline paste, l'estensione del " -"filtro di endpoint è ora sempre disponibile. Aggiornare la sezione [pipeline:" -"api_v3] in keystone-paste.ini di conseguenza, in quanto verrà rimossa nella " -"release O." - -msgid "" -"Remove federation_extension from the paste pipeline, the federation " -"extension is now always available. Update the [pipeline:api_v3] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Rimuovere federation_extension dalla pipeline paste, l'estensione federation " -"è ora sempre disponibile. Aggiornare la sezione [pipeline:api_v3] in " -"keystone-paste.ini di conseguenza, in quanto verrà rimossa nella release O." - -msgid "" -"Remove oauth1_extension from the paste pipeline, the oauth1 extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Rimuovere oauth1_extension dalla pipeline paste, l'estensione oauth1 è ora " -"sempre disponibile. Aggiornare la sezione [pipeline:api_v3] in keystone-" -"paste.ini di conseguenza, in quanto verrà rimossa nella release O." - -msgid "" -"Remove revoke_extension from the paste pipeline, the revoke extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Rimuovere revoke_extension dalla pipeline paste, l'estensione revoke è ora " -"sempre disponibile. Aggiornare la sezione [pipeline:api_v3] in keystone-" -"paste.ini di conseguenza, in quanto verrà rimossa nella release O." - -msgid "" -"Remove simple_cert from the paste pipeline, the PKI and PKIz token providers " -"are now deprecated and simple_cert was only used insupport of these token " -"providers. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Rimuovere simple_cert dalla pipeline paste, i provider di token PKI e PKIz " -"sono ora obsoleti e simple_cert è stato utilizzato solo in supporto di " -"questi provider di token. Aggiornare la sezione [pipeline:api_v3] in " -"keystone-paste.ini di conseguenza, in quanto verrà rimossa nella release O." - -msgid "" -"Remove user_crud_extension from the paste pipeline, the user_crud extension " -"is now always available. Updatethe [pipeline:public_api] section in keystone-" -"paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Rimuovere user_crud_extension dalla pipeline paste, l'estensione user_crud è " -"ora sempre disponibile. Aggiornare la sezione [pipeline:admin_api] in " -"keystone-paste.ini di conseguenza, in quanto verrà rimossa nella release O." - -msgid "Request Token does not have an authorizing user id" -msgstr "" -"Il token della richiesta non dispone di un id utente per l'autorizzazione" - -#, python-format -msgid "" -"Request attribute %(attribute)s must be less than or equal to %(size)i. The " -"server could not comply with the request because the attribute size is " -"invalid (too large). The client is assumed to be in error." -msgstr "" -"L'attributo della richiesta %(attribute)s deve essere minore o uguale a " -"%(size)i. Il server non è riuscito a soddisfare la richiesta poiché la " -"dimensione dell'attributo non è valido (troppo grande). Si ritiene che il " -"client sia in errore." - -msgid "Request must have an origin query parameter" -msgstr "La richiesta deve avere un parametro della query di origine" - -msgid "Request token is expired" -msgstr "Il token della richiesta è scaduto" - -msgid "Request token not found" -msgstr "token della richiesta non trovata" - -msgid "Requested expiration time is more than redelegated trust can provide" -msgstr "" -"Il tempo di scadenza richiesto è maggiore di quello che può essere fornito " -"dal trust con delega riassegnata" - -#, python-format -msgid "" -"Requested redelegation depth of %(requested_count)d is greater than allowed " -"%(max_count)d" -msgstr "" -"La profondità di riassegnazione della delega richiesta %(requested_count)d è " -"maggiore del valore consentito %(max_count)d" - -msgid "" -"Running keystone via eventlet is deprecated as of Kilo in favor of running " -"in a WSGI server (e.g. mod_wsgi). Support for keystone under eventlet will " -"be removed in the \"M\"-Release." -msgstr "" -"L'esecuzione del keystone via eventlet è obsoleta in Kilo, rispetto " -"all'esecuzione in un server WSGI (ad esempio mod_wsgi). Il supporto per il " -"keystone in eventlet verrà rimosso in \"M\"-Release." - -msgid "Scoping to both domain and project is not allowed" -msgstr "Il controllo sia del dominio che del progetto non è consentito" - -msgid "Scoping to both domain and trust is not allowed" -msgstr "Il controllo sia del dominio che di trust non è consentito" - -msgid "Scoping to both project and trust is not allowed" -msgstr "Il controllo sia delprogetto che di trust non è consentito" - -#, python-format -msgid "Service Provider %(sp)s is disabled" -msgstr "Il Provider del servizio %(sp)s è disabilitato" - -msgid "Some of requested roles are not in redelegated trust" -msgstr "" -"Alcuni dei ruoli richiesti non sono presenti nel trust con delega riassegnata" - -msgid "Specify a domain or project, not both" -msgstr "Specificare un dominio o un progetto, non entrambi" - -msgid "Specify a user or group, not both" -msgstr "Specificare un utente o un gruppo, non entrambi" - -msgid "Specify one of domain or project" -msgstr "Specificare uno valore di dominio o progetto" - -msgid "Specify one of user or group" -msgstr "Specificare un valore di utente o gruppo" - -#, python-format -msgid "" -"String length exceeded.The length of string '%(string)s' exceeded the limit " -"of column %(type)s(CHAR(%(length)d))." -msgstr "" -"È stata superata la lunghezza della stringa. La lunghezza della stringa " -"'%(string)s' ha superato il limite della colonna %(type)s(CHAR(%(length)d))." - -msgid "Tenant name cannot contain reserved characters." -msgstr "Il nome tenant non può contenere caratteri riservati." - -#, python-format -msgid "" -"The %s extension has been moved into keystone core and as such its " -"migrations are maintained by the main keystone database control. Use the " -"command: keystone-manage db_sync" -msgstr "" -"L'estensione %s è stata spostata nel keystone di base e le relative " -"migrazioni vengono mantenute dal controllo di database keystone principale. " -"Utilizzare il comando: keystone-manage db_sync" - -msgid "" -"The 'expires_at' must not be before now. The server could not comply with " -"the request since it is either malformed or otherwise incorrect. The client " -"is assumed to be in error." -msgstr "" -"'expires_at' non deve essere prima ora. Il server non è riuscito a " -"rispettare larichiesta perché è in formato errato o non corretta. Il client " -"viene considerato in errore." - -msgid "The --all option cannot be used with the --domain-name option" -msgstr "L'opzione --all non può essere utilizzata con l'opzione --domain-name" - -#, python-format -msgid "The Keystone configuration file %(config_file)s could not be found." -msgstr "" -"Impossibile trovare il file di configurazione Keystone %(config_file)s." - -#, python-format -msgid "" -"The Keystone domain-specific configuration has specified more than one SQL " -"driver (only one is permitted): %(source)s." -msgstr "" -"La configurazione specifica del dominio keystone ha specificato più di un " -"driver SQL (solo uno è consentito): %(source)s." - -msgid "The action you have requested has not been implemented." -msgstr "L'azione richiesta non è stata implementata." - -msgid "The authenticated user should match the trustor." -msgstr "L'utente autenticato deve corrispondere al ruolo trustor." - -msgid "" -"The certificates you requested are not available. It is likely that this " -"server does not use PKI tokens otherwise this is the result of " -"misconfiguration." -msgstr "" -"I certificati richiesti non sono disponibili. È probabile che questo server " -"non utilizzi i token PKI, altrimenti questo è il risultato di una " -"configurazione errata." - -msgid "The configured token provider does not support bind authentication." -msgstr "Il provider di token configurato non supporta l'autenticazione bind. " - -msgid "The creation of projects acting as domains is not allowed in v2." -msgstr "" -"La creazione di progetti che agiscono come domini non è consentita in v2. " - -#, python-format -msgid "" -"The password length must be less than or equal to %(size)i. The server could " -"not comply with the request because the password is invalid." -msgstr "" -"La lunghezza della password deve essere minore o uguale a %(size)i. Il " -"server non è in grado di soddisfare la richiesta perché la password non è " -"valida." - -msgid "The request you have made requires authentication." -msgstr "La richiesta che è stata fatta richiede l'autenticazione." - -msgid "The resource could not be found." -msgstr "Impossibile trovare la risorsa." - -msgid "" -"The revoke call must not have both domain_id and project_id. This is a bug " -"in the Keystone server. The current request is aborted." -msgstr "" -"La chiamata di revoca non deve avere entrambi domain_id e project_id. Questo " -"è un bug nel server Keystone. La richiesta corrente è stata interrotta." - -msgid "The service you have requested is no longer available on this server." -msgstr "Il servizio richiesto non è più disponibile su questo server." - -#, python-format -msgid "" -"The specified parent region %(parent_region_id)s would create a circular " -"region hierarchy." -msgstr "" -"La regione parent specificata %(parent_region_id)s crea una gerarchia di " -"regione circolare." - -#, python-format -msgid "" -"The value of group %(group)s specified in the config should be a dictionary " -"of options" -msgstr "" -"Il valore del gruppo %(group)s specificato nella configurazione deve essere " -"un dizionario di opzioni" - -msgid "There should not be any non-oauth parameters" -msgstr "Non deve essere presente nessun parametro non-oauth" - -#, python-format -msgid "This is not a recognized Fernet payload version: %s" -msgstr "Questa non è una versione di payload Fernet riconosciuta: %s" - -#, python-format -msgid "This is not a recognized Fernet token %s" -msgstr "Questo non è un token Fernet %s riconosciuto " - -msgid "" -"Timestamp not in expected format. The server could not comply with the " -"request since it is either malformed or otherwise incorrect. The client is " -"assumed to be in error." -msgstr "" -"Data/ora non nel formato previsto. Il server non è riuscito a rispettare la " -"richiesta perché è in formato errato o non corretta. Il client viene " -"considerato in errore." - -#, python-format -msgid "" -"To get a more detailed information on this error, re-run this command for " -"the specific domain, i.e.: keystone-manage domain_config_upload --domain-" -"name %s" -msgstr "" -"Per ottenere informazioni più dettagliate su questo errore, eseguire di " -"nuovo questo comando per il dominio specificato, ad esempio: keystone-manage " -"domain_config_upload --domain-name %s" - -msgid "Token belongs to another user" -msgstr "Il token appartiene ad un altro utente" - -msgid "Token does not belong to specified tenant." -msgstr "Il token non appartiene al tenant specificato." - -msgid "Token version is unrecognizable or unsupported." -msgstr "La versione token non è riconoscibile o non supportata. " - -msgid "Trustee has no delegated roles." -msgstr "Trustee non ha ruoli delegati." - -msgid "Trustor is disabled." -msgstr "Trustor è disabilitato." - -#, python-format -msgid "" -"Trying to update group %(group)s, so that, and only that, group must be " -"specified in the config" -msgstr "" -"Tentativo di aggiornare il gruppo %(group)s, pertanto, solo quel gruppo deve " -"essere specificato nella configurazione" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, but config provided " -"contains option %(option_other)s instead" -msgstr "" -"Tentativo di aggiornare l'opzione %(option)s nel gruppo %(group)s, ma la " -"configurazione fornita contiene l'opzione %(option_other)s" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, so that, and only " -"that, option must be specified in the config" -msgstr "" -"Tentativo di aggiornare l'opzione %(option)s nel gruppo %(group)s, pertanto, " -"solo quell'opzione deve essere specificata nella configurazione" - -msgid "" -"Unable to access the keystone database, please check it is configured " -"correctly." -msgstr "" -"Impossibile accedere al database del keystone, controllare se è configurato " -"correttamente." - -#, python-format -msgid "Unable to consume trust %(trust_id)s, unable to acquire lock." -msgstr "" -"Impossibile utilizzare trust %(trust_id)s, impossibile acquisire il blocco." - -#, python-format -msgid "" -"Unable to delete region %(region_id)s because it or its child regions have " -"associated endpoints." -msgstr "" -"Impossibile eliminare la regione %(region_id)s perché la regione o le " -"relative regioni child hanno degli endpoint associati." - -msgid "Unable to downgrade schema" -msgstr "Impossibile eseguire il downgrade dello schema" - -#, python-format -msgid "Unable to find valid groups while using mapping %(mapping_id)s" -msgstr "" -"Impossibile trovare i gruppi validi durante l'utilizzo dell'associazione " -"%(mapping_id)s" - -#, python-format -msgid "Unable to locate domain config directory: %s" -msgstr "Impossibile individuare la directory config del dominio: %s" - -#, python-format -msgid "Unable to lookup user %s" -msgstr "Impossibile eseguire la ricerca dell'utente %s" - -#, python-format -msgid "" -"Unable to reconcile identity attribute %(attribute)s as it has conflicting " -"values %(new)s and %(old)s" -msgstr "" -"Impossibile riconciliare l'attributo identity %(attribute)s poiché ha " -"valori in conflitto tra i %(new)s e i %(old)s" - -#, python-format -msgid "" -"Unable to sign SAML assertion. It is likely that this server does not have " -"xmlsec1 installed, or this is the result of misconfiguration. Reason " -"%(reason)s" -msgstr "" -"Impossibile firmare l'asserzione SAML. Probabilmente questo server non " -"dispone di xmlsec1 installato o è il risultato di una configurazione " -"sbagliata. Motivo %(reason)s" - -msgid "Unable to sign token." -msgstr "Impossibile firmare il token." - -#, python-format -msgid "Unexpected assignment type encountered, %s" -msgstr "È stato rilevato un tipo di assegnazione non previsto, %s" - -#, python-format -msgid "" -"Unexpected combination of grant attributes - User: %(user_id)s, Group: " -"%(group_id)s, Project: %(project_id)s, Domain: %(domain_id)s" -msgstr "" -"Combinazione non prevista degli attributi di autorizzazione - Utente: " -"%(user_id)s, Gruppo: %(group_id)s, Progetto: %(project_id)s, Dominio: " -"%(domain_id)s" - -#, python-format -msgid "Unexpected status requested for JSON Home response, %s" -msgstr "Stato non previsto richiesto per la risposta JSON Home, %s" - -msgid "Unknown Target" -msgstr "Destinazione sconosciuta" - -#, python-format -msgid "Unknown domain '%(name)s' specified by --domain-name" -msgstr "Dominio sconosciuto '%(name)s' specificato da --domain-name" - -#, python-format -msgid "Unknown token version %s" -msgstr "Versione di token sconosciuta %s" - -#, python-format -msgid "Unregistered dependency: %(name)s for %(targets)s" -msgstr "Dipendenza non registrata: %(name)s per %(targets)s" - -msgid "Update of `domain_id` is not allowed." -msgstr "Aggiornamento di `domain_id` non consentito." - -msgid "Update of `is_domain` is not allowed." -msgstr "Aggiornamento di `is_domain` non consentito." - -msgid "Update of `parent_id` is not allowed." -msgstr "Aggiornamento di `parent_id` non consentito." - -msgid "Update of domain_id is only allowed for root projects." -msgstr "L'aggiornamento di domain_id è consentito solo per progetti root." - -msgid "Update of domain_id of projects acting as domains is not allowed." -msgstr "" -"L'aggiornamento di domain_id di progetti che agiscono come domini non è " -"consentito." - -msgid "Use a project scoped token when attempting to create a SAML assertion" -msgstr "" -"Utilizzare un token nell'ambito del progetto quando si tenta di creare " -"un'asserzione SAML" - -msgid "" -"Use of the identity driver config to automatically configure the same " -"assignment driver has been deprecated, in the \"O\" release, the assignment " -"driver will need to be expicitly configured if different than the default " -"(SQL)." -msgstr "" -"Utilizzare la configurazione del driver di identità per configurare " -"automaticamente la stessa assegnazione. Il driver è obsoleto nella release " -"\"O\". Il driver di assegnazione dovrà essere configurato esplicitamente se " -"diverso dal driver predefinito (SQL)." - -#, python-format -msgid "User %(u_id)s is unauthorized for tenant %(t_id)s" -msgstr "L'utente %(u_id)s non è autorizzato per il tenant %(t_id)s" - -#, python-format -msgid "User %(user_id)s has no access to domain %(domain_id)s" -msgstr "L'utente %(user_id)s non ha accesso al dominio %(domain_id)s" - -#, python-format -msgid "User %(user_id)s has no access to project %(project_id)s" -msgstr "L'utente %(user_id)s non ha accesso al progetto %(project_id)s" - -#, python-format -msgid "User %(user_id)s is already a member of group %(group_id)s" -msgstr "L'utente %(user_id)s è già membro del gruppo %(group_id)s" - -#, python-format -msgid "User '%(user_id)s' not found in group '%(group_id)s'" -msgstr "L'utente '%(user_id)s' non è stato trovato nel gruppo '%(group_id)s'" - -msgid "User IDs do not match" -msgstr "Gli ID utente non corrispondono" - -msgid "" -"User auth cannot be built due to missing either user id, or user name with " -"domain id, or user name with domain name." -msgstr "" -"L'autorizzazione utente non può essere creata perché manca l'id utente o il " -"nome utente con l'id dominio o il nome utente con il nome dominio. " - -#, python-format -msgid "User is disabled: %s" -msgstr "L'utente è disabilitato: %s" - -msgid "User is not a member of the requested project" -msgstr "L'utente non è un membro del progetto richiesto" - -msgid "User is not a trustee." -msgstr "L'utente non è un amministratore." - -msgid "User not found" -msgstr "Utente non trovato" - -msgid "User not valid for tenant." -msgstr "Utente non valido per il tenant." - -msgid "User roles not supported: tenant_id required" -msgstr "Ruoli utente non supportati: richiesto tenant_id" - -#, python-format -msgid "User type %s not supported" -msgstr "Tipo utente %s non supportato" - -msgid "You are not authorized to perform the requested action." -msgstr "Non si possiede l'autorizzazione per eseguire l'operazione richiesta." - -#, python-format -msgid "You are not authorized to perform the requested action: %(action)s" -msgstr "L'utente non è autorizzato ad eseguire l'azione richiesta: %(action)s" - -msgid "" -"You have tried to create a resource using the admin token. As this token is " -"not within a domain you must explicitly include a domain for this resource " -"to belong to." -msgstr "" -"Si è cercato di creare una risorsa utilizzando il token admin. Poiché questo " -"token non si trova all'interno di un dominio, è necessario includere " -"esplicitamente un dominio per fare in modo che questa risorsa vi appartenga." - -msgid "`key_mangler` functions must be callable." -msgstr "Le funzioni `key_mangler` devono essere disponibili per la chiamata." - -msgid "`key_mangler` option must be a function reference" -msgstr "L'opzione `key_mangler` deve essere un riferimento funzione" - -msgid "any options" -msgstr "qualsiasi opzione" - -msgid "auth_type is not Negotiate" -msgstr "auth_type non è Negotiate" - -msgid "authorizing user does not have role required" -msgstr "l'utente per l'autorizzazione non dispone del ruolo richiesto" - -#, python-format -msgid "cannot create a project in a branch containing a disabled project: %s" -msgstr "" -"impossibile creare un progetto in un ramo che contiene un progetto " -"disabilitato: %s" - -#, python-format -msgid "" -"cannot delete an enabled project acting as a domain. Please disable the " -"project %s first." -msgstr "" -"impossibile eliminare un progetto abilitato che agisce come un dominio. " -"Disabilitare prima il progetto %s." - -#, python-format -msgid "group %(group)s" -msgstr "gruppo %(group)s" - -msgid "" -"idp_contact_type must be one of: [technical, other, support, administrative " -"or billing." -msgstr "" -"idp_contact_type deve essere uno tra: [tecnico, altro, supporto, " -"amministrativo o di fatturazione." - -#, python-format -msgid "invalid date format %s" -msgstr "formato data non valido %s" - -#, python-format -msgid "" -"it is not permitted to have two projects acting as domains with the same " -"name: %s" -msgstr "" -"non è consentito avere due progetti che agiscono con lo stesso nome: %s" - -#, python-format -msgid "" -"it is not permitted to have two projects within a domain with the same " -"name : %s" -msgstr "" -"non è consentito avere due progetti all'interno di un dominio con lo stesso " -"nome: %s" - -msgid "only root projects are allowed to act as domains." -msgstr "Solo ai progetti root è consentito agire come domini." - -#, python-format -msgid "option %(option)s in group %(group)s" -msgstr "opzione %(option)s nel gruppo %(group)s" - -msgid "provided consumer key does not match stored consumer key" -msgstr "" -"La chiave consumer fornita non corrisponde alla chiave consumer memorizzata" - -msgid "provided request key does not match stored request key" -msgstr "" -"La chiave della richiesta fornita non corrisponde alla chiave della " -"richiesta memorizzata" - -msgid "provided verifier does not match stored verifier" -msgstr "il verificatore fornito non corrisponde al verificatore memorizzato" - -msgid "remaining_uses must be a positive integer or null." -msgstr "remaining_uses deve essere un numero intero positivo o nullo." - -msgid "remaining_uses must not be set if redelegation is allowed" -msgstr "" -"remaining_uses non deve essere impostato se è consentita la riassegnazione " -"della delega" - -#, python-format -msgid "" -"request to update group %(group)s, but config provided contains group " -"%(group_other)s instead" -msgstr "" -"Richiesta di aggiornamento del gruppo %(group)s, ma la configurazione " -"fornita contiene il gruppo %(group_other)s" - -msgid "rescope a scoped token" -msgstr "riassegna ambito a token con ambito" - -#, python-format -msgid "role %s is not defined" -msgstr "il ruolo %s non è definito" - -msgid "scope.project.id must be specified if include_subtree is also specified" -msgstr "" -"scope.project.id deve essere specificato se è specificato anche " -"include_subtree" - -#, python-format -msgid "tls_cacertdir %s not found or is not a directory" -msgstr "Impossibile trovare tls_cacertdir %s o non è una directory" - -#, python-format -msgid "tls_cacertfile %s not found or is not a file" -msgstr "Impossibile trovare tls_cacertfile %s o non è un file" - -#, python-format -msgid "token reference must be a KeystoneToken type, got: %s" -msgstr "" -"il riferimento al token deve essere un tipo KeystoneToken, ottenuto: %s" - -msgid "" -"update of domain_id is deprecated as of Mitaka and will be removed in O." -msgstr "" -"l'aggiornamento di domain_id è obsoleto a partire da Mitaka e verrà rimosso " -"in O." - -#, python-format -msgid "" -"validated expected to find %(param_name)r in function signature for " -"%(func_name)r." -msgstr "" -"la convalida prevede di trovare %(param_name)r nella firma funzione per " -"%(func_name)r." diff --git a/keystone-moon/keystone/locale/ja/LC_MESSAGES/keystone-log-critical.po b/keystone-moon/keystone/locale/ja/LC_MESSAGES/keystone-log-critical.po deleted file mode 100644 index b9224fea..00000000 --- a/keystone-moon/keystone/locale/ja/LC_MESSAGES/keystone-log-critical.po +++ /dev/null @@ -1,25 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Akihiro Motoki , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2014-08-31 03:19+0000\n" -"Last-Translator: openstackjenkins \n" -"Language: ja\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Japanese\n" - -#, python-format -msgid "Unable to open template file %s" -msgstr "テンプレートファイル %s を開けません" diff --git a/keystone-moon/keystone/locale/ja/LC_MESSAGES/keystone-log-error.po b/keystone-moon/keystone/locale/ja/LC_MESSAGES/keystone-log-error.po deleted file mode 100644 index d3e6062f..00000000 --- a/keystone-moon/keystone/locale/ja/LC_MESSAGES/keystone-log-error.po +++ /dev/null @@ -1,177 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Kuo(Kyohei MORIYAMA) <>, 2014 -msgid "" -msgstr "" -"Project-Id-Version: Keystone\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/keystone\n" -"POT-Creation-Date: 2015-03-09 06:03+0000\n" -"PO-Revision-Date: 2015-03-07 04:31+0000\n" -"Last-Translator: openstackjenkins \n" -"Language-Team: Japanese (http://www.transifex.com/projects/p/keystone/" -"language/ja/)\n" -"Language: ja\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 1.3\n" -"Plural-Forms: nplurals=1; plural=0;\n" - -#: keystone/notifications.py:304 -msgid "Failed to construct notifier" -msgstr "" - -#: keystone/notifications.py:389 -#, python-format -msgid "Failed to send %(res_id)s %(event_type)s notification" -msgstr "" - -#: keystone/notifications.py:606 -#, python-format -msgid "Failed to send %(action)s %(event_type)s notification" -msgstr "" - -#: keystone/catalog/core.py:62 -#, python-format -msgid "Malformed endpoint - %(url)r is not a string" -msgstr "" - -#: keystone/catalog/core.py:66 -#, python-format -msgid "Malformed endpoint %(url)s - unknown key %(keyerror)s" -msgstr "不正な形式のエンドポイント %(url)s - 未知のキー %(keyerror)s" - -#: keystone/catalog/core.py:71 -#, python-format -msgid "" -"Malformed endpoint '%(url)s'. The following type error occurred during " -"string substitution: %(typeerror)s" -msgstr "" - -#: keystone/catalog/core.py:77 -#, python-format -msgid "" -"Malformed endpoint %s - incomplete format (are you missing a type notifier ?)" -msgstr "" - -#: keystone/common/openssl.py:93 -#, python-format -msgid "Command %(to_exec)s exited with %(retcode)s- %(output)s" -msgstr "" - -#: keystone/common/openssl.py:121 -#, python-format -msgid "Failed to remove file %(file_path)r: %(error)s" -msgstr "" - -#: keystone/common/utils.py:239 -msgid "" -"Error setting up the debug environment. Verify that the option --debug-url " -"has the format : and that a debugger processes is listening on " -"that port." -msgstr "" -"デバッグ環境のセットアップ中にエラーが発生しました。オプション --debug-url " -"が : の形式を持ち、デバッガープロセスがそのポートにおいてリッスン" -"していることを確認してください。" - -#: keystone/common/cache/core.py:100 -#, python-format -msgid "" -"Unable to build cache config-key. Expected format \":\". " -"Skipping unknown format: %s" -msgstr "" - -#: keystone/common/environment/eventlet_server.py:99 -#, python-format -msgid "Could not bind to %(host)s:%(port)s" -msgstr "%(host)s:%(port)s がバインドできません。" - -#: keystone/common/environment/eventlet_server.py:185 -msgid "Server error" -msgstr "内部サーバーエラー" - -#: keystone/contrib/endpoint_policy/core.py:129 -#: keystone/contrib/endpoint_policy/core.py:228 -#, python-format -msgid "" -"Circular reference or a repeated entry found in region tree - %(region_id)s." -msgstr "" - -#: keystone/contrib/federation/idp.py:410 -#, python-format -msgid "Error when signing assertion, reason: %(reason)s" -msgstr "サインアサーション時にエラーが発生しました。理由:%(reason)s" - -#: keystone/contrib/oauth1/core.py:136 -msgid "Cannot retrieve Authorization headers" -msgstr "" - -#: keystone/openstack/common/loopingcall.py:95 -msgid "in fixed duration looping call" -msgstr "一定期間の呼び出しループ" - -#: keystone/openstack/common/loopingcall.py:138 -msgid "in dynamic looping call" -msgstr "動的呼び出しループ" - -#: keystone/openstack/common/service.py:268 -msgid "Unhandled exception" -msgstr "未処理例外" - -#: keystone/resource/core.py:477 -#, python-format -msgid "" -"Circular reference or a repeated entry found projects hierarchy - " -"%(project_id)s." -msgstr "" - -#: keystone/resource/core.py:939 -#, python-format -msgid "" -"Unexpected results in response for domain config - %(count)s responses, " -"first option is %(option)s, expected option %(expected)s" -msgstr "" - -#: keystone/resource/backends/sql.py:102 keystone/resource/backends/sql.py:121 -#, python-format -msgid "" -"Circular reference or a repeated entry found in projects hierarchy - " -"%(project_id)s." -msgstr "" - -#: keystone/token/provider.py:292 -#, python-format -msgid "Unexpected error or malformed token determining token expiry: %s" -msgstr "" - -#: keystone/token/persistence/backends/kvs.py:226 -#, python-format -msgid "" -"Reinitializing revocation list due to error in loading revocation list from " -"backend. Expected `list` type got `%(type)s`. Old revocation list data: " -"%(list)r" -msgstr "" - -#: keystone/token/providers/common.py:611 -msgid "Failed to validate token" -msgstr "" - -#: keystone/token/providers/pki.py:47 -msgid "Unable to sign token" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:38 -#, python-format -msgid "" -"Either [fernet_tokens] key_repository does not exist or Keystone does not " -"have sufficient permission to access it: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:79 -msgid "" -"Failed to create [fernet_tokens] key_repository: either it already exists or " -"you don't have sufficient permissions to create it" -msgstr "" diff --git a/keystone-moon/keystone/locale/ja/LC_MESSAGES/keystone.po b/keystone-moon/keystone/locale/ja/LC_MESSAGES/keystone.po deleted file mode 100644 index 8f460602..00000000 --- a/keystone-moon/keystone/locale/ja/LC_MESSAGES/keystone.po +++ /dev/null @@ -1,1614 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Tomoyuki KATO , 2012-2013 -# Akihiro Motoki , 2015. #zanata -# 笹原 昌美 , 2016. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2016-04-04 01:18+0000\n" -"Last-Translator: 笹原 昌美 \n" -"Language: ja\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Japanese\n" - -#, python-format -msgid "%(detail)s" -msgstr "%(detail)s" - -#, python-format -msgid "%(driver)s is not supported driver version" -msgstr "%(driver)s はサポートされるドライバーバージョンではありません" - -#, python-format -msgid "" -"%(entity)s name cannot contain the following reserved characters: %(chars)s" -msgstr "%(entity)s 名に以下の予約済み文字を含めることはできません: %(chars)s" - -#, python-format -msgid "" -"%(event)s is not a valid notification event, must be one of: %(actions)s" -msgstr "" -"%(event)s は有効な通知イベントではありません。%(actions)s のいずれかでなけれ" -"ばなりません。" - -#, python-format -msgid "%(host)s is not a trusted dashboard host" -msgstr "%(host)s は信頼されたダッシュボードホストではありません" - -#, python-format -msgid "%(message)s %(amendment)s" -msgstr "%(message)s %(amendment)s" - -#, python-format -msgid "" -"%(mod_name)s doesn't provide database migrations. The migration repository " -"path at %(path)s doesn't exist or isn't a directory." -msgstr "" -"%(mod_name)s はデータベースマイグレーションを提供していません。%(path)s のマ" -"イグレーションリポジトリーのパスが存在しないか、ディレクトリーではないかのい" -"ずれかです。" - -#, python-format -msgid "%(prior_role_id)s does not imply %(implied_role_id)s" -msgstr "%(prior_role_id)s は %(implied_role_id)s を暗黙的に示しません" - -#, python-format -msgid "%(property_name)s cannot be less than %(min_length)s characters." -msgstr "%(property_name)s は %(min_length)s 文字より短くできません。" - -#, python-format -msgid "%(property_name)s is not a %(display_expected_type)s" -msgstr "%(property_name)s が %(display_expected_type)s ではありません。" - -#, python-format -msgid "%(property_name)s should not be greater than %(max_length)s characters." -msgstr "%(property_name)s は %(max_length)s 文字より長くできません。" - -#, python-format -msgid "%(role_id)s cannot be an implied roles" -msgstr "%(role_id)s は暗黙的ロールにできません" - -#, python-format -msgid "%s cannot be empty." -msgstr "%s は空にはできません。" - -#, python-format -msgid "%s extension does not exist." -msgstr "%s 拡張が存在しません。" - -#, python-format -msgid "%s field is required and cannot be empty" -msgstr "フィールド %s は必須フィールドであるため、空にできません" - -#, python-format -msgid "%s field(s) cannot be empty" -msgstr "フィールド %s を空にすることはできません" - -#, python-format -msgid "" -"%s for the LDAP identity backend has been deprecated in the Mitaka release " -"in favor of read-only identity LDAP access. It will be removed in the \"O\" " -"release." -msgstr "" -"LDAP ID バックエンドの %s は Mitaka リリースにおいて読み取り専用の ID LDAP ア" -"クセスを選択したため、提供を終了しています。これは \"O\" リリースで削除される" -"予定です。" - -msgid "(Disable insecure_debug mode to suppress these details.)" -msgstr "(これらの詳細を抑制するには、insecure_debug モードを無効にします。)" - -msgid "--all option cannot be mixed with other options" -msgstr "--all オプションを他のオプションと組み合わせて使用することはできません" - -msgid "A project-scoped token is required to produce a service catalog." -msgstr "" -"サービスカタログを生成するには、プロジェクトにスコープが設定されたトークンが" -"必要です。" - -msgid "Access token is expired" -msgstr "アクセストークンの有効期限が切れています" - -msgid "Access token not found" -msgstr "アクセストークンが見つかりません" - -msgid "Additional authentications steps required." -msgstr "追加認証手順が必要です。" - -msgid "An unexpected error occurred when retrieving domain configs" -msgstr "ドメイン設定の取得中に予期しないエラーが発生しました" - -#, python-format -msgid "An unexpected error occurred when trying to store %s" -msgstr "%s の保存中に予期しないエラーが発生しました" - -msgid "An unexpected error prevented the server from fulfilling your request." -msgstr "予期しないエラーが発生したため、サーバーが要求を完了できませんでした。" - -#, python-format -msgid "" -"An unexpected error prevented the server from fulfilling your request: " -"%(exception)s" -msgstr "" -"予期しないエラーが発生したため、サーバーが要求を完了できませんでした: " -"%(exception)s" - -msgid "An unhandled exception has occurred: Could not find metadata." -msgstr "処理できない例外が発生しました。メタデータが見つかりませんでした。" - -msgid "At least one option must be provided" -msgstr "少なくとも 1 つはオプションを指定する必要があります" - -msgid "At least one option must be provided, use either --all or --domain-name" -msgstr "" -"少なくとも 1 つのオプションを指定する必要があります。--all または --domain-" -"name を使用してください" - -msgid "At least one role should be specified." -msgstr "少なくとも 1 つのロールを指定する必要があります。" - -#, python-format -msgid "" -"Attempted automatic driver selection for assignment based upon " -"[identity]\\driver option failed since driver %s is not found. Set " -"[assignment]/driver to a valid driver in keystone config." -msgstr "" -"[identity]\\driver オプションに基づく割り当て用にドライバーの自動選択を試みま" -"したが、ドライバー %s が見つからなかったため失敗しました。[assignment]/" -"driver を Keystone 設定の有効なドライバーに設定してください。" - -msgid "Attempted to authenticate with an unsupported method." -msgstr "サポートされていないメソッドを使用して認証を行おうとしました。" - -msgid "" -"Attempting to use OS-FEDERATION token with V2 Identity Service, use V3 " -"Authentication" -msgstr "" -"V2 Identity Service で OS-FEDERATION トークンを使用しようとしています。V3 認" -"証を使用してください" - -msgid "Authentication plugin error." -msgstr "認証プラグインエラー。" - -#, python-format -msgid "" -"Backend `%(backend)s` is not a valid memcached backend. Valid backends: " -"%(backend_list)s" -msgstr "" -"バックエンド `%(backend)s` は有効な memcached バックエンドではありません。有" -"効なバックエンド: %(backend_list)s" - -msgid "Cannot authorize a request token with a token issued via delegation." -msgstr "" -"委任によって発行されたトークンを使用して要求トークンを許可することはできませ" -"ん。" - -#, python-format -msgid "Cannot change %(option_name)s %(attr)s" -msgstr "%(option_name)s %(attr)s を変更できません" - -msgid "Cannot change Domain ID" -msgstr "ドメイン ID を変更できません" - -msgid "Cannot change user ID" -msgstr "ユーザー ID を変更できません" - -msgid "Cannot change user name" -msgstr "ユーザー名を変更できません" - -#, python-format -msgid "Cannot create an endpoint with an invalid URL: %(url)s" -msgstr "以下の無効な URL を持つエンドポイントを作成できません: %(url)s" - -#, python-format -msgid "Cannot create project with parent: %(project_id)s" -msgstr "親を持つプロジェクト: %(project_id)s を作成できません" - -#, python-format -msgid "" -"Cannot create project, since it specifies its owner as domain %(domain_id)s, " -"but specifies a parent in a different domain (%(parent_domain_id)s)." -msgstr "" -"プロジェクトでその所有者をドメイン %(domain_id)s として指定しているが、別のド" -"メイン (%(parent_domain_id)s) に親を指定しているため、そのプロジェクトを作成" -"できません。" - -#, python-format -msgid "" -"Cannot create project, since its parent (%(domain_id)s) is acting as a " -"domain, but project's specified parent_id (%(parent_id)s) does not match " -"this domain_id." -msgstr "" -"プロジェクトの親 (%(domain_id)s) がドメインとして動作しているが、プロジェク" -"トで指定される parent_id (%(parent_id)s) がこの domain_id と一致しないため、" -"そのプロジェクトを作成できません。" - -msgid "Cannot delete a domain that is enabled, please disable it first." -msgstr "" -"有効になっているドメインは削除できません。最初にそのドメインを無効にしてくだ" -"さい。" - -#, python-format -msgid "" -"Cannot delete project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" -"プロジェクト %(project_id)s はそのサブツリーに有効になっているプロジェクトが" -"含まれているため削除できません。" - -#, python-format -msgid "" -"Cannot delete the project %s since it is not a leaf in the hierarchy. Use " -"the cascade option if you want to delete a whole subtree." -msgstr "" -"プロジェクト %s は階層内の末端ではないため、削除できません。サブツリー全体を" -"削除する場合、カスケードオプションを使用してください。" - -#, python-format -msgid "" -"Cannot disable project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" -"プロジェクト %(project_id)s はそのサブツリーに有効になっているプロジェクトが" -"含まれているため、無効にできません。" - -#, python-format -msgid "Cannot enable project %s since it has disabled parents" -msgstr "親が無効になっているプロジェクト %s は有効にできません" - -msgid "Cannot list assignments sourced from groups and filtered by user ID." -msgstr "" -"グループから取得し、ユーザー ID でフィルター処理した割り当てをリストできませ" -"ん。" - -msgid "Cannot list request tokens with a token issued via delegation." -msgstr "" -"委任によって発行されたトークンを使用して要求トークンをリストすることはできま" -"せん。" - -#, python-format -msgid "Cannot open certificate %(cert_file)s. Reason: %(reason)s" -msgstr "証明書 %(cert_file)s を開くことができません。理由: %(reason)s" - -#, python-format -msgid "Cannot remove role that has not been granted, %s" -msgstr "許可されていないロールを削除できません、%s" - -#, fuzzy -msgid "" -"Cannot truncate a driver call without hints list as first parameter after " -"self " -msgstr "" -"セルフの後に最初のパラメーターとしてヒントリストなしでドライバー呼び出しを切" -"り捨てることはできません" - -msgid "Cannot update domain_id of a project that has children." -msgstr "子を持つプロジェクトの domain_id を更新できません。" - -msgid "" -"Cannot use parents_as_list and parents_as_ids query params at the same time." -msgstr "" -"問い合わせパラメーター parents_as_list と parents_as_ids を同時に使用すること" -"はできません。" - -msgid "" -"Cannot use subtree_as_list and subtree_as_ids query params at the same time." -msgstr "" -"問い合わせパラメーター subtree_as_list と subtree_as_ids を同時に使用すること" -"はできません。" - -msgid "Cascade update is only allowed for enabled attribute." -msgstr "カスケード更新は有効になっている属性にのみ許可されます。" - -#, fuzzy -msgid "" -"Combining effective and group filter will always result in an empty list." -msgstr "" -"有効フィルターとグループフィルターの組み合わせは常に空のリストになります。" - -#, fuzzy -msgid "" -"Combining effective, domain and inherited filters will always result in an " -"empty list." -msgstr "" -"有効フィルター、ドメインフィルター、および継承フィルターの組み合わせは常に空" -"のリストになります。" - -#, python-format -msgid "Config API entity at /domains/%s/config" -msgstr "/domains/%s/config の Config API エンティティー" - -#, python-format -msgid "Conflict occurred attempting to store %(type)s - %(details)s" -msgstr "%(type)s を保存するときに競合が発生しました - %(details)s" - -#, python-format -msgid "Conflicting region IDs specified: \"%(url_id)s\" != \"%(ref_id)s\"" -msgstr "" -"矛盾するリージョン ID が指定されました: \"%(url_id)s\" != \"%(ref_id)s\"" - -msgid "Consumer not found" -msgstr "コンシューマーが見つかりません" - -#, python-format -msgid "" -"Could not change immutable attribute(s) '%(attributes)s' in target %(target)s" -msgstr "" -"ターゲット %(target)s の変更不可の属性 '%(attributes)s' を変更できませんでし" -"た" - -#, python-format -msgid "" -"Could not determine Identity Provider ID. The configuration option " -"%(issuer_attribute)s was not found in the request environment." -msgstr "" -"認証プロバイダー ID を判別できませんでした。設定オプション " -"%(issuer_attribute)s が要求環境内で見つかりませんでした。" - -#, python-format -msgid "" -"Could not find %(group_or_option)s in domain configuration for domain " -"%(domain_id)s" -msgstr "" -"%(group_or_option)s がドメイン %(domain_id)s のドメイン設定に見つかりませんで" -"した" - -#, python-format -msgid "Could not find Endpoint Group: %(endpoint_group_id)s" -msgstr "エンドポイントグループ %(endpoint_group_id)s が見つかりませんでした" - -msgid "Could not find Identity Provider identifier in environment" -msgstr "Identity Provider ID が環境情報内に見つかりませんでした" - -#, python-format -msgid "Could not find Identity Provider: %(idp_id)s" -msgstr "ID プロバイダー %(idp_id)s が見つかりませんでした" - -#, python-format -msgid "Could not find Service Provider: %(sp_id)s" -msgstr "サービスプロバイダー %(sp_id)s が見つかりませんでした" - -#, python-format -msgid "Could not find credential: %(credential_id)s" -msgstr "クレデンシャルが見つかりませんでした: %(credential_id)s" - -#, python-format -msgid "Could not find domain: %(domain_id)s" -msgstr "ドメイン %(domain_id)s が見つかりませんでした" - -#, python-format -msgid "Could not find endpoint: %(endpoint_id)s" -msgstr "エンドポイント %(endpoint_id)sが見つかりませんでした" - -#, python-format -msgid "" -"Could not find federated protocol %(protocol_id)s for Identity Provider: " -"%(idp_id)s" -msgstr "" -"Identity Provider の連携プロトコル %(protocol_id)s が見つかりませんでした: " -"%(idp_id)s" - -#, python-format -msgid "Could not find group: %(group_id)s" -msgstr "グループ %(group_id)s が見つかりませんでした" - -#, python-format -msgid "Could not find mapping: %(mapping_id)s" -msgstr "マッピング %(mapping_id)s が見つかりませんでした" - -msgid "Could not find policy association" -msgstr "ポリシー関連付けが見つかりませんでした" - -#, python-format -msgid "Could not find policy: %(policy_id)s" -msgstr "ポリシー %(policy_id)s が見つかりませんでした" - -#, python-format -msgid "Could not find project: %(project_id)s" -msgstr "プロジェクト %(project_id)s が見つかりませんでした" - -#, python-format -msgid "Could not find region: %(region_id)s" -msgstr "リージョン %(region_id)s が見つかりませんでした" - -#, python-format -msgid "" -"Could not find role assignment with role: %(role_id)s, user or group: " -"%(actor_id)s, project or domain: %(target_id)s" -msgstr "" -"ロール %(role_id)s を持つ割り当てが見つかりませんでした。ユーザーまたはグルー" -"プは %(actor_id)s で、プロジェクトまたはドメインが %(target_id)s です" - -#, python-format -msgid "Could not find role: %(role_id)s" -msgstr "ロール %(role_id)s が見つかりませんでした" - -#, python-format -msgid "Could not find service: %(service_id)s" -msgstr "サービス %(service_id)s が見つかりませんでした" - -#, python-format -msgid "Could not find token: %(token_id)s" -msgstr "トークン %(token_id)s が見つかりませんでした" - -#, python-format -msgid "Could not find trust: %(trust_id)s" -msgstr "トラスト %(trust_id)s が見つかりませんでした" - -#, python-format -msgid "Could not find user: %(user_id)s" -msgstr "ユーザー %(user_id)s が見つかりませんでした:" - -#, python-format -msgid "Could not find version: %(version)s" -msgstr "バージョン %(version)s が見つかりませんでした" - -#, python-format -msgid "Could not find: %(target)s" -msgstr "%(target)s が見つかりませんでした" - -msgid "" -"Could not map any federated user properties to identity values. Check debug " -"logs or the mapping used for additional details." -msgstr "" -"フェデレーションしたユーザープロパティーのいずれも ID 値にマップすることがで" -"きませんでした。デバッグログまたは追加の詳細に使用したマッピングを確認してく" -"ださい。" - -msgid "" -"Could not map user while setting ephemeral user identity. Either mapping " -"rules must specify user id/name or REMOTE_USER environment variable must be " -"set." -msgstr "" -"一時的なユーザー ID の設定中にユーザーをマップすることができませんでした。" -"マッピング規則によってユーザー ID/ユーザー名を指定するか、REMOTE_USER 環境変" -"数を設定するか、いずれかを行う必要があります。" - -msgid "Could not validate the access token" -msgstr "アクセストークンを検証できませんでした" - -msgid "Credential belongs to another user" -msgstr "クレデンシャルが別のユーザーに属しています" - -msgid "Credential signature mismatch" -msgstr "クレデンシャルのシグニチャーが一致しません" - -#, python-format -msgid "" -"Direct import of auth plugin %(name)r is deprecated as of Liberty in favor " -"of its entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"認証プラグイン %(name)r の直接インポートは、Liberty の時点で %(namespace)r の" -"エンドポイントを選択したため、提供を終了しました。N では削除される予定です。" - -#, python-format -msgid "" -"Direct import of driver %(name)r is deprecated as of Liberty in favor of its " -"entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"ドライバー %(name)r の直接インポートは、Liberty の時点で %(namespace)r からの" -"エントリーポイントを選択したため、 提供を終了しました。N では削除される予定で" -"す。" - -msgid "" -"Disabling an entity where the 'enable' attribute is ignored by configuration." -msgstr "" -"「enable」属性が設定によって無視されているエンティティーを無効化中です。" - -#, python-format -msgid "Domain (%s)" -msgstr "ドメイン (%s)" - -#, python-format -msgid "Domain cannot be named %s" -msgstr "ドメインに %s という名前を付けることはできません" - -#, python-format -msgid "Domain cannot have ID %s" -msgstr "ドメインに %s という ID を付けることはできません" - -#, python-format -msgid "Domain is disabled: %s" -msgstr "ドメイン %s が無効になっています" - -msgid "Domain name cannot contain reserved characters." -msgstr "ドメイン名に予約済み文字が含まれていてはなりません。" - -msgid "Domain scoped token is not supported" -msgstr "ドメインをスコープにしたトークンはサポートされていません" - -msgid "Domain specific roles are not supported in the V8 role driver" -msgstr "ドメイン固有のロールは、V8 のロールドライバーではサポートされません" - -#, python-format -msgid "" -"Domain: %(domain)s already has a configuration defined - ignoring file: " -"%(file)s." -msgstr "" -"ドメイン %(domain)s には既に定義された設定があります。ファイル %(file)s は無" -"視されます。" - -msgid "Duplicate Entry" -msgstr "重複する項目" - -#, python-format -msgid "Duplicate ID, %s." -msgstr "重複した ID、%s。" - -#, python-format -msgid "Duplicate entry: %s" -msgstr "重複する項目: %s" - -#, python-format -msgid "Duplicate name, %s." -msgstr "重複した名前、%s。" - -#, python-format -msgid "Duplicate remote ID: %s" -msgstr "重複するリモート ID: %s" - -msgid "EC2 access key not found." -msgstr "EC2 アクセスキーが見つかりません。" - -msgid "EC2 signature not supplied." -msgstr "EC2 の署名が提供されていません。" - -msgid "" -"Either --bootstrap-password argument or OS_BOOTSTRAP_PASSWORD must be set." -msgstr "" -"--bootstrap-password 引数または OS_BOOTSTRAP_PASSWORD いずれかを設定する必要" -"があります。" - -msgid "Enabled field must be a boolean" -msgstr "「有効」フィールドはブール値でなければなりません" - -msgid "Enabled field should be a boolean" -msgstr "「有効」フィールドはブール値でなければなりません" - -#, python-format -msgid "Endpoint %(endpoint_id)s not found in project %(project_id)s" -msgstr "" -"エンドポイント %(endpoint_id)s がプロジェクト %(project_id)s に見つかりません" - -msgid "Endpoint Group Project Association not found" -msgstr "エンドポイントグループとプロジェクトの関連付けが見つかりません" - -msgid "Ensure configuration option idp_entity_id is set." -msgstr "設定オプション idp_entity_id が設定されていることを確認してください。" - -msgid "Ensure configuration option idp_sso_endpoint is set." -msgstr "" -"設定オプション idp_sso_endpoint が設定されていることを確認してください。" - -#, python-format -msgid "" -"Error parsing configuration file for domain: %(domain)s, file: %(file)s." -msgstr "" -"ドメイン: %(domain)s、ファイル: %(file)s の設定ファイルの構文解析エラー。" - -#, python-format -msgid "Error while opening file %(path)s: %(err)s" -msgstr "ファイル %(path)s のオープン中にエラーが発生しました: %(err)s" - -#, python-format -msgid "Error while parsing line: '%(line)s': %(err)s" -msgstr "行: '%(line)s' の解析中にエラーが発生しました: %(err)s" - -#, python-format -msgid "Error while parsing rules %(path)s: %(err)s" -msgstr "ルール %(path)s の解析中にエラーが発生しました: %(err)s" - -#, python-format -msgid "Error while reading metadata file, %(reason)s" -msgstr "メタデータファイルの読み取り中にエラーが発生しました。%(reason)s" - -#, python-format -msgid "" -"Exceeded attempts to register domain %(domain)s to use the SQL driver, the " -"last domain that appears to have had it is %(last_domain)s, giving up" -msgstr "" -"SQL ドライバーを使用するためのドメイン %(domain)s の登録の試行回数が制限を超" -"過しました。最後に登録されたと思われるドメインは %(last_domain)s です。中断し" -"ます" - -#, python-format -msgid "Expected dict or list: %s" -msgstr "期待される辞書またはリスト: %s" - -msgid "" -"Expected signing certificates are not available on the server. Please check " -"Keystone configuration." -msgstr "" -"想定された署名証明書がサーバーにありません。Keystone の設定を確認してくださ" -"い。" - -#, python-format -msgid "" -"Expecting to find %(attribute)s in %(target)s - the server could not comply " -"with the request since it is either malformed or otherwise incorrect. The " -"client is assumed to be in error." -msgstr "" -"%(target)s に %(attribute)s があることが想定されています。要求の形式が不正も" -"しくは正しくないため、サーバーは要求に応じることができませんでした。クライア" -"ントでエラーが発生していると考えられます。" - -#, python-format -msgid "Failed to start the %(name)s server" -msgstr "%(name)s サーバーの起動に失敗しました" - -msgid "Failed to validate token" -msgstr "トークンの検証に失敗しました" - -msgid "Federation token is expired" -msgstr "統合トークンの有効期限が切れています" - -#, python-format -msgid "" -"Field \"remaining_uses\" is set to %(value)s while it must not be set in " -"order to redelegate a trust" -msgstr "" -"フィールド \"remaining_uses\" は %(value)s になっていますが、トラストを再委任" -"するにはこのフィールドが設定されていてはなりません" - -msgid "Found invalid token: scoped to both project and domain." -msgstr "" -"無効なトークンが見つかりました: スコープがプロジェクトとドメインの両方に対し" -"て設定されています。" - -#, python-format -msgid "Group %s not found in config" -msgstr "グループ %s が設定内に見つかりません" - -#, python-format -msgid "Group %(group)s is not supported for domain specific configurations" -msgstr "ドメイン固有の設定ではグループ %(group)s はサポートされません" - -#, python-format -msgid "" -"Group %(group_id)s returned by mapping %(mapping_id)s was not found in the " -"backend." -msgstr "" -"マッピング %(mapping_id)s が返したグループ %(group_id)s がバックエンドにあり" -"ませんでした。" - -#, python-format -msgid "" -"Group membership across backend boundaries is not allowed, group in question " -"is %(group_id)s, user is %(user_id)s" -msgstr "" -"バックエンド境界をまたぐグループメンバーシップは許可されていません。問題と" -"なっているグループは %(group_id)s、ユーザーは %(user_id)s です" - -#, python-format -msgid "ID attribute %(id_attr)s not found in LDAP object %(dn)s" -msgstr "ID 属性 %(id_attr)s が LDAP オブジェクト %(dn)s に見つかりません" - -#, python-format -msgid "Identity Provider %(idp)s is disabled" -msgstr "ID プロバイダー %(idp)s は無効になっています" - -msgid "" -"Incoming identity provider identifier not included among the accepted " -"identifiers." -msgstr "受諾した ID の中に着信 ID プロバイダーの ID が含まれません。" - -msgid "Invalid EC2 signature." -msgstr "無効な EC2 の署名。" - -#, python-format -msgid "Invalid LDAP TLS certs option: %(option)s. Choose one of: %(options)s" -msgstr "" -"無効な LDAP TLS 証明書オプション %(option)s です。 %(options)s のいずれかを選" -"択してください" - -#, python-format -msgid "Invalid LDAP TLS_AVAIL option: %s. TLS not available" -msgstr "無効な LDAP TLS_AVAIL オプション %s です。TLS が利用できません。" - -#, python-format -msgid "Invalid LDAP deref option: %(option)s. Choose one of: %(options)s" -msgstr "" -"無効な LDAP deref オプション %(option)s です。%(options)s のいずれかを選択し" -"てください" - -#, python-format -msgid "Invalid LDAP scope: %(scope)s. Choose one of: %(options)s" -msgstr "" -"無効な LDAP スコープ %(scope)s です。 %(options)s のいずれかを選んでくださ" -"い: " - -msgid "Invalid TLS / LDAPS combination" -msgstr "無効な TLS / LDAPS の組み合わせです" - -#, python-format -msgid "Invalid audit info data type: %(data)s (%(type)s)" -msgstr "無効な監査情報データタイプ %(data)s (%(type)s) です" - -msgid "Invalid blob in credential" -msgstr "クレデンシャル内の blob が無効です" - -#, python-format -msgid "" -"Invalid domain name: %(domain)s found in config file name: %(file)s - " -"ignoring this file." -msgstr "" -"無効なドメイン名 %(domain)s が設定ファイル名 %(file)s に見つかりました。この" -"ファイルは無視されます。" - -#, python-format -msgid "Invalid domain specific configuration: %(reason)s" -msgstr "無効なドメイン固有の設定です: %(reason)s" - -#, python-format -msgid "Invalid input for field '%(path)s'. The value is '%(value)s'." -msgstr "フィールド '%(path)s' の入力が無効です。値は '%(value)s' です。" - -#, fuzzy -msgid "Invalid limit value" -msgstr "制限値が無効です" - -#, python-format -msgid "" -"Invalid mix of entities for policy association - only Endpoint, Service or " -"Region+Service allowed. Request was - Endpoint: %(endpoint_id)s, Service: " -"%(service_id)s, Region: %(region_id)s" -msgstr "" -"ポリシー関連付けのエンティティーの組み合わせが無効です。エンドポイント、サー" -"ビス、または領域とサービスのみ許可されています。要求 - エンドポイント: " -"%(endpoint_id)s、サービス: %(service_id)s、領域: %(region_id)s" - -#, python-format -msgid "" -"Invalid rule: %(identity_value)s. Both 'groups' and 'domain' keywords must " -"be specified." -msgstr "" -"無効なルール: %(identity_value)s。「グループ」と「ドメイン」の両方のキーワー" -"ドを指定する必要があります。" - -msgid "Invalid signature" -msgstr "シグニチャーが無効です" - -msgid "Invalid user / password" -msgstr "ユーザー/パスワードが無効です" - -msgid "Invalid username or TOTP passcode" -msgstr "無効なユーザー名または TOTP パスコード" - -msgid "Invalid username or password" -msgstr "無効なユーザー名かパスワード" - -#, python-format -msgid "KVS region %s is already configured. Cannot reconfigure." -msgstr "KVS 領域 %s は既に構成されています。再構成はできません。" - -#, python-format -msgid "Key Value Store not configured: %s" -msgstr "キーバリューストアが設定されていません: %s" - -#, python-format -msgid "LDAP %s create" -msgstr "LDAP %s の作成" - -#, python-format -msgid "LDAP %s delete" -msgstr "LDAP %s の削除" - -#, python-format -msgid "LDAP %s update" -msgstr "LDAP %s の更新" - -msgid "" -"Length of transformable resource id > 64, which is max allowed characters" -msgstr "" -"変換可能なリソース ID の長さは最大許容文字数である、64 文字より少なくなりま" -"す。" - -#, python-format -msgid "" -"Local section in mapping %(mapping_id)s refers to a remote match that " -"doesn't exist (e.g. {0} in a local section)." -msgstr "" -"マッピング %(mapping_id)s にあるローカルセクションは、存在しないリモートの一" -"致 (例えばローカルセクションの {0}) を参照します。" - -#, python-format -msgid "Lock Timeout occurred for key, %(target)s" -msgstr "キー %(target)s についてロックタイムアウトが発生しました" - -#, python-format -msgid "Lock key must match target key: %(lock)s != %(target)s" -msgstr "" -"ロックキーはターゲットキーと一致しなければなりません: %(lock)s != %(target)s" - -#, python-format -msgid "Malformed endpoint URL (%(endpoint)s), see ERROR log for details." -msgstr "" -"エンドポイント URL (%(endpoint)s) の形式が正しくありません。詳しくはエラーロ" -"グを参照してください。" - -msgid "Marker could not be found" -msgstr "マーカーが見つかりませんでした" - -#, python-format -msgid "Max hierarchy depth reached for %s branch." -msgstr "%s ブランチに到達する最大の階層の深さ。" - -#, python-format -msgid "Maximum lock attempts on %s occurred." -msgstr "%s に対してロックが最大回数まで試みられました。" - -#, python-format -msgid "Member %(member)s is already a member of group %(group)s" -msgstr "メンバー %(member)s は既にグループ %(group)s のメンバーです" - -#, python-format -msgid "Method not callable: %s" -msgstr "メソッドが呼び出し可能ではありません: %s" - -msgid "Missing entity ID from environment" -msgstr "環境情報にエンティティー ID が見つかりません" - -msgid "" -"Modifying \"redelegation_count\" upon redelegation is forbidden. Omitting " -"this parameter is advised." -msgstr "" -"再委任時の「redelegation_count」の変更は禁止されています。このパラメーターは" -"指定しないでください。" - -msgid "Multiple domains are not supported" -msgstr "複数のドメインはサポートされていません" - -msgid "Must be called within an active lock context." -msgstr "アクティブなロックコンテキスト内で呼び出されなければなりません。" - -msgid "Must specify either domain or project" -msgstr "ドメインまたはプロジェクトのいずれかを指定する必要があります" - -msgid "Name field is required and cannot be empty" -msgstr "「名前」フィールドは必須フィールドであり、空にできません" - -msgid "Neither Project Domain ID nor Project Domain Name was provided." -msgstr "" -"プロジェクトドメイン ID および プロジェクトドメイン名のいずれも指定されません" -"でした。" - -msgid "" -"No Authorization headers found, cannot proceed with OAuth related calls, if " -"running under HTTPd or Apache, ensure WSGIPassAuthorization is set to On." -msgstr "" -"認可に使用するヘッダーが見つからず、OAuth 関連の呼び出しを続行できません。" -"HTTPd または Apache の下で実行している場合は、WSGIPassAuthorization が On に" -"設定されていることを確認してください。" - -msgid "No authenticated user" -msgstr "認証されていないユーザー" - -msgid "" -"No encryption keys found; run keystone-manage fernet_setup to bootstrap one." -msgstr "" -"暗号鍵が見つかりません。keystone-manage fernet_setup を実行して暗号鍵を初期設" -"定します。" - -msgid "No options specified" -msgstr "オプションが指定されていません" - -#, python-format -msgid "No policy is associated with endpoint %(endpoint_id)s." -msgstr "" -"エンドポイント %(endpoint_id)s に関連付けられているポリシーはありません。" - -#, python-format -msgid "No remaining uses for trust: %(trust_id)s" -msgstr "トラストはこれ以上使用できません: %(trust_id)s" - -msgid "No token in the request" -msgstr "要求にトークンがありません" - -msgid "Non-default domain is not supported" -msgstr "デフォルト以外のドメインはサポートされません" - -msgid "One of the trust agents is disabled or deleted" -msgstr "トラストエージェントの 1 つが無効になっているか削除されています" - -#, python-format -msgid "" -"Option %(option)s found with no group specified while checking domain " -"configuration request" -msgstr "" -"ドメイン設定要求の検査中に、グループが指定されていないオプション %(option)s " -"が見つかりました" - -#, python-format -msgid "" -"Option %(option)s in group %(group)s is not supported for domain specific " -"configurations" -msgstr "" -"ドメイン固有の設定ではグループ %(group)s のオプション %(option)s はサポートさ" -"れていません" - -#, python-format -msgid "Project (%s)" -msgstr "プロジェクト (%s)" - -#, python-format -msgid "Project ID not found: %(t_id)s" -msgstr "プロジェクト ID が見つかりません: %(t_id)s" - -msgid "Project field is required and cannot be empty." -msgstr "プロジェクトフィールドは必須であり、空にできません。" - -#, python-format -msgid "Project is disabled: %s" -msgstr "プロジェクト %s が無効になっています" - -msgid "Project name cannot contain reserved characters." -msgstr "プロジェクト名に予約済み文字が含まれていてはなりません。" - -msgid "Query string is not UTF-8 encoded" -msgstr "照会文字列は、UTF-8 でエンコードされていません" - -#, python-format -msgid "" -"Reading the default for option %(option)s in group %(group)s is not supported" -msgstr "" -"グループ %(group)s のオプション %(option)s のデフォルトの読み取りはサポートさ" -"れません" - -msgid "Redelegation allowed for delegated by trust only" -msgstr "再委任はトラストによる委任にのみ許可されます" - -#, python-format -msgid "" -"Remaining redelegation depth of %(redelegation_depth)d out of allowed range " -"of [0..%(max_count)d]" -msgstr "" -"%(redelegation_depth)d の残りの再委任の深さが、許可された範囲 [0.." -"%(max_count)d] を超えています" - -msgid "" -"Remove admin_crud_extension from the paste pipeline, the admin_crud " -"extension is now always available. Updatethe [pipeline:admin_api] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"admin_crud_extension を Paste のパイプラインから削除したため、admin_crud 拡張" -"を常時使用できるようになりました。これは O リリースで削除される予定であるた" -"め、それに応じて keystone-paste.ini 内の [pipeline:admin_api] セクションを更" -"新してください。" - -msgid "" -"Remove endpoint_filter_extension from the paste pipeline, the endpoint " -"filter extension is now always available. Update the [pipeline:api_v3] " -"section in keystone-paste.ini accordingly as it will be removed in the O " -"release." -msgstr "" -"endpoint_filter_extension を Paste パイプラインから削除したため、エンドポイン" -"トフィルター拡張を常時使用できるようになりました。これは O リリースで削除され" -"る予定であるため、それに応じて keystone-paste.ini 内の [pipeline:api_v3] セク" -"ションを更新してください。" - -msgid "" -"Remove federation_extension from the paste pipeline, the federation " -"extension is now always available. Update the [pipeline:api_v3] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"federation_extension を Paste パイプラインから削除したため、フェデレーション" -"拡張を常時使用できるようになりました。これは O リリースで削除される予定である" -"ため、それに応じて keystone-paste.ini 内の [pipeline:api_v3] セクションを更新" -"してください。" - -msgid "" -"Remove oauth1_extension from the paste pipeline, the oauth1 extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"oauth1_extension を Paste パイプラインから削除したため、oauth1 拡張を常時使用" -"できるようになりました。これは O リリースで削除される予定であるため、それに応" -"じて keystone-paste.ini 内の [pipeline:api_v3] セクションを更新してくださ" -"い。" - -msgid "" -"Remove revoke_extension from the paste pipeline, the revoke extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"revoke_extension を Paste パイプラインから削除したため、取り消し拡張を常時使" -"用できるようになりました。これは O リリースで削除される予定であるため、それに" -"応じて keystone-paste.ini 内の [pipeline:api_v3] セクションを更新してくださ" -"い。" - -msgid "" -"Remove simple_cert from the paste pipeline, the PKI and PKIz token providers " -"are now deprecated and simple_cert was only used insupport of these token " -"providers. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"simple_cert を Paste パイプラインから削除したため、PKI および PKIz のトークン" -"プロバイダーは非推奨となりました。これらのトークンプロバイダーのサポートに使" -"用されていたのは simple_cert のみでした。これは O リリースで削除される予定で" -"あるため、それに応じて keystone-paste.ini 内の [pipeline:api_v3] セクションを" -"更新してください。" - -msgid "" -"Remove user_crud_extension from the paste pipeline, the user_crud extension " -"is now always available. Updatethe [pipeline:public_api] section in keystone-" -"paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"user_crud_extension を Paste パイプラインから削除したため、user_crud 拡張を常" -"時使用できるようになりました。 これは O リリースで削除される予定であるため、" -"それに応じて keystone-paste.ini 内の [pipeline:public_api] セクションを更新し" -"てください。" - -msgid "Request Token does not have an authorizing user id" -msgstr "要求されたトークンに許可ユーザー ID が含まれていません" - -#, python-format -msgid "" -"Request attribute %(attribute)s must be less than or equal to %(size)i. The " -"server could not comply with the request because the attribute size is " -"invalid (too large). The client is assumed to be in error." -msgstr "" -"要求された属性 %(attribute)s のサイズは %(size)i 以下でなければなりません。属" -"性のサイズが無効である (大きすぎる) ため、サーバーは要求に応じることができま" -"せんでした。クライアントでエラーが発生していると考えられます。" - -#, fuzzy -msgid "Request must have an origin query parameter" -msgstr "要求には起点照会パラメーターが必要です" - -msgid "Request token is expired" -msgstr "要求トークンの有効期限が切れています" - -msgid "Request token not found" -msgstr "要求されたトークンが見つかりません" - -msgid "Requested expiration time is more than redelegated trust can provide" -msgstr "要求された有効期限は再委任されたトラストが提供可能な期間を超えています" - -#, python-format -msgid "" -"Requested redelegation depth of %(requested_count)d is greater than allowed " -"%(max_count)d" -msgstr "" -"要求された再委任の深さ %(requested_count)d が、許可された上限 %(max_count)d " -"を超えています" - -msgid "" -"Running keystone via eventlet is deprecated as of Kilo in favor of running " -"in a WSGI server (e.g. mod_wsgi). Support for keystone under eventlet will " -"be removed in the \"M\"-Release." -msgstr "" -"eventlet を介した keystone の実行は Kilo 以降では推奨されておらず、WSGI サー" -"バー (mod_wsgi など) での実行が推奨されています。eventlet 下での keystone の" -"サポートは「M」リリースで削除される予定です。" - -msgid "Scoping to both domain and project is not allowed" -msgstr "ドメインとプロジェクトの両方にスコープを設定することはできません" - -msgid "Scoping to both domain and trust is not allowed" -msgstr "ドメインとトラストの両方にスコープを設定することはできません" - -msgid "Scoping to both project and trust is not allowed" -msgstr "プロジェクトとトラストの両方にスコープを設定することはできません" - -#, python-format -msgid "Service Provider %(sp)s is disabled" -msgstr "サービスプロバイダー %(sp)s は無効になっています" - -msgid "Some of requested roles are not in redelegated trust" -msgstr "要求されたロールの一部が再委任されたトラスト内にありません" - -msgid "Specify a domain or project, not both" -msgstr "ドメインかプロジェクトを指定してください。両方は指定しないでください" - -msgid "Specify a user or group, not both" -msgstr "ユーザーかグループを指定してください。両方は指定しないでください" - -msgid "Specify one of domain or project" -msgstr "ドメインまたはプロジェクトのいずれかを指定してください" - -msgid "Specify one of user or group" -msgstr "ユーザーまたはグループのいずれかを指定してください" - -#, python-format -msgid "" -"String length exceeded.The length of string '%(string)s' exceeded the limit " -"of column %(type)s(CHAR(%(length)d))." -msgstr "" -"文字列が長過ぎます。文字列 %(string)s' の長さが列 %(type)s(CHAR(%(length)d)) " -"の制限を超えました。" - -msgid "Tenant name cannot contain reserved characters." -msgstr "テナント名に予約済み文字が含まれていてはなりません。" - -#, python-format -msgid "" -"The %s extension has been moved into keystone core and as such its " -"migrations are maintained by the main keystone database control. Use the " -"command: keystone-manage db_sync" -msgstr "" -"%s 拡張が keystone コアに移動されているため、そのマイグレーションはメインの " -"keystone データベース制御によって維持されます。次のコマンドを使用します: " -"keystone-manage db_sync" - -msgid "" -"The 'expires_at' must not be before now. The server could not comply with " -"the request since it is either malformed or otherwise incorrect. The client " -"is assumed to be in error." -msgstr "" -"'expires_at' は現時点以前であってはなりません。要求の形式が誤っているか、要求" -"が正しくないために、サーバーはこの要求に応じることが出来ませんでした。クライ" -"アントでエラーが発生していると考えられます。" - -msgid "The --all option cannot be used with the --domain-name option" -msgstr "--all オプションを --domain-name オプションと併用することはできません" - -#, python-format -msgid "The Keystone configuration file %(config_file)s could not be found." -msgstr "Keystone 設定ファイル %(config_file)s が見つかりませんでした。" - -#, python-format -msgid "" -"The Keystone domain-specific configuration has specified more than one SQL " -"driver (only one is permitted): %(source)s." -msgstr "" -"keystone ドメイン固有設定で複数の SQL ドライバーが指定されています (1 つしか" -"指定できません): %(source)s。" - -msgid "The action you have requested has not been implemented." -msgstr "要求したアクションは実装されていません。" - -#, fuzzy -msgid "The authenticated user should match the trustor." -msgstr "認証ユーザーは委託者と一致している必要があります。" - -msgid "" -"The certificates you requested are not available. It is likely that this " -"server does not use PKI tokens otherwise this is the result of " -"misconfiguration." -msgstr "" -"要求された証明書がありません。このサーバーでは PKI トークンが使用されていない" -"か、そうでない場合は設定が間違っていると考えられます。 " - -msgid "The configured token provider does not support bind authentication." -msgstr "設定済みトークンプロバイダーはバインド認証をサポートしません。" - -msgid "The creation of projects acting as domains is not allowed in v2." -msgstr "v2 では、ドメインとして動作するプロジェクトの作成は許可されません。" - -#, python-format -msgid "" -"The password length must be less than or equal to %(size)i. The server could " -"not comply with the request because the password is invalid." -msgstr "" -"パスワードの長さは %(size)i 以下でなければなりません。パスワードが無効である" -"ため、サーバーは要求に応じることができませんでした。" - -msgid "The request you have made requires authentication." -msgstr "実行された要求には認証が必要です。" - -msgid "The resource could not be found." -msgstr "リソースが見つかりませんでした。" - -msgid "" -"The revoke call must not have both domain_id and project_id. This is a bug " -"in the Keystone server. The current request is aborted." -msgstr "" -"取り消し呼び出しに domain_id と project_id の両方を使用することはできません。" -"これは、Keystone サーバーにおけるバグです。現在の要求は打ち切られます。" - -msgid "The service you have requested is no longer available on this server." -msgstr "要求したサービスは現在このサーバーでは使用できません。" - -#, python-format -msgid "" -"The specified parent region %(parent_region_id)s would create a circular " -"region hierarchy." -msgstr "" -"指定された親リージョン %(parent_region_id)s では、リージョン階層構造でループ" -"が発生してしまいます。" - -#, python-format -msgid "" -"The value of group %(group)s specified in the config should be a dictionary " -"of options" -msgstr "" -"設定で指定されたグループ %(group)s の値はオプションの辞書にする必要があります" - -msgid "There should not be any non-oauth parameters" -msgstr "oauth 関連以外のパラメーターが含まれていてはいけません" - -#, python-format -msgid "This is not a recognized Fernet payload version: %s" -msgstr "これは認識可能な Fernet ペイロードバージョンではありません: %s" - -#, python-format -msgid "This is not a recognized Fernet token %s" -msgstr "これは認識可能な Fernet トークン %s ではありません" - -msgid "" -"Timestamp not in expected format. The server could not comply with the " -"request since it is either malformed or otherwise incorrect. The client is " -"assumed to be in error." -msgstr "" -"タイムスタンプが想定された形式になっていません。要求の形式が不正もしくは正し" -"くないため、サーバーは要求に応じることができませんでした。クライアントでエ" -"ラーが発生していると考えられます。" - -#, python-format -msgid "" -"To get a more detailed information on this error, re-run this command for " -"the specific domain, i.e.: keystone-manage domain_config_upload --domain-" -"name %s" -msgstr "" -"このエラーに関する詳細を得るには、特定ドメインに対してこのコマンドを再実行し" -"てください: keystone-manage domain_config_upload --domain-name %s" - -msgid "Token belongs to another user" -msgstr "トークンが別のユーザーに属しています" - -msgid "Token does not belong to specified tenant." -msgstr "トークンが指定されたテナントに所属していません。" - -msgid "Token version is unrecognizable or unsupported." -msgstr "トークンバージョンが認識できないかサポートされません。" - -#, fuzzy -msgid "Trustee has no delegated roles." -msgstr "受託者に委任された役割がありません。" - -#, fuzzy -msgid "Trustor is disabled." -msgstr "委託者は無効です。" - -#, python-format -msgid "" -"Trying to update group %(group)s, so that, and only that, group must be " -"specified in the config" -msgstr "" -"グループ %(group)s を更新しようとしていますが、その場合は設定でグループのみを" -"指定する必要があります" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, but config provided " -"contains option %(option_other)s instead" -msgstr "" -"グループ %(group)s のオプション %(option)s を更新しようとしましたが、指定され" -"た設定には代わりにオプション %(option_other)s が含まれています" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, so that, and only " -"that, option must be specified in the config" -msgstr "" -"グループ %(group)s のオプション %(option)s を更新しようとしていますが、その場" -"合は設定でオプションのみを指定する必要があります" - -msgid "" -"Unable to access the keystone database, please check it is configured " -"correctly." -msgstr "" -"keystone データベースにアクセスできません。このデータベースが正しく設定されて" -"いるかどうかを確認してください。" - -#, fuzzy, python-format -msgid "Unable to consume trust %(trust_id)s, unable to acquire lock." -msgstr "トラスト %(trust_id)s を消費できず、ロックを取得できません。" - -#, python-format -msgid "" -"Unable to delete region %(region_id)s because it or its child regions have " -"associated endpoints." -msgstr "" -"リージョン %(region_id)s またはその子リージョンがエンドポイントに関連付けられ" -"ているため、このリージョンを削除できません。" - -msgid "Unable to downgrade schema" -msgstr "スキーマをダウングレードすることができません" - -#, python-format -msgid "Unable to find valid groups while using mapping %(mapping_id)s" -msgstr "" -"マッピング %(mapping_id)s を使用する際に、有効なグループが見つかりませんでし" -"た" - -#, python-format -msgid "Unable to locate domain config directory: %s" -msgstr "ドメイン設定ディレクトリーが見つかりません: %s" - -#, python-format -msgid "Unable to lookup user %s" -msgstr "ユーザー %s を検索できません" - -#, fuzzy, python-format -msgid "" -"Unable to reconcile identity attribute %(attribute)s as it has conflicting " -"values %(new)s and %(old)s" -msgstr "" -"ID 属性 %(attribute)s に競合する値 %(new)s と %(old)s が含まれているため、調" -"整できません" - -#, python-format -msgid "" -"Unable to sign SAML assertion. It is likely that this server does not have " -"xmlsec1 installed, or this is the result of misconfiguration. Reason " -"%(reason)s" -msgstr "" -"SAML アサーションに署名できません。このサーバーに xmlsec1 がインストールされ" -"ていないか、設定が誤っているためと考えられます。理由: %(reason)s" - -msgid "Unable to sign token." -msgstr "トークンに署名できません。" - -#, python-format -msgid "Unexpected assignment type encountered, %s" -msgstr "予期しない割り当てタイプが検出されました。%s" - -#, python-format -msgid "" -"Unexpected combination of grant attributes - User: %(user_id)s, Group: " -"%(group_id)s, Project: %(project_id)s, Domain: %(domain_id)s" -msgstr "" -"認可属性 の組み合わせ (ユーザー: %(user_id)s、グループ: %(group_id)s、プロ" -"ジェクト: %(project_id)s、ドメイン: %(domain_id)s) が正しくありません。" - -#, python-format -msgid "Unexpected status requested for JSON Home response, %s" -msgstr "JSON Home 応答に対して予期しない状況が要求されました。%s" - -msgid "Unknown Target" -msgstr "不明なターゲット" - -#, python-format -msgid "Unknown domain '%(name)s' specified by --domain-name" -msgstr "不明なドメイン '%(name)s' が --domain-name で指定されました" - -#, python-format -msgid "Unknown token version %s" -msgstr "トークンバージョン %s は不明です" - -#, python-format -msgid "Unregistered dependency: %(name)s for %(targets)s" -msgstr "未登録の依存関係: %(targets)s に対する %(name)s" - -msgid "Update of `domain_id` is not allowed." -msgstr "`domain_id` の更新は許可されていません。" - -msgid "Update of `is_domain` is not allowed." -msgstr "`is_domain` の更新は許可されません。" - -msgid "Update of `parent_id` is not allowed." -msgstr "\"parent_id\" の更新は許可されていません。" - -msgid "Update of domain_id is only allowed for root projects." -msgstr "domain_id の更新が許可されるのは root プロジェクトのみです。" - -msgid "Update of domain_id of projects acting as domains is not allowed." -msgstr "" -"ドメインとして動作するプロジェクトの domain_id の更新は許可されません。" - -msgid "Use a project scoped token when attempting to create a SAML assertion" -msgstr "" -"SAML アサーションの作成を行うときは、プロジェクトにスコープが設定されたトーク" -"ンを使用してください" - -msgid "" -"Use of the identity driver config to automatically configure the same " -"assignment driver has been deprecated, in the \"O\" release, the assignment " -"driver will need to be expicitly configured if different than the default " -"(SQL)." -msgstr "" -"同一の割り当てドライバーを自動的に設定するための ID ドライバー設定の使用は、" -"提供を終了しました。 \"O\" リリースでは、デフォルト (SQL) 以外の場合は割り当" -"てドライバーを明示的に設定する必要があります。" - -#, python-format -msgid "User %(u_id)s is unauthorized for tenant %(t_id)s" -msgstr "ユーザー %(u_id)s はテナント %(t_id)s のアクセス権限がありません。" - -#, python-format -msgid "User %(user_id)s has no access to domain %(domain_id)s" -msgstr "" -"ユーザー %(user_id)s はドメイン %(domain_id)s へのアクセス権限がありません" - -#, python-format -msgid "User %(user_id)s has no access to project %(project_id)s" -msgstr "" -"ユーザー %(user_id)s はプロジェクト %(project_id)s へのアクセス権限がありませ" -"ん" - -#, python-format -msgid "User %(user_id)s is already a member of group %(group_id)s" -msgstr "ユーザー %(user_id)s はすでにグループ %(group_id)s のメンバーです" - -#, python-format -msgid "User '%(user_id)s' not found in group '%(group_id)s'" -msgstr "ユーザー '%(user_id)s' がグループ '%(group_id)s' で見つかりません" - -msgid "User IDs do not match" -msgstr "ユーザー ID が一致しません" - -msgid "" -"User auth cannot be built due to missing either user id, or user name with " -"domain id, or user name with domain name." -msgstr "" -"ユーザー ID、ドメイン ID が指定されたユーザー名、ドメイン名が指定されたユー" -"ザー名のいずれかが欠落しているため、ユーザー認証を作成できません。" - -#, python-format -msgid "User is disabled: %s" -msgstr "ユーザーが無効になっています: %s" - -msgid "User is not a member of the requested project" -msgstr "ユーザーは、要求されたプロジェクトのメンバーではありません" - -#, fuzzy -msgid "User is not a trustee." -msgstr "ユーザーは受託者ではありません。" - -msgid "User not found" -msgstr "ユーザーが見つかりません" - -msgid "User not valid for tenant." -msgstr "ユーザーはテナントに対して無効です。" - -msgid "User roles not supported: tenant_id required" -msgstr "ユーザーロールがサポートされません: tenant_id が必要です" - -#, fuzzy, python-format -msgid "User type %s not supported" -msgstr "ユーザータイプ %s はサポートされていません" - -msgid "You are not authorized to perform the requested action." -msgstr "要求されたアクションを実行する許可がありません。" - -#, python-format -msgid "You are not authorized to perform the requested action: %(action)s" -msgstr "要求されたアクションを実行する許可がありません: %(action)s" - -msgid "" -"You have tried to create a resource using the admin token. As this token is " -"not within a domain you must explicitly include a domain for this resource " -"to belong to." -msgstr "" -"管理トークンを使用してリソースを作成しようとしています。このトークンはドメイ" -"ン内にないため、このリソースが属するドメインを明示的に含める必要があります。" - -msgid "`key_mangler` functions must be callable." -msgstr "`key_mangler` 関数は呼び出し可能でなければなりません。" - -msgid "`key_mangler` option must be a function reference" -msgstr "`key_mangler` オプションは関数参照でなければなりません" - -msgid "any options" -msgstr "任意のオプション" - -msgid "auth_type is not Negotiate" -msgstr "auth_type はネゴシエートではありません" - -msgid "authorizing user does not have role required" -msgstr "ユーザーを認可するのに必要なロールがありません" - -#, python-format -msgid "cannot create a project in a branch containing a disabled project: %s" -msgstr "" -"無効になっているプロジェクトを含むブランチにプロジェクトを作成することはでき" -"ません: %s" - -#, python-format -msgid "" -"cannot delete an enabled project acting as a domain. Please disable the " -"project %s first." -msgstr "" -"ドメインとして動作する有効になっているプロジェクトを削除できません。最初にプ" -"ロジェクト %s を無効にしてください。" - -#, python-format -msgid "group %(group)s" -msgstr "グループ %(group)s" - -msgid "" -"idp_contact_type must be one of: [technical, other, support, administrative " -"or billing." -msgstr "" -"idp_contact_type は technical、other、support、administrative、billing のいず" -"れかでなければなりません。" - -#, python-format -msgid "invalid date format %s" -msgstr "日付形式 %s は無効です" - -#, python-format -msgid "" -"it is not permitted to have two projects acting as domains with the same " -"name: %s" -msgstr "" -"ドメインとして動作する同じ名前の 2 つのプロジェクトが存在することは許可されま" -"せん: %s" - -#, python-format -msgid "" -"it is not permitted to have two projects within a domain with the same " -"name : %s" -msgstr "" -"1 つのドメイン内に同じ名前の 2 つのプロジェクトが存在することは許可されませ" -"ん : %s" - -msgid "only root projects are allowed to act as domains." -msgstr "ドメインとして動作することが許可されるのは root プロジェクトのみです。" - -#, python-format -msgid "option %(option)s in group %(group)s" -msgstr "グループ %(group)s のオプション %(option)s" - -msgid "provided consumer key does not match stored consumer key" -msgstr "" -"指定されたコンシューマー鍵は保存されているコンシューマー鍵と一致しません" - -msgid "provided request key does not match stored request key" -msgstr "指定された要求鍵は保管されている要求鍵と一致しません" - -#, fuzzy -msgid "provided verifier does not match stored verifier" -msgstr "指定されたベリファイヤーは保管済みベリファイヤーと一致しません" - -msgid "remaining_uses must be a positive integer or null." -msgstr "remaining_uses は正整数またはヌルでなければなりません。" - -msgid "remaining_uses must not be set if redelegation is allowed" -msgstr "再委任が許可されている場合は remaining_uses を設定してはなりません" - -#, python-format -msgid "" -"request to update group %(group)s, but config provided contains group " -"%(group_other)s instead" -msgstr "" -"グループ %(group)s の更新を要求しましたが、指定された設定には代わりにグルー" -"プ %(group_other)s が含まれています" - -msgid "rescope a scoped token" -msgstr "スコープが設定されたトークンのスコープを設定し直します" - -#, python-format -msgid "role %s is not defined" -msgstr "ロール %s は定義されていません" - -msgid "scope.project.id must be specified if include_subtree is also specified" -msgstr "" -"include_subtree も指定される場合、scope.project.id を指定する必要があります。" - -#, python-format -msgid "tls_cacertdir %s not found or is not a directory" -msgstr "" -"tls_cacertdir %s が見つからない、もしくは、ディレクトリではありません。" - -#, python-format -msgid "tls_cacertfile %s not found or is not a file" -msgstr "tls_cacertfile %s が見つからない、もしくは、ファイルではありません。" - -#, python-format -msgid "token reference must be a KeystoneToken type, got: %s" -msgstr "トークン参照は KeystoneToken 型である必要があります。%s を受信しました" - -msgid "" -"update of domain_id is deprecated as of Mitaka and will be removed in O." -msgstr "" -"domain_id の更新は Mitaka の時点で提供を終了し、O で削除される予定です。" - -#, python-format -msgid "" -"validated expected to find %(param_name)r in function signature for " -"%(func_name)r." -msgstr "" -"検証され、%(func_name)r の関数のシグニチャーで %(param_name)r が見つかること" -"が予期されます" diff --git a/keystone-moon/keystone/locale/keystone-log-critical.pot b/keystone-moon/keystone/locale/keystone-log-critical.pot deleted file mode 100644 index f071ef0f..00000000 --- a/keystone-moon/keystone/locale/keystone-log-critical.pot +++ /dev/null @@ -1,24 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2016 ORGANIZATION -# This file is distributed under the same license as the keystone project. -# FIRST AUTHOR , 2016. -# -#, fuzzy -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-04 06:55+0000\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" -"Last-Translator: FULL NAME \n" -"Language-Team: LANGUAGE \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 2.2.0\n" - -#: keystone/catalog/backends/templated.py:106 -#, python-format -msgid "Unable to open template file %s" -msgstr "" - diff --git a/keystone-moon/keystone/locale/keystone-log-error.pot b/keystone-moon/keystone/locale/keystone-log-error.pot deleted file mode 100644 index 7b38a370..00000000 --- a/keystone-moon/keystone/locale/keystone-log-error.pot +++ /dev/null @@ -1,177 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2016 ORGANIZATION -# This file is distributed under the same license as the keystone project. -# FIRST AUTHOR , 2016. -# -#, fuzzy -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-04 06:55+0000\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" -"Last-Translator: FULL NAME \n" -"Language-Team: LANGUAGE \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 2.2.0\n" - -#: keystone/notifications.py:336 -msgid "Failed to construct notifier" -msgstr "" - -#: keystone/notifications.py:439 -#, python-format -msgid "Failed to send %(res_id)s %(event_type)s notification" -msgstr "" - -#: keystone/notifications.py:706 -#, python-format -msgid "Failed to send %(action)s %(event_type)s notification" -msgstr "" - -#: keystone/assignment/core.py:688 -#, python-format -msgid "Circular reference found role inference rules - %(prior_role_id)s." -msgstr "" - -#: keystone/catalog/core.py:75 -#, python-format -msgid "Malformed endpoint - %(url)r is not a string" -msgstr "" - -#: keystone/catalog/core.py:80 -#, python-format -msgid "Malformed endpoint %(url)s - unknown key %(keyerror)s" -msgstr "" - -#: keystone/catalog/core.py:88 -#, python-format -msgid "" -"Malformed endpoint '%(url)s'. The following type error occurred during " -"string substitution: %(typeerror)s" -msgstr "" - -#: keystone/catalog/core.py:94 -#, python-format -msgid "" -"Malformed endpoint %s - incomplete format (are you missing a type " -"notifier ?)" -msgstr "" - -#: keystone/common/openssl.py:90 -#, python-format -msgid "Command %(to_exec)s exited with %(retcode)s - %(output)s" -msgstr "" - -#: keystone/common/openssl.py:114 -#, python-format -msgid "Failed to remove file %(file_path)r: %(error)s" -msgstr "" - -#: keystone/common/utils.py:267 -msgid "" -"Error setting up the debug environment. Verify that the option --debug-" -"url has the format : and that a debugger processes is " -"listening on that port." -msgstr "" - -#: keystone/common/environment/eventlet_server.py:112 -#, python-format -msgid "Could not bind to %(host)s:%(port)s" -msgstr "" - -#: keystone/common/environment/eventlet_server.py:211 -msgid "Server error" -msgstr "" - -#: keystone/endpoint_policy/core.py:131 keystone/endpoint_policy/core.py:231 -#, python-format -msgid "" -"Circular reference or a repeated entry found in region tree - " -"%(region_id)s." -msgstr "" - -#: keystone/federation/idp.py:440 -#, python-format -msgid "Error when signing assertion, reason: %(reason)s%(output)s" -msgstr "" - -#: keystone/oauth1/core.py:135 -msgid "Cannot retrieve Authorization headers" -msgstr "" - -#: keystone/resource/core.py:728 -#, python-format -msgid "" -"Asked to convert a non-domain project into a domain - Domain: " -"%(domain_id)s, Project ID: %(id)s, Project Name: %(project_name)s" -msgstr "" - -#: keystone/resource/core.py:831 -#, python-format -msgid "" -"Circular reference or a repeated entry found projects hierarchy - " -"%(project_id)s." -msgstr "" - -#: keystone/resource/core.py:904 -msgid "Failed to create the default domain." -msgstr "" - -#: keystone/resource/core.py:1479 keystone/resource/V8_backends/sql.py:100 -#: keystone/resource/V8_backends/sql.py:119 -#: keystone/resource/backends/sql.py:137 keystone/resource/backends/sql.py:156 -#, python-format -msgid "" -"Circular reference or a repeated entry found in projects hierarchy - " -"%(project_id)s." -msgstr "" - -#: keystone/resource/core.py:1660 -#, python-format -msgid "" -"Unexpected results in response for domain config - %(count)s responses, " -"first option is %(option)s, expected option %(expected)s" -msgstr "" - -#: keystone/token/provider.py:334 -#, python-format -msgid "Unexpected error or malformed token determining token expiry: %s" -msgstr "" - -#: keystone/token/persistence/backends/kvs.py:236 -#, python-format -msgid "" -"Reinitializing revocation list due to error in loading revocation list " -"from backend. Expected `list` type got `%(type)s`. Old revocation list " -"data: %(list)r" -msgstr "" - -#: keystone/token/providers/common.py:728 -msgid "Failed to validate token" -msgstr "" - -#: keystone/token/providers/pki.py:52 -msgid "Unable to sign token" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:42 -#, python-format -msgid "" -"Either [fernet_tokens] key_repository does not exist or Keystone does not" -" have sufficient permission to access it: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:66 -#, python-format -msgid "Unable to convert Keystone user or group ID. Error: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:83 -msgid "" -"Failed to create [fernet_tokens] key_repository: either it already exists" -" or you don't have sufficient permissions to create it" -msgstr "" - diff --git a/keystone-moon/keystone/locale/keystone-log-info.pot b/keystone-moon/keystone/locale/keystone-log-info.pot deleted file mode 100644 index 664cf0fa..00000000 --- a/keystone-moon/keystone/locale/keystone-log-info.pot +++ /dev/null @@ -1,238 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2016 ORGANIZATION -# This file is distributed under the same license as the keystone project. -# FIRST AUTHOR , 2016. -# -#, fuzzy -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-04 06:55+0000\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" -"Last-Translator: FULL NAME \n" -"Language-Team: LANGUAGE \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 2.2.0\n" - -#: keystone/assignment/core.py:200 -#, python-format -msgid "Creating the default role %s because it does not exist." -msgstr "" - -#: keystone/assignment/core.py:208 -#, python-format -msgid "Creating the default role %s failed because it was already created" -msgstr "" - -#: keystone/auth/controllers.py:112 -#, python-format -msgid "" -"\"expires_at\" has conflicting values %(existing)s and %(new)s. Will use" -" the earliest value." -msgstr "" - -#: keystone/cmd/cli.py:188 -#, python-format -msgid "Created domain %s" -msgstr "" - -#: keystone/cmd/cli.py:191 -#, python-format -msgid "Domain %s already exists, skipping creation." -msgstr "" - -#: keystone/cmd/cli.py:204 -#, python-format -msgid "Created project %s" -msgstr "" - -#: keystone/cmd/cli.py:206 -#, python-format -msgid "Project %s already exists, skipping creation." -msgstr "" - -#: keystone/cmd/cli.py:216 -#, python-format -msgid "User %s already exists, skipping creation." -msgstr "" - -#: keystone/cmd/cli.py:226 -#, python-format -msgid "Created user %s" -msgstr "" - -#: keystone/cmd/cli.py:235 -#, python-format -msgid "Created Role %s" -msgstr "" - -#: keystone/cmd/cli.py:237 -#, python-format -msgid "Role %s exists, skipping creation." -msgstr "" - -#: keystone/cmd/cli.py:254 -#, python-format -msgid "Granted %(role)s on %(project)s to user %(username)s." -msgstr "" - -#: keystone/cmd/cli.py:260 -#, python-format -msgid "User %(username)s already has %(role)s on %(project)s." -msgstr "" - -#: keystone/cmd/cli.py:271 -#, python-format -msgid "Created Region %s" -msgstr "" - -#: keystone/cmd/cli.py:273 -#, python-format -msgid "Region %s exists, skipping creation." -msgstr "" - -#: keystone/cmd/cli.py:330 -#, python-format -msgid "Created %(interface)s endpoint %(url)s" -msgstr "" - -#: keystone/cmd/cli.py:335 -#, python-format -msgid "Skipping %s endpoint as already created" -msgstr "" - -#: keystone/cmd/cli.py:639 -#, python-format -msgid "Scanning %r for domain config files" -msgstr "" - -#: keystone/common/openssl.py:80 -#, python-format -msgid "Running command - %s" -msgstr "" - -#: keystone/common/wsgi.py:80 -msgid "No bind information present in token" -msgstr "" - -#: keystone/common/wsgi.py:87 -#, python-format -msgid "Named bind mode %s not in bind information" -msgstr "" - -#: keystone/common/wsgi.py:94 -msgid "Kerberos credentials required and not present" -msgstr "" - -#: keystone/common/wsgi.py:98 -msgid "Kerberos credentials do not match those in bind" -msgstr "" - -#: keystone/common/wsgi.py:102 -msgid "Kerberos bind authentication successful" -msgstr "" - -#: keystone/common/wsgi.py:109 -#, python-format -msgid "Couldn't verify unknown bind: {%(bind_type)s: %(identifier)s}" -msgstr "" - -#: keystone/common/environment/eventlet_server.py:116 -#, python-format -msgid "Starting %(arg0)s on %(host)s:%(port)s" -msgstr "" - -#: keystone/common/kvs/core.py:159 -#, python-format -msgid "Adding proxy '%(proxy)s' to KVS %(name)s." -msgstr "" - -#: keystone/common/kvs/core.py:209 -#, python-format -msgid "Using %(func)s as KVS region %(name)s key_mangler" -msgstr "" - -#: keystone/common/kvs/core.py:221 -#, python-format -msgid "" -"Using default keystone.common.kvs.sha1_mangle_key as KVS region %s " -"key_mangler" -msgstr "" - -#: keystone/common/kvs/core.py:231 -#, python-format -msgid "KVS region %s key_mangler disabled." -msgstr "" - -#: keystone/middleware/auth.py:172 -#, python-format -msgid "Cannot find client issuer in env by the issuer attribute - %s." -msgstr "" - -#: keystone/middleware/auth.py:180 -#, python-format -msgid "" -"The client issuer %(client_issuer)s does not match with the trusted " -"issuer %(trusted_issuer)s" -msgstr "" - -#: keystone/token/persistence/backends/sql.py:286 -#, python-format -msgid "Total expired tokens removed: %d" -msgstr "" - -#: keystone/token/providers/fernet/token_formatters.py:174 -#, python-format -msgid "" -"Fernet token created with length of %d characters, which exceeds 255 " -"characters" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:76 -msgid "" -"[fernet_tokens] key_repository does not appear to exist; attempting to " -"create it" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:134 -#, python-format -msgid "Created a new key: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:147 -msgid "Key repository is already initialized; aborting." -msgstr "" - -#: keystone/token/providers/fernet/utils.py:188 -#, python-format -msgid "Starting key rotation with %(count)s key files: %(list)s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:194 -#, python-format -msgid "Current primary key is: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:196 -#, python-format -msgid "Next primary key will be: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:206 -#, python-format -msgid "Promoted key 0 to be the primary: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:227 -#, python-format -msgid "Excess key to purge: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:262 -#, python-format -msgid "Loaded %(count)d encryption keys (max_active_keys=%(max)d) from: %(dir)s" -msgstr "" - diff --git a/keystone-moon/keystone/locale/keystone-log-warning.pot b/keystone-moon/keystone/locale/keystone-log-warning.pot deleted file mode 100644 index 6282f2c6..00000000 --- a/keystone-moon/keystone/locale/keystone-log-warning.pot +++ /dev/null @@ -1,315 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2016 ORGANIZATION -# This file is distributed under the same license as the keystone project. -# FIRST AUTHOR , 2016. -# -#, fuzzy -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-04 06:55+0000\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" -"Last-Translator: FULL NAME \n" -"Language-Team: LANGUAGE \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 2.2.0\n" - -#: keystone/exception.py:66 -msgid "missing exception kwargs (programmer error)" -msgstr "" - -#: keystone/assignment/core.py:1394 -#, python-format -msgid "" -"delete_domain_assignments method not found in custom assignment driver. " -"Domain assignments for domain (%s) to users from other domains will not " -"be removed. This was added in V9 of the assignment driver." -msgstr "" - -#: keystone/auth/controllers.py:468 -#, python-format -msgid "" -"User %(user_id)s doesn't have access to default project %(project_id)s. " -"The token will be unscoped rather than scoped to the project." -msgstr "" - -#: keystone/auth/controllers.py:476 -#, python-format -msgid "" -"User %(user_id)s's default project %(project_id)s is disabled. The token " -"will be unscoped rather than scoped to the project." -msgstr "" - -#: keystone/auth/controllers.py:485 -#, python-format -msgid "" -"User %(user_id)s's default project %(project_id)s not found. The token " -"will be unscoped rather than scoped to the project." -msgstr "" - -#: keystone/cmd/cli.py:455 -msgid "" -"keystone-manage pki_setup is deprecated as of Mitaka in favor of not " -"using PKI tokens and may be removed in 'O' release." -msgstr "" - -#: keystone/cmd/cli.py:458 -msgid "keystone-manage pki_setup is not recommended for production use." -msgstr "" - -#: keystone/cmd/cli.py:477 -msgid "keystone-manage ssl_setup is not recommended for production use." -msgstr "" - -#: keystone/cmd/cli.py:650 -#, python-format -msgid "Ignoring file (%s) while scanning domain config directory" -msgstr "" - -#: keystone/common/authorization.py:69 -msgid "RBAC: Invalid user data in token" -msgstr "" - -#: keystone/common/controller.py:102 keystone/middleware/auth.py:102 -msgid "RBAC: Invalid token" -msgstr "" - -#: keystone/common/controller.py:127 keystone/common/controller.py:246 -#: keystone/common/controller.py:799 -msgid "RBAC: Bypassing authorization" -msgstr "" - -#: keystone/common/controller.py:735 -msgid "No domain information specified as part of list request" -msgstr "" - -#: keystone/common/controller.py:771 -msgid "" -"Not specifying a domain during a create user, group or project call, and " -"relying on falling back to the default domain, is deprecated as of " -"Liberty and will be removed in the N release. Specify the domain " -"explicitly or use a domain-scoped token" -msgstr "" - -#: keystone/common/openssl.py:74 -msgid "Failed to invoke ``openssl version``, assuming is v1.0 or newer" -msgstr "" - -#: keystone/common/utils.py:129 -#, python-format -msgid "Truncating user password to %d characters." -msgstr "" - -#: keystone/common/utils.py:552 -msgid "Couldn't find the auth context." -msgstr "" - -#: keystone/common/wsgi.py:252 -#, python-format -msgid "Authorization failed. %(exception)s from %(remote_addr)s" -msgstr "" - -#: keystone/common/kvs/core.py:153 -#, python-format -msgid "%s is not a dogpile.proxy.ProxyBackend" -msgstr "" - -#: keystone/common/kvs/core.py:428 -#, python-format -msgid "KVS lock released (timeout reached) for: %s" -msgstr "" - -#: keystone/common/ldap/core.py:1033 -msgid "" -"LDAP Server does not support paging. Disable paging in keystone.conf to " -"avoid this message." -msgstr "" - -#: keystone/common/ldap/core.py:1232 -#, python-format -msgid "" -"Invalid additional attribute mapping: \"%s\". Format must be " -":" -msgstr "" - -#: keystone/common/ldap/core.py:1343 -#, python-format -msgid "" -"ID attribute %(id_attr)s for LDAP object %(dn)s has multiple values and " -"therefore cannot be used as an ID. Will get the ID from DN instead" -msgstr "" - -#: keystone/common/ldap/core.py:1704 -#, python-format -msgid "" -"When deleting entries for %(search_base)s, could not delete nonexistent " -"entries %(entries)s%(dots)s" -msgstr "" - -#: keystone/endpoint_policy/core.py:94 -#, python-format -msgid "" -"Endpoint %(endpoint_id)s referenced in association for policy " -"%(policy_id)s not found." -msgstr "" - -#: keystone/endpoint_policy/core.py:181 -#, python-format -msgid "" -"Unsupported policy association found - Policy %(policy_id)s, Endpoint " -"%(endpoint_id)s, Service %(service_id)s, Region %(region_id)s, " -msgstr "" - -#: keystone/endpoint_policy/core.py:197 -#, python-format -msgid "" -"Policy %(policy_id)s referenced in association for endpoint " -"%(endpoint_id)s not found." -msgstr "" - -#: keystone/federation/utils.py:615 -msgid "Ignoring user name" -msgstr "" - -#: keystone/identity/controllers.py:145 -#, python-format -msgid "Unable to remove user %(user)s from %(tenant)s." -msgstr "" - -#: keystone/identity/controllers.py:164 -#, python-format -msgid "Unable to add user %(user)s to %(tenant)s." -msgstr "" - -#: keystone/identity/core.py:131 -#, python-format -msgid "Invalid domain name (%s) found in config file name" -msgstr "" - -#: keystone/identity/core.py:169 -#, python-format -msgid "Unable to locate domain config directory: %s" -msgstr "" - -#: keystone/identity/core.py:691 -#, python-format -msgid "" -"Found multiple domains being mapped to a driver that does not support " -"that (e.g. LDAP) - Domain ID: %(domain)s, Default Driver: %(driver)s" -msgstr "" - -#: keystone/middleware/auth.py:81 -msgid "" -"build_auth_context middleware checking for the admin token is deprecated " -"as of the Mitaka release and will be removed in the O release. If your " -"deployment requires use of the admin token, update keystone-paste.ini so " -"that admin_token_auth is before build_auth_context in the paste " -"pipelines, otherwise remove the admin_token_auth middleware from the " -"paste pipelines." -msgstr "" - -#: keystone/middleware/auth.py:195 -msgid "" -"Auth context already exists in the request environment; it will be used " -"for authorization instead of creating a new one." -msgstr "" - -#: keystone/middleware/core.py:63 -msgid "" -"The admin_token_auth middleware presents a security risk and should be " -"removed from the [pipeline:api_v3], [pipeline:admin_api], and " -"[pipeline:public_api] sections of your paste ini file." -msgstr "" - -#: keystone/resource/core.py:896 -msgid "" -"The default domain was created automatically to contain V2 resources. " -"This is deprecated in the M release and will not be supported in the O " -"release. Create the default domain manually or use the keystone-manage " -"bootstrap command." -msgstr "" - -#: keystone/resource/core.py:1945 -#, python-format -msgid "" -"Found what looks like an unmatched config option substitution reference -" -" domain: %(domain)s, group: %(group)s, option: %(option)s, value: " -"%(value)s. Perhaps the config option to which it refers has yet to be " -"added?" -msgstr "" - -#: keystone/resource/core.py:1952 -#, python-format -msgid "" -"Found what looks like an incorrectly constructed config option " -"substitution reference - domain: %(domain)s, group: %(group)s, option: " -"%(option)s, value: %(value)s." -msgstr "" - -#: keystone/resource/backends/sql.py:222 -#, python-format -msgid "Project %s does not exist and was not deleted." -msgstr "" - -#: keystone/server/common.py:42 -msgid "insecure_debug is enabled so responses may include sensitive information." -msgstr "" - -#: keystone/token/persistence/core.py:220 -#, python-format -msgid "" -"`token_api.%s` is deprecated as of Juno in favor of utilizing methods on " -"`token_provider_api` and may be removed in Kilo." -msgstr "" - -#: keystone/token/persistence/backends/kvs.py:58 -msgid "" -"It is recommended to only use the base key-value-store implementation for" -" the token driver for testing purposes. Please use 'memcache' or 'sql' " -"instead." -msgstr "" - -#: keystone/token/persistence/backends/kvs.py:207 -#, python-format -msgid "Token `%s` is expired, not adding to the revocation list." -msgstr "" - -#: keystone/token/persistence/backends/kvs.py:250 -#, python-format -msgid "" -"Removing `%s` from revocation list due to invalid expires data in " -"revocation list." -msgstr "" - -#: keystone/token/providers/fernet/utils.py:50 -#, python-format -msgid "[fernet_tokens] key_repository is world readable: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:94 -#, python-format -msgid "" -"Unable to change the ownership of [fernet_tokens] key_repository without " -"a keystone user ID and keystone group ID both being provided: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:116 -#, python-format -msgid "" -"Unable to change the ownership of the new key without a keystone user ID " -"and keystone group ID both being provided: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:214 -msgid "" -"[fernet_tokens] max_active_keys must be at least 1 to maintain a primary " -"key." -msgstr "" - -#: keystone/version/service.py:77 -msgid "'local conf' from PasteDeploy INI is being ignored." -msgstr "" - diff --git a/keystone-moon/keystone/locale/keystone.pot b/keystone-moon/keystone/locale/keystone.pot deleted file mode 100644 index b5838aab..00000000 --- a/keystone-moon/keystone/locale/keystone.pot +++ /dev/null @@ -1,1711 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2016 ORGANIZATION -# This file is distributed under the same license as the keystone project. -# FIRST AUTHOR , 2016. -# -#, fuzzy -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-04 06:55+0000\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" -"Last-Translator: FULL NAME \n" -"Language-Team: LANGUAGE \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=utf-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 2.2.0\n" - -#: keystone/exception.py:83 -#, python-format -msgid "" -"Expecting to find %(attribute)s in %(target)s - the server could not " -"comply with the request since it is either malformed or otherwise " -"incorrect. The client is assumed to be in error." -msgstr "" - -#: keystone/exception.py:92 -#, python-format -msgid "Cannot create an endpoint with an invalid URL: %(url)s" -msgstr "" - -#: keystone/exception.py:99 -#, python-format -msgid "%(detail)s" -msgstr "" - -#: keystone/exception.py:103 -msgid "" -"Timestamp not in expected format. The server could not comply with the " -"request since it is either malformed or otherwise incorrect. The client " -"is assumed to be in error." -msgstr "" - -#: keystone/exception.py:112 -msgid "" -"The 'expires_at' must not be before now. The server could not comply with" -" the request since it is either malformed or otherwise incorrect. The " -"client is assumed to be in error." -msgstr "" - -#: keystone/exception.py:121 -#, python-format -msgid "" -"String length exceeded.The length of string '%(string)s' exceeded the " -"limit of column %(type)s(CHAR(%(length)d))." -msgstr "" - -#: keystone/exception.py:127 -#, python-format -msgid "" -"Request attribute %(attribute)s must be less than or equal to %(size)i. " -"The server could not comply with the request because the attribute size " -"is invalid (too large). The client is assumed to be in error." -msgstr "" - -#: keystone/exception.py:137 -#, python-format -msgid "" -"The specified parent region %(parent_region_id)s would create a circular " -"region hierarchy." -msgstr "" - -#: keystone/exception.py:156 -#, python-format -msgid "" -"The password length must be less than or equal to %(size)i. The server " -"could not comply with the request because the password is invalid." -msgstr "" - -#: keystone/exception.py:162 -#, python-format -msgid "" -"Unable to delete region %(region_id)s because it or its child regions " -"have associated endpoints." -msgstr "" - -#: keystone/exception.py:167 -msgid "" -"The certificates you requested are not available. It is likely that this " -"server does not use PKI tokens otherwise this is the result of " -"misconfiguration." -msgstr "" - -#: keystone/exception.py:179 -msgid "(Disable insecure_debug mode to suppress these details.)" -msgstr "" - -#: keystone/exception.py:189 -#, python-format -msgid "%(message)s %(amendment)s" -msgstr "" - -#: keystone/exception.py:197 -msgid "The request you have made requires authentication." -msgstr "" - -#: keystone/exception.py:203 -msgid "Authentication plugin error." -msgstr "" - -#: keystone/exception.py:211 -#, python-format -msgid "Unable to find valid groups while using mapping %(mapping_id)s" -msgstr "" - -#: keystone/exception.py:216 -msgid "Attempted to authenticate with an unsupported method." -msgstr "" - -#: keystone/exception.py:224 -msgid "Additional authentications steps required." -msgstr "" - -#: keystone/exception.py:232 -msgid "You are not authorized to perform the requested action." -msgstr "" - -#: keystone/exception.py:239 -#, python-format -msgid "You are not authorized to perform the requested action: %(action)s" -msgstr "" - -#: keystone/exception.py:244 -#, python-format -msgid "" -"Could not change immutable attribute(s) '%(attributes)s' in target " -"%(target)s" -msgstr "" - -#: keystone/exception.py:249 -#, python-format -msgid "" -"Group membership across backend boundaries is not allowed, group in " -"question is %(group_id)s, user is %(user_id)s" -msgstr "" - -#: keystone/exception.py:255 -#, python-format -msgid "" -"Invalid mix of entities for policy association - only Endpoint, Service " -"or Region+Service allowed. Request was - Endpoint: %(endpoint_id)s, " -"Service: %(service_id)s, Region: %(region_id)s" -msgstr "" - -#: keystone/exception.py:262 -#, python-format -msgid "Invalid domain specific configuration: %(reason)s" -msgstr "" - -#: keystone/exception.py:266 -#, python-format -msgid "Could not find: %(target)s" -msgstr "" - -#: keystone/exception.py:272 -#, python-format -msgid "Could not find endpoint: %(endpoint_id)s" -msgstr "" - -#: keystone/exception.py:279 -msgid "An unhandled exception has occurred: Could not find metadata." -msgstr "" - -#: keystone/exception.py:284 -#, python-format -msgid "Could not find policy: %(policy_id)s" -msgstr "" - -#: keystone/exception.py:288 -msgid "Could not find policy association" -msgstr "" - -#: keystone/exception.py:292 -#, python-format -msgid "Could not find role: %(role_id)s" -msgstr "" - -#: keystone/exception.py:296 -#, python-format -msgid "%(prior_role_id)s does not imply %(implied_role_id)s" -msgstr "" - -#: keystone/exception.py:300 -#, python-format -msgid "%(role_id)s cannot be an implied roles" -msgstr "" - -#: keystone/exception.py:304 -#, python-format -msgid "" -"Could not find role assignment with role: %(role_id)s, user or group: " -"%(actor_id)s, project or domain: %(target_id)s" -msgstr "" - -#: keystone/exception.py:310 -#, python-format -msgid "Could not find region: %(region_id)s" -msgstr "" - -#: keystone/exception.py:314 -#, python-format -msgid "Could not find service: %(service_id)s" -msgstr "" - -#: keystone/exception.py:318 -#, python-format -msgid "Could not find domain: %(domain_id)s" -msgstr "" - -#: keystone/exception.py:322 -#, python-format -msgid "Could not find project: %(project_id)s" -msgstr "" - -#: keystone/exception.py:326 -#, python-format -msgid "Cannot create project with parent: %(project_id)s" -msgstr "" - -#: keystone/exception.py:330 -#, python-format -msgid "Could not find token: %(token_id)s" -msgstr "" - -#: keystone/exception.py:334 -#, python-format -msgid "Could not find user: %(user_id)s" -msgstr "" - -#: keystone/exception.py:338 -#, python-format -msgid "Could not find group: %(group_id)s" -msgstr "" - -#: keystone/exception.py:342 -#, python-format -msgid "Could not find mapping: %(mapping_id)s" -msgstr "" - -#: keystone/exception.py:346 -#, python-format -msgid "Could not find trust: %(trust_id)s" -msgstr "" - -#: keystone/exception.py:350 -#, python-format -msgid "No remaining uses for trust: %(trust_id)s" -msgstr "" - -#: keystone/exception.py:354 -#, python-format -msgid "Could not find credential: %(credential_id)s" -msgstr "" - -#: keystone/exception.py:358 -#, python-format -msgid "Could not find version: %(version)s" -msgstr "" - -#: keystone/exception.py:362 -#, python-format -msgid "Could not find Endpoint Group: %(endpoint_group_id)s" -msgstr "" - -#: keystone/exception.py:366 -#, python-format -msgid "Could not find Identity Provider: %(idp_id)s" -msgstr "" - -#: keystone/exception.py:370 -#, python-format -msgid "Could not find Service Provider: %(sp_id)s" -msgstr "" - -#: keystone/exception.py:374 -#, python-format -msgid "" -"Could not find federated protocol %(protocol_id)s for Identity Provider: " -"%(idp_id)s" -msgstr "" - -#: keystone/exception.py:385 -#, python-format -msgid "" -"Could not find %(group_or_option)s in domain configuration for domain " -"%(domain_id)s" -msgstr "" - -#: keystone/exception.py:403 -#, python-format -msgid "Conflict occurred attempting to store %(type)s - %(details)s" -msgstr "" - -#: keystone/exception.py:412 -msgid "An unexpected error prevented the server from fulfilling your request." -msgstr "" - -#: keystone/exception.py:415 -#, python-format -msgid "" -"An unexpected error prevented the server from fulfilling your request: " -"%(exception)s" -msgstr "" - -#: keystone/exception.py:433 -#, python-format -msgid "Unable to consume trust %(trust_id)s, unable to acquire lock." -msgstr "" - -#: keystone/exception.py:438 -msgid "" -"Expected signing certificates are not available on the server. Please " -"check Keystone configuration." -msgstr "" - -#: keystone/exception.py:444 -#, python-format -msgid "Malformed endpoint URL (%(endpoint)s), see ERROR log for details." -msgstr "" - -#: keystone/exception.py:449 -#, python-format -msgid "" -"Group %(group_id)s returned by mapping %(mapping_id)s was not found in " -"the backend." -msgstr "" - -#: keystone/exception.py:454 -#, python-format -msgid "Error while reading metadata file, %(reason)s" -msgstr "" - -#: keystone/exception.py:458 -#, python-format -msgid "" -"Local section in mapping %(mapping_id)s refers to a remote match that " -"doesn't exist (e.g. {0} in a local section)." -msgstr "" - -#: keystone/exception.py:464 -#, python-format -msgid "" -"Unexpected combination of grant attributes - User: %(user_id)s, Group: " -"%(group_id)s, Project: %(project_id)s, Domain: %(domain_id)s" -msgstr "" - -#: keystone/exception.py:471 -msgid "The action you have requested has not been implemented." -msgstr "" - -#: keystone/exception.py:478 -msgid "The service you have requested is no longer available on this server." -msgstr "" - -#: keystone/exception.py:485 -#, python-format -msgid "The Keystone configuration file %(config_file)s could not be found." -msgstr "" - -#: keystone/exception.py:490 -msgid "" -"No encryption keys found; run keystone-manage fernet_setup to bootstrap " -"one." -msgstr "" - -#: keystone/exception.py:495 -#, python-format -msgid "" -"The Keystone domain-specific configuration has specified more than one " -"SQL driver (only one is permitted): %(source)s." -msgstr "" - -#: keystone/exception.py:502 -#, python-format -msgid "" -"%(mod_name)s doesn't provide database migrations. The migration " -"repository path at %(path)s doesn't exist or isn't a directory." -msgstr "" - -#: keystone/exception.py:509 -msgid "Token version is unrecognizable or unsupported." -msgstr "" - -#: keystone/exception.py:514 -#, python-format -msgid "" -"Unable to sign SAML assertion. It is likely that this server does not " -"have xmlsec1 installed, or this is the result of misconfiguration. Reason" -" %(reason)s" -msgstr "" - -#: keystone/exception.py:521 -msgid "" -"No Authorization headers found, cannot proceed with OAuth related calls, " -"if running under HTTPd or Apache, ensure WSGIPassAuthorization is set to " -"On." -msgstr "" - -#: keystone/exception.py:528 -#, python-format -msgid "" -"Could not determine Identity Provider ID. The configuration option " -"%(issuer_attribute)s was not found in the request environment." -msgstr "" - -#: keystone/exception.py:536 -#, python-format -msgid "" -"The %s extension has been moved into keystone core and as such its " -"migrations are maintained by the main keystone database control. Use the " -"command: keystone-manage db_sync" -msgstr "" - -#: keystone/exception.py:544 -#, python-format -msgid "%(driver)s is not supported driver version" -msgstr "" - -#: keystone/notifications.py:232 -#, python-format -msgid "%(event)s is not a valid notification event, must be one of: %(actions)s" -msgstr "" - -#: keystone/notifications.py:241 -#, python-format -msgid "Method not callable: %s" -msgstr "" - -#: keystone/assignment/controllers.py:100 keystone/identity/controllers.py:71 -#: keystone/resource/controllers.py:90 -msgid "Name field is required and cannot be empty" -msgstr "" - -#: keystone/assignment/controllers.py:146 -#: keystone/assignment/controllers.py:163 -#: keystone/assignment/controllers.py:182 -msgid "User roles not supported: tenant_id required" -msgstr "" - -#: keystone/assignment/controllers.py:567 -#: keystone/assignment/controllers.py:856 -msgid "Specify a domain or project, not both" -msgstr "" - -#: keystone/assignment/controllers.py:570 -msgid "Specify one of domain or project" -msgstr "" - -#: keystone/assignment/controllers.py:575 -#: keystone/assignment/controllers.py:861 -msgid "Specify a user or group, not both" -msgstr "" - -#: keystone/assignment/controllers.py:578 -msgid "Specify one of user or group" -msgstr "" - -#: keystone/assignment/controllers.py:845 -msgid "Combining effective and group filter will always result in an empty list." -msgstr "" - -#: keystone/assignment/controllers.py:850 -msgid "" -"Combining effective, domain and inherited filters will always result in " -"an empty list." -msgstr "" - -#: keystone/assignment/controllers.py:952 -msgid "scope.project.id must be specified if include_subtree is also specified" -msgstr "" - -#: keystone/assignment/core.py:77 -msgid "" -"Use of the identity driver config to automatically configure the same " -"assignment driver has been deprecated, in the \"O\" release, the " -"assignment driver will need to be expicitly configured if different than " -"the default (SQL)." -msgstr "" - -#: keystone/assignment/core.py:88 -#, python-format -msgid "" -"Attempted automatic driver selection for assignment based upon " -"[identity]\\driver option failed since driver %s is not found. Set " -"[assignment]/driver to a valid driver in keystone config." -msgstr "" - -#: keystone/assignment/core.py:179 -msgid "Must specify either domain or project" -msgstr "" - -#: keystone/assignment/core.py:848 -msgid "Cannot list assignments sourced from groups and filtered by user ID." -msgstr "" - -#: keystone/assignment/core.py:1058 -#, python-format -msgid "Project (%s)" -msgstr "" - -#: keystone/assignment/core.py:1060 -#, python-format -msgid "Domain (%s)" -msgstr "" - -#: keystone/assignment/core.py:1062 -msgid "Unknown Target" -msgstr "" - -#: keystone/assignment/core.py:1518 -msgid "Update of `domain_id` is not allowed." -msgstr "" - -#: keystone/assignment/core.py:1743 -msgid "Domain specific roles are not supported in the V8 role driver" -msgstr "" - -#: keystone/assignment/V8_backends/sql.py:287 -#: keystone/assignment/backends/sql.py:137 -#, python-format -msgid "Cannot remove role that has not been granted, %s" -msgstr "" - -#: keystone/assignment/V8_backends/sql.py:363 -#: keystone/assignment/backends/sql.py:213 -#, python-format -msgid "Unexpected assignment type encountered, %s" -msgstr "" - -#: keystone/auth/controllers.py:60 -#, python-format -msgid "" -"Direct import of auth plugin %(name)r is deprecated as of Liberty in " -"favor of its entrypoint from %(namespace)r and may be removed in N." -msgstr "" - -#: keystone/auth/controllers.py:121 -#, python-format -msgid "" -"Unable to reconcile identity attribute %(attribute)s as it has " -"conflicting values %(new)s and %(old)s" -msgstr "" - -#: keystone/auth/controllers.py:182 -msgid "Domain name cannot contain reserved characters." -msgstr "" - -#: keystone/auth/controllers.py:205 -msgid "Project name cannot contain reserved characters." -msgstr "" - -#: keystone/auth/controllers.py:355 keystone/middleware/auth.py:130 -msgid "Scoping to both domain and project is not allowed" -msgstr "" - -#: keystone/auth/controllers.py:358 -msgid "Scoping to both domain and trust is not allowed" -msgstr "" - -#: keystone/auth/controllers.py:361 -msgid "Scoping to both project and trust is not allowed" -msgstr "" - -#: keystone/auth/controllers.py:530 -msgid "User not found" -msgstr "" - -#: keystone/auth/controllers.py:644 -msgid "A project-scoped token is required to produce a service catalog." -msgstr "" - -#: keystone/auth/plugins/external.py:42 -msgid "No authenticated user" -msgstr "" - -#: keystone/auth/plugins/external.py:52 -#, python-format -msgid "Unable to lookup user %s" -msgstr "" - -#: keystone/auth/plugins/external.py:100 -msgid "auth_type is not Negotiate" -msgstr "" - -#: keystone/auth/plugins/mapped.py:246 -msgid "" -"Could not map user while setting ephemeral user identity. Either mapping " -"rules must specify user id/name or REMOTE_USER environment variable must " -"be set." -msgstr "" - -#: keystone/auth/plugins/oauth1.py:46 -msgid "Access token is expired" -msgstr "" - -#: keystone/auth/plugins/oauth1.py:60 -msgid "Could not validate the access token" -msgstr "" - -#: keystone/auth/plugins/password.py:39 -msgid "Invalid username or password" -msgstr "" - -#: keystone/auth/plugins/token.py:70 keystone/token/controllers.py:160 -msgid "rescope a scoped token" -msgstr "" - -#: keystone/auth/plugins/totp.py:96 -msgid "Invalid username or TOTP passcode" -msgstr "" - -#: keystone/catalog/controllers.py:215 -#, python-format -msgid "Conflicting region IDs specified: \"%(url_id)s\" != \"%(ref_id)s\"" -msgstr "" - -#: keystone/catalog/core.py:149 keystone/common/ldap/core.py:1411 -#, python-format -msgid "Duplicate ID, %s." -msgstr "" - -#: keystone/catalog/backends/sql.py:389 -#, python-format -msgid "Endpoint %(endpoint_id)s not found in project %(project_id)s" -msgstr "" - -#: keystone/catalog/backends/sql.py:492 -msgid "Endpoint Group Project Association not found" -msgstr "" - -#: keystone/cmd/cli.py:173 -msgid "Either --bootstrap-password argument or OS_BOOTSTRAP_PASSWORD must be set." -msgstr "" - -#: keystone/cmd/cli.py:586 -msgid "At least one option must be provided" -msgstr "" - -#: keystone/cmd/cli.py:593 -msgid "--all option cannot be mixed with other options" -msgstr "" - -#: keystone/cmd/cli.py:600 -#, python-format -msgid "Unknown domain '%(name)s' specified by --domain-name" -msgstr "" - -#: keystone/cmd/cli.py:679 keystone/tests/unit/test_cli.py:411 -msgid "At least one option must be provided, use either --all or --domain-name" -msgstr "" - -#: keystone/cmd/cli.py:685 keystone/tests/unit/test_cli.py:427 -msgid "The --all option cannot be used with the --domain-name option" -msgstr "" - -#: keystone/cmd/cli.py:710 keystone/tests/unit/test_cli.py:444 -#, python-format -msgid "" -"Invalid domain name: %(domain)s found in config file name: %(file)s - " -"ignoring this file." -msgstr "" - -#: keystone/cmd/cli.py:718 keystone/tests/unit/test_cli.py:385 -#, python-format -msgid "" -"Domain: %(domain)s already has a configuration defined - ignoring file: " -"%(file)s." -msgstr "" - -#: keystone/cmd/cli.py:732 -#, python-format -msgid "Error parsing configuration file for domain: %(domain)s, file: %(file)s." -msgstr "" - -#: keystone/cmd/cli.py:765 -#, python-format -msgid "" -"To get a more detailed information on this error, re-run this command for" -" the specific domain, i.e.: keystone-manage domain_config_upload " -"--domain-name %s" -msgstr "" - -#: keystone/cmd/cli.py:783 -#, python-format -msgid "Unable to locate domain config directory: %s" -msgstr "" - -#: keystone/cmd/cli.py:803 -msgid "" -"Unable to access the keystone database, please check it is configured " -"correctly." -msgstr "" - -#: keystone/cmd/cli.py:866 -#, python-format -msgid "Error while parsing rules %(path)s: %(err)s" -msgstr "" - -#: keystone/cmd/cli.py:875 -#, python-format -msgid "Error while opening file %(path)s: %(err)s" -msgstr "" - -#: keystone/cmd/cli.py:885 -#, python-format -msgid "Error while parsing line: '%(line)s': %(err)s" -msgstr "" - -#: keystone/common/authorization.py:61 keystone/common/wsgi.py:67 -#, python-format -msgid "token reference must be a KeystoneToken type, got: %s" -msgstr "" - -#: keystone/common/clean.py:24 -#, python-format -msgid "%s cannot be empty." -msgstr "" - -#: keystone/common/clean.py:26 -#, python-format -msgid "%(property_name)s cannot be less than %(min_length)s characters." -msgstr "" - -#: keystone/common/clean.py:31 -#, python-format -msgid "%(property_name)s should not be greater than %(max_length)s characters." -msgstr "" - -#: keystone/common/clean.py:40 -#, python-format -msgid "%(property_name)s is not a %(display_expected_type)s" -msgstr "" - -#: keystone/common/controller.py:349 keystone/common/controller.py:377 -#: keystone/identity/core.py:595 keystone/resource/core.py:1145 -#, python-format -msgid "Expected dict or list: %s" -msgstr "" - -#: keystone/common/controller.py:390 -msgid "Marker could not be found" -msgstr "" - -#: keystone/common/controller.py:401 -msgid "Invalid limit value" -msgstr "" - -#: keystone/common/controller.py:705 -msgid "Cannot change Domain ID" -msgstr "" - -#: keystone/common/controller.py:751 -msgid "" -"You have tried to create a resource using the admin token. As this token " -"is not within a domain you must explicitly include a domain for this " -"resource to belong to." -msgstr "" - -#: keystone/common/dependency.py:65 -#, python-format -msgid "Unregistered dependency: %(name)s for %(targets)s" -msgstr "" - -#: keystone/common/driver_hints.py:38 -msgid "" -"Cannot truncate a driver call without hints list as first parameter after" -" self " -msgstr "" - -#: keystone/common/json_home.py:76 -#, python-format -msgid "Unexpected status requested for JSON Home response, %s" -msgstr "" - -#: keystone/common/manager.py:82 -#, python-format -msgid "" -"Direct import of driver %(name)r is deprecated as of Liberty in favor of " -"its entrypoint from %(namespace)r and may be removed in N." -msgstr "" - -#: keystone/common/tokenless_auth.py:73 -msgid "Neither Project Domain ID nor Project Domain Name was provided." -msgstr "" - -#: keystone/common/tokenless_auth.py:165 -msgid "" -"User auth cannot be built due to missing either user id, or user name " -"with domain id, or user name with domain name." -msgstr "" - -#: keystone/common/utils.py:63 -msgid "Length of transformable resource id > 64, which is max allowed characters" -msgstr "" - -#: keystone/common/utils.py:192 keystone/credential/controllers.py:44 -msgid "Invalid blob in credential" -msgstr "" - -#: keystone/common/wsgi.py:208 -msgid "Query string is not UTF-8 encoded" -msgstr "" - -#: keystone/common/wsgi.py:341 -#, python-format -msgid "%s field is required and cannot be empty" -msgstr "" - -#: keystone/common/wsgi.py:353 -#, python-format -msgid "%s field(s) cannot be empty" -msgstr "" - -#: keystone/common/wsgi.py:548 -msgid "The resource could not be found." -msgstr "" - -#: keystone/common/kvs/core.py:88 -#, python-format -msgid "Lock Timeout occurred for key, %(target)s" -msgstr "" - -#: keystone/common/kvs/core.py:123 -#, python-format -msgid "KVS region %s is already configured. Cannot reconfigure." -msgstr "" - -#: keystone/common/kvs/core.py:166 -#, python-format -msgid "Key Value Store not configured: %s" -msgstr "" - -#: keystone/common/kvs/core.py:219 -msgid "`key_mangler` option must be a function reference" -msgstr "" - -#: keystone/common/kvs/core.py:376 -#, python-format -msgid "Lock key must match target key: %(lock)s != %(target)s" -msgstr "" - -#: keystone/common/kvs/core.py:380 -msgid "Must be called within an active lock context." -msgstr "" - -#: keystone/common/kvs/backends/memcached.py:68 -#, python-format -msgid "Maximum lock attempts on %s occurred." -msgstr "" - -#: keystone/common/kvs/backends/memcached.py:109 -#, python-format -msgid "" -"Backend `%(backend)s` is not a valid memcached backend. Valid backends: " -"%(backend_list)s" -msgstr "" - -#: keystone/common/kvs/backends/memcached.py:185 -msgid "`key_mangler` functions must be callable." -msgstr "" - -#: keystone/common/ldap/core.py:199 -#, python-format -msgid "Invalid LDAP deref option: %(option)s. Choose one of: %(options)s" -msgstr "" - -#: keystone/common/ldap/core.py:209 -#, python-format -msgid "Invalid LDAP TLS certs option: %(option)s. Choose one of: %(options)s" -msgstr "" - -#: keystone/common/ldap/core.py:221 -#, python-format -msgid "Invalid LDAP scope: %(scope)s. Choose one of: %(options)s" -msgstr "" - -#: keystone/common/ldap/core.py:591 -msgid "Invalid TLS / LDAPS combination" -msgstr "" - -#: keystone/common/ldap/core.py:596 -#, python-format -msgid "Invalid LDAP TLS_AVAIL option: %s. TLS not available" -msgstr "" - -#: keystone/common/ldap/core.py:606 -#, python-format -msgid "tls_cacertfile %s not found or is not a file" -msgstr "" - -#: keystone/common/ldap/core.py:618 -#, python-format -msgid "tls_cacertdir %s not found or is not a directory" -msgstr "" - -#: keystone/common/ldap/core.py:1333 -#, python-format -msgid "ID attribute %(id_attr)s not found in LDAP object %(dn)s" -msgstr "" - -#: keystone/common/ldap/core.py:1378 -#, python-format -msgid "LDAP %s create" -msgstr "" - -#: keystone/common/ldap/core.py:1383 -#, python-format -msgid "LDAP %s update" -msgstr "" - -#: keystone/common/ldap/core.py:1388 -#, python-format -msgid "LDAP %s delete" -msgstr "" - -#: keystone/common/ldap/core.py:1400 -#, python-format -msgid "Duplicate name, %s." -msgstr "" - -#: keystone/common/ldap/core.py:1557 -msgid "" -"Disabling an entity where the 'enable' attribute is ignored by " -"configuration." -msgstr "" - -#: keystone/common/ldap/core.py:1568 -#, python-format -msgid "Cannot change %(option_name)s %(attr)s" -msgstr "" - -#: keystone/common/ldap/core.py:1655 -#, python-format -msgid "Member %(member)s is already a member of group %(group)s" -msgstr "" - -#: keystone/common/sql/core.py:413 -msgid "Duplicate Entry" -msgstr "" - -#: keystone/common/sql/core.py:429 -#, python-format -msgid "An unexpected error occurred when trying to store %s" -msgstr "" - -#: keystone/common/sql/migration_helpers.py:167 -msgid "Unable to downgrade schema" -msgstr "" - -#: keystone/common/sql/migration_helpers.py:185 -#: keystone/common/sql/migration_helpers.py:231 -#, python-format -msgid "%s extension does not exist." -msgstr "" - -#: keystone/common/validation/__init__.py:44 -#, python-format -msgid "" -"validated expected to find %(param_name)r in function signature for " -"%(func_name)r." -msgstr "" - -#: keystone/common/validation/validators.py:53 -#, python-format -msgid "Invalid input for field '%(path)s'. The value is '%(value)s'." -msgstr "" - -#: keystone/contrib/admin_crud/core.py:28 -msgid "" -"Remove admin_crud_extension from the paste pipeline, the admin_crud " -"extension is now always available. Updatethe [pipeline:admin_api] section" -" in keystone-paste.ini accordingly, as it will be removed in the O " -"release." -msgstr "" - -#: keystone/contrib/ec2/controllers.py:80 keystone/contrib/s3/core.py:111 -#: keystone/contrib/s3/core.py:114 -msgid "Invalid EC2 signature." -msgstr "" - -#: keystone/contrib/ec2/controllers.py:83 -#: keystone/contrib/ec2/controllers.py:87 -#: keystone/contrib/ec2/controllers.py:125 -msgid "EC2 signature not supplied." -msgstr "" - -#: keystone/contrib/ec2/controllers.py:159 -msgid "User not valid for tenant." -msgstr "" - -#: keystone/contrib/ec2/controllers.py:260 -msgid "EC2 access key not found." -msgstr "" - -#: keystone/contrib/ec2/controllers.py:326 -msgid "Token belongs to another user" -msgstr "" - -#: keystone/contrib/ec2/controllers.py:354 -msgid "Credential belongs to another user" -msgstr "" - -#: keystone/contrib/endpoint_filter/routers.py:29 -msgid "" -"Remove endpoint_filter_extension from the paste pipeline, the endpoint " -"filter extension is now always available. Update the [pipeline:api_v3] " -"section in keystone-paste.ini accordingly as it will be removed in the O " -"release." -msgstr "" - -#: keystone/contrib/federation/routers.py:27 -msgid "" -"Remove federation_extension from the paste pipeline, the federation " -"extension is now always available. Update the [pipeline:api_v3] section " -"in keystone-paste.ini accordingly, as it will be removed in the O " -"release." -msgstr "" - -#: keystone/contrib/oauth1/routers.py:29 -msgid "" -"Remove oauth1_extension from the paste pipeline, the oauth1 extension is " -"now always available. Update the [pipeline:api_v3] section in keystone-" -"paste.ini accordingly, as it will be removed in the O release." -msgstr "" - -#: keystone/contrib/revoke/routers.py:27 -msgid "" -"Remove revoke_extension from the paste pipeline, the revoke extension is " -"now always available. Update the [pipeline:api_v3] section in keystone-" -"paste.ini accordingly, as it will be removed in the O release." -msgstr "" - -#: keystone/contrib/s3/core.py:82 -msgid "Credential signature mismatch" -msgstr "" - -#: keystone/contrib/simple_cert/routers.py:27 -msgid "" -"Remove simple_cert from the paste pipeline, the PKI and PKIz token " -"providers are now deprecated and simple_cert was only used insupport of " -"these token providers. Update the [pipeline:api_v3] section in keystone-" -"paste.ini accordingly, as it will be removed in the O release." -msgstr "" - -#: keystone/contrib/user_crud/core.py:28 -msgid "" -"Remove user_crud_extension from the paste pipeline, the user_crud " -"extension is now always available. Updatethe [pipeline:public_api] " -"section in keystone-paste.ini accordingly, as it will be removed in the O" -" release." -msgstr "" - -#: keystone/endpoint_policy/core.py:264 -#, python-format -msgid "No policy is associated with endpoint %(endpoint_id)s." -msgstr "" - -#: keystone/federation/controllers.py:269 -msgid "Request must have an origin query parameter" -msgstr "" - -#: keystone/federation/controllers.py:278 -#, python-format -msgid "%(host)s is not a trusted dashboard host" -msgstr "" - -#: keystone/federation/controllers.py:309 -msgid "Missing entity ID from environment" -msgstr "" - -#: keystone/federation/controllers.py:357 -msgid "Use a project scoped token when attempting to create a SAML assertion" -msgstr "" - -#: keystone/federation/idp.py:486 -#, python-format -msgid "Cannot open certificate %(cert_file)s. Reason: %(reason)s" -msgstr "" - -#: keystone/federation/idp.py:552 -msgid "Ensure configuration option idp_entity_id is set." -msgstr "" - -#: keystone/federation/idp.py:555 -msgid "Ensure configuration option idp_sso_endpoint is set." -msgstr "" - -#: keystone/federation/idp.py:574 -msgid "" -"idp_contact_type must be one of: [technical, other, support, " -"administrative or billing." -msgstr "" - -#: keystone/federation/utils.py:234 -msgid "Federation token is expired" -msgstr "" - -#: keystone/federation/utils.py:286 -msgid "Could not find Identity Provider identifier in environment" -msgstr "" - -#: keystone/federation/utils.py:290 -msgid "" -"Incoming identity provider identifier not included among the accepted " -"identifiers." -msgstr "" - -#: keystone/federation/utils.py:585 -#, python-format -msgid "User type %s not supported" -msgstr "" - -#: keystone/federation/utils.py:605 -msgid "" -"Could not map any federated user properties to identity values. Check " -"debug logs or the mapping used for additional details." -msgstr "" - -#: keystone/federation/utils.py:629 -#, python-format -msgid "" -"Invalid rule: %(identity_value)s. Both 'groups' and 'domain' keywords " -"must be specified." -msgstr "" - -#: keystone/federation/utils.py:854 -#, python-format -msgid "Identity Provider %(idp)s is disabled" -msgstr "" - -#: keystone/federation/utils.py:862 -#, python-format -msgid "Service Provider %(sp)s is disabled" -msgstr "" - -#: keystone/federation/backends/sql.py:182 -#, python-format -msgid "Duplicate remote ID: %s" -msgstr "" - -#: keystone/federation/backends/sql.py:184 -#, python-format -msgid "Duplicate entry: %s" -msgstr "" - -#: keystone/identity/controllers.py:74 -msgid "Enabled field must be a boolean" -msgstr "" - -#: keystone/identity/controllers.py:103 -msgid "Enabled field should be a boolean" -msgstr "" - -#: keystone/identity/core.py:265 -#, python-format -msgid "Config API entity at /domains/%s/config" -msgstr "" - -#: keystone/identity/core.py:271 -#, python-format -msgid "" -"Exceeded attempts to register domain %(domain)s to use the SQL driver, " -"the last domain that appears to have had it is %(last_domain)s, giving up" -msgstr "" - -#: keystone/identity/core.py:450 keystone/identity/backends/ldap.py:62 -#: keystone/identity/backends/ldap.py:64 keystone/identity/backends/ldap.py:70 -#: keystone/identity/backends/ldap.py:72 keystone/identity/backends/sql.py:210 -#: keystone/identity/backends/sql.py:212 -msgid "Invalid user / password" -msgstr "" - -#: keystone/identity/core.py:895 -#, python-format -msgid "User is disabled: %s" -msgstr "" - -#: keystone/identity/core.py:928 keystone/resource/core.py:375 -msgid "update of domain_id is deprecated as of Mitaka and will be removed in O." -msgstr "" - -#: keystone/identity/core.py:947 -msgid "Cannot change user ID" -msgstr "" - -#: keystone/identity/backends/ldap.py:35 -#, python-format -msgid "" -"%s for the LDAP identity backend has been deprecated in the Mitaka " -"release in favor of read-only identity LDAP access. It will be removed in" -" the \"O\" release." -msgstr "" - -#: keystone/identity/backends/ldap.py:106 -msgid "Cannot change user name" -msgstr "" - -#: keystone/identity/backends/ldap.py:214 keystone/identity/backends/sql.py:292 -#: keystone/identity/backends/sql.py:310 -#, python-format -msgid "User '%(user_id)s' not found in group '%(group_id)s'" -msgstr "" - -#: keystone/identity/backends/ldap.py:366 -#, python-format -msgid "User %(user_id)s is already a member of group %(group_id)s" -msgstr "" - -#: keystone/models/token_model.py:62 -msgid "Found invalid token: scoped to both project and domain." -msgstr "" - -#: keystone/oauth1/controllers.py:126 -msgid "Cannot list request tokens with a token issued via delegation." -msgstr "" - -#: keystone/oauth1/controllers.py:187 keystone/oauth1/backends/sql.py:256 -msgid "User IDs do not match" -msgstr "" - -#: keystone/oauth1/controllers.py:243 -msgid "Invalid signature" -msgstr "" - -#: keystone/oauth1/controllers.py:294 keystone/oauth1/controllers.py:372 -msgid "Request token is expired" -msgstr "" - -#: keystone/oauth1/controllers.py:308 -msgid "There should not be any non-oauth parameters" -msgstr "" - -#: keystone/oauth1/controllers.py:312 -msgid "provided consumer key does not match stored consumer key" -msgstr "" - -#: keystone/oauth1/controllers.py:316 -msgid "provided verifier does not match stored verifier" -msgstr "" - -#: keystone/oauth1/controllers.py:320 -msgid "provided request key does not match stored request key" -msgstr "" - -#: keystone/oauth1/controllers.py:324 -msgid "Request Token does not have an authorizing user id" -msgstr "" - -#: keystone/oauth1/controllers.py:361 -msgid "Cannot authorize a request token with a token issued via delegation." -msgstr "" - -#: keystone/oauth1/controllers.py:388 -msgid "authorizing user does not have role required" -msgstr "" - -#: keystone/oauth1/controllers.py:401 -msgid "User is not a member of the requested project" -msgstr "" - -#: keystone/oauth1/backends/sql.py:91 -msgid "Consumer not found" -msgstr "" - -#: keystone/oauth1/backends/sql.py:177 -msgid "Request token not found" -msgstr "" - -#: keystone/oauth1/backends/sql.py:237 -msgid "Access token not found" -msgstr "" - -#: keystone/resource/controllers.py:94 -msgid "The creation of projects acting as domains is not allowed in v2." -msgstr "" - -#: keystone/resource/controllers.py:284 -msgid "" -"Cannot use parents_as_list and parents_as_ids query params at the same " -"time." -msgstr "" - -#: keystone/resource/controllers.py:290 -msgid "" -"Cannot use subtree_as_list and subtree_as_ids query params at the same " -"time." -msgstr "" - -#: keystone/resource/core.py:106 -#, python-format -msgid "Max hierarchy depth reached for %s branch." -msgstr "" - -#: keystone/resource/core.py:123 -msgid "Multiple domains are not supported" -msgstr "" - -#: keystone/resource/core.py:129 -msgid "only root projects are allowed to act as domains." -msgstr "" - -#: keystone/resource/core.py:152 -#, python-format -msgid "" -"Cannot create project, since its parent (%(domain_id)s) is acting as a " -"domain, but project's specified parent_id (%(parent_id)s) does not match " -"this domain_id." -msgstr "" - -#: keystone/resource/core.py:163 -#, python-format -msgid "" -"Cannot create project, since it specifies its owner as domain " -"%(domain_id)s, but specifies a parent in a different domain " -"(%(parent_domain_id)s)." -msgstr "" - -#: keystone/resource/core.py:183 -#, python-format -msgid "cannot create a project in a branch containing a disabled project: %s" -msgstr "" - -#: keystone/resource/core.py:191 -#, python-format -msgid "" -"%(entity)s name cannot contain the following reserved characters: " -"%(chars)s" -msgstr "" - -#: keystone/resource/core.py:201 -#, python-format -msgid "" -"it is not permitted to have two projects acting as domains with the same " -"name: %s" -msgstr "" - -#: keystone/resource/core.py:205 -#, python-format -msgid "" -"it is not permitted to have two projects within a domain with the same " -"name : %s" -msgstr "" - -#: keystone/resource/core.py:262 -#, python-format -msgid "Domain is disabled: %s" -msgstr "" - -#: keystone/resource/core.py:279 -#, python-format -msgid "Domain cannot be named %s" -msgstr "" - -#: keystone/resource/core.py:282 -#, python-format -msgid "Domain cannot have ID %s" -msgstr "" - -#: keystone/resource/core.py:297 -#, python-format -msgid "Project is disabled: %s" -msgstr "" - -#: keystone/resource/core.py:304 -#, python-format -msgid "Cannot enable project %s since it has disabled parents" -msgstr "" - -#: keystone/resource/core.py:340 -msgid "Update of `parent_id` is not allowed." -msgstr "" - -#: keystone/resource/core.py:345 -msgid "Update of `is_domain` is not allowed." -msgstr "" - -#: keystone/resource/core.py:359 -msgid "Update of domain_id of projects acting as domains is not allowed." -msgstr "" - -#: keystone/resource/core.py:366 -msgid "Update of domain_id is only allowed for root projects." -msgstr "" - -#: keystone/resource/core.py:371 -msgid "Cannot update domain_id of a project that has children." -msgstr "" - -#: keystone/resource/core.py:396 -#, python-format -msgid "" -"Cannot disable project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" - -#: keystone/resource/core.py:443 -msgid "Cascade update is only allowed for enabled attribute." -msgstr "" - -#: keystone/resource/core.py:507 -#, python-format -msgid "" -"cannot delete an enabled project acting as a domain. Please disable the " -"project %s first." -msgstr "" - -#: keystone/resource/core.py:513 -#, python-format -msgid "" -"Cannot delete the project %s since it is not a leaf in the hierarchy. Use" -" the cascade option if you want to delete a whole subtree." -msgstr "" - -#: keystone/resource/core.py:526 -#, python-format -msgid "" -"Cannot delete project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" - -#: keystone/resource/core.py:554 -msgid "Project field is required and cannot be empty." -msgstr "" - -#: keystone/resource/core.py:795 -msgid "Cannot delete a domain that is enabled, please disable it first." -msgstr "" - -#: keystone/resource/core.py:1570 -msgid "No options specified" -msgstr "" - -#: keystone/resource/core.py:1576 -#, python-format -msgid "" -"The value of group %(group)s specified in the config should be a " -"dictionary of options" -msgstr "" - -#: keystone/resource/core.py:1600 -#, python-format -msgid "" -"Option %(option)s found with no group specified while checking domain " -"configuration request" -msgstr "" - -#: keystone/resource/core.py:1607 -#, python-format -msgid "Group %(group)s is not supported for domain specific configurations" -msgstr "" - -#: keystone/resource/core.py:1614 -#, python-format -msgid "" -"Option %(option)s in group %(group)s is not supported for domain specific" -" configurations" -msgstr "" - -#: keystone/resource/core.py:1666 -msgid "An unexpected error occurred when retrieving domain configs" -msgstr "" - -#: keystone/resource/core.py:1745 keystone/resource/core.py:1828 -#: keystone/resource/core.py:1898 keystone/resource/config_backends/sql.py:76 -#, python-format -msgid "option %(option)s in group %(group)s" -msgstr "" - -#: keystone/resource/core.py:1748 keystone/resource/core.py:1833 -#: keystone/resource/core.py:1894 -#, python-format -msgid "group %(group)s" -msgstr "" - -#: keystone/resource/core.py:1750 -msgid "any options" -msgstr "" - -#: keystone/resource/core.py:1793 -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, so that, and only " -"that, option must be specified in the config" -msgstr "" - -#: keystone/resource/core.py:1798 -#, python-format -msgid "" -"Trying to update group %(group)s, so that, and only that, group must be " -"specified in the config" -msgstr "" - -#: keystone/resource/core.py:1807 -#, python-format -msgid "" -"request to update group %(group)s, but config provided contains group " -"%(group_other)s instead" -msgstr "" - -#: keystone/resource/core.py:1814 -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, but config " -"provided contains option %(option_other)s instead" -msgstr "" - -#: keystone/resource/core.py:2006 -#, python-format -msgid "Group %s not found in config" -msgstr "" - -#: keystone/resource/core.py:2016 -#, python-format -msgid "" -"Reading the default for option %(option)s in group %(group)s is not " -"supported" -msgstr "" - -#: keystone/revoke/controllers.py:33 -#, python-format -msgid "invalid date format %s" -msgstr "" - -#: keystone/revoke/core.py:156 -msgid "" -"The revoke call must not have both domain_id and project_id. This is a " -"bug in the Keystone server. The current request is aborted." -msgstr "" - -#: keystone/revoke/core.py:226 keystone/token/provider.py:217 -#: keystone/token/provider.py:256 keystone/token/provider.py:336 -#: keystone/token/provider.py:343 -msgid "Failed to validate token" -msgstr "" - -#: keystone/server/eventlet.py:77 -msgid "" -"Running keystone via eventlet is deprecated as of Kilo in favor of " -"running in a WSGI server (e.g. mod_wsgi). Support for keystone under " -"eventlet will be removed in the \"M\"-Release." -msgstr "" - -#: keystone/server/eventlet.py:90 -#, python-format -msgid "Failed to start the %(name)s server" -msgstr "" - -#: keystone/token/controllers.py:372 -msgid "Tenant name cannot contain reserved characters." -msgstr "" - -#: keystone/token/controllers.py:392 -#, python-format -msgid "Project ID not found: %(t_id)s" -msgstr "" - -#: keystone/token/controllers.py:396 -#, python-format -msgid "User %(u_id)s is unauthorized for tenant %(t_id)s" -msgstr "" - -#: keystone/token/controllers.py:415 keystone/token/controllers.py:418 -msgid "Token does not belong to specified tenant." -msgstr "" - -#: keystone/token/provider.py:269 keystone/token/provider.py:293 -msgid "No token in the request" -msgstr "" - -#: keystone/token/persistence/backends/kvs.py:132 -#, python-format -msgid "Unknown token version %s" -msgstr "" - -#: keystone/token/providers/common.py:313 -#: keystone/token/providers/common.py:445 -#, python-format -msgid "User %(user_id)s has no access to project %(project_id)s" -msgstr "" - -#: keystone/token/providers/common.py:318 -#: keystone/token/providers/common.py:450 -#, python-format -msgid "User %(user_id)s has no access to domain %(domain_id)s" -msgstr "" - -#: keystone/token/providers/common.py:345 -msgid "Trustor is disabled." -msgstr "" - -#: keystone/token/providers/common.py:434 -msgid "Trustee has no delegated roles." -msgstr "" - -#: keystone/token/providers/common.py:496 -#, python-format -msgid "Invalid audit info data type: %(data)s (%(type)s)" -msgstr "" - -#: keystone/token/providers/common.py:560 -#: keystone/token/providers/common.py:587 -msgid "The configured token provider does not support bind authentication." -msgstr "" - -#: keystone/token/providers/common.py:598 -msgid "User is not a trustee." -msgstr "" - -#: keystone/token/providers/common.py:665 -msgid "" -"Attempting to use OS-FEDERATION token with V2 Identity Service, use V3 " -"Authentication" -msgstr "" - -#: keystone/token/providers/common.py:675 -msgid "Non-default domain is not supported" -msgstr "" - -#: keystone/token/providers/common.py:679 -msgid "Domain scoped token is not supported" -msgstr "" - -#: keystone/token/providers/pki.py:53 keystone/token/providers/pkiz.py:31 -msgid "Unable to sign token." -msgstr "" - -#: keystone/token/providers/fernet/token_formatters.py:102 -#, python-format -msgid "This is not a recognized Fernet token %s" -msgstr "" - -#: keystone/token/providers/fernet/token_formatters.py:198 -#, python-format -msgid "This is not a recognized Fernet payload version: %s" -msgstr "" - -#: keystone/trust/controllers.py:107 -#, python-format -msgid "role %s is not defined" -msgstr "" - -#: keystone/trust/controllers.py:131 -msgid "Redelegation allowed for delegated by trust only" -msgstr "" - -#: keystone/trust/controllers.py:164 -msgid "The authenticated user should match the trustor." -msgstr "" - -#: keystone/trust/controllers.py:169 -msgid "At least one role should be specified." -msgstr "" - -#: keystone/trust/core.py:58 -#, python-format -msgid "" -"Remaining redelegation depth of %(redelegation_depth)d out of allowed " -"range of [0..%(max_count)d]" -msgstr "" - -#: keystone/trust/core.py:67 -#, python-format -msgid "" -"Field \"remaining_uses\" is set to %(value)s while it must not be set in " -"order to redelegate a trust" -msgstr "" - -#: keystone/trust/core.py:78 -msgid "Requested expiration time is more than redelegated trust can provide" -msgstr "" - -#: keystone/trust/core.py:88 -msgid "Some of requested roles are not in redelegated trust" -msgstr "" - -#: keystone/trust/core.py:112 -msgid "One of the trust agents is disabled or deleted" -msgstr "" - -#: keystone/trust/core.py:131 -msgid "remaining_uses must be a positive integer or null." -msgstr "" - -#: keystone/trust/core.py:137 -#, python-format -msgid "" -"Requested redelegation depth of %(requested_count)d is greater than " -"allowed %(max_count)d" -msgstr "" - -#: keystone/trust/core.py:144 -msgid "remaining_uses must not be set if redelegation is allowed" -msgstr "" - -#: keystone/trust/core.py:154 -msgid "" -"Modifying \"redelegation_count\" upon redelegation is forbidden. Omitting" -" this parameter is advised." -msgstr "" - diff --git a/keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone-log-critical.po b/keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone-log-critical.po deleted file mode 100644 index acf44efb..00000000 --- a/keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone-log-critical.po +++ /dev/null @@ -1,25 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Sungjin Kang , 2016. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2014-08-31 03:19+0000\n" -"Last-Translator: openstackjenkins \n" -"Language: ko-KR\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Korean (South Korea)\n" - -#, python-format -msgid "Unable to open template file %s" -msgstr "템플릿 파일 %s을(를) 열 수 없음" diff --git a/keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone-log-error.po b/keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone-log-error.po deleted file mode 100644 index d531e9d5..00000000 --- a/keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone-log-error.po +++ /dev/null @@ -1,165 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Sungjin Kang , 2016. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2016-04-19 04:32+0000\n" -"Last-Translator: SeYeon Lee \n" -"Language: ko-KR\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Korean (South Korea)\n" - -#, python-format -msgid "" -"Asked to convert a non-domain project into a domain - Domain: %(domain_id)s, " -"Project ID: %(id)s, Project Name: %(project_name)s" -msgstr "" -"비도메인 프로젝트를 도메인으로 변환하도록 요청 - 도메인: %(domain_id)s, 프로" -"젝트 ID: %(id)s, 프로젝트 이름: %(project_name)s" - -msgid "Cannot retrieve Authorization headers" -msgstr "인증 헤더를 검색할 수 없음" - -#, python-format -msgid "Circular reference found role inference rules - %(prior_role_id)s." -msgstr "순환 참조에서 역할 추론 규칙 발견 - %(prior_role_id)s." - -#, python-format -msgid "" -"Circular reference or a repeated entry found in projects hierarchy - " -"%(project_id)s." -msgstr "" -"프로젝트 계층 - %(project_id)s에서 순환 참조 또는 반복 항목을 발견했습니다." - -#, python-format -msgid "" -"Circular reference or a repeated entry found in region tree - %(region_id)s." -msgstr "지역 트리에서 순환 참조 또는 반복 항목이 발견됨 - %(region_id)s." - -#, python-format -msgid "" -"Circular reference or a repeated entry found projects hierarchy - " -"%(project_id)s." -msgstr "순환 참조 또는 반복 항목에서 프로젝트 계층을 발견 - %(project_id)s." - -#, python-format -msgid "Command %(to_exec)s exited with %(retcode)s - %(output)s" -msgstr "명령 %(to_exec)s이(가) 종료되고 %(retcode)s - %(output)s이(가) 표시됨" - -#, python-format -msgid "Could not bind to %(host)s:%(port)s" -msgstr "%(host)s:%(port)s에 바인드할 수 없음" - -#, python-format -msgid "" -"Either [fernet_tokens] key_repository does not exist or Keystone does not " -"have sufficient permission to access it: %s" -msgstr "" -"[fernet_tokens] key_repository가 없거나 Keystone에서 액세스할 권한이 충분하" -"지 않음: %s" - -msgid "" -"Error setting up the debug environment. Verify that the option --debug-url " -"has the format : and that a debugger processes is listening on " -"that port." -msgstr "" -"디버그 환경을 설정하는 중에 오류가 발생했습니다. --debug-url 옵션에 :" -" 형식이 있으며 디버거 프로세스가 해당 포트에서 청취 중인지 확인하십시" -"오." - -#, python-format -msgid "Error when signing assertion, reason: %(reason)s%(output)s" -msgstr "어설션에 서명할 때 오류 발생, 이유: %(reason)s%(output)s" - -msgid "Failed to construct notifier" -msgstr "알리미를 구성하는 데 실패" - -msgid "" -"Failed to create [fernet_tokens] key_repository: either it already exists or " -"you don't have sufficient permissions to create it" -msgstr "" -"[fernet_tokens] key_repository 생성 실패: 이미 있거나 생성할 권한이 충분하지 " -"않음" - -msgid "Failed to create the default domain." -msgstr "기본 도메인을 생성하지 못했습니다." - -#, python-format -msgid "Failed to remove file %(file_path)r: %(error)s" -msgstr "파일 %(file_path)r을(를) 제거하는 데 실패: %(error)s" - -#, python-format -msgid "Failed to send %(action)s %(event_type)s notification" -msgstr "%(action)s %(event_type)s 알림을 보내는 데 실패" - -#, python-format -msgid "Failed to send %(res_id)s %(event_type)s notification" -msgstr "%(res_id)s %(event_type)s 알림을 보내는 데 실패" - -msgid "Failed to validate token" -msgstr "토큰을 유효성 검증하지 못했음" - -#, python-format -msgid "Malformed endpoint %(url)s - unknown key %(keyerror)s" -msgstr "형식이 잘못된 엔드포인트 %(url)s - 알 수 없는 키 %(keyerror)s" - -#, python-format -msgid "" -"Malformed endpoint %s - incomplete format (are you missing a type notifier ?)" -msgstr "" -"잘못된 형식의 엔드포인트 %s - 불완전한 형식(유형 알리미가 누락되었습니까?)" - -#, python-format -msgid "" -"Malformed endpoint '%(url)s'. The following type error occurred during " -"string substitution: %(typeerror)s" -msgstr "" -"잘못된 형식의 엔드포인트 '%(url)s'입니다. 문자열 대체 중에 다음 입력 오류 발" -"생: %(typeerror)s" - -#, python-format -msgid "Malformed endpoint - %(url)r is not a string" -msgstr "잘못된 형식의 엔드포인트 - %(url)r이(가) 문자열이 아님" - -#, python-format -msgid "" -"Reinitializing revocation list due to error in loading revocation list from " -"backend. Expected `list` type got `%(type)s`. Old revocation list data: " -"%(list)r" -msgstr "" -"백엔드에서 취소 목록을 로드하는 중에 발생한 오류로 인해 취소 목록을 다시 초기" -"화합니다. 예상되는`list` 유형이 `%(type)s`이(가) 되었습니다. 이전 취소 목록 " -"데이터: %(list)r" - -msgid "Server error" -msgstr "서버 오류" - -#, python-format -msgid "Unable to convert Keystone user or group ID. Error: %s" -msgstr "Keystone 사용자 또는 그룹 ID를 변환할 수 없습니다. 오류: %s" - -msgid "Unable to sign token" -msgstr "토큰에 서명할 수 없음" - -#, python-format -msgid "Unexpected error or malformed token determining token expiry: %s" -msgstr "토큰 만료를 판별하는 잘못된 형식의 토큰 또는 예상치 못한 오류: %s" - -#, python-format -msgid "" -"Unexpected results in response for domain config - %(count)s responses, " -"first option is %(option)s, expected option %(expected)s" -msgstr "" -"도메인 구성에 대한 응답의 예기치 않은 결과 - %(count)s 응답, 첫 번째 옵션 " -"%(option)s, 예상 옵션 %(expected)s" diff --git a/keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone-log-info.po b/keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone-log-info.po deleted file mode 100644 index 1fb0edd5..00000000 --- a/keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone-log-info.po +++ /dev/null @@ -1,210 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Yongbok Kim , 2015 -# Sungjin Kang , 2016. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2016-04-19 04:30+0000\n" -"Last-Translator: SeYeon Lee \n" -"Language: ko-KR\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Korean (South Korea)\n" - -#, python-format -msgid "" -"\"expires_at\" has conflicting values %(existing)s and %(new)s. Will use " -"the earliest value." -msgstr "" -"\"expires_at\"에 충돌되는 값 %(existing)s 및 %(new)s이(가) 있습니다. 가장 이" -"른 값을 사용합니다." - -#, python-format -msgid "Adding proxy '%(proxy)s' to KVS %(name)s." -msgstr "KVS %(name)s에 프록시 '%(proxy)s'을(를) 추가합니다." - -#, python-format -msgid "Cannot find client issuer in env by the issuer attribute - %s." -msgstr "" -"Issuer 속성 - %s을(를) 사용하여 환경에서 클라이언트 issuer를 찾을 수 없습니" -"다." - -#, python-format -msgid "Couldn't verify unknown bind: {%(bind_type)s: %(identifier)s}" -msgstr "알 수 없는 바인드를 확인할 수 없음: {%(bind_type)s: %(identifier)s}" - -#, python-format -msgid "Created %(interface)s endpoint %(url)s" -msgstr "%(interface)s 엔드포인트 %(url)s이(가)생성됨" - -#, python-format -msgid "Created Region %s" -msgstr "지역 %s이(가) 생성됨" - -#, python-format -msgid "Created Role %s" -msgstr "역할 %s이(가) 생성됨" - -#, python-format -msgid "Created a new key: %s" -msgstr "새로운 키 생성: %s" - -#, python-format -msgid "Created domain %s" -msgstr "도메인 %s이(가) 생성됨" - -#, python-format -msgid "Created project %s" -msgstr "프로젝트 %s이(가) 생성됨" - -#, python-format -msgid "Created user %s" -msgstr "사용자 \"%s\"이(가) 생성됨" - -#, python-format -msgid "Creating the default role %s because it does not exist." -msgstr "기본 역할 %s이(가) 없으므로 작성합니다." - -#, python-format -msgid "Creating the default role %s failed because it was already created" -msgstr "기본 역할 %s이(가) 이미 생성되었으므로 작성에 실패" - -#, python-format -msgid "Current primary key is: %s" -msgstr "현재 기본 키: %s" - -#, python-format -msgid "Domain %s already exists, skipping creation." -msgstr "도메인 %s이(가) 이미 있으므로, 생성을 건너뜁니다." - -#, python-format -msgid "Excess key to purge: %s" -msgstr "제거할 초과 키: %s" - -#, python-format -msgid "" -"Fernet token created with length of %d characters, which exceeds 255 " -"characters" -msgstr "길이가 255자를 초과하는 %d자로 fernet 토큰이 생성됨" - -#, python-format -msgid "Granted %(role)s on %(project)s to user %(username)s." -msgstr "" -"%(project)s에 대한 %(role)s이(가) 사용자 %(username)s에 부여되었습니다." - -#, python-format -msgid "KVS region %s key_mangler disabled." -msgstr "KVS 지역 %s key_mangler가 사용되지 않습니다." - -msgid "Kerberos bind authentication successful" -msgstr "Kerberos 바인드 인증 성공" - -msgid "Kerberos credentials do not match those in bind" -msgstr "Kerberos 자격 증명이 바인드에 있는 자격 증명과 일치하지 않음" - -msgid "Kerberos credentials required and not present" -msgstr "Kerberos 자격 증명이 필요하지만 없음" - -msgid "Key repository is already initialized; aborting." -msgstr "키 저장소가 이미 초기화되었습니다. 중단합니다." - -#, python-format -msgid "" -"Loaded %(count)d encryption keys (max_active_keys=%(max)d) from: %(dir)s" -msgstr "%(dir)s에서 %(count)d 암호화 키(max_active_keys=%(max)d)를 로드함" - -#, python-format -msgid "Named bind mode %s not in bind information" -msgstr "바인드 정보에 이름 지정된 바인드 모드 %s이(가) 없음" - -#, python-format -msgid "Next primary key will be: %s" -msgstr "다음 기본 키: %s" - -msgid "No bind information present in token" -msgstr "토큰에 바인드 정보가 없음" - -#, python-format -msgid "Project %s already exists, skipping creation." -msgstr "프로젝트 %s이(가) 이미 있으므로, 생성을 건너뜁니다." - -#, python-format -msgid "Promoted key 0 to be the primary: %s" -msgstr "승격된 키 0이 기본이 됨: %s" - -#, python-format -msgid "Region %s exists, skipping creation." -msgstr "지역 %s이(가) 이미 있으므로, 생성을 건너뜁니다." - -#, python-format -msgid "Role %s exists, skipping creation." -msgstr "역할 %s이(가) 이미 있으므로, 생성을 건너뜁니다." - -#, python-format -msgid "Running command - %s" -msgstr "%s - 명령 실행" - -#, python-format -msgid "Scanning %r for domain config files" -msgstr "%r에서 도메인 구성 파일 스캔" - -#, python-format -msgid "Skipping %s endpoint as already created" -msgstr "%s 엔드포인트가 이미 생성되었으므로 건너뜀" - -#, python-format -msgid "Starting %(arg0)s on %(host)s:%(port)s" -msgstr "%(host)s:%(port)s에서 %(arg0)s 시작 중" - -#, python-format -msgid "Starting key rotation with %(count)s key files: %(list)s" -msgstr "%(count)s 키 파일로 키 순환 시작: %(list)s" - -#, python-format -msgid "" -"The client issuer %(client_issuer)s does not match with the trusted issuer " -"%(trusted_issuer)s" -msgstr "" -"클라이언트 issuer %(client_issuer)s이(가) 신뢰할 수 있는 issuer " -"%(trusted_issuer)s과(와) 일치하지 않음" - -#, python-format -msgid "Total expired tokens removed: %d" -msgstr "제거된 만료 토큰 총계: %d" - -#, python-format -msgid "User %(username)s already has %(role)s on %(project)s." -msgstr "" -"사용자 %(username)s이(가) 이미 %(project)s에 대한 %(role)s이(가) 있습니다." - -#, python-format -msgid "User %s already exists, skipping creation." -msgstr "사용자 %s이(가) 이미 있으므로, 생성을 건너뜁니다." - -#, python-format -msgid "Using %(func)s as KVS region %(name)s key_mangler" -msgstr "%(func)s을(를) KVS region %(name)s key_mangler(으)로 사용" - -#, python-format -msgid "" -"Using default keystone.common.kvs.sha1_mangle_key as KVS region %s " -"key_mangler" -msgstr "" -"기본 keystone.common.kvs.sha1_mangle_key을(를) KVS 지역 %s key_mangler(으)로 " -"사용" - -msgid "" -"[fernet_tokens] key_repository does not appear to exist; attempting to " -"create it" -msgstr "" -"[fernet_tokens] key_repository가 없는 것으로 보입니다. 생성하려고 시도합니다." diff --git a/keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone-log-warning.po b/keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone-log-warning.po deleted file mode 100644 index 0a931724..00000000 --- a/keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone-log-warning.po +++ /dev/null @@ -1,325 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Yongbok Kim , 2015 -# Sungjin Kang , 2016. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2016-04-19 04:27+0000\n" -"Last-Translator: SeYeon Lee \n" -"Language: ko-KR\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Korean (South Korea)\n" - -#, python-format -msgid "%s is not a dogpile.proxy.ProxyBackend" -msgstr "%s이(가) dogpile.proxy.ProxyBackend가 아님" - -msgid "'local conf' from PasteDeploy INI is being ignored." -msgstr "PasteDeploy INI의 'local conf'가 무시됩니다." - -msgid "" -"Auth context already exists in the request environment; it will be used for " -"authorization instead of creating a new one." -msgstr "" -"요청 환경에 인증 컨텍스트가 이미 있습니다. 새로 생성하지 않고 이 인증 컨텍스" -"트를 인증에 사용합니다." - -#, python-format -msgid "Authorization failed. %(exception)s from %(remote_addr)s" -msgstr "%(remote_addr)s 에서 %(exception)s 인증에 실패 하였습니다." - -msgid "Couldn't find the auth context." -msgstr "인증 컨텍스트를 찾을 수 없습니다." - -#, python-format -msgid "" -"Endpoint %(endpoint_id)s referenced in association for policy %(policy_id)s " -"not found." -msgstr "" -"정책 %(policy_id)s의 연관에서 참조되는 엔드포인트 %(endpoint_id)s을(를) 찾을 " -"수 없습니다." - -msgid "Failed to invoke ``openssl version``, assuming is v1.0 or newer" -msgstr "v1.0 이상이라고 가정하여 ``openssl version``을 호출하는 데 실패" - -#, python-format -msgid "" -"Found multiple domains being mapped to a driver that does not support that " -"(e.g. LDAP) - Domain ID: %(domain)s, Default Driver: %(driver)s" -msgstr "" -"여러 도메인이 드라이버에 맵핑되어 있음을 발견했지만, 이 드라이버에서 이 기능" -"을 지원하지 않음(예: LDAP) - 도메인 ID: %(domain)s, 기본 드라이버: %(driver)s" - -#, python-format -msgid "" -"Found what looks like an incorrectly constructed config option substitution " -"reference - domain: %(domain)s, group: %(group)s, option: %(option)s, value: " -"%(value)s." -msgstr "" -"잘못 구성된 구성 옵션 대체 참조 발견 - 도메인: %(domain)s, 그룹: %(group)s, " -"옵션: %(option)s, 값: %(value)s." - -#, python-format -msgid "" -"Found what looks like an unmatched config option substitution reference - " -"domain: %(domain)s, group: %(group)s, option: %(option)s, value: %(value)s. " -"Perhaps the config option to which it refers has yet to be added?" -msgstr "" -"일치하지 않는 구성 옵션 대체 발견 - 도메인: %(domain)s, 그룹: %(group)s, 옵" -"션: %(option)s, 값: %(value)s. 참조하는 구성 옵션이 이미 추가되었을 가능성이 " -"있습니다." - -#, python-format -msgid "" -"ID attribute %(id_attr)s for LDAP object %(dn)s has multiple values and " -"therefore cannot be used as an ID. Will get the ID from DN instead" -msgstr "" -"LDAP 오브젝트 %(dn)s의 ID 속성 %(id_attr)s 값이 여러 개이므로, ID로 사용할 " -"수 없습니다. 대신 DN에서 ID를 얻습니다." - -#, python-format -msgid "Ignoring file (%s) while scanning domain config directory" -msgstr "도메인 구성 디렉토리를 스캔하는 중에 파일(%s) 무시" - -msgid "Ignoring user name" -msgstr "사용자 이름 무시" - -#, python-format -msgid "" -"Invalid additional attribute mapping: \"%s\". Format must be " -":" -msgstr "" -"잘못된 추가 속성 맵핑:\" %s\". 형식은 :" - -#, python-format -msgid "Invalid domain name (%s) found in config file name" -msgstr "설정 파일 이름에 잘못된 도메인 이름(%s)을 찾았습니다." - -msgid "" -"It is recommended to only use the base key-value-store implementation for " -"the token driver for testing purposes. Please use 'memcache' or 'sql' " -"instead." -msgstr "" -"테스트용으로만 토큰 드라이버의 기본 key-value-store 구현을 사용하는 것이 좋습" -"니다. 대신 'memcache' 또는 'sql'을 사용하십시오." - -#, python-format -msgid "KVS lock released (timeout reached) for: %s" -msgstr "%s에 대한 KVS 잠금이 해제됨(제한시간에 도달)" - -msgid "" -"LDAP Server does not support paging. Disable paging in keystone.conf to " -"avoid this message." -msgstr "" -"LDAP 서버가 페이징을 지원하지 않습니다. 이 메시지를 방지하려면 keystone.conf" -"에서 페이징을 사용 안함으로 설정하십시오." - -msgid "No domain information specified as part of list request" -msgstr "목록 요청의 일부로 도메인 정보가 지정되지 않음" - -msgid "" -"Not specifying a domain during a create user, group or project call, and " -"relying on falling back to the default domain, is deprecated as of Liberty " -"and will be removed in the N release. Specify the domain explicitly or use a " -"domain-scoped token" -msgstr "" -"사용자, 그룹 또는 프로젝트 호출 생성 중에 도메인을 지정하지 않고, 기본 도메인" -"으로 다시 돌아가는 기능은 Liberty에서는 더 이상 사용되지 않으므로 N 릴리스에" -"서 제거됩니다. 도메인을 명시적으로 지정하거나 도메인 범위 토큰을 사용하십시" -"오." - -#, python-format -msgid "" -"Policy %(policy_id)s referenced in association for endpoint %(endpoint_id)s " -"not found." -msgstr "" -"엔드포인트 %(endpoint_id)s의 연관에서 참조되는 정책 %(policy_id)s을(를) 찾을 " -"수 없습니다." - -#, python-format -msgid "Project %s does not exist and was not deleted." -msgstr "프로젝트 %s이(가) 없으므로 삭제되지 않았습니다." - -msgid "RBAC: Bypassing authorization" -msgstr "RBAC: 권한 무시" - -msgid "RBAC: Invalid token" -msgstr "RBAC: 올바르지 않은 토큰" - -msgid "RBAC: Invalid user data in token" -msgstr "RBAC: 토큰에 잘못된 사용자 데이터" - -#, python-format -msgid "" -"Removing `%s` from revocation list due to invalid expires data in revocation " -"list." -msgstr "" -"유효하지 않아 취소 목록에서 `%s`을(를) 제거하면 취소 목록의 데이터가 만료됩니" -"다." - -msgid "" -"The admin_token_auth middleware presents a security risk and should be " -"removed from the [pipeline:api_v3], [pipeline:admin_api], and [pipeline:" -"public_api] sections of your paste ini file." -msgstr "" -"admin_token_auth 미들웨어에서는 보안 위험이 제기되므로 paste ini 파일의 " -"[pipeline:api_v3], [pipeline:admin_api] 및 [pipeline:public_api] 섹션에서 제" -"거해야 합니다." - -msgid "" -"The default domain was created automatically to contain V2 resources. This " -"is deprecated in the M release and will not be supported in the O release. " -"Create the default domain manually or use the keystone-manage bootstrap " -"command." -msgstr "" -"V2 자원을 포함하도록 기본 도메인이 자동으로 생성되었습니다. 이 기능은 M 릴리" -"스에서 더 이상 사용되지 않으며 O 릴리스에서 지원되지 않습니다. 수동으로 기본 " -"도메인을 생성하거나 keystone-manage 부트스트랩 명령을 사용하십시오." - -#, python-format -msgid "Token `%s` is expired, not adding to the revocation list." -msgstr "토큰 `%s`를 해지 목록에 추가 하지 않으면 만료 됩니다." - -#, python-format -msgid "Truncating user password to %d characters." -msgstr "사용자 비밀번호를 %d자로 자릅니다." - -#, python-format -msgid "Unable to add user %(user)s to %(tenant)s." -msgstr "%(tenant)s 에 사용자 %(user)s 를 추가 할 수 없습니다." - -#, python-format -msgid "" -"Unable to change the ownership of [fernet_tokens] key_repository without a " -"keystone user ID and keystone group ID both being provided: %s" -msgstr "" -"Keystone 사용자 ID와 keystone 그룹 ID가 모두 제공되지 않으면 [fernet_tokens] " -"key_repository의 소유권은 변경할 수 없음: %s" - -#, python-format -msgid "" -"Unable to change the ownership of the new key without a keystone user ID and " -"keystone group ID both being provided: %s" -msgstr "" -"keystone 사용자 ID와 keystone 그룹 ID가 모두 제공되지 않으면 새 키의 소유권" -"을 변경할 수 없음: %s" - -#, python-format -msgid "Unable to locate domain config directory: %s" -msgstr "%s: 도메인 설정 디렉토리를 찾을 수 없습니다." - -#, python-format -msgid "Unable to remove user %(user)s from %(tenant)s." -msgstr "%(tenant)s 에서 %(user)s 를 제거 할 수 없습니다." - -#, python-format -msgid "" -"Unsupported policy association found - Policy %(policy_id)s, Endpoint " -"%(endpoint_id)s, Service %(service_id)s, Region %(region_id)s, " -msgstr "" -"지원되지 않는 정책 연관 발견 - 정책 %(policy_id)s, 엔드포인트 " -"%(endpoint_id)s, 서비스 %(service_id)s, 지역 %(region_id)s, " - -#, python-format -msgid "" -"User %(user_id)s doesn't have access to default project %(project_id)s. The " -"token will be unscoped rather than scoped to the project." -msgstr "" -"사용자 %(user_id)s이(가) 기본 프로젝트 %(project_id)s에 대한 액세스 권한이 없" -"습니다. 토큰의 범위가 프로젝트로 지정되지 않고 범위 지정이 해제됩니다." - -#, python-format -msgid "" -"User %(user_id)s's default project %(project_id)s is disabled. The token " -"will be unscoped rather than scoped to the project." -msgstr "" -"%(user_id)s 사용자의 기본 프로젝트 %(project_id)s을(를) 사용하지 않습니다. 토" -"큰의 범위가 프로젝트로 지정되지 않고 범위 지정이 해제됩니다." - -#, python-format -msgid "" -"User %(user_id)s's default project %(project_id)s not found. The token will " -"be unscoped rather than scoped to the project." -msgstr "" -"사용자 %(user_id)s의 기본 프로젝트 %(project_id)s을(를) 찾을 수 없습니다. 토" -"큰의 범위가 프로젝트로 지정되지 않고 범위 지정이 해제됩니다." - -#, python-format -msgid "" -"When deleting entries for %(search_base)s, could not delete nonexistent " -"entries %(entries)s%(dots)s" -msgstr "" -"%(search_base)s의 항목을 삭제할 때 존재하지 않는 항목 %(entries)s%(dots)s을" -"(를) 삭제할 수 없음" - -#, python-format -msgid "[fernet_tokens] key_repository is world readable: %s" -msgstr "[fernet_tokens] key_repository는 읽을 수 있음: %s" - -msgid "" -"[fernet_tokens] max_active_keys must be at least 1 to maintain a primary key." -msgstr "" -"기본 키를 유지 보수하려면 [fernet_tokens] max_active_keys가 최소 1이어야 합니" -"다." - -#, python-format -msgid "" -"`token_api.%s` is deprecated as of Juno in favor of utilizing methods on " -"`token_provider_api` and may be removed in Kilo." -msgstr "" -"Juno에서는 `token_provider_api`의 메소드를 활용하기 위해 `token_api.%s`이" -"(가) 더 이상 사용되지 않으므로 Kilo에서 제거될 수 있습니다." - -msgid "" -"build_auth_context middleware checking for the admin token is deprecated as " -"of the Mitaka release and will be removed in the O release. If your " -"deployment requires use of the admin token, update keystone-paste.ini so " -"that admin_token_auth is before build_auth_context in the paste pipelines, " -"otherwise remove the admin_token_auth middleware from the paste pipelines." -msgstr "" -"build_auth_context 미들웨어에서 관리 토큰을 확인하는 기능은 Mitaka 릴리스에" -"서 더 이상 사용되지 않으므로, O 릴리스에서 제거됩니다. 배포에서 관리 토큰을 " -"사용해야 하는 경우 붙여넣기 파이프라인에서 build_auth_context 전에 " -"admin_token_auth가 오도록 keystone-paste.ini를 업데이트하십시오. 그렇지 않으" -"면 붙여넣기 파이프라인에서 admin_token_auth 미들웨어를 제거하십시오." - -#, python-format -msgid "" -"delete_domain_assignments method not found in custom assignment driver. " -"Domain assignments for domain (%s) to users from other domains will not be " -"removed. This was added in V9 of the assignment driver." -msgstr "" -"사용자 정의 할당 드라이버에서 delete_domain_assignments 메소드를 찾을 수 없습" -"니다. 다른 도메인의 사용자에게 할당한 도메인(%s)은 제거되지 않습니다. 이 기능" -"은 할당 드라이버의 V9에서 추가되었습니다." - -msgid "" -"insecure_debug is enabled so responses may include sensitive information." -msgstr "insecure_debug가 사용되므로 응답에 민감한 정보가 포함될 수 있습니다." - -msgid "" -"keystone-manage pki_setup is deprecated as of Mitaka in favor of not using " -"PKI tokens and may be removed in 'O' release." -msgstr "" -"Mitaka에서 PKI 토큰을 사용하지 않기 위해 keystone-manage pki_setup이 더 이상 " -"사용되지 않으므로, 'O' 릴리스에서 제거할 수 있습니다." - -msgid "keystone-manage pki_setup is not recommended for production use." -msgstr "keystone-manage pki_setup은 프로덕션에서 사용하지 않는 것이 좋습니다.." - -msgid "keystone-manage ssl_setup is not recommended for production use." -msgstr "keystone-manage ssl_setup은 프로덕션에서 사용하지 않는 것이 좋습니다." - -msgid "missing exception kwargs (programmer error)" -msgstr "누락된 예외 kwargs(프로그래머 오류)" diff --git a/keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone.po b/keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone.po deleted file mode 100644 index 850b3e39..00000000 --- a/keystone-moon/keystone/locale/ko_KR/LC_MESSAGES/keystone.po +++ /dev/null @@ -1,1530 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Sungjin Kang , 2013 -# Sungjin Kang , 2013 -# Sungjin Kang , 2016. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2016-04-19 04:43+0000\n" -"Last-Translator: SeYeon Lee \n" -"Language: ko-KR\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Korean (South Korea)\n" - -#, python-format -msgid "%(detail)s" -msgstr "%(detail)s" - -#, python-format -msgid "%(driver)s is not supported driver version" -msgstr "%(driver)s은(는) 지원되는 드라이버 버전이 아님" - -#, python-format -msgid "" -"%(entity)s name cannot contain the following reserved characters: %(chars)s" -msgstr "%(entity)s 이름에는 다음과 같은 예약 문자가 포함될 수 없음: %(chars)s" - -#, python-format -msgid "" -"%(event)s is not a valid notification event, must be one of: %(actions)s" -msgstr "" -"%(event)s은(는) 올바른 알림 이벤트가 아니며 %(actions)s 중 하나여야 합니다." - -#, python-format -msgid "%(host)s is not a trusted dashboard host" -msgstr "%(host)s이(가) 신뢰 대시보드 호스트가 아님" - -#, python-format -msgid "%(message)s %(amendment)s" -msgstr "%(message)s %(amendment)s" - -#, python-format -msgid "" -"%(mod_name)s doesn't provide database migrations. The migration repository " -"path at %(path)s doesn't exist or isn't a directory." -msgstr "" -"%(mod_name)s은(는) 데이터베이스 마이그레이션을 제공하지 않습니다. 마이그레이" -"션 저장소 경로가 %(path)s에 존재하지 않거나 디렉토리가 아닙니다." - -#, python-format -msgid "%(prior_role_id)s does not imply %(implied_role_id)s" -msgstr "%(prior_role_id)s은(는) %(implied_role_id)s을(를) 내포하지 않음" - -#, python-format -msgid "%(property_name)s cannot be less than %(min_length)s characters." -msgstr "%(property_name)s은(는) %(min_length)s자 미만일 수 없습니다. " - -#, python-format -msgid "%(property_name)s is not a %(display_expected_type)s" -msgstr "%(property_name)s이(가) %(display_expected_type)s이(가) 아님" - -#, python-format -msgid "%(property_name)s should not be greater than %(max_length)s characters." -msgstr "%(property_name)s은(는) %(max_length)s자 이하여야 합니다. " - -#, python-format -msgid "%(role_id)s cannot be an implied roles" -msgstr "%(role_id)s은(는) 내포된 역할일 수 없음" - -#, python-format -msgid "%s cannot be empty." -msgstr "%s은(는) 공백일 수 없습니다. " - -#, python-format -msgid "%s extension does not exist." -msgstr "%s 확장자가 존재하지 않습니다." - -#, python-format -msgid "%s field is required and cannot be empty" -msgstr "%s 필드가 필요하며 비어 있을 수 없음" - -#, python-format -msgid "%s field(s) cannot be empty" -msgstr "%s 필드는 비어 있을 수 없음" - -#, python-format -msgid "" -"%s for the LDAP identity backend has been deprecated in the Mitaka release " -"in favor of read-only identity LDAP access. It will be removed in the \"O\" " -"release." -msgstr "" -"Mitaka 릴리스에서는 읽기 전용 ID LDAP 액세스를 사용하기 위해 LDAP ID 백엔드" -"의 %s이(가) 더 이상 사용되지 않으므로, \"O\" 릴리스에서 제거됩니다." - -msgid "(Disable insecure_debug mode to suppress these details.)" -msgstr "" -"(이러한 세부사항을 억제하려면 insecure_debug 모드를 사용 안함으로 설정하십시" -"오.)" - -msgid "--all option cannot be mixed with other options" -msgstr "--all 옵션은 다른 옵션과 함께 사용할 수 없음" - -msgid "A project-scoped token is required to produce a service catalog." -msgstr "서비스 카탈로그를 생성하려면 프로젝트 범위 토큰이 필요합니다." - -msgid "Access token is expired" -msgstr "액세스 토큰이 만료됨" - -msgid "Access token not found" -msgstr "액세스 토큰을 찾을 수 없음" - -msgid "Additional authentications steps required." -msgstr "추가 인증 단계가 필요합니다." - -msgid "An unexpected error occurred when retrieving domain configs" -msgstr "도메인 구성 검색 중 예상치 못한 오류 발생" - -#, python-format -msgid "An unexpected error occurred when trying to store %s" -msgstr "%s을(를) 저장하려 할 때 예기치 않은 오류가 발생했음" - -msgid "An unexpected error prevented the server from fulfilling your request." -msgstr "예상치 않은 오류가 발생하여 서버가 사용자 요청을 이행하지 못함." - -#, python-format -msgid "" -"An unexpected error prevented the server from fulfilling your request: " -"%(exception)s" -msgstr "" -"예상치 않은 오류가 발생하여 서버가 사용자 요청을 이행하지 못함:%(exception)s" - -msgid "An unhandled exception has occurred: Could not find metadata." -msgstr "처리되지 않은 예외가 발생함: 메타데이터를 찾을 수 없음." - -msgid "At least one option must be provided" -msgstr "하나 이상의 옵션을 제공해야 함" - -msgid "At least one option must be provided, use either --all or --domain-name" -msgstr "" -"하나 이상의 옵션을 제공해야 합니다. --all 또는 --domain-name을 사용하십시오. " - -msgid "At least one role should be specified." -msgstr "최소한 하나의 역할을 지정해야 합니다." - -#, python-format -msgid "" -"Attempted automatic driver selection for assignment based upon " -"[identity]\\driver option failed since driver %s is not found. Set " -"[assignment]/driver to a valid driver in keystone config." -msgstr "" -"드라이버 %s을(를) 찾을 수 없으므로 [identity]\\driver 옵션을 기반으로 할당할 " -"드라이버를 자동으로 선택하는 데 실패했습니다. keystone 구성에서 [assignment]/" -"driver를 올바른 드라이버로 설정하십시오." - -msgid "Attempted to authenticate with an unsupported method." -msgstr "지원되지 않는 방법으로 인증을 시도했습니다." - -msgid "" -"Attempting to use OS-FEDERATION token with V2 Identity Service, use V3 " -"Authentication" -msgstr "" -"V2 ID 서비스에서 OS-FEDERATION 토큰을 사용할 경우 V3 인증을 사용하십시오." - -msgid "Authentication plugin error." -msgstr "인증 플러그인 오류." - -#, python-format -msgid "" -"Backend `%(backend)s` is not a valid memcached backend. Valid backends: " -"%(backend_list)s" -msgstr "" -"백엔드 `%(backend)s`이(가) 올바른 memcached 백엔드가 아닙니다. 올바른 백엔" -"드: %(backend_list)s" - -msgid "Cannot authorize a request token with a token issued via delegation." -msgstr "위임을 통해 발행된 토큰으로 요청 토큰에 권한을 부여할 수 없습니다." - -#, python-format -msgid "Cannot change %(option_name)s %(attr)s" -msgstr "%(option_name)s %(attr)s을(를) 변경할 수 없음" - -msgid "Cannot change Domain ID" -msgstr "도메인 ID를 변경할 수 없음" - -msgid "Cannot change user ID" -msgstr "사용자 ID를 변경할 수 없음" - -msgid "Cannot change user name" -msgstr "사용자 이름을 변경할 수 없음" - -#, python-format -msgid "Cannot create an endpoint with an invalid URL: %(url)s" -msgstr "올바르지 않은 URL을 사용하여 엔드포인트를 작성할 수 없음: %(url)s" - -#, python-format -msgid "Cannot create project with parent: %(project_id)s" -msgstr "상위로 프로젝트를 작성할 수 없음: %(project_id)s" - -#, python-format -msgid "" -"Cannot create project, since it specifies its owner as domain %(domain_id)s, " -"but specifies a parent in a different domain (%(parent_domain_id)s)." -msgstr "" -"소유자를 도메인 %(domain_id)s(으)로 지정하지만 다른 도메인 " -"(%(parent_domain_id)s)의 상위를 지정하므로 프로젝트를 생성할 수 없습니다." - -#, python-format -msgid "" -"Cannot create project, since its parent (%(domain_id)s) is acting as a " -"domain, but project's specified parent_id (%(parent_id)s) does not match " -"this domain_id." -msgstr "" -"상위(%(domain_id)s)가 도메인 역할을 수행하지만 프로젝트 지정 " -"parent_id(%(parent_id)s)가 이 domain_id와 일치하지 않으므로 프로젝트를 생성" -"할 수 없습니다." - -msgid "Cannot delete a domain that is enabled, please disable it first." -msgstr "" -"사용으로 설정된 도메인을 삭제할 수 없습니다. 먼저 해당 도메인을 사용 안함으" -"로 설정하십시오." - -#, python-format -msgid "" -"Cannot delete project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" -"서브트리에 사용 설정된 프로젝트가 있으므로 프로젝트 %(project_id)s을(를) 삭제" -"할 수 없습니다." - -#, python-format -msgid "" -"Cannot delete the project %s since it is not a leaf in the hierarchy. Use " -"the cascade option if you want to delete a whole subtree." -msgstr "" -"계층 구조의 리프가 아니므로 프로젝트 %s을(를) 삭제할 수 없습니다. 전체 하위 " -"트리를 삭제하려면 계단식 옵션을 사용하십시오." - -#, python-format -msgid "" -"Cannot disable project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" -"서브트리에 사용 설정된 프로젝트가 있으므로 프로젝트 %(project_id)s을(를) 사" -"용 안함으로 설정할 수 없습니다." - -#, python-format -msgid "Cannot enable project %s since it has disabled parents" -msgstr "프로젝트 %s에 사용 안함으로 설정된 상위가 있어서 이를 사용할 수 없음" - -msgid "Cannot list assignments sourced from groups and filtered by user ID." -msgstr "" -"그룹에서 소스가 공급되고 사용자 ID별로 필터링된 할당을 나열할 수 없습니다." - -msgid "Cannot list request tokens with a token issued via delegation." -msgstr "위임을 통해 발행된 토큰으로 요청 토큰을 나열할 수 없습니다." - -#, python-format -msgid "Cannot open certificate %(cert_file)s. Reason: %(reason)s" -msgstr "%(cert_file)s 인증서를 열수 없습니다. 이유: %(reason)s" - -#, python-format -msgid "Cannot remove role that has not been granted, %s" -msgstr "권한이 부여되지 않은 역할을 제거할 수 없음: %s" - -msgid "" -"Cannot truncate a driver call without hints list as first parameter after " -"self " -msgstr "" -"자신 뒤의 첫 번째 매개변수와 같은 힌트 목록 없이 드라이버 호출을 자를 수 없음" - -msgid "Cannot update domain_id of a project that has children." -msgstr "하위가 있는 프로젝트의 domain_id를 업데이트할 수 없습니다." - -msgid "" -"Cannot use parents_as_list and parents_as_ids query params at the same time." -msgstr "" -"parents_as_list 및 parents_as_ids 조회 매개변수를 동시에 사용할 수 없습니다." - -msgid "" -"Cannot use subtree_as_list and subtree_as_ids query params at the same time." -msgstr "" -"subtree_as_list 및 subtree_as_ids 조회 매개변수를 동시에 사용할 수 없습니다." - -msgid "Cascade update is only allowed for enabled attribute." -msgstr "사용된 속성에만 계단식 업데이트가 허용됩니다." - -msgid "" -"Combining effective and group filter will always result in an empty list." -msgstr "결합에 효율적인 그룹 필터는 항상 빈 목록을 생성합니다." - -msgid "" -"Combining effective, domain and inherited filters will always result in an " -"empty list." -msgstr "결합에 효율적인 도메인과 상속 필터는 항상 빈 목록을 생성합니다." - -#, python-format -msgid "Config API entity at /domains/%s/config" -msgstr "/domains/%s/config의 구성 API 엔티티" - -#, python-format -msgid "Conflict occurred attempting to store %(type)s - %(details)s" -msgstr "%(type)s을(를) 저장하는 중에 충돌이 발생함 - %(details)s" - -#, python-format -msgid "Conflicting region IDs specified: \"%(url_id)s\" != \"%(ref_id)s\"" -msgstr "지정된 리젼 ID가 충돌함: \"%(url_id)s\" != \"%(ref_id)s\"" - -msgid "Consumer not found" -msgstr "이용자를 찾을 수 없음" - -#, python-format -msgid "" -"Could not change immutable attribute(s) '%(attributes)s' in target %(target)s" -msgstr "%(target)s 대상에서 불변 속성 '%(attributes)s'을(를) 변경할 수 없음" - -#, python-format -msgid "" -"Could not determine Identity Provider ID. The configuration option " -"%(issuer_attribute)s was not found in the request environment." -msgstr "" -"ID 제공자 ID를 판별할 수 없습니다. 구성 옵션 %(issuer_attribute)s이(가) 요청 " -"환경에 없습니다. " - -#, python-format -msgid "" -"Could not find %(group_or_option)s in domain configuration for domain " -"%(domain_id)s" -msgstr "" -"다음 도메인의 도메인 구성에서 %(group_or_option)s을(를) 찾을 수 없습니다. " -"%(domain_id)s" - -#, python-format -msgid "Could not find Endpoint Group: %(endpoint_group_id)s" -msgstr "엔드포인트 그룹을 찾을 수 없음: %(endpoint_group_id)s" - -msgid "Could not find Identity Provider identifier in environment" -msgstr "환경에서 ID 제공자의 ID를 찾을 수 없음" - -#, python-format -msgid "Could not find Identity Provider: %(idp_id)s" -msgstr "%(idp_id)s ID 제공자를 찾을 수 없음" - -#, python-format -msgid "Could not find Service Provider: %(sp_id)s" -msgstr "서비스 제공자를 찾을 수 없음: %(sp_id)s" - -#, python-format -msgid "Could not find credential: %(credential_id)s" -msgstr "%(credential_id)s 신임 정보를 찾을 수 없음" - -#, python-format -msgid "Could not find domain: %(domain_id)s" -msgstr "%(domain_id)s 도메인을 찾을 수 없음" - -#, python-format -msgid "Could not find endpoint: %(endpoint_id)s" -msgstr "%(endpoint_id)s 엔드포인트를 찾을 수 없음" - -#, python-format -msgid "" -"Could not find federated protocol %(protocol_id)s for Identity Provider: " -"%(idp_id)s" -msgstr "" -"ID 제공자 %(idp_id)s에 대한 연합 프로토콜 %(protocol_id)s을(를) 찾을 수 없음" - -#, python-format -msgid "Could not find group: %(group_id)s" -msgstr "%(group_id)s 그룹을 찾을 수 없음" - -#, python-format -msgid "Could not find mapping: %(mapping_id)s" -msgstr "%(mapping_id)s 맵핑을 찾을 수 없음" - -msgid "Could not find policy association" -msgstr "정책 연관을 찾을 수 없음" - -#, python-format -msgid "Could not find policy: %(policy_id)s" -msgstr "%(policy_id)s 정책을 찾을 수 없음" - -#, python-format -msgid "Could not find project: %(project_id)s" -msgstr "%(project_id)s 프로젝트를 찾을 수 없음" - -#, python-format -msgid "Could not find region: %(region_id)s" -msgstr "%(region_id)s 리젼을 찾을 수 없음" - -#, python-format -msgid "" -"Could not find role assignment with role: %(role_id)s, user or group: " -"%(actor_id)s, project or domain: %(target_id)s" -msgstr "" -"%(role_id)s 역할에 대한 역할 지정을 찾을 수 없음. 사용자 또는 그룹: " -"%(actor_id)s, 프로젝트 또는 도메인: %(target_id)s" - -#, python-format -msgid "Could not find role: %(role_id)s" -msgstr "%(role_id)s 규칙을 찾을 수 없음" - -#, python-format -msgid "Could not find service: %(service_id)s" -msgstr "%(service_id)s 서비스를 찾을 수 없음" - -#, python-format -msgid "Could not find token: %(token_id)s" -msgstr "%(token_id)s 토큰을 찾을 수 없음" - -#, python-format -msgid "Could not find trust: %(trust_id)s" -msgstr "%(trust_id)s 신뢰를 찾을 수 없음" - -#, python-format -msgid "Could not find user: %(user_id)s" -msgstr "%(user_id)s 사용자를 찾을 수 없음" - -#, python-format -msgid "Could not find version: %(version)s" -msgstr "%(version)s 버전을 찾을 수 없음" - -#, python-format -msgid "Could not find: %(target)s" -msgstr "%(target)s을(를) 찾을 수 없음" - -msgid "" -"Could not map any federated user properties to identity values. Check debug " -"logs or the mapping used for additional details." -msgstr "" -"연합 사용자 특성을 ID 값에 맵핑할 수 없습니다. 추가 세부 사항은 사용된 맵핑 " -"또는 디버그 로그를 확인하십시오." - -msgid "" -"Could not map user while setting ephemeral user identity. Either mapping " -"rules must specify user id/name or REMOTE_USER environment variable must be " -"set." -msgstr "" -"임시 사용자 ID를 설정하는 중에 사용자를 맵핑할 수 없습니다. 맵핑 규칙이 사용" -"자 ID/이름을 지정해야 하거나 REMOTE_USER 환경 변수를 설정해야 합니다. " - -msgid "Could not validate the access token" -msgstr "액세스 토큰을 유효성 검증할 수 없음" - -msgid "Credential belongs to another user" -msgstr "신임 정보가 다른 사용자에 속함" - -msgid "Credential signature mismatch" -msgstr "자격 증명 서명 불일치" - -#, python-format -msgid "" -"Direct import of auth plugin %(name)r is deprecated as of Liberty in favor " -"of its entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"Liberty에서 %(namespace)r의 입력점을 사용하기 위해 인증 플러그인 %(name)r의 " -"직접 가져오기는 더 이상 사용되지 않으므로, N에서 제거될 수 있습니다." - -#, python-format -msgid "" -"Direct import of driver %(name)r is deprecated as of Liberty in favor of its " -"entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"Liberty에서 %(namespace)r의 입력점을 사용하기 위해 드라이버 %(name)r의 직접 " -"가져오기는 더 이상 사용되지 않으므로, N에서 제거될 수 있습니다." - -msgid "" -"Disabling an entity where the 'enable' attribute is ignored by configuration." -msgstr "구성에서 'enable' 속성이 있는 엔티티의 사용 안함 설정을 무시합니다." - -#, python-format -msgid "Domain (%s)" -msgstr "도메인(%s)" - -#, python-format -msgid "Domain cannot be named %s" -msgstr "도메인 이름은 %s일 수 없음" - -#, python-format -msgid "Domain cannot have ID %s" -msgstr "도메인 ID가 %s일 수 없음" - -#, python-format -msgid "Domain is disabled: %s" -msgstr "도메인을 사용 안함: %s" - -msgid "Domain name cannot contain reserved characters." -msgstr "도메인 이름에는 예약된 문자가 포함될 수 없습니다." - -msgid "Domain scoped token is not supported" -msgstr "도메인 범위 지정 토큰은 지원되지 않음" - -msgid "Domain specific roles are not supported in the V8 role driver" -msgstr "V8 역할 드라이버에서는 도메인 특정 역할이 지원되지 않음" - -#, python-format -msgid "" -"Domain: %(domain)s already has a configuration defined - ignoring file: " -"%(file)s." -msgstr "" -"%(domain)s 도메인에 이미 정의된 구성이 있음 - 다음 파일을 무시하십시오. " -"%(file)s." - -msgid "Duplicate Entry" -msgstr "중복 항목" - -#, python-format -msgid "Duplicate ID, %s." -msgstr "중복 ID, %s." - -#, python-format -msgid "Duplicate entry: %s" -msgstr "중복된 항목: %s" - -#, python-format -msgid "Duplicate name, %s." -msgstr "중복 이름, %s." - -#, python-format -msgid "Duplicate remote ID: %s" -msgstr "중복된 원격 ID: %s" - -msgid "EC2 access key not found." -msgstr "EC2 접근 키를 찾을 수 없습니다." - -msgid "EC2 signature not supplied." -msgstr "EC2 서명이 제공되지 않았습니다." - -msgid "" -"Either --bootstrap-password argument or OS_BOOTSTRAP_PASSWORD must be set." -msgstr "--bootstrap-password 인수나 OS_BOOTSTRAP_PASSWORD를 설정해야 합니다." - -msgid "Enabled field must be a boolean" -msgstr "사용으로 설정된 필드는 부울이어야 함" - -msgid "Enabled field should be a boolean" -msgstr "사용으로 설정된 필드는 부울이어야 함" - -#, python-format -msgid "Endpoint %(endpoint_id)s not found in project %(project_id)s" -msgstr "%(endpoint_id)s 엔드포인트가 %(project_id)s 프로젝트에 없음 " - -msgid "Endpoint Group Project Association not found" -msgstr "엔드포인트 그룹 프로젝트 연관을 찾을 수 없음" - -msgid "Ensure configuration option idp_entity_id is set." -msgstr "구성 옵션 idp_entity_id가 설정되어 있는지 확인하십시오." - -msgid "Ensure configuration option idp_sso_endpoint is set." -msgstr "구성 옵션 idp_sso_endpoint가 설정되어 있는지 확인하십시오." - -#, python-format -msgid "" -"Error parsing configuration file for domain: %(domain)s, file: %(file)s." -msgstr "" -"%(domain)s 도메인에 대한 구성 파일을 구문 분석하는 중 오류 발생. 파일: " -"%(file)s." - -#, python-format -msgid "Error while opening file %(path)s: %(err)s" -msgstr "파일 %(path)s 여는 중 오류 발생: %(err)s" - -#, python-format -msgid "Error while parsing line: '%(line)s': %(err)s" -msgstr "행: '%(line)s' 구문 분석 중 오류 발생: %(err)s" - -#, python-format -msgid "Error while parsing rules %(path)s: %(err)s" -msgstr "규칙 %(path)s 구문 분석 중 오류 발생: %(err)s" - -#, python-format -msgid "Error while reading metadata file, %(reason)s" -msgstr "메타데이터 파일을 읽는 중에 오류 발생, %(reason)s" - -#, python-format -msgid "" -"Exceeded attempts to register domain %(domain)s to use the SQL driver, the " -"last domain that appears to have had it is %(last_domain)s, giving up" -msgstr "" -"SQL 드라이버를 사용하기 위해 도메인 %(domain)s을(를) 등록하는 시도가 초과되었" -"습니다. 드라이버를 보유한 것으로 보이는 마지막 도메인은 %(last_domain)s입니" -"다. 포기하는 중" - -#, python-format -msgid "Expected dict or list: %s" -msgstr "예상된 사전 또는 목록: %s" - -msgid "" -"Expected signing certificates are not available on the server. Please check " -"Keystone configuration." -msgstr "" -"예상 서명 인증서를 서버에서 사용할 수 없습니다. 키스톤 구성을 확인하십시오." - -#, python-format -msgid "" -"Expecting to find %(attribute)s in %(target)s - the server could not comply " -"with the request since it is either malformed or otherwise incorrect. The " -"client is assumed to be in error." -msgstr "" -"%(target)s에 %(attribute)s이(가) 있어야 합니다- 서버의 형식이나 다른 항목이 " -"올바르지 않기 때문에 서버가 요청을 준수할 수 없습니다. 클라이언트가 오류 상태" -"로 간주됩니다." - -#, python-format -msgid "Failed to start the %(name)s server" -msgstr "%(name)s 서버를 시작하지 못함" - -msgid "Failed to validate token" -msgstr "토큰을 유효성 검증하지 못했음" - -msgid "Federation token is expired" -msgstr "연합 토큰이 만료됨" - -#, python-format -msgid "" -"Field \"remaining_uses\" is set to %(value)s while it must not be set in " -"order to redelegate a trust" -msgstr "" -"필드 \"remaining_uses\"가 %(value)s(으)로 설정되었으나 신뢰를 재위임하려면 설" -"정하지 않아야 함" - -msgid "Found invalid token: scoped to both project and domain." -msgstr "" -"올바르지 않은 토큰이 있습니다. 프로젝트와 도메인 둘 다 범위에 포함됩니다." - -#, python-format -msgid "Group %s not found in config" -msgstr "구성에 그룹 %s을(를) 찾을 수 없음" - -#, python-format -msgid "Group %(group)s is not supported for domain specific configurations" -msgstr "도메인 특정 구성에 대해 %(group)s 그룹이 지원되지 않음" - -#, python-format -msgid "" -"Group %(group_id)s returned by mapping %(mapping_id)s was not found in the " -"backend." -msgstr "" -"맵핑 %(mapping_id)s별로 리턴된 그룹 %(group_id)s을(를) 백엔드에서 찾지 못했습" -"니다." - -#, python-format -msgid "" -"Group membership across backend boundaries is not allowed, group in question " -"is %(group_id)s, user is %(user_id)s" -msgstr "" -"경계를 초월한 그룹 멤버십이 허용되지 않습니다. 관련 그룹은 %(group_id)s이고 " -"사용자는 %(user_id)s입니다." - -#, python-format -msgid "ID attribute %(id_attr)s not found in LDAP object %(dn)s" -msgstr "ID 속성 %(id_attr)s을(를) LDAP 오브젝트 %(dn)s에서 찾을 수 없음" - -#, python-format -msgid "Identity Provider %(idp)s is disabled" -msgstr "ID 제공자 %(idp)s이(가) 사용 안함으로 설정됨" - -msgid "" -"Incoming identity provider identifier not included among the accepted " -"identifiers." -msgstr "승인 ID에 수신 ID 제공자가 포함되지 않습니다." - -msgid "Invalid EC2 signature." -msgstr "올바르지 않은 EC2 서명입니다." - -#, python-format -msgid "Invalid LDAP TLS certs option: %(option)s. Choose one of: %(options)s" -msgstr "" -"올바르지 않은 LDAP TLS 인증 옵션: %(option)s. 다음 중 하나 선택: %(options)s" - -#, python-format -msgid "Invalid LDAP TLS_AVAIL option: %s. TLS not available" -msgstr "올바르지 않은 LDAP TLS_AVAIL 옵션: %s. TLS를 사용할 수 없음" - -#, python-format -msgid "Invalid LDAP deref option: %(option)s. Choose one of: %(options)s" -msgstr "" -"올바르지 않은 LDAP deref 옵션: %(option)s. 다음 중 하나 선택: %(options)s" - -#, python-format -msgid "Invalid LDAP scope: %(scope)s. Choose one of: %(options)s" -msgstr "올바르지 않은 LDAP 범위: %(scope)s. 다음 중 하나를 선택: %(options)s" - -msgid "Invalid TLS / LDAPS combination" -msgstr "잘못된 TLS / LDAPS 결합." - -#, python-format -msgid "Invalid audit info data type: %(data)s (%(type)s)" -msgstr "올바르지 않은 감사 정보 데이터 유형: %(data)s (%(type)s)" - -msgid "Invalid blob in credential" -msgstr "신임 정보에 올바르지 blob가 있음" - -#, python-format -msgid "" -"Invalid domain name: %(domain)s found in config file name: %(file)s - " -"ignoring this file." -msgstr "" -"구성 파일 이름에 올바르지 않은 도메인 이름 %(domain)s이(가) 있음: %(file)s - " -"이 파일을 무시하십시오." - -#, python-format -msgid "Invalid domain specific configuration: %(reason)s" -msgstr "올바르지 않은 도메인 특정 구성: %(reason)s" - -#, python-format -msgid "Invalid input for field '%(path)s'. The value is '%(value)s'." -msgstr "'%(path)s' 필드에 올바르지 않은 입력입니다. 값은 '%(value)s'입니다." - -msgid "Invalid limit value" -msgstr "올바르지 않은 한계 값" - -#, python-format -msgid "" -"Invalid mix of entities for policy association - only Endpoint, Service or " -"Region+Service allowed. Request was - Endpoint: %(endpoint_id)s, Service: " -"%(service_id)s, Region: %(region_id)s" -msgstr "" -"정책 연관에 대한 엔티티의 올바르지 않은 조합인 엔드포인트, 서비스 또는 리젼" -"+서비스가 허용되었습니다. 요청은 엔드포인트: %(endpoint_id)s, 서비스: " -"%(service_id)s, 리젼: %(region_id)s입니다." - -#, python-format -msgid "" -"Invalid rule: %(identity_value)s. Both 'groups' and 'domain' keywords must " -"be specified." -msgstr "" -"올바르지 않은 규칙: %(identity_value)s. 'groups' 및 'domain' 키워드가 둘 다 " -"지정되어야 합니다." - -msgid "Invalid signature" -msgstr "올바르지 않은 서명" - -msgid "Invalid user / password" -msgstr "올바르지 않은 사용자 / 비밀번호" - -msgid "Invalid username or TOTP passcode" -msgstr "올바르지 않은 사용자 이름 또는 TOTP 비밀번호" - -msgid "Invalid username or password" -msgstr "올바르지 않은 사용자 이름 또는 비밀번호" - -#, python-format -msgid "KVS region %s is already configured. Cannot reconfigure." -msgstr "KVS 리젼 %s이(가) 이미 구성되어 있습니다. 재구성할 수 없습니다." - -#, python-format -msgid "Key Value Store not configured: %s" -msgstr "키 값 저장소가 구성되지 않음: %s" - -#, python-format -msgid "LDAP %s create" -msgstr "LDAP %s 작성" - -#, python-format -msgid "LDAP %s delete" -msgstr "LDAP %s 삭제" - -#, python-format -msgid "LDAP %s update" -msgstr "LDAP %s 업데이트" - -msgid "" -"Length of transformable resource id > 64, which is max allowed characters" -msgstr "변환 가능한 자원 id의 길이가 최대 허용 문자인 64보다 큼" - -#, python-format -msgid "" -"Local section in mapping %(mapping_id)s refers to a remote match that " -"doesn't exist (e.g. {0} in a local section)." -msgstr "" -"맵핑 %(mapping_id)s의 로컬 섹션에서 존재하지 않는 원격 일치를 참조합니다(예: " -"로컬 섹션의 {0})." - -#, python-format -msgid "Lock Timeout occurred for key, %(target)s" -msgstr "키 %(target)s에 대해 잠금 제한시간 초과가 발생함" - -#, python-format -msgid "Lock key must match target key: %(lock)s != %(target)s" -msgstr "잠금 키가 대상 키와 일치해야 함: %(lock)s != %(target)s" - -#, python-format -msgid "Malformed endpoint URL (%(endpoint)s), see ERROR log for details." -msgstr "" -"잘못된 형식의 엔드포인트 URL(%(endpoint)s). 세부사항은 오류 로그를 참조하십시" -"오." - -msgid "Marker could not be found" -msgstr "마커를 찾을 수 없음" - -#, python-format -msgid "Max hierarchy depth reached for %s branch." -msgstr "%s 분기에 대한 최대 계층 깊이에 도달했습니다." - -#, python-format -msgid "Maximum lock attempts on %s occurred." -msgstr "%s에서 최대 잠금 시도가 발생했습니다." - -#, python-format -msgid "Member %(member)s is already a member of group %(group)s" -msgstr "%(member)s 구성원은 이미 %(group)s 그룹의 구성원임" - -#, python-format -msgid "Method not callable: %s" -msgstr "메소드를 호출할 수 없음: %s" - -msgid "Missing entity ID from environment" -msgstr "환경에서 엔티티 ID가 누락됨" - -msgid "" -"Modifying \"redelegation_count\" upon redelegation is forbidden. Omitting " -"this parameter is advised." -msgstr "" -"재위임 시 \"redelegation_count\"를 수정할 수 없습니다. 이 매개변수는 생략하" -"는 것이 좋습니다." - -msgid "Multiple domains are not supported" -msgstr "여러 도메인이 지원되지 않음" - -msgid "Must be called within an active lock context." -msgstr "활성 잠금 컨텍스트 내에서 호출되어야 합니다." - -msgid "Must specify either domain or project" -msgstr "도메인 프로젝트 중 하나를 지정해야 함" - -msgid "Name field is required and cannot be empty" -msgstr "이름 필드가 필요하며 비어 있을 수 없음" - -msgid "Neither Project Domain ID nor Project Domain Name was provided." -msgstr "프로젝트 도메인 ID와 프로젝트 도메인 이름이 제공되지 않았습니다. " - -msgid "" -"No Authorization headers found, cannot proceed with OAuth related calls, if " -"running under HTTPd or Apache, ensure WSGIPassAuthorization is set to On." -msgstr "" -"권한 부여 헤더를 찾을 수 없습니다. HTTPd 또는 Apache에서 실행 중인 경우 " -"OAuth 관련 호출을 사용하여 계속 진행할 수 없습니다. WSGIPassAuthorization이 " -"On으로 설정되어 있는지 확인하십시오." - -msgid "No authenticated user" -msgstr "인증된 사용자가 없음" - -msgid "" -"No encryption keys found; run keystone-manage fernet_setup to bootstrap one." -msgstr "" -"암호화 키를 찾을 수 없음: keystone-manage fernet_setup을 부트스트랩 1로 실행" -"하십시오." - -msgid "No options specified" -msgstr "지정된 옵션 없음" - -#, python-format -msgid "No policy is associated with endpoint %(endpoint_id)s." -msgstr "엔드포인트 %(endpoint_id)s과(와) 연관된 정책이 없습니다." - -#, python-format -msgid "No remaining uses for trust: %(trust_id)s" -msgstr "신뢰 %(trust_id)s에 대해 남아 있는 사용이 없음" - -msgid "No token in the request" -msgstr "요청에 토큰이 없음" - -msgid "Non-default domain is not supported" -msgstr "기본이 아닌 도메인은 지원되지 않음" - -msgid "One of the trust agents is disabled or deleted" -msgstr "신뢰 에이전트 중 하나가 사용 안함으로 설정되었거나 삭제됨" - -#, python-format -msgid "" -"Option %(option)s found with no group specified while checking domain " -"configuration request" -msgstr "" -"%(option)s 옵션은 도메인 구성 요청 확인 중에 지정된 그룹이 없음을 발견함" - -#, python-format -msgid "" -"Option %(option)s in group %(group)s is not supported for domain specific " -"configurations" -msgstr "" -"도메인 특정 구성에 대해 %(group)s 그룹의 %(option)s 옵션이 지원되지않음" - -#, python-format -msgid "Project (%s)" -msgstr "프로젝트(%s)" - -#, python-format -msgid "Project ID not found: %(t_id)s" -msgstr "프로젝트 ID를 찾을 수 없음: %(t_id)s" - -msgid "Project field is required and cannot be empty." -msgstr "프로젝트 필드는 필수이므로 비어 있어서는 안 됩니다. " - -#, python-format -msgid "Project is disabled: %s" -msgstr "프로젝트를 사용 안함: %s" - -msgid "Project name cannot contain reserved characters." -msgstr "프로젝트 이름에 예약된 문자가 포함될 수 없습니다." - -msgid "Query string is not UTF-8 encoded" -msgstr "조회 문자열이 UTF-8로 인코딩되어 있지 않음" - -#, python-format -msgid "" -"Reading the default for option %(option)s in group %(group)s is not supported" -msgstr "그룹 %(group)s에서 옵션 %(option)s의 기본값 읽기는 지원되지 않음" - -msgid "Redelegation allowed for delegated by trust only" -msgstr "신뢰에서 위임한 경우에만 재위임 허용" - -#, python-format -msgid "" -"Remaining redelegation depth of %(redelegation_depth)d out of allowed range " -"of [0..%(max_count)d]" -msgstr "" -"%(redelegation_depth)d의 나머지 재위임 깊이가 허용 범위 [0..%(max_count)d]을" -"(를) 벗어남" - -msgid "" -"Remove admin_crud_extension from the paste pipeline, the admin_crud " -"extension is now always available. Updatethe [pipeline:admin_api] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"붙여넣기 파이프라인에서 admin_crud_extension을 제거하십시오. admin_crud 확장" -"은 이제 항상 사용할 수 있습니다. O 릴리스에서는 제거되므로 keystone-paste.ini" -"에서 [pipeline:admin_api] 섹션을 적절하게 업데이트하십시오." - -msgid "" -"Remove endpoint_filter_extension from the paste pipeline, the endpoint " -"filter extension is now always available. Update the [pipeline:api_v3] " -"section in keystone-paste.ini accordingly as it will be removed in the O " -"release." -msgstr "" -"붙여넣기 파이프라인에서 endpoint_filter_extension을 제거하십시오. 엔드포인트 " -"필터 확장은 이제 항상 사용할 수 있습니다. O 릴리스에서는 제거되므로 keystone-" -"paste.ini에서 [pipeline:api_v3] 섹션을 적절하게 업데이트하십시오." - -msgid "" -"Remove federation_extension from the paste pipeline, the federation " -"extension is now always available. Update the [pipeline:api_v3] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"붙여넣기 파이프라인에서 federation_extension을 제거하십시오. 연합 확장은 이" -"제 항상 사용할 수 있습니다. O 릴리스에서는 제거되므로 keystone-paste.ini에서 " -"[pipeline:api_v3]섹션을 적절하게 업데이트하십시오." - -msgid "" -"Remove oauth1_extension from the paste pipeline, the oauth1 extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"붙여넣기 파이프라인에서 oauth1_extension을 제거하십시오. oauth1 확장은 이제 " -"항상 사용할 수 있습니다. O 릴리스에서는 제거되므로 keystone-paste.ini에서 " -"[pipeline:api_v3]섹션을 적절하게 업데이트하십시오." - -msgid "" -"Remove revoke_extension from the paste pipeline, the revoke extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"붙여넣기 파이프라인에서 revoke_extension을 제거하십시오. 취소 확장은 이제 항" -"상 사용할 수 있습니다. O 릴리스에서는 제거되므로 keystone-paste.ini에서 " -"[pipeline:api_v3]섹션을 적절하게 업데이트하십시오." - -msgid "" -"Remove simple_cert from the paste pipeline, the PKI and PKIz token providers " -"are now deprecated and simple_cert was only used insupport of these token " -"providers. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"붙여넣기 파이프라인에서 simple_cert를 제거하십시오. PKI 및 PKIz 토큰 제공자" -"는 이제 더 이상 사용되지 않으며 simple_cert는 이러한 토큰 제공자를 지원하는 " -"데만 사용됩니다. O 릴리스에서는 제거되므로 keystone-paste.ini에서 [pipeline:" -"api_v3]섹션을 적절하게 업데이트하십시오." - -msgid "" -"Remove user_crud_extension from the paste pipeline, the user_crud extension " -"is now always available. Updatethe [pipeline:public_api] section in keystone-" -"paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"붙여넣기 파이프라인에서 user_crud_extension을 제거하십시오. user_crud 확장은 " -"이제 항상 사용할 수 있습니다. O 릴리스에서는 제거되므로 keystone-paste.ini에" -"서 [pipeline:public_api] 섹션을 적절하게 업데이트하십시오." - -msgid "Request Token does not have an authorizing user id" -msgstr "요청 토큰에 인증하는 사용자 ID가 없음" - -#, python-format -msgid "" -"Request attribute %(attribute)s must be less than or equal to %(size)i. The " -"server could not comply with the request because the attribute size is " -"invalid (too large). The client is assumed to be in error." -msgstr "" -"요청 속성 %(attribute)s이(가) %(size)i 이하여야 합니다. 속성 크기가 올바르지 " -"않기 때문에(너무 큼) 서버가 요청을 준수할 수 없습니다. 클라이언트가 오류 상태" -"로 간주됩니다." - -msgid "Request must have an origin query parameter" -msgstr "요청에는 원본 조회 매개변수가 있어야 함" - -msgid "Request token is expired" -msgstr "요청 토큰이 만료됨" - -msgid "Request token not found" -msgstr "요청 토큰을 찾을 수 없음" - -msgid "Requested expiration time is more than redelegated trust can provide" -msgstr "요청된 만기 시간이 재위임된 신뢰에서 제공할 수 있는 시간보다 큼" - -#, python-format -msgid "" -"Requested redelegation depth of %(requested_count)d is greater than allowed " -"%(max_count)d" -msgstr "" -"%(requested_count)d의 요청된 재위임 depth가 허용되는 %(max_count)d보다 깊음" - -msgid "" -"Running keystone via eventlet is deprecated as of Kilo in favor of running " -"in a WSGI server (e.g. mod_wsgi). Support for keystone under eventlet will " -"be removed in the \"M\"-Release." -msgstr "" -"Eventlet을 통한 키스톤 실행은 WSGI 서버 실행의 플레이버에 있는 Kilo부터 더 " -"이상 사용되지 않습니다(예: mod_wsgi). Eventlet 아래의 키스톤에 대한 지원은 " -"\"M\"-릴리스에서 제거됩니다." - -msgid "Scoping to both domain and project is not allowed" -msgstr "도메인과 프로젝트에 대한 범위 지정이 허용되지 않음" - -msgid "Scoping to both domain and trust is not allowed" -msgstr "도메인과 신뢰에 대한 범위 지정이 허용되지 않음" - -msgid "Scoping to both project and trust is not allowed" -msgstr "프로젝트와 신뢰에 대한 범위 지정이 허용되지 않음" - -#, python-format -msgid "Service Provider %(sp)s is disabled" -msgstr "서비스 제공자 %(sp)s이(가) 사용 안함으로 설정됨" - -msgid "Some of requested roles are not in redelegated trust" -msgstr "요청된 일부 역할이 재위임된 신뢰에 없음" - -msgid "Specify a domain or project, not both" -msgstr "도메인 또는 프로젝트 중 하나 지정" - -msgid "Specify a user or group, not both" -msgstr "사용자 또는 그룹 중 하나 지정" - -msgid "Specify one of domain or project" -msgstr "도메인 또는 프로젝트 중 하나 지정" - -msgid "Specify one of user or group" -msgstr "사용자 또는 그룹 중 하나 지정" - -#, python-format -msgid "" -"String length exceeded.The length of string '%(string)s' exceeded the limit " -"of column %(type)s(CHAR(%(length)d))." -msgstr "" -"문자열 길이 제한을 초과합니다. '%(string)s' 문자열 길이가 열의 한도 " -"%(type)s(CHAR(%(length)d))을(를) 초과합니다." - -msgid "Tenant name cannot contain reserved characters." -msgstr "Tenant 이름에 예약된 문자가 포함될 수 없습니다." - -#, python-format -msgid "" -"The %s extension has been moved into keystone core and as such its " -"migrations are maintained by the main keystone database control. Use the " -"command: keystone-manage db_sync" -msgstr "" -"%s 확장이 keystone 코어에 이동되었으므로 기본 keystone 데이터베이스 제어에서 " -"마이그레이션을 유지 관리합니다. keystone-manage db_sync 명령을 사용하십시오." - -msgid "" -"The 'expires_at' must not be before now. The server could not comply with " -"the request since it is either malformed or otherwise incorrect. The client " -"is assumed to be in error." -msgstr "" -"'expires_at'은 지금보다 이전이어서는 안 됩니다. 형식이 잘못되었거나 올바르지 " -"않기 때문에 서버가 요청을 준수할 수 없습니다. 클라이언트는 오류 상태로 간주됩" -"니다." - -msgid "The --all option cannot be used with the --domain-name option" -msgstr "--all 옵션은 --domain-name 옵션과 함께 사용할 수 없습니다." - -#, python-format -msgid "The Keystone configuration file %(config_file)s could not be found." -msgstr "키스톤 구성 파일 %(config_file)s을(를) 찾을 수 없습니다." - -#, python-format -msgid "" -"The Keystone domain-specific configuration has specified more than one SQL " -"driver (only one is permitted): %(source)s." -msgstr "" -"키스톤 도메인 특정 구성에 하나 이상의 SQL 드라이버가 지정됨(하나만 허용됨): " -"%(source)s." - -msgid "The action you have requested has not been implemented." -msgstr "요청한 조치가 구현되지 않았습니다." - -msgid "The authenticated user should match the trustor." -msgstr "인증된 사용자는 trustor와 일치해야 합니다." - -msgid "" -"The certificates you requested are not available. It is likely that this " -"server does not use PKI tokens otherwise this is the result of " -"misconfiguration." -msgstr "" -"요청한 인증서를 사용할 수 없습니다. 서버가 PKI 토큰을 사용하지 않거나 잘못된 " -"구성의 결과로 인해 발생했을 수 있습니다." - -msgid "The configured token provider does not support bind authentication." -msgstr "구성된 토큰 제공자가 바인드 인증을 지원하지 않습니다. " - -msgid "The creation of projects acting as domains is not allowed in v2." -msgstr "도메인 역할을 수행하는 프로젝트 생성은 v2에서 허용되지 않습니다. " - -#, python-format -msgid "" -"The password length must be less than or equal to %(size)i. The server could " -"not comply with the request because the password is invalid." -msgstr "" -"비밀번호 길이는 %(size)i 이하여야 합니다. 비밀번호가 올바르지 않아 서버가 요" -"청을 준수할 수 없습니다." - -msgid "The request you have made requires authentication." -msgstr "요청에 인증이 필요합니다." - -msgid "The resource could not be found." -msgstr "자원을 찾을 수 없습니다. " - -msgid "" -"The revoke call must not have both domain_id and project_id. This is a bug " -"in the Keystone server. The current request is aborted." -msgstr "" -"취소 호출은 domain_id와 project_id가 둘 다 있으면 안됩니다.키스톤 서버에서 이" -"는 버그입니다. 현재 요청이 중단됩니다." - -msgid "The service you have requested is no longer available on this server." -msgstr "요청한 서비스를 더 이상 이 서버에서 사용할 수 없습니다." - -#, python-format -msgid "" -"The specified parent region %(parent_region_id)s would create a circular " -"region hierarchy." -msgstr "지정된 상위 리젼 %(parent_region_id)s에서 순환 리젼 계층을 작성합니다." - -#, python-format -msgid "" -"The value of group %(group)s specified in the config should be a dictionary " -"of options" -msgstr "구성에 지정된 %(group)s 그룹의 값은 옵션의 사전이어야 함" - -msgid "There should not be any non-oauth parameters" -msgstr "non-oauth 매개변수가 없어야 함" - -#, python-format -msgid "This is not a recognized Fernet payload version: %s" -msgstr "인식되는 Fernet 페이로드 버전이 아님: %s" - -#, python-format -msgid "This is not a recognized Fernet token %s" -msgstr "인식되는 Fernet 토큰 %s이(가) 아님" - -msgid "" -"Timestamp not in expected format. The server could not comply with the " -"request since it is either malformed or otherwise incorrect. The client is " -"assumed to be in error." -msgstr "" -"시간소인이 예상된 형식이 아닙니다. 잘못 구성되었거나 올바르지 않으므로 서버" -"가 요청을 준수할 수 없습니다. 클라이언트가 오류 상태로 간주됩니다." - -#, python-format -msgid "" -"To get a more detailed information on this error, re-run this command for " -"the specific domain, i.e.: keystone-manage domain_config_upload --domain-" -"name %s" -msgstr "" -"이 오류에 대한 자세한 정보를 보려면 특정 도메인에 대해 이 명령을 다시 실행하" -"십시오. 예: keystone-manage domain_config_upload --domain-name %s" - -msgid "Token belongs to another user" -msgstr "토큰이 다른 사용자에 속함" - -msgid "Token does not belong to specified tenant." -msgstr "토큰이 지정된 테넌트에 속하지 않습니다." - -msgid "Token version is unrecognizable or unsupported." -msgstr "토큰 버전이 인식되지 않거나 지원되지 않습니다. " - -msgid "Trustee has no delegated roles." -msgstr "Trustee에 위임된 역할이 없습니다. " - -msgid "Trustor is disabled." -msgstr "Trustor를 사용하지 않습니다. " - -#, python-format -msgid "" -"Trying to update group %(group)s, so that, and only that, group must be " -"specified in the config" -msgstr "구성에서 그룹만 지정되도록 %(group)s 그룹을 업데이트하려고 합니다. " - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, but config provided " -"contains option %(option_other)s instead" -msgstr "" -"%(group)s 그룹에서 %(option)s 옵션을 업데이트하려고 했지만 제공된 구성에 " -"%(option_other)s 옵션이 대신 포함되어 있습니다." - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, so that, and only " -"that, option must be specified in the config" -msgstr "" -"구성에서 옵션만 지정되도록 %(group)s 그룹에서 %(option)s 옵션을 업데이트하려" -"고 합니다." - -msgid "" -"Unable to access the keystone database, please check it is configured " -"correctly." -msgstr "" -"Keystone 데이터베이스를 액세스할 수 없습니다. 데이터베이스가 제대로 구성되어 " -"있는지 확인하십시오. " - -#, python-format -msgid "Unable to consume trust %(trust_id)s, unable to acquire lock." -msgstr "%(trust_id)s 신뢰를 이용할 수 없어서 잠금을 획득할 수 없습니다." - -#, python-format -msgid "" -"Unable to delete region %(region_id)s because it or its child regions have " -"associated endpoints." -msgstr "" -"리젼 %(region_id)s 또는 하위 리젼에 연관된 엔드포인트가 있어 삭제할 수 없습니" -"다." - -msgid "Unable to downgrade schema" -msgstr "스키마를 다운그레이드할 수 없음" - -#, python-format -msgid "Unable to find valid groups while using mapping %(mapping_id)s" -msgstr "%(mapping_id)s 맵핑을 사용하는 중에 올바른 그룹을 찾을 수 없음 " - -#, python-format -msgid "Unable to locate domain config directory: %s" -msgstr "%s: 도메인 설정 디렉토리를 찾을 수 없습니다." - -#, python-format -msgid "Unable to lookup user %s" -msgstr "%s 사용자를 검색할 수 없음" - -#, python-format -msgid "" -"Unable to reconcile identity attribute %(attribute)s as it has conflicting " -"values %(new)s and %(old)s" -msgstr "" -"ID 속성 %(attribute)s에 서로 충돌하는 %(new)s 및 %(old)s 값이 있으므로 이 ID " -"속성을 조정할 수 없음" - -#, python-format -msgid "" -"Unable to sign SAML assertion. It is likely that this server does not have " -"xmlsec1 installed, or this is the result of misconfiguration. Reason " -"%(reason)s" -msgstr "" -"SAML 어설션에 서명할 수 없습니다. 이 서버에 xmlsec1이 설치되지 않았거나 잘못 " -"구성된 결과입니다. 이유%(reason)s" - -msgid "Unable to sign token." -msgstr "토큰을 부호화할 수 없습니다." - -#, python-format -msgid "Unexpected assignment type encountered, %s" -msgstr "예상치 못한 지정 유형 발생, %s" - -#, python-format -msgid "" -"Unexpected combination of grant attributes - User: %(user_id)s, Group: " -"%(group_id)s, Project: %(project_id)s, Domain: %(domain_id)s" -msgstr "" -"Grant 속성의 예상치 못한 조합 - 사용자: %(user_id)s, 그룹: %(group_id)s, 프로" -"젝트: %(project_id)s, 도메인: %(domain_id)s" - -#, python-format -msgid "Unexpected status requested for JSON Home response, %s" -msgstr "JSON 홈 응답에 대해 예상치 못한 상태가 요청됨. %s" - -msgid "Unknown Target" -msgstr "알 수 없는 대상" - -#, python-format -msgid "Unknown domain '%(name)s' specified by --domain-name" -msgstr "--domain-name으로 알 수 없는 도메인 '%(name)s'을(를) 지정했음" - -#, python-format -msgid "Unknown token version %s" -msgstr "알 수 없는 토큰 버전 %s" - -#, python-format -msgid "Unregistered dependency: %(name)s for %(targets)s" -msgstr "등록되지 않은 종속성: %(targets)s의 %(name)s" - -msgid "Update of `domain_id` is not allowed." -msgstr "`domain_id` 업데이트는 허용되지 않습니다." - -msgid "Update of `is_domain` is not allowed." -msgstr "`is_domain`의 업데이트는 허용되지 않습니다. " - -msgid "Update of `parent_id` is not allowed." -msgstr "`parent_id` 업데이트가 허용되지 않습니다." - -msgid "Update of domain_id is only allowed for root projects." -msgstr "domain_id의 업데이트는 루트 프로젝트에만 허용됩니다." - -msgid "Update of domain_id of projects acting as domains is not allowed." -msgstr "도메인 역할을 하는 프로젝트의 domain_id는 업데이트할 수 없습니다." - -msgid "Use a project scoped token when attempting to create a SAML assertion" -msgstr "SAML 어설션을 작성할 때 프로젝트 범위 지정 토큰 사용" - -msgid "" -"Use of the identity driver config to automatically configure the same " -"assignment driver has been deprecated, in the \"O\" release, the assignment " -"driver will need to be expicitly configured if different than the default " -"(SQL)." -msgstr "" -"ID 드라이버 구성을 사용하여 동일한 할당 드라이버를 자동으로 구성하는 기능은 " -"더 이상 사용되지 않습니다. \"O\" 릴리스에서는 기본값(SQL)과 다른 경우 할당 드" -"라이버를 명시적으로 구성해야 합니다." - -#, python-format -msgid "User %(u_id)s is unauthorized for tenant %(t_id)s" -msgstr "사용자 %(u_id)s이(는) tenant %(t_id)s에 대한 권한이 없습니다. " - -#, python-format -msgid "User %(user_id)s has no access to domain %(domain_id)s" -msgstr "" -"%(user_id)s 사용자는 %(domain_id)s 도메인에 대한 액세스 권한이 없습니다. " - -#, python-format -msgid "User %(user_id)s has no access to project %(project_id)s" -msgstr "" -"%(user_id)s 사용자는 %(project_id)s 프로젝트에 대한 액세스 권한이 없습니다. " - -#, python-format -msgid "User %(user_id)s is already a member of group %(group_id)s" -msgstr "%(user_id)s 사용자는 이미 %(group_id)s 그룹의 구성원임" - -#, python-format -msgid "User '%(user_id)s' not found in group '%(group_id)s'" -msgstr "'%(group_id)s' 그룹에 '%(user_id)s' 사용자가 없음" - -msgid "User IDs do not match" -msgstr "사용자 ID가 일치하지 않음" - -msgid "" -"User auth cannot be built due to missing either user id, or user name with " -"domain id, or user name with domain name." -msgstr "" -"사용자 ID, 도메인 ID가 포함된 사용자 이름 또는 도메인 이름이 포함된 사용자 이" -"름이 누락되어 사용자 인증을 빌드할 수 없습니다. " - -#, python-format -msgid "User is disabled: %s" -msgstr "사용자를 사용 안함: %s" - -msgid "User is not a member of the requested project" -msgstr "사용자가 요청한 프로젝트의 구성원이 아님" - -msgid "User is not a trustee." -msgstr "사용자는 trustee가 아닙니다." - -msgid "User not found" -msgstr "사용자를 찾을 수 없음" - -msgid "User not valid for tenant." -msgstr "Tenant 사용자가 올바르지 않습니다." - -msgid "User roles not supported: tenant_id required" -msgstr "사용자 역할이 지원되지 않음: tenant_id 필요" - -#, python-format -msgid "User type %s not supported" -msgstr "사용자 유형 %s이(가) 지원되지 않음" - -msgid "You are not authorized to perform the requested action." -msgstr "요청한 조치를 수행할 권한이 없습니다." - -#, python-format -msgid "You are not authorized to perform the requested action: %(action)s" -msgstr "요청한 조치(%(action)s)를 수행할 권한이 없습니다." - -msgid "" -"You have tried to create a resource using the admin token. As this token is " -"not within a domain you must explicitly include a domain for this resource " -"to belong to." -msgstr "" -"관리자 토큰을 사용하여 자원을 생성하려 했습니다. 이 토큰이 도메인에 없으므" -"로, 이 자원이 속할 도메인을 명시적으로 포함시켜야 합니다." - -msgid "`key_mangler` functions must be callable." -msgstr "`key_mangler` 기능을 호출할 수 있어야 합니다." - -msgid "`key_mangler` option must be a function reference" -msgstr "`key_mangler` 옵션은 기능 참조여야 함" - -msgid "any options" -msgstr "옵션" - -msgid "auth_type is not Negotiate" -msgstr "auth_type이 Negotiate가 아님" - -msgid "authorizing user does not have role required" -msgstr "인증하는 사용자에게 필요한 역할이 없음" - -#, python-format -msgid "cannot create a project in a branch containing a disabled project: %s" -msgstr "" -"사용 안함으로 설정된 프로젝트가 포함된 분기에 프로젝트를 작성할 수 없습니다. " -"%s" - -#, python-format -msgid "" -"cannot delete an enabled project acting as a domain. Please disable the " -"project %s first." -msgstr "" -"도메인 역할을 하는 사용 설정된 프로젝트를 삭제할 수 없습니다. 프로젝트 %s을" -"(를) 먼저 사용하지 않게 설정하십시오." - -#, python-format -msgid "group %(group)s" -msgstr "%(group)s 그룹" - -msgid "" -"idp_contact_type must be one of: [technical, other, support, administrative " -"or billing." -msgstr "" -"idp_contact_type은 [기술, 기타, 지원, 관리 또는 비용 청구 중 하나여야 합니다." - -#, python-format -msgid "invalid date format %s" -msgstr "올바르지 않은 날짜 형식 %s" - -#, python-format -msgid "" -"it is not permitted to have two projects acting as domains with the same " -"name: %s" -msgstr "이름이 같은 두 프로젝트가 도메인 역할을 수행할 수 없음: %s" - -#, python-format -msgid "" -"it is not permitted to have two projects within a domain with the same " -"name : %s" -msgstr "한 도메인에 이름이 같은 두 프로젝트가 있을 수 없음: %s" - -msgid "only root projects are allowed to act as domains." -msgstr "루트 프로젝트만 도메인 역할을 수행할 수 있습니다." - -#, python-format -msgid "option %(option)s in group %(group)s" -msgstr "%(group)s 그룹의 %(option)s 옵션" - -msgid "provided consumer key does not match stored consumer key" -msgstr "제공된 이용자 키가 저장된 이용자 키와 일치하지 않음" - -msgid "provided request key does not match stored request key" -msgstr "제공된 요청 키가 저장된 요청 키와 일치하지 않음" - -msgid "provided verifier does not match stored verifier" -msgstr "제공된 확인자가 저장된 확인자와 일치하지 않음 " - -msgid "remaining_uses must be a positive integer or null." -msgstr "remaining_uses는 양의 정수 또는 null이어야 합니다." - -msgid "remaining_uses must not be set if redelegation is allowed" -msgstr "재위임을 허용하는 경우 remaining_uses를 설정하지 않아야 함" - -#, python-format -msgid "" -"request to update group %(group)s, but config provided contains group " -"%(group_other)s instead" -msgstr "" -"%(group)s 그룹을 업데이트하도록 요청했지만 제공된 구성에 %(group_other)s 그룹" -"이 대신 포함되어 있습니다." - -msgid "rescope a scoped token" -msgstr "범위 지정된 토큰의 범위 재지정" - -#, python-format -msgid "role %s is not defined" -msgstr "역할 %s이(가) 정의되지 않음" - -msgid "scope.project.id must be specified if include_subtree is also specified" -msgstr "include_subtree도 지정된 경우 scope.project.id를 지정해야 함" - -#, python-format -msgid "tls_cacertdir %s not found or is not a directory" -msgstr "tls_cacertdir %s를 찾을 수 없으며, 이 디렉토리에 존재하지 않습니다." - -#, python-format -msgid "tls_cacertfile %s not found or is not a file" -msgstr "tls_cacertfile %s를 찾을 수 없스며, 그런 파일이 없습니다." - -#, python-format -msgid "token reference must be a KeystoneToken type, got: %s" -msgstr "토큰 참조는 KeystoneToken 유형이어야 합니다. %s을(를) 가져왔습니다." - -msgid "" -"update of domain_id is deprecated as of Mitaka and will be removed in O." -msgstr "" -"Mitaka에서 domain_id 업데이트는 더 이상 사용되지 않으므로, O에서 제거됩니다." - -#, python-format -msgid "" -"validated expected to find %(param_name)r in function signature for " -"%(func_name)r." -msgstr "" -"%(func_name)r에 대한 함수 서명에서 %(param_name)r을(를) 찾기 위해 유효성 검증" -"하고 예상했습니다. " diff --git a/keystone-moon/keystone/locale/pl_PL/LC_MESSAGES/keystone-log-critical.po b/keystone-moon/keystone/locale/pl_PL/LC_MESSAGES/keystone-log-critical.po deleted file mode 100644 index 0f2ca85c..00000000 --- a/keystone-moon/keystone/locale/pl_PL/LC_MESSAGES/keystone-log-critical.po +++ /dev/null @@ -1,26 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2014-08-31 03:19+0000\n" -"Last-Translator: openstackjenkins \n" -"Language: pl-PL\n" -"Plural-Forms: nplurals=3; plural=(n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 " -"|| n%100>=20) ? 1 : 2);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Polish (Poland)\n" - -#, python-format -msgid "Unable to open template file %s" -msgstr "Błąd podczas otwierania pliku %s" diff --git a/keystone-moon/keystone/locale/pt_BR/LC_MESSAGES/keystone-log-critical.po b/keystone-moon/keystone/locale/pt_BR/LC_MESSAGES/keystone-log-critical.po deleted file mode 100644 index 6ed0adbe..00000000 --- a/keystone-moon/keystone/locale/pt_BR/LC_MESSAGES/keystone-log-critical.po +++ /dev/null @@ -1,25 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2014-08-31 03:19+0000\n" -"Last-Translator: openstackjenkins \n" -"Language: pt-BR\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Portuguese (Brazil)\n" - -#, python-format -msgid "Unable to open template file %s" -msgstr "Não é possível abrir o arquivo de modelo %s" diff --git a/keystone-moon/keystone/locale/pt_BR/LC_MESSAGES/keystone-log-error.po b/keystone-moon/keystone/locale/pt_BR/LC_MESSAGES/keystone-log-error.po deleted file mode 100644 index 2302f6a9..00000000 --- a/keystone-moon/keystone/locale/pt_BR/LC_MESSAGES/keystone-log-error.po +++ /dev/null @@ -1,57 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 8.0.1.dev11\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/keystone\n" -"POT-Creation-Date: 2015-11-05 06:13+0000\n" -"PO-Revision-Date: 2015-06-26 05:13+0000\n" -"Last-Translator: openstackjenkins \n" -"Language-Team: Portuguese (Brazil)\n" -"Language: pt-BR\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.1\n" - -msgid "" -"Error setting up the debug environment. Verify that the option --debug-url " -"has the format : and that a debugger processes is listening on " -"that port." -msgstr "" -"Erro configurando o ambiente de debug. Verifique que a opção --debug-url " -"possui o formato : e que o processo debugger está escutando " -"nesta porta." - -#, python-format -msgid "Failed to send %(res_id)s %(event_type)s notification" -msgstr "Falha ao enviar notificação %(res_id)s %(event_type)s" - -msgid "Failed to validate token" -msgstr "Falha ao validar token" - -#, python-format -msgid "Malformed endpoint %(url)s - unknown key %(keyerror)s" -msgstr "Endpoint mal formado %(url)s - chave desconhecida %(keyerror)s" - -msgid "Server error" -msgstr "Erro do servidor" - -#, python-format -msgid "" -"Unable to build cache config-key. Expected format \":\". " -"Skipping unknown format: %s" -msgstr "" -"Não é possível construir chave de configuração do cache. Formato esperado " -"\":\". Pulando formato desconhecido: %s" - -#, python-format -msgid "Unexpected error or malformed token determining token expiry: %s" -msgstr "" -"Erro inesperado ou token mal formado ao determinar validade do token: %s" diff --git a/keystone-moon/keystone/locale/pt_BR/LC_MESSAGES/keystone.po b/keystone-moon/keystone/locale/pt_BR/LC_MESSAGES/keystone.po deleted file mode 100644 index 49a2f8ad..00000000 --- a/keystone-moon/keystone/locale/pt_BR/LC_MESSAGES/keystone.po +++ /dev/null @@ -1,1620 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Gabriel Wainer, 2013 -# Gabriel Wainer, 2013 -# Lucas Ribeiro , 2014 -# Volmar Oliveira Junior , 2013 -# Volmar Oliveira Junior , 2013 -# Raildo Mascena , 2015. #zanata -# Carlos Marques , 2016. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2016-05-02 09:08+0000\n" -"Last-Translator: Carlos Marques \n" -"Language: pt-BR\n" -"Plural-Forms: nplurals=2; plural=(n > 1);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Portuguese (Brazil)\n" - -#, python-format -msgid "%(detail)s" -msgstr "%(detail)s" - -#, python-format -msgid "%(driver)s is not supported driver version" -msgstr "O %(driver)s não é uma versão de driver suportada" - -#, python-format -msgid "" -"%(entity)s name cannot contain the following reserved characters: %(chars)s" -msgstr "" -"O nome %(entity)s não pode conter os caracteres reservados a seguir: " -"%(chars)s" - -#, python-format -msgid "" -"%(event)s is not a valid notification event, must be one of: %(actions)s" -msgstr "" -"%(event)s não é um evento de notificação válido, deve ser um de: %(actions)s" - -#, python-format -msgid "%(host)s is not a trusted dashboard host" -msgstr "%(host)s não é um host do painel confiável" - -#, python-format -msgid "%(message)s %(amendment)s" -msgstr "%(message)s %(amendment)s" - -#, python-format -msgid "" -"%(mod_name)s doesn't provide database migrations. The migration repository " -"path at %(path)s doesn't exist or isn't a directory." -msgstr "" -"O %(mod_name)s não fornece migrações de banco de dados. O caminho do " -"repositório de migração %(path)s não existe ou não é um diretório." - -#, python-format -msgid "%(prior_role_id)s does not imply %(implied_role_id)s" -msgstr "%(prior_role_id)s não implica %(implied_role_id)s" - -#, python-format -msgid "%(property_name)s cannot be less than %(min_length)s characters." -msgstr "%(property_name)s não pode ter menos de %(min_length)s caracteres." - -#, python-format -msgid "%(property_name)s is not a %(display_expected_type)s" -msgstr "%(property_name)s não é um %(display_expected_type)s" - -#, python-format -msgid "%(property_name)s should not be greater than %(max_length)s characters." -msgstr "%(property_name)s não deve ter mais de %(max_length)s caracteres." - -#, python-format -msgid "%(role_id)s cannot be an implied roles" -msgstr "%(role_id)s não pode ser uma função implícita" - -#, python-format -msgid "%s cannot be empty." -msgstr "%s não pode ficar vazio." - -#, python-format -msgid "%s extension does not exist." -msgstr "A extensão %s não existe." - -#, python-format -msgid "%s field is required and cannot be empty" -msgstr "O campo %s é obrigatório e não pode ficar vazio" - -#, python-format -msgid "%s field(s) cannot be empty" -msgstr "Os campos %s não podem ficar vazios" - -#, python-format -msgid "" -"%s for the LDAP identity backend has been deprecated in the Mitaka release " -"in favor of read-only identity LDAP access. It will be removed in the \"O\" " -"release." -msgstr "" -"%s para o backend de identidade LDAP foi descontinuado na liberação do " -"Mitaka a favor do acesso LDAP de identidade somente leitura. Ele será " -"removido na liberação \"O\"." - -msgid "(Disable insecure_debug mode to suppress these details.)" -msgstr "(Desative o modo insecure_debug para suprimir esses detalhes)." - -msgid "--all option cannot be mixed with other options" -msgstr "A opção --all não pode ser combinada com outras opções" - -msgid "A project-scoped token is required to produce a service catalog." -msgstr "" -"Um token de projeto com escopo é necessário para produzir um catálogo de " -"serviços." - -msgid "Access token is expired" -msgstr "O token de acesso expirou" - -msgid "Access token not found" -msgstr "Token de acesso não encontrado" - -msgid "Additional authentications steps required." -msgstr "Passos de autenticação adicionais necessários." - -msgid "An unexpected error occurred when retrieving domain configs" -msgstr "Ocorreu um erro inesperado ao recuperar as configurações de domínio" - -#, python-format -msgid "An unexpected error occurred when trying to store %s" -msgstr "Ocorreu um erro inesperado ao tentar armazenar %s" - -msgid "An unexpected error prevented the server from fulfilling your request." -msgstr "Um erro inesperado evitou que o servidor cumprisse sua solicitação." - -#, python-format -msgid "" -"An unexpected error prevented the server from fulfilling your request: " -"%(exception)s" -msgstr "" -"Um erro inesperado evitou que o servidor cumprisse sua solicitação: " -"%(exception)s" - -msgid "An unhandled exception has occurred: Could not find metadata." -msgstr "" -"Ocorreu uma exceção não manipulada: Não foi possível encontrar metadados." - -msgid "At least one option must be provided" -msgstr "Pelo menos uma opção deve ser fornecida" - -msgid "At least one option must be provided, use either --all or --domain-name" -msgstr "Pelo menos uma opção deve ser fornecida, use --all ou --domain-name" - -msgid "At least one role should be specified." -msgstr "Pelo menos um papel deve ser especificado." - -#, python-format -msgid "" -"Attempted automatic driver selection for assignment based upon " -"[identity]\\driver option failed since driver %s is not found. Set " -"[assignment]/driver to a valid driver in keystone config." -msgstr "" -"Uma tentativa de seleção de driver automática para designação com base na " -"opção [identity]\\driver falhou porque o driver %s não foi localizado. " -"Configure o [assignment]/driver para um driver válido na configuração do " -"keystone." - -msgid "Attempted to authenticate with an unsupported method." -msgstr "Tentativa de autenticação com um método não suportado." - -msgid "" -"Attempting to use OS-FEDERATION token with V2 Identity Service, use V3 " -"Authentication" -msgstr "" -"Ao tentar usar o token OS-FEDERATION com Serviço de identidade V2, use " -"autenticação V3" - -msgid "Authentication plugin error." -msgstr "Erro do plugin de autenticação." - -#, python-format -msgid "" -"Backend `%(backend)s` is not a valid memcached backend. Valid backends: " -"%(backend_list)s" -msgstr "" -"O backend `%(backend)s`não é um backend memcached válido. Backends válidos: " -"%(backend_list)s" - -msgid "Cannot authorize a request token with a token issued via delegation." -msgstr "" -"Não é possível autorizar um token de solicitação com um token emitido por " -"meio de delegação." - -#, python-format -msgid "Cannot change %(option_name)s %(attr)s" -msgstr "Não é possível alterar %(option_name)s %(attr)s" - -msgid "Cannot change Domain ID" -msgstr "Não é possível alterar o ID do Domínio" - -msgid "Cannot change user ID" -msgstr "Não é possível alterar o ID do usuário" - -msgid "Cannot change user name" -msgstr "Não é possível alterar o nome de usuário" - -#, python-format -msgid "Cannot create an endpoint with an invalid URL: %(url)s" -msgstr "Não é possível criar um terminal com uma URL inválida: %(url)s" - -#, python-format -msgid "Cannot create project with parent: %(project_id)s" -msgstr "Não é possível criar o projeto com o pai: %(project_id)s" - -#, python-format -msgid "" -"Cannot create project, since it specifies its owner as domain %(domain_id)s, " -"but specifies a parent in a different domain (%(parent_domain_id)s)." -msgstr "" -"Não é possível criar o projeto porque ele especifica seu proprietário como " -"domínio %(domain_id)s, mas especifica um pai em um domínio diferente " -"(%(parent_domain_id)s)." - -#, python-format -msgid "" -"Cannot create project, since its parent (%(domain_id)s) is acting as a " -"domain, but project's specified parent_id (%(parent_id)s) does not match " -"this domain_id." -msgstr "" -"Não é possível criar um projeto porque seu pai (%(domain_id)s) está agindo " -"como um domínio, mas o parent_id (%(parent_id)s) especificado do projeto não " -"corresponde com esse domain_id." - -msgid "Cannot delete a domain that is enabled, please disable it first." -msgstr "" -"Não é possível excluir um domínio que esteja ativado, desative-o primeiro." - -#, python-format -msgid "" -"Cannot delete project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" -"Não é possível excluir o projeto%(project_id)s porque sua subárvore contém " -"projetos ativados." - -#, python-format -msgid "" -"Cannot delete the project %s since it is not a leaf in the hierarchy. Use " -"the cascade option if you want to delete a whole subtree." -msgstr "" -"Não é possível excluir o projeto %s porque ele não é uma folha na " -"hierarquia. Use a opção em cascata se desejar excluir uma subárvore inteira." - -#, python-format -msgid "" -"Cannot disable project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" -"Não é possível desativar o projeto%(project_id)s porque sua subárvore " -"contém projetos ativados." - -#, python-format -msgid "Cannot enable project %s since it has disabled parents" -msgstr "Não é possível ativar o projeto %s porque ele possui pais desativados" - -msgid "Cannot list assignments sourced from groups and filtered by user ID." -msgstr "" -"Não é possível listar designações originadas a partir de grupos e filtradas " -"pelo ID do usuário." - -msgid "Cannot list request tokens with a token issued via delegation." -msgstr "" -"Não é possível listar os tokens de solicitação com um token emitido por meio " -"de delegação." - -#, python-format -msgid "Cannot open certificate %(cert_file)s. Reason: %(reason)s" -msgstr "Não é possível abrir o certificado %(cert_file)s. Motivo: %(reason)s" - -#, python-format -msgid "Cannot remove role that has not been granted, %s" -msgstr "Não é possível remover a função que não foi concedida, %s" - -msgid "" -"Cannot truncate a driver call without hints list as first parameter after " -"self " -msgstr "" -"Não é possível truncar uma chamada de driver sem uma lista de sugestões como " -"primeiro parâmetro após self " - -msgid "Cannot update domain_id of a project that has children." -msgstr "Não é possível atualizar domain_id de um projeto que possua filhos." - -msgid "" -"Cannot use parents_as_list and parents_as_ids query params at the same time." -msgstr "" -"Não é possível usar parâmetros de consulta parents_as_list e parents_as_ids " -"ao mesmo tempo." - -msgid "" -"Cannot use subtree_as_list and subtree_as_ids query params at the same time." -msgstr "" -"Não é possível usar parâmetros de consulta subtree_as_list e subtree_as_ids " -"ao mesmo tempo." - -msgid "Cascade update is only allowed for enabled attribute." -msgstr "A atualização em cascata é permitida somente para atributo ativado." - -msgid "" -"Combining effective and group filter will always result in an empty list." -msgstr "" -"Combinar filtros efetivos e de grupo sempre resultará em uma lista vazia." - -msgid "" -"Combining effective, domain and inherited filters will always result in an " -"empty list." -msgstr "" -"Combinar filtros efetivos, de domínio e herdados sempre resultará em uma " -"lista vazia." - -#, python-format -msgid "Config API entity at /domains/%s/config" -msgstr "Entidade de API de configuração em /domains/%s/config" - -#, python-format -msgid "Conflict occurred attempting to store %(type)s - %(details)s" -msgstr "Ocorreu um conflito ao tentar armazenar %(type)s -%(details)s" - -#, python-format -msgid "Conflicting region IDs specified: \"%(url_id)s\" != \"%(ref_id)s\"" -msgstr "" -"IDs de região conflitantes especificados: \"%(url_id)s\" != \"%(ref_id)s\"" - -msgid "Consumer not found" -msgstr "Consumidor não encontrado" - -#, python-format -msgid "" -"Could not change immutable attribute(s) '%(attributes)s' in target %(target)s" -msgstr "" -"Não foi possível alterar os atributos imutáveis '%(attributes)s' no destino " -"%(target)s" - -#, python-format -msgid "" -"Could not determine Identity Provider ID. The configuration option " -"%(issuer_attribute)s was not found in the request environment." -msgstr "" -"Não foi possível determinar o ID do Provedor de Identidade. A opção de " -"configuração %(issuer_attribute)s não foi encontrada no ambiente da " -"solicitação." - -#, python-format -msgid "" -"Could not find %(group_or_option)s in domain configuration for domain " -"%(domain_id)s" -msgstr "" -"Não foi possível localizar %(group_or_option)s na configuração de domínio " -"para o domínio %(domain_id)s" - -#, python-format -msgid "Could not find Endpoint Group: %(endpoint_group_id)s" -msgstr "Não foi possível localizar o Grupo do Terminal: %(endpoint_group_id)s" - -msgid "Could not find Identity Provider identifier in environment" -msgstr "" -"Não foi possível localizar o identificador do Provedor de Identidade no " -"ambiente" - -#, python-format -msgid "Could not find Identity Provider: %(idp_id)s" -msgstr "Não foi possível localizar o Provedor de Identidade: %(idp_id)s" - -#, python-format -msgid "Could not find Service Provider: %(sp_id)s" -msgstr "Não foi possível localizar o Provedor de Serviços: %(sp_id)s" - -#, python-format -msgid "Could not find credential: %(credential_id)s" -msgstr "Não foi possível localizar a credencial: %(credential_id)s" - -#, python-format -msgid "Could not find domain: %(domain_id)s" -msgstr "Não foi possível localizar o domínio: %(domain_id)s" - -#, python-format -msgid "Could not find endpoint: %(endpoint_id)s" -msgstr "Não foi possível localizar terminal: %(endpoint_id)s" - -#, python-format -msgid "" -"Could not find federated protocol %(protocol_id)s for Identity Provider: " -"%(idp_id)s" -msgstr "" -"Não foi possível localizar o protocolo federado %(protocol_id)s para o " -"Provedor de Identidade: %(idp_id)s" - -#, python-format -msgid "Could not find group: %(group_id)s" -msgstr "Não foi possível localizar o grupo: %(group_id)s" - -#, python-format -msgid "Could not find mapping: %(mapping_id)s" -msgstr "Não foi possível localizar o mapeamento: %(mapping_id)s" - -msgid "Could not find policy association" -msgstr "Não foi possível localizar a associação de política" - -#, python-format -msgid "Could not find policy: %(policy_id)s" -msgstr "Não foi possível localizar a política: %(policy_id)s" - -#, python-format -msgid "Could not find project: %(project_id)s" -msgstr "Não foi possível localizar o projeto: %(project_id)s" - -#, python-format -msgid "Could not find region: %(region_id)s" -msgstr "Não foi possível localizar a região: %(region_id)s" - -#, python-format -msgid "" -"Could not find role assignment with role: %(role_id)s, user or group: " -"%(actor_id)s, project or domain: %(target_id)s" -msgstr "" -"Não foi possível localizar a designação de função com a função: %(role_id)s, " -"usuário ou grupo: %(actor_id)s, projeto ou domínio: %(target_id)s" - -#, python-format -msgid "Could not find role: %(role_id)s" -msgstr "Não foi possível localizar a função: %(role_id)s" - -#, python-format -msgid "Could not find service: %(service_id)s" -msgstr "Não foi possível localizar o serviço: %(service_id)s" - -#, python-format -msgid "Could not find token: %(token_id)s" -msgstr "Não foi possível localizar o token: %(token_id)s" - -#, python-format -msgid "Could not find trust: %(trust_id)s" -msgstr "Não foi possível localizar a confiança: %(trust_id)s" - -#, python-format -msgid "Could not find user: %(user_id)s" -msgstr "Não foi possível localizar o usuário: %(user_id)s" - -#, python-format -msgid "Could not find version: %(version)s" -msgstr "Não foi possível localizar a versão: %(version)s" - -#, python-format -msgid "Could not find: %(target)s" -msgstr "Não foi possível localizar: %(target)s" - -msgid "" -"Could not map any federated user properties to identity values. Check debug " -"logs or the mapping used for additional details." -msgstr "" -"Não foi possível mapear nenhuma propriedade do usuário federado para valores " -"de identidade. Verifique os logs de depuração ou o mapeamento usado para " -"obter detalhes adicionais" - -msgid "" -"Could not map user while setting ephemeral user identity. Either mapping " -"rules must specify user id/name or REMOTE_USER environment variable must be " -"set." -msgstr "" -"Não foi possível mapear o usuário ao configurar a identidade do usuário " -"efêmera. As regras de mapeamento devem especificar o ID/nome do usuário ou " -"a variável de ambiente REMOTE_USER deve ser configurada." - -msgid "Could not validate the access token" -msgstr "Não foi possível validar o token de acesso" - -msgid "Credential belongs to another user" -msgstr "A credencial pertence a outro usuário" - -msgid "Credential signature mismatch" -msgstr "Incompatibilidade de assinatura de credencial" - -#, python-format -msgid "" -"Direct import of auth plugin %(name)r is deprecated as of Liberty in favor " -"of its entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"A importação direta de um plug-in de autoria %(name)r foi descontinuada a " -"partir do Liberty a favor de seu ponto de entrada de %(namespace)r e pode " -"ser removida no N." - -#, python-format -msgid "" -"Direct import of driver %(name)r is deprecated as of Liberty in favor of its " -"entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"A importação direta de um driver %(name)r foi descontinuada a partir do " -"Liberty a favor de seu ponto de entrada de %(namespace)r e pode ser removida " -"no N." - -msgid "" -"Disabling an entity where the 'enable' attribute is ignored by configuration." -msgstr "" -"A desativação de uma entidade em que o atributo ‘enable' é ignorado pela " -"configuração." - -#, python-format -msgid "Domain (%s)" -msgstr "Domínio (%s)" - -#, python-format -msgid "Domain cannot be named %s" -msgstr "O domínio não pode ser chamado %s" - -#, python-format -msgid "Domain cannot have ID %s" -msgstr "O domínio não pode ter o ID de %s" - -#, python-format -msgid "Domain is disabled: %s" -msgstr "O domínio está desativado: %s" - -msgid "Domain name cannot contain reserved characters." -msgstr "O nome do domínio não pode conter caracteres reservados." - -msgid "Domain scoped token is not supported" -msgstr "O token de escopo de domínio não é suportado" - -msgid "Domain specific roles are not supported in the V8 role driver" -msgstr "" -"Funções específicas de domínio não são suportadas no driver de função da V8" - -#, python-format -msgid "" -"Domain: %(domain)s already has a configuration defined - ignoring file: " -"%(file)s." -msgstr "" -"O domínio: %(domain)s já possui uma configuração definida - ignorando " -"arquivo: %(file)s." - -msgid "Duplicate Entry" -msgstr "Entrada Duplicada" - -#, python-format -msgid "Duplicate ID, %s." -msgstr "ID duplicado, %s." - -#, python-format -msgid "Duplicate entry: %s" -msgstr "Entrada duplicada: %s" - -#, python-format -msgid "Duplicate name, %s." -msgstr "Nome duplicado, %s." - -#, python-format -msgid "Duplicate remote ID: %s" -msgstr "ID remoto duplicado: %s" - -msgid "EC2 access key not found." -msgstr "Chave de acesso EC2 não encontrada." - -msgid "EC2 signature not supplied." -msgstr "Assinatura EC2 não fornecida." - -msgid "" -"Either --bootstrap-password argument or OS_BOOTSTRAP_PASSWORD must be set." -msgstr "" -"O argumento de senha de autoinicialização ou OS_BOOTSTRAP_PASSWORD deve ser " -"configurado." - -msgid "Enabled field must be a boolean" -msgstr "O campo habilitado precisa ser um booleano" - -msgid "Enabled field should be a boolean" -msgstr "O campo habilitado deve ser um booleano" - -#, python-format -msgid "Endpoint %(endpoint_id)s not found in project %(project_id)s" -msgstr "Endpoint %(endpoint_id)s não encontrado no projeto %(project_id)s" - -msgid "Endpoint Group Project Association not found" -msgstr "Associação de Projeto do Grupo do Terminal não localizada" - -msgid "Ensure configuration option idp_entity_id is set." -msgstr "" -"Assegure-se de que a opção de configuração idp_entity_id esteja definida." - -msgid "Ensure configuration option idp_sso_endpoint is set." -msgstr "" -"Assegure-se de que a opção de configuração idp_sso_endpoint esteja definida." - -#, python-format -msgid "" -"Error parsing configuration file for domain: %(domain)s, file: %(file)s." -msgstr "" -"Erro ao analisar o arquivo de configuração para o domínio: %(domain)s, " -"arquivo: %(file)s." - -#, python-format -msgid "Error while opening file %(path)s: %(err)s" -msgstr "Erro ao abrir arquivo %(path)s: %(err)s" - -#, python-format -msgid "Error while parsing line: '%(line)s': %(err)s" -msgstr "Erro ao analisar a linha %(line)s: %(err)s" - -#, python-format -msgid "Error while parsing rules %(path)s: %(err)s" -msgstr "Erro ao analisar regras %(path)s: %(err)s" - -#, python-format -msgid "Error while reading metadata file, %(reason)s" -msgstr "Erro ao ler arquivo de metadados, %(reason)s" - -#, python-format -msgid "" -"Exceeded attempts to register domain %(domain)s to use the SQL driver, the " -"last domain that appears to have had it is %(last_domain)s, giving up" -msgstr "" -"Tentativas excedidas de registrar o domínio %(domain)s para usar SQL driver, " -"o ultimo domínio que parece ter tido foi %(last_domain)s, desistindo" - -#, python-format -msgid "Expected dict or list: %s" -msgstr "Esperado dict ou list: %s" - -msgid "" -"Expected signing certificates are not available on the server. Please check " -"Keystone configuration." -msgstr "" -"Certificados de assinatura esperados não estão disponíveis no servidor. " -"Verifique a configuração de Keystone." - -#, python-format -msgid "" -"Expecting to find %(attribute)s in %(target)s - the server could not comply " -"with the request since it is either malformed or otherwise incorrect. The " -"client is assumed to be in error." -msgstr "" -"Esperando localizar %(attribute)s em %(target)s - o servidor não pôde " -"atender à solicitação porque ela está malformada ou de outra maneira " -"incorreta. Supõe-se que o cliente está em erro." - -#, python-format -msgid "Failed to start the %(name)s server" -msgstr "Falha ao iniciar o servidor do %(name)s" - -msgid "Failed to validate token" -msgstr "Falha ao validar token" - -msgid "Federation token is expired" -msgstr "O token de federação está expirado" - -#, python-format -msgid "" -"Field \"remaining_uses\" is set to %(value)s while it must not be set in " -"order to redelegate a trust" -msgstr "" -"O campo \"remaining_uses\" está configurado como %(value)s enquanto ele não " -"deve ser configurado para delegar novamente uma confiança" - -msgid "Found invalid token: scoped to both project and domain." -msgstr "Token inválido encontrado: escopo definido para o projeto e o domínio." - -#, python-format -msgid "Group %s not found in config" -msgstr "Grupo %s não localizado na configuração" - -#, python-format -msgid "Group %(group)s is not supported for domain specific configurations" -msgstr "" -"O grupo %(group)s não é suportado para configurações específicas do domínio" - -#, python-format -msgid "" -"Group %(group_id)s returned by mapping %(mapping_id)s was not found in the " -"backend." -msgstr "" -"O grupo %(group_id)s retornado pelo mapeamento %(mapping_id)s não foi " -"localizado no backend." - -#, python-format -msgid "" -"Group membership across backend boundaries is not allowed, group in question " -"is %(group_id)s, user is %(user_id)s" -msgstr "" -"Associação ao grupo pelos limites de backend não é permitida, o grupo em " -"questão é %(group_id)s, o usuário é %(user_id)s" - -#, python-format -msgid "ID attribute %(id_attr)s not found in LDAP object %(dn)s" -msgstr "Atributo do ID %(id_attr)s não localizado no objeto LDAP %(dn)s" - -#, python-format -msgid "Identity Provider %(idp)s is disabled" -msgstr "O Provedor de Identidade %(idp)s está desativado" - -msgid "" -"Incoming identity provider identifier not included among the accepted " -"identifiers." -msgstr "" -"O identificador do provedor de identidade recebido não está incluído entre " -"os identificadores aceitos." - -msgid "Invalid EC2 signature." -msgstr "Assinatura EC2 inválida." - -#, python-format -msgid "Invalid LDAP TLS certs option: %(option)s. Choose one of: %(options)s" -msgstr "" -"Opção de certificados LADP TLS inválida: %(option)s. Escolha uma de: " -"%(options)s" - -#, python-format -msgid "Invalid LDAP TLS_AVAIL option: %s. TLS not available" -msgstr "Opção LDAP TLS_AVAIL inválida: %s. TLS não dsponível" - -#, python-format -msgid "Invalid LDAP deref option: %(option)s. Choose one of: %(options)s" -msgstr "Opção deref LDAP inválida: %(option)s. Escolha uma destas: %(options)s" - -#, python-format -msgid "Invalid LDAP scope: %(scope)s. Choose one of: %(options)s" -msgstr "Escopo LDAP inválido: %(scope)s. Escolha um de: %(options)s" - -msgid "Invalid TLS / LDAPS combination" -msgstr "Combinação TLS / LADPS inválida" - -#, python-format -msgid "Invalid audit info data type: %(data)s (%(type)s)" -msgstr "" -"Tipo de dados de informações de auditoria inválido: %(data)s (%(type)s)" - -msgid "Invalid blob in credential" -msgstr "BLOB inválido na credencial" - -#, python-format -msgid "" -"Invalid domain name: %(domain)s found in config file name: %(file)s - " -"ignoring this file." -msgstr "" -"Nome de domínio inválido: %(domain)s localizado no nome do arquivo de " -"configuração: %(file)s - ignorando este arquivo." - -#, python-format -msgid "Invalid domain specific configuration: %(reason)s" -msgstr "Configuração específica de domínio inválida: %(reason)s" - -#, python-format -msgid "Invalid input for field '%(path)s'. The value is '%(value)s'." -msgstr "Entrada inválida para o campo '%(path)s'. O valor é '%(value)s'." - -msgid "Invalid limit value" -msgstr "Valor limite inválido" - -#, python-format -msgid "" -"Invalid mix of entities for policy association - only Endpoint, Service or " -"Region+Service allowed. Request was - Endpoint: %(endpoint_id)s, Service: " -"%(service_id)s, Region: %(region_id)s" -msgstr "" -"Combinação de entidades inválida para associação de política - somente " -"Terminal, Serviço ou Região+Serviço permitido. A solicitação foi - Terminal: " -"%(endpoint_id)s, Serviço: %(service_id)s, Região: %(region_id)s" - -#, python-format -msgid "" -"Invalid rule: %(identity_value)s. Both 'groups' and 'domain' keywords must " -"be specified." -msgstr "" -"Regra inválida: %(identity_value)s. As palavras-chave 'groups' e 'domain' " -"devem ser especificadas." - -msgid "Invalid signature" -msgstr "Assinatura inválida" - -msgid "Invalid user / password" -msgstr "Usuário / senha inválido" - -msgid "Invalid username or TOTP passcode" -msgstr "Nome de usuário ou passcode TOTP inválido" - -msgid "Invalid username or password" -msgstr "Nome de usuário ou senha inválidos" - -#, python-format -msgid "KVS region %s is already configured. Cannot reconfigure." -msgstr "A região KVS %s já está configurada. Não é possível reconfigurar." - -#, python-format -msgid "Key Value Store not configured: %s" -msgstr "Armazenamento do Valor da Chave não configurado: %s" - -#, python-format -msgid "LDAP %s create" -msgstr "Criação de LDAP %s" - -#, python-format -msgid "LDAP %s delete" -msgstr "Exclusão de LDAP %s" - -#, python-format -msgid "LDAP %s update" -msgstr "Atualização de LDAP %s" - -msgid "" -"Length of transformable resource id > 64, which is max allowed characters" -msgstr "" -"O comprimento do recurso transformável id > 64, que é o máximo de caracteres " -"permitidos" - -#, python-format -msgid "" -"Local section in mapping %(mapping_id)s refers to a remote match that " -"doesn't exist (e.g. {0} in a local section)." -msgstr "" -"A seção local no mapeamento %(mapping_id)s refere-se a uma correspondência " -"remota que não existe (por exemplo, '{0}' em uma seção local)." - -#, python-format -msgid "Lock Timeout occurred for key, %(target)s" -msgstr "Ocorreu um tempo limite de bloqueio para a chave, %(target)s" - -#, python-format -msgid "Lock key must match target key: %(lock)s != %(target)s" -msgstr "" -"A chave de bloqueio deve corresponder à chave de destino: %(lock)s !=" -"%(target)s" - -#, python-format -msgid "Malformed endpoint URL (%(endpoint)s), see ERROR log for details." -msgstr "" -"URL de terminal mal-formada (%(endpoint)s), consulte o log ERROR para obter " -"detalhes." - -msgid "Marker could not be found" -msgstr "O marcador não pôde ser encontrado" - -#, python-format -msgid "Max hierarchy depth reached for %s branch." -msgstr "Profundidade máx. de hierarquia atingida para a ramificação %s." - -#, python-format -msgid "Maximum lock attempts on %s occurred." -msgstr "Máximo de tentativas de bloqueio em %s ocorreu." - -#, python-format -msgid "Member %(member)s is already a member of group %(group)s" -msgstr "O membro %(member)s já é membro do grupo %(group)s" - -#, python-format -msgid "Method not callable: %s" -msgstr "O método não pode ser chamado: %s" - -msgid "Missing entity ID from environment" -msgstr "ID da entidade ausente a partir do ambiente" - -msgid "" -"Modifying \"redelegation_count\" upon redelegation is forbidden. Omitting " -"this parameter is advised." -msgstr "" -"A modificação de \"redelegation_count\" na nova delegação é proibida. É " -"recomendado omitir este parâmetro." - -msgid "Multiple domains are not supported" -msgstr "Múltiplos domínios não são suportados" - -msgid "Must be called within an active lock context." -msgstr "Deve ser chamado dentro de um contexto de bloqueio ativo." - -msgid "Must specify either domain or project" -msgstr "Deve especificar o domínio ou projeto" - -msgid "Name field is required and cannot be empty" -msgstr "O campo Nome é obrigatório e não pode ficar vazio" - -msgid "Neither Project Domain ID nor Project Domain Name was provided." -msgstr "" -"Nem o ID do Domínio do Projeto nem o Nome do Domínio do Projeto foi " -"fornecido." - -msgid "" -"No Authorization headers found, cannot proceed with OAuth related calls, if " -"running under HTTPd or Apache, ensure WSGIPassAuthorization is set to On." -msgstr "" -"Nenhum cabeçalho de autorização foi localizado; não é possível continuar com " -"chamadas relacionadas OAuth. Se estiver executando sob HTTPd ou Apache, " -"certifique-se de que WSGIPassAuthorization esteja configurado para Ligado." - -msgid "No authenticated user" -msgstr "Nenhum usuário autenticado" - -msgid "" -"No encryption keys found; run keystone-manage fernet_setup to bootstrap one." -msgstr "" -"Nenhuma chave de criptografia localizada; execute keystone-manage " -"fernet_setup para autoinicializar uma." - -msgid "No options specified" -msgstr "Nenhuma opção especificada" - -#, python-format -msgid "No policy is associated with endpoint %(endpoint_id)s." -msgstr "Nenhuma política associada ao terminal %(endpoint_id)s." - -#, python-format -msgid "No remaining uses for trust: %(trust_id)s" -msgstr "Nenhum uso restante para confiança: %(trust_id)s" - -msgid "No token in the request" -msgstr "Não existe token na solicitação." - -msgid "Non-default domain is not supported" -msgstr "O domínio não padrão não é suportado" - -msgid "One of the trust agents is disabled or deleted" -msgstr "Um dos agentes de confiança está desativado ou excluído" - -#, python-format -msgid "" -"Option %(option)s found with no group specified while checking domain " -"configuration request" -msgstr "" -"Opção %(option)s localizada sem grupo especificado durante a verificação de " -"solicitação de configuração de domínio" - -#, python-format -msgid "" -"Option %(option)s in group %(group)s is not supported for domain specific " -"configurations" -msgstr "" -"A opção %(option)s no grupo %(group)s não é suportada para configurações " -"específicas de domínio" - -#, python-format -msgid "Project (%s)" -msgstr "Projeto (%s)" - -#, python-format -msgid "Project ID not found: %(t_id)s" -msgstr "ID de projeto não encontrado: %(t_id)s" - -msgid "Project field is required and cannot be empty." -msgstr "O campo projeto é necessário e não pode ficar vazio." - -#, python-format -msgid "Project is disabled: %s" -msgstr "O projeto está desativado: %s" - -msgid "Project name cannot contain reserved characters." -msgstr "O nome do projeto não pode conter caracteres reservados." - -msgid "Query string is not UTF-8 encoded" -msgstr "A sequência de consulta não está codificada em UTF-8 " - -#, python-format -msgid "" -"Reading the default for option %(option)s in group %(group)s is not supported" -msgstr "" -"Não é suportado ler o padrão para a opção %(option)s no grupo %(group)s" - -msgid "Redelegation allowed for delegated by trust only" -msgstr "Nova delegação permitida para delegado somente pelo fiador" - -#, python-format -msgid "" -"Remaining redelegation depth of %(redelegation_depth)d out of allowed range " -"of [0..%(max_count)d]" -msgstr "" -"Profundidade da nova delegação restante do %(redelegation_depth)d fora do " -"intervalo permitido de [0..%(max_count)d]" - -msgid "" -"Remove admin_crud_extension from the paste pipeline, the admin_crud " -"extension is now always available. Updatethe [pipeline:admin_api] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Remova a admin_crud_extension do pipeline de colagem, já que a extensão " -"admin_crud agora está sempre disponível. Atualize a seção [pipeline:" -"admin_api] no keystone-paste.ini de acordo, já que ela será removida da " -"liberação O." - -msgid "" -"Remove endpoint_filter_extension from the paste pipeline, the endpoint " -"filter extension is now always available. Update the [pipeline:api_v3] " -"section in keystone-paste.ini accordingly as it will be removed in the O " -"release." -msgstr "" -"Remova a endpoint_filter_extension do pipeline de colagem, já que a extensão " -"de filtro de terminal agora está sempre está disponível. Atualize a seção " -"[pipeline:api_v3] no keystone-paste.ini de acordo, já que ela será removida " -"da liberação O." - -msgid "" -"Remove federation_extension from the paste pipeline, the federation " -"extension is now always available. Update the [pipeline:api_v3] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Remova a federation_extension do pipeline de colagem, já que a extensão de " -"federação agora está sempre está disponível. Atualize a seção [pipeline:" -"api_v3] no keystone-paste.ini de acordo, já que ela será removida da " -"liberação O." - -msgid "" -"Remove oauth1_extension from the paste pipeline, the oauth1 extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Remova oauth1_extension do pipeline de colagem, já que a extensão oauth1 " -"agora está sempre está disponível. Atualize a seção [pipeline:api_v3] no " -"keystone-paste.ini de acordo, já que ela será removida da liberação O." - -msgid "" -"Remove revoke_extension from the paste pipeline, the revoke extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Remova revoke_extension do pipeline de colagem, já que a extensão de " -"revogação agora está sempre está disponível. Atualize a seção [pipeline:" -"api_v3] no keystone-paste.ini de acordo, já que ela será removida da " -"liberação O." - -msgid "" -"Remove simple_cert from the paste pipeline, the PKI and PKIz token providers " -"are now deprecated and simple_cert was only used insupport of these token " -"providers. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Remova simple_cert do pipeline de colagem, já que os provedores PKI e PKIz " -"estão agora descontinuados e simple_cert era usado somente em suporte a " -"esses provedores de token. Atualize a seção [pipeline:api_v3] no keystone-" -"paste.ini de acordo, já que ela será removida da liberação O." - -msgid "" -"Remove user_crud_extension from the paste pipeline, the user_crud extension " -"is now always available. Updatethe [pipeline:public_api] section in keystone-" -"paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Remova a user_crud_extension do pipeline de colagem, já que a extensão " -"user_crud agora está sempre disponível. Atualize a seção [pipeline:" -"public_api] no keystone-paste.ini de acordo, já que ela será removida da " -"liberação O." - -msgid "Request Token does not have an authorizing user id" -msgstr "O Token de Solicitação não possui um ID de usuário autorizado" - -#, python-format -msgid "" -"Request attribute %(attribute)s must be less than or equal to %(size)i. The " -"server could not comply with the request because the attribute size is " -"invalid (too large). The client is assumed to be in error." -msgstr "" -"O atributo de solicitação %(attribute)s deve ser menor ou igual a %(size)i. " -"O servidor não pôde atender à solicitação porque o tamanho do atributo é " -"inválido (muito grande). Supõe-se que o cliente está em erro." - -msgid "Request must have an origin query parameter" -msgstr "A solicitação deve ter um parâmetro de consulta de origem" - -msgid "Request token is expired" -msgstr "O token de solicitação expirou" - -msgid "Request token not found" -msgstr "Token de solicitação não encontrado" - -msgid "Requested expiration time is more than redelegated trust can provide" -msgstr "" -"Prazo de expiração solicitado é maior do que a confiança delegada novamente " -"pode fornecer" - -#, python-format -msgid "" -"Requested redelegation depth of %(requested_count)d is greater than allowed " -"%(max_count)d" -msgstr "" -"Profundidade da nova delegação solicitada de %(requested_count)d é maior que " -"a %(max_count)d permitida" - -msgid "" -"Running keystone via eventlet is deprecated as of Kilo in favor of running " -"in a WSGI server (e.g. mod_wsgi). Support for keystone under eventlet will " -"be removed in the \"M\"-Release." -msgstr "" -"Executar o keystone via eventlet foi descontinuado a partir do Kilo a favor " -"de executar em um servidor WSGI (por exemplo, mod_wsgi). Suporte para o " -"keystone sob eventlet será removido no \"M\"-Release." - -msgid "Scoping to both domain and project is not allowed" -msgstr "A definição de escopo para o domínio e o projeto não é permitida" - -msgid "Scoping to both domain and trust is not allowed" -msgstr "A definição de escopo para o domínio e confiança não é permitida" - -msgid "Scoping to both project and trust is not allowed" -msgstr "A definição de escopo para o projeto e a confiança não é permitida" - -#, python-format -msgid "Service Provider %(sp)s is disabled" -msgstr "O Provedor de Serviços %(sp)s está desativado" - -msgid "Some of requested roles are not in redelegated trust" -msgstr "Algumas funções solicitadas não estão na confiança da nova delegação" - -msgid "Specify a domain or project, not both" -msgstr "Especifique um domínio ou projeto, não ambos" - -msgid "Specify a user or group, not both" -msgstr "Especifique um usuário ou grupo, não ambos" - -msgid "Specify one of domain or project" -msgstr "Especifique um domínio ou projeto" - -msgid "Specify one of user or group" -msgstr "Especifique um usuário ou grupo" - -#, python-format -msgid "" -"String length exceeded.The length of string '%(string)s' exceeded the limit " -"of column %(type)s(CHAR(%(length)d))." -msgstr "" -"Comprimento de sequência excedido. O comprimento de sequência '%(string)s' " -"excedeu o limite da coluna %(type)s(CHAR(%(length)d))." - -msgid "Tenant name cannot contain reserved characters." -msgstr "O nome do locatário não pode conter caracteres reservados." - -#, python-format -msgid "" -"The %s extension has been moved into keystone core and as such its " -"migrations are maintained by the main keystone database control. Use the " -"command: keystone-manage db_sync" -msgstr "" -"A extensão %s foi movida para o núcleo do keystone e, com isso, suas " -"migrações são mantidas pelo controle de banco de dados keystone principal. " -"Use o comando: keystone-manage db_sync" - -msgid "" -"The 'expires_at' must not be before now. The server could not comply with " -"the request since it is either malformed or otherwise incorrect. The client " -"is assumed to be in error." -msgstr "" -"O 'expires_at' não deve ser anterior a agora. O servidor não pôde atender à " -"solicitação porque ela está malformada ou de outra maneira incorreta. Supõe-" -"se que o cliente está em erro erro." - -msgid "The --all option cannot be used with the --domain-name option" -msgstr "A opção --all não pode ser usada com a opção --domain-name" - -#, python-format -msgid "The Keystone configuration file %(config_file)s could not be found." -msgstr "" -"O arquivo de configuração do Keystone %(config_file)s não pôde ser " -"localizado." - -#, python-format -msgid "" -"The Keystone domain-specific configuration has specified more than one SQL " -"driver (only one is permitted): %(source)s." -msgstr "" -"A configuração específica de domínio Keystone especificou mais de um driver " -"SQL (somente um é permitido): %(source)s." - -msgid "The action you have requested has not been implemented." -msgstr "A ação que você solicitou não foi implementada." - -msgid "The authenticated user should match the trustor." -msgstr "O usuário autenticado deve corresponder ao fideicomitente." - -msgid "" -"The certificates you requested are not available. It is likely that this " -"server does not use PKI tokens otherwise this is the result of " -"misconfiguration." -msgstr "" -"Os certificados que você solicitou não estão disponíveis. É provável que " -"esse servidor não utiliza tokens PKI, caso contrário, este é o resultado de " -"configuração incorreta." - -msgid "The configured token provider does not support bind authentication." -msgstr "O provedor de token configurado não suporta autenticação de ligação." - -msgid "The creation of projects acting as domains is not allowed in v2." -msgstr "A criação de projetos agindo como domínios não é permitida na v2." - -#, python-format -msgid "" -"The password length must be less than or equal to %(size)i. The server could " -"not comply with the request because the password is invalid." -msgstr "" -"O comprimento da senha deve ser menor ou igual a %(size)i. O servidor não " -"pôde atender à solicitação porque a senha é inválida." - -msgid "The request you have made requires authentication." -msgstr "A solicitação que você fez requer autenticação." - -msgid "The resource could not be found." -msgstr "O recurso não pôde ser localizado." - -msgid "" -"The revoke call must not have both domain_id and project_id. This is a bug " -"in the Keystone server. The current request is aborted." -msgstr "" -"A chamada de revogação não deve ter domain_id e project_id. Esse é um erro " -"no servidor do Keystone. A solicitação atual foi interrompida." - -msgid "The service you have requested is no longer available on this server." -msgstr "O serviço que você solicitou não está mais disponível neste servidor." - -#, python-format -msgid "" -"The specified parent region %(parent_region_id)s would create a circular " -"region hierarchy." -msgstr "" -"A região pai especificada %(parent_region_id)s criaria uma hierarquia de " -"região circular." - -#, python-format -msgid "" -"The value of group %(group)s specified in the config should be a dictionary " -"of options" -msgstr "" -"O valor do grupo %(group)s especificado na configuração deverá ser um " -"dicionário de opções" - -msgid "There should not be any non-oauth parameters" -msgstr "Não deve haver nenhum parâmetro não oauth" - -#, python-format -msgid "This is not a recognized Fernet payload version: %s" -msgstr "Esta não é uma versão de carga útil do Fernet reconhecida: %s" - -#, python-format -msgid "This is not a recognized Fernet token %s" -msgstr "Este não é um token Fernet %s reconhecido" - -msgid "" -"Timestamp not in expected format. The server could not comply with the " -"request since it is either malformed or otherwise incorrect. The client is " -"assumed to be in error." -msgstr "" -"O registro de data e hora não está no formato especificado. O servidor não " -"pôde atender à solicitação porque ela está mal formada ou de outra maneira " -"incorreta. Supõe-se que o cliente está em erro." - -#, python-format -msgid "" -"To get a more detailed information on this error, re-run this command for " -"the specific domain, i.e.: keystone-manage domain_config_upload --domain-" -"name %s" -msgstr "" -"Para obter informações mais detalhadas sobre este erro, execute novamente " -"este comando para o domínio específico, ou seja: keystone-manage " -"domain_config_upload --domain-name %s" - -msgid "Token belongs to another user" -msgstr "O token pertence a outro usuário" - -msgid "Token does not belong to specified tenant." -msgstr "O token não pertence ao locatário especificado." - -msgid "Token version is unrecognizable or unsupported." -msgstr "A versão de Token é irreconhecida ou não suportada" - -msgid "Trustee has no delegated roles." -msgstr "O fiduciário não possui funções delegadas." - -msgid "Trustor is disabled." -msgstr "O fideicomitente está desativado." - -#, python-format -msgid "" -"Trying to update group %(group)s, so that, and only that, group must be " -"specified in the config" -msgstr "" -"Tentando atualizar o grupo %(group)s de modo que, e apenas que, o grupo deve " -"ser especificado na configuração" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, but config provided " -"contains option %(option_other)s instead" -msgstr "" -"Tentando atualizar a opção %(option)s no grupo %(group)s, mas a configuração " -"fornecida contém %(option_other)s " - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, so that, and only " -"that, option must be specified in the config" -msgstr "" -"Tentando atualizar a opção %(option)s no grupo %(group)s, de modo que, e " -"apenas que, a opção deve ser especificada na configuração" - -msgid "" -"Unable to access the keystone database, please check it is configured " -"correctly." -msgstr "" -"Não é possível acessar o banco de dados keystone, verifique se ele está " -"configurado corretamente." - -#, python-format -msgid "Unable to consume trust %(trust_id)s, unable to acquire lock." -msgstr "" -"Não é possível consumir a confiança %(trust_id)s, não é possível adquirir o " -"bloqueio." - -#, python-format -msgid "" -"Unable to delete region %(region_id)s because it or its child regions have " -"associated endpoints." -msgstr "" -"Não é possível excluir a região %(region_id)s porque uma ou mais de suas " -"regiões filhas possuem terminais associados." - -msgid "Unable to downgrade schema" -msgstr "Não é possível fazer downgrade do esquema" - -#, python-format -msgid "Unable to find valid groups while using mapping %(mapping_id)s" -msgstr "" -"Não é possível localizar os grupos válidos ao utilizar o mapeamento " -"%(mapping_id)s" - -#, python-format -msgid "Unable to locate domain config directory: %s" -msgstr "Não é possível localizar o diretório de configuração de domínio: %s" - -#, python-format -msgid "Unable to lookup user %s" -msgstr "Não é possível consultar o usuário %s" - -#, python-format -msgid "" -"Unable to reconcile identity attribute %(attribute)s as it has conflicting " -"values %(new)s and %(old)s" -msgstr "" -"Não é possível reconciliar o atributo de identidade %(attribute)s, pois ele " -"possui valores conflitantes %(new)s e %(old)s" - -#, python-format -msgid "" -"Unable to sign SAML assertion. It is likely that this server does not have " -"xmlsec1 installed, or this is the result of misconfiguration. Reason " -"%(reason)s" -msgstr "" -"Não é possível assinar asserção SAML. Provavelmente esse servidor não possui " -"o xmlsec1 instalado, ou isso é o resultado de uma configuração incorreta. " -"Motivo %(reason)s" - -msgid "Unable to sign token." -msgstr "Não é possível assinar o token." - -#, python-format -msgid "Unexpected assignment type encountered, %s" -msgstr "Tipo de designação inesperado encontrado, %s" - -#, python-format -msgid "" -"Unexpected combination of grant attributes - User: %(user_id)s, Group: " -"%(group_id)s, Project: %(project_id)s, Domain: %(domain_id)s" -msgstr "" -"Combinação inesperada de atributos de concessão – Usuário: %(user_id)s, " -"Grupo: %(group_id)s, Projeto: %(project_id)s, Domínio: %(domain_id)s" - -#, python-format -msgid "Unexpected status requested for JSON Home response, %s" -msgstr "Status inesperado solicitado para resposta JSON Home, %s" - -msgid "Unknown Target" -msgstr "Alvo Desconhecido" - -#, python-format -msgid "Unknown domain '%(name)s' specified by --domain-name" -msgstr "Domínio desconhecido '%(name)s' especificado pelo --domain-name" - -#, python-format -msgid "Unknown token version %s" -msgstr "Versão de token desconhecida %s" - -#, python-format -msgid "Unregistered dependency: %(name)s for %(targets)s" -msgstr "Dependência não registrada: %(name)s para %(targets)s" - -msgid "Update of `domain_id` is not allowed." -msgstr "Atualização de `domain_id` não é permitida." - -msgid "Update of `is_domain` is not allowed." -msgstr "Atualização de `is_domain` não é permitida." - -msgid "Update of `parent_id` is not allowed." -msgstr "Atualização de ‘parent_id’ não é permitida." - -msgid "Update of domain_id is only allowed for root projects." -msgstr "A atualização de domain_id é permitida somente para projetos raízes." - -msgid "Update of domain_id of projects acting as domains is not allowed." -msgstr "" -"Não é permitido atualizar domain_id de projetos que agem como domínios." - -msgid "Use a project scoped token when attempting to create a SAML assertion" -msgstr "" -"Use um token com escopo definido do projeto ao tentar criar uma asserção SAML" - -msgid "" -"Use of the identity driver config to automatically configure the same " -"assignment driver has been deprecated, in the \"O\" release, the assignment " -"driver will need to be expicitly configured if different than the default " -"(SQL)." -msgstr "" -"O uso da configuração do driver de identidade para configurar " -"automaticamente o mesmo driver de designação foi descontinuado. Na liberação " -"\"O\", o driver de designação precisará ser configurado explicitamente caso " -"seja diferente do padrão (SQL)." - -#, python-format -msgid "User %(u_id)s is unauthorized for tenant %(t_id)s" -msgstr "O usuário %(u_id)s não está autorizado para o locatário %(t_id)s" - -#, python-format -msgid "User %(user_id)s has no access to domain %(domain_id)s" -msgstr "O usuário %(user_id)s não tem acesso ao domínio %(domain_id)s" - -#, python-format -msgid "User %(user_id)s has no access to project %(project_id)s" -msgstr "O usuário %(user_id)s não tem acesso ao projeto %(project_id)s" - -#, python-format -msgid "User %(user_id)s is already a member of group %(group_id)s" -msgstr "O usuário %(user_id)s já é membro do grupo %(group_id)s" - -#, python-format -msgid "User '%(user_id)s' not found in group '%(group_id)s'" -msgstr "Usuário '%(user_id)s' não localizado no grupo '%(group_id)s'" - -msgid "User IDs do not match" -msgstr "O ID de usuário não corresponde" - -msgid "" -"User auth cannot be built due to missing either user id, or user name with " -"domain id, or user name with domain name." -msgstr "" -"A autenticação do usuário não pode ser construída porque está faltando o ID " -"ou o nome do usuário com o ID do domínio ou o nome do usuário com o nome do " -"domínio." - -#, python-format -msgid "User is disabled: %s" -msgstr "O usuário está desativado: %s" - -msgid "User is not a member of the requested project" -msgstr "O usuário não é membro do projeto solicitado" - -msgid "User is not a trustee." -msgstr "Usuário não é um fiduciário." - -msgid "User not found" -msgstr "Usuário não localizado" - -msgid "User not valid for tenant." -msgstr "O usuário não é válido para o locatário." - -msgid "User roles not supported: tenant_id required" -msgstr "Papéis de usuários não suportados: necessário tenant_id" - -#, python-format -msgid "User type %s not supported" -msgstr "Tipo de usuário %s não suportado" - -msgid "You are not authorized to perform the requested action." -msgstr "Você não está autorizado à realizar a ação solicitada." - -#, python-format -msgid "You are not authorized to perform the requested action: %(action)s" -msgstr "Você não está autorizado a executar a ação solicitada: %(action)s" - -msgid "" -"You have tried to create a resource using the admin token. As this token is " -"not within a domain you must explicitly include a domain for this resource " -"to belong to." -msgstr "" -"Você tentou criar um recurso usando o token de administração. Como esse " -"token não está dentro de um domínio, deve-se incluir explicitamente um " -"domínio ao qual esse recurso possa pertencer." - -msgid "`key_mangler` functions must be callable." -msgstr "Funções `key_mangler` devem ser chamáveis." - -msgid "`key_mangler` option must be a function reference" -msgstr "A opção `key_mangler` deve ser uma referência de função" - -msgid "any options" -msgstr "quaisquer opções" - -msgid "auth_type is not Negotiate" -msgstr "auth_type não é Negotiate" - -msgid "authorizing user does not have role required" -msgstr "O usuário autorizado não possui a função necessária" - -#, python-format -msgid "cannot create a project in a branch containing a disabled project: %s" -msgstr "" -"Não é possível criar um projeto em uma ramificação que contém um projeto " -"desativado: %s" - -#, python-format -msgid "" -"cannot delete an enabled project acting as a domain. Please disable the " -"project %s first." -msgstr "" -"Não é possível excluir um projeto ativado que age como um domínio. Desative " -"o projeto %s primeiro." - -#, python-format -msgid "group %(group)s" -msgstr "grupo %(group)s" - -msgid "" -"idp_contact_type must be one of: [technical, other, support, administrative " -"or billing." -msgstr "" -"idp_contact_type deve ser uma dessas opções: [técnico, outro, suporte, " -"administrativo ou faturamento." - -#, python-format -msgid "invalid date format %s" -msgstr "formato de data inválido %s" - -#, python-format -msgid "" -"it is not permitted to have two projects acting as domains with the same " -"name: %s" -msgstr "" -"Não é permitido ter dois projetos agindo como domínios com o mesmo nome: %s" - -#, python-format -msgid "" -"it is not permitted to have two projects within a domain with the same " -"name : %s" -msgstr "" -"Não é permitido ter dois projetos dentro de um domínio com o mesmo nome: %s" - -msgid "only root projects are allowed to act as domains." -msgstr "Somente projetos raízes são permitidos para agirem como domínios. " - -#, python-format -msgid "option %(option)s in group %(group)s" -msgstr "opção %(option)s no grupo %(group)s" - -msgid "provided consumer key does not match stored consumer key" -msgstr "" -"A chave de consumidor fornecida não confere com a chave de consumidor " -"armazenada" - -msgid "provided request key does not match stored request key" -msgstr "" -"A chave de solicitação fornecida não confere com a chave de solicitação " -"armazenada" - -msgid "provided verifier does not match stored verifier" -msgstr "O verificador fornecido não confere com o verificador armazenado" - -msgid "remaining_uses must be a positive integer or null." -msgstr "remaining_uses deve ser um número inteiro positivo ou nulo." - -msgid "remaining_uses must not be set if redelegation is allowed" -msgstr "" -"remaining_uses não deverá ser definido se a nova delegação for permitida" - -#, python-format -msgid "" -"request to update group %(group)s, but config provided contains group " -"%(group_other)s instead" -msgstr "" -"solicitação para atualizar o grupo %(group)s, mas a configuração fornecida " -"contém o grupo %(group_other)s" - -msgid "rescope a scoped token" -msgstr "Defina novamente um escopo de um token com escopo" - -#, python-format -msgid "role %s is not defined" -msgstr "O papel %s não foi definido" - -msgid "scope.project.id must be specified if include_subtree is also specified" -msgstr "" -"scope.project.id deverá ser especificado se include_subtree também for " -"especificado" - -#, python-format -msgid "tls_cacertdir %s not found or is not a directory" -msgstr "tls_cacertdir %s não encontrado ou não é um diretório" - -#, python-format -msgid "tls_cacertfile %s not found or is not a file" -msgstr "tls_cacertfile %s não encontrado ou não é um arquivo" - -#, python-format -msgid "token reference must be a KeystoneToken type, got: %s" -msgstr "A referência de token deve ser um tipo KeystoneToken, obteve: %s" - -msgid "" -"update of domain_id is deprecated as of Mitaka and will be removed in O." -msgstr "" -"A atualização de domain_id foi descontinuada a partir do Mitaka e será " -"removida na liberação O." - -#, python-format -msgid "" -"validated expected to find %(param_name)r in function signature for " -"%(func_name)r." -msgstr "" -"O validado esperava localizar %(param_name)r na assinatura da função para " -"%(func_name)r." diff --git a/keystone-moon/keystone/locale/ru/LC_MESSAGES/keystone-log-critical.po b/keystone-moon/keystone/locale/ru/LC_MESSAGES/keystone-log-critical.po deleted file mode 100644 index b60e4349..00000000 --- a/keystone-moon/keystone/locale/ru/LC_MESSAGES/keystone-log-critical.po +++ /dev/null @@ -1,27 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2014-08-31 03:19+0000\n" -"Last-Translator: openstackjenkins \n" -"Language: ru\n" -"Plural-Forms: nplurals=4; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n" -"%10<=4 && (n%100<12 || n%100>14) ? 1 : n%10==0 || (n%10>=5 && n%10<=9) || (n" -"%100>=11 && n%100<=14)? 2 : 3);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Russian\n" - -#, python-format -msgid "Unable to open template file %s" -msgstr "Не удается открыть файл шаблона %s" diff --git a/keystone-moon/keystone/locale/ru/LC_MESSAGES/keystone.po b/keystone-moon/keystone/locale/ru/LC_MESSAGES/keystone.po deleted file mode 100644 index 205a3e53..00000000 --- a/keystone-moon/keystone/locale/ru/LC_MESSAGES/keystone.po +++ /dev/null @@ -1,1603 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# kogamatranslator49 , 2015 -# sher , 2013 -# sher , 2013 -# Lucas Palm , 2015. #zanata -# OpenStack Infra , 2015. #zanata -# Grigory Mokhin , 2016. #zanata -# Lucas Palm , 2016. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2016-03-18 10:16+0000\n" -"Last-Translator: Grigory Mokhin \n" -"Language: ru\n" -"Plural-Forms: nplurals=4; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n" -"%10<=4 && (n%100<12 || n%100>14) ? 1 : n%10==0 || (n%10>=5 && n%10<=9) || (n" -"%100>=11 && n%100<=14)? 2 : 3);\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Russian\n" - -#, python-format -msgid "%(detail)s" -msgstr "%(detail)s" - -#, python-format -msgid "%(driver)s is not supported driver version" -msgstr "Версия драйвера %(driver)s не поддерживается" - -#, python-format -msgid "" -"%(entity)s name cannot contain the following reserved characters: %(chars)s" -msgstr "" -"Имя %(entity)s не может содержать следующие зарезервированные символы: " -"%(chars)s" - -#, python-format -msgid "" -"%(event)s is not a valid notification event, must be one of: %(actions)s" -msgstr "" -"%(event)s не является допустимым событием уведомления, требуется одно из " -"значений: %(actions)s" - -#, python-format -msgid "%(host)s is not a trusted dashboard host" -msgstr "%(host)s не является надежным хостом сводных панелей" - -#, python-format -msgid "%(message)s %(amendment)s" -msgstr "%(message)s %(amendment)s" - -#, python-format -msgid "" -"%(mod_name)s doesn't provide database migrations. The migration repository " -"path at %(path)s doesn't exist or isn't a directory." -msgstr "" -"%(mod_name)s не обеспечивает перенос баз данных. Путь к хранилищу миграции " -"%(path)s не существует или не является каталогом." - -#, python-format -msgid "%(prior_role_id)s does not imply %(implied_role_id)s" -msgstr "%(prior_role_id)s не подразумевает %(implied_role_id)s" - -#, python-format -msgid "%(property_name)s cannot be less than %(min_length)s characters." -msgstr "%(property_name)s не может быть короче %(min_length)s символов." - -#, python-format -msgid "%(property_name)s is not a %(display_expected_type)s" -msgstr "%(property_name)s не принадлежит к типу %(display_expected_type)s" - -#, python-format -msgid "%(property_name)s should not be greater than %(max_length)s characters." -msgstr "%(property_name)s не должен быть длинее %(max_length)s символов." - -#, python-format -msgid "%(role_id)s cannot be an implied roles" -msgstr "%(role_id)s не может быть подразумеваемой ролью" - -#, python-format -msgid "%s cannot be empty." -msgstr "%s не может быть пуст." - -#, python-format -msgid "%s extension does not exist." -msgstr "Расширение %s не существует" - -#, python-format -msgid "%s field is required and cannot be empty" -msgstr "Поле %s является обязательным и не может быть пустым" - -#, python-format -msgid "%s field(s) cannot be empty" -msgstr "Поле %s не может быть пустым" - -#, python-format -msgid "" -"%s for the LDAP identity backend has been deprecated in the Mitaka release " -"in favor of read-only identity LDAP access. It will be removed in the \"O\" " -"release." -msgstr "" -"%s для системы идентификации LDAP устарело Mitaka, вместо него используется " -"идентификация LDAP с доступом только для чтения. Эта функция будет удалена в " -"выпуске \"O\"." - -msgid "(Disable insecure_debug mode to suppress these details.)" -msgstr "(Выключите режим insecure_debug, чтобы не показывать эти подробности.)" - -msgid "--all option cannot be mixed with other options" -msgstr "опцию --all нельзя указывать вместе с другими опциями" - -msgid "A project-scoped token is required to produce a service catalog." -msgstr "Для создания каталога службы необходим маркер уровня проекта." - -msgid "Access token is expired" -msgstr "Срок действия ключа доступа истек" - -msgid "Access token not found" -msgstr "Ключ доступа не найден" - -msgid "Additional authentications steps required." -msgstr "Требуются дополнительные действия для идентификации." - -msgid "An unexpected error occurred when retrieving domain configs" -msgstr "Возникла непредвиденная ошибка при получении конфигураций доменов" - -#, python-format -msgid "An unexpected error occurred when trying to store %s" -msgstr "При попытке сохранить %s произошла непредвиденная ошибка" - -msgid "An unexpected error prevented the server from fulfilling your request." -msgstr "Из-за непредвиденной ошибки ваш запрос не был выполнен сервером." - -#, python-format -msgid "" -"An unexpected error prevented the server from fulfilling your request: " -"%(exception)s" -msgstr "" -"Из-за непредвиденной ошибки ваш запрос не был выполнен сервером: " -"%(exception)s" - -msgid "An unhandled exception has occurred: Could not find metadata." -msgstr "" -"Возникла необработанная исключительная ситуация: не удалось найти метаданные." - -msgid "At least one option must be provided" -msgstr "Необходимо указать хотя бы одну опцию" - -msgid "At least one option must be provided, use either --all or --domain-name" -msgstr "" -"Должен быть указан хотя бы один параметр. Укажите --all или --domain-name" - -msgid "At least one role should be specified." -msgstr "Необходимо указать по крайней мере одну роль." - -#, python-format -msgid "" -"Attempted automatic driver selection for assignment based upon " -"[identity]\\driver option failed since driver %s is not found. Set " -"[assignment]/driver to a valid driver in keystone config." -msgstr "" -"Не удалось автоматически выбрать драйвер на основе опции [identity]\\driver, " -"так как драйвер %s не найден. Укажите требуемый драйвер в [assignment]/" -"driver в конфигурации keystone." - -msgid "Attempted to authenticate with an unsupported method." -msgstr "Попытка идентификации с использованием неподдерживаемого метода." - -msgid "" -"Attempting to use OS-FEDERATION token with V2 Identity Service, use V3 " -"Authentication" -msgstr "" -"Попытка использовать маркер OS-FEDERATION со службой идентификации версии 2. " -"Следует использовать идентификацию версии 3" - -msgid "Authentication plugin error." -msgstr "Ошибка модуля идентификации." - -#, python-format -msgid "" -"Backend `%(backend)s` is not a valid memcached backend. Valid backends: " -"%(backend_list)s" -msgstr "" -"Базовая система `%(backend)s` не является допустимой базовой системой в кэше " -"памяти. Допустимые базовые системы: %(backend_list)s" - -msgid "Cannot authorize a request token with a token issued via delegation." -msgstr "" -"Предоставить права доступа маркеру запроса с маркером, выданным посредством " -"делегирования, невозможно." - -#, python-format -msgid "Cannot change %(option_name)s %(attr)s" -msgstr "Невозможно изменить %(option_name)s %(attr)s" - -msgid "Cannot change Domain ID" -msgstr "Невозможно изменить ИД домена" - -msgid "Cannot change user ID" -msgstr "Невозможно изменить ИД пользователя" - -msgid "Cannot change user name" -msgstr "Невозможно изменить имя пользователя" - -#, python-format -msgid "Cannot create an endpoint with an invalid URL: %(url)s" -msgstr "Не удается создать конечную точку с помощью недопустимого URL: %(url)s" - -#, python-format -msgid "Cannot create project with parent: %(project_id)s" -msgstr "Не удается создать проект с родительским объектом: %(project_id)s" - -#, python-format -msgid "" -"Cannot create project, since it specifies its owner as domain %(domain_id)s, " -"but specifies a parent in a different domain (%(parent_domain_id)s)." -msgstr "" -"Не удается создать проект, так как его владелец указан как домен " -"%(domain_id)s, но его родительский объект задан в другом домене " -"(%(parent_domain_id)s)." - -#, python-format -msgid "" -"Cannot create project, since its parent (%(domain_id)s) is acting as a " -"domain, but project's specified parent_id (%(parent_id)s) does not match " -"this domain_id." -msgstr "" -"Не удается создать проект, так как его родительский элемент (%(domain_id)s) " -"работает в качестве домена, но parent_id (%(parent_id)s), указанный для " -"проекта, не соответствует данному domain_id." - -msgid "Cannot delete a domain that is enabled, please disable it first." -msgstr "Невозможно удалить включенный домен, сначала выключите его." - -#, python-format -msgid "" -"Cannot delete project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" -"Невозможно удалить проект %(project_id)s, так как его поддерево содержит " -"включенные проекты" - -#, python-format -msgid "" -"Cannot delete the project %s since it is not a leaf in the hierarchy. Use " -"the cascade option if you want to delete a whole subtree." -msgstr "" -"Невозможно удалить проект %s, так как он не является конечным объектом в " -"структуре. Используйте каскадную опцию для удаления всего поддерева." - -#, python-format -msgid "" -"Cannot disable project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "" -"Нельзя отключить проект %(project_id)s, так как его поддерево содержит " -"включенные проекты" - -#, python-format -msgid "Cannot enable project %s since it has disabled parents" -msgstr "" -"Не удается включить проект %s, так как у него отключены родительские объекты" - -msgid "Cannot list assignments sourced from groups and filtered by user ID." -msgstr "" -"Не удается показать список присвоений, полученных из групп и отфильтрованных " -"по ИД пользователя." - -msgid "Cannot list request tokens with a token issued via delegation." -msgstr "" -"Показать список маркеров запросов с маркером, выданным посредством " -"делегирования, невозможно." - -#, python-format -msgid "Cannot open certificate %(cert_file)s. Reason: %(reason)s" -msgstr "Не удалось открыть сертификат %(cert_file)s. Причина: %(reason)s" - -#, python-format -msgid "Cannot remove role that has not been granted, %s" -msgstr "Удалить роль, которая не была предоставлена, нельзя: %s" - -msgid "" -"Cannot truncate a driver call without hints list as first parameter after " -"self " -msgstr "" -"Невозможно отсечь вызов драйвера без списка подсказок в качестве первого " -"параметра после самого себя " - -msgid "Cannot update domain_id of a project that has children." -msgstr "" -"Не разрешено обновлять domain_id для проекта, у которого есть дочерние " -"объекты." - -msgid "" -"Cannot use parents_as_list and parents_as_ids query params at the same time." -msgstr "" -"Нельзя использовать параметры запроса parents_as_list и parents_as_ids " -"одновременно." - -msgid "" -"Cannot use subtree_as_list and subtree_as_ids query params at the same time." -msgstr "" -"Нельзя использовать параметры запроса subtree_as_list и subtree_as_ids " -"одновременно." - -msgid "Cascade update is only allowed for enabled attribute." -msgstr "Каскадное обновление разрешено только для включенных атрибутов." - -msgid "" -"Combining effective and group filter will always result in an empty list." -msgstr "" -"Сочетание действующего фильтра и фильтра группы всегда дает пустой список." - -msgid "" -"Combining effective, domain and inherited filters will always result in an " -"empty list." -msgstr "" -"Сочетание действующего фильтра, фильтра домена и унаследованного фильтра " -"всегда дает пустой список." - -#, python-format -msgid "Config API entity at /domains/%s/config" -msgstr "Настроить элемент API в /domains/%s/config" - -#, python-format -msgid "Conflict occurred attempting to store %(type)s - %(details)s" -msgstr "При попытке сохранить %(type)s возник конфликт - %(details)s" - -#, python-format -msgid "Conflicting region IDs specified: \"%(url_id)s\" != \"%(ref_id)s\"" -msgstr "Указаны конфликтующие ИД регионов: \"%(url_id)s\" != \"%(ref_id)s\"" - -msgid "Consumer not found" -msgstr "Приемник не найден" - -#, python-format -msgid "" -"Could not change immutable attribute(s) '%(attributes)s' in target %(target)s" -msgstr "" -"Изменить постоянный атрибут '%(attributes)s' в цели %(target)s невозможно" - -#, python-format -msgid "" -"Could not determine Identity Provider ID. The configuration option " -"%(issuer_attribute)s was not found in the request environment." -msgstr "" -"Не удалось определить ИД поставщика идентификации. Опция конфигурации " -"%(issuer_attribute)s не найдена в среде запроса." - -#, python-format -msgid "" -"Could not find %(group_or_option)s in domain configuration for domain " -"%(domain_id)s" -msgstr "" -"Не найден пользователь/группа %(group_or_option)s в конфигурации домена " -"%(domain_id)s" - -#, python-format -msgid "Could not find Endpoint Group: %(endpoint_group_id)s" -msgstr "Не найдена группа конечных точек: %(endpoint_group_id)s" - -msgid "Could not find Identity Provider identifier in environment" -msgstr "Не удалось найти идентификатор поставщика идентификаторов в среде" - -#, python-format -msgid "Could not find Identity Provider: %(idp_id)s" -msgstr "Поставщик идентификаторов %(idp_id)s не найден" - -#, python-format -msgid "Could not find Service Provider: %(sp_id)s" -msgstr "Не удалось найти поставщик служб %(sp_id)s" - -#, python-format -msgid "Could not find credential: %(credential_id)s" -msgstr "Идентификационные данные %(credential_id)s не найдены" - -#, python-format -msgid "Could not find domain: %(domain_id)s" -msgstr "Домен %(domain_id)s не найден" - -#, python-format -msgid "Could not find endpoint: %(endpoint_id)s" -msgstr "Конечная точка %(endpoint_id)s не найдена" - -#, python-format -msgid "" -"Could not find federated protocol %(protocol_id)s for Identity Provider: " -"%(idp_id)s" -msgstr "" -"Объединенный протокол %(protocol_id)s для поставщика идентификаторов " -"%(idp_id)s не найден" - -#, python-format -msgid "Could not find group: %(group_id)s" -msgstr "Группа %(group_id)s не найдена" - -#, python-format -msgid "Could not find mapping: %(mapping_id)s" -msgstr "Отображение %(mapping_id)s не найдено" - -msgid "Could not find policy association" -msgstr "Не найдена связь стратегии" - -#, python-format -msgid "Could not find policy: %(policy_id)s" -msgstr "Стратегия %(policy_id)s не найдена" - -#, python-format -msgid "Could not find project: %(project_id)s" -msgstr "Проект %(project_id)s не найден" - -#, python-format -msgid "Could not find region: %(region_id)s" -msgstr "Регион %(region_id)s не найден" - -#, python-format -msgid "" -"Could not find role assignment with role: %(role_id)s, user or group: " -"%(actor_id)s, project or domain: %(target_id)s" -msgstr "" -"Не найдено присвоение роли %(role_id)s, пользователь/группа: %(actor_id)s, " -"проект/домен: %(target_id)s" - -#, python-format -msgid "Could not find role: %(role_id)s" -msgstr "Роль %(role_id)s не найдена" - -#, python-format -msgid "Could not find service: %(service_id)s" -msgstr "Служба %(service_id)s не найдена" - -#, python-format -msgid "Could not find token: %(token_id)s" -msgstr "Ключ %(token_id)s не найден" - -#, python-format -msgid "Could not find trust: %(trust_id)s" -msgstr "Группа доверия %(trust_id)s не найдена" - -#, python-format -msgid "Could not find user: %(user_id)s" -msgstr "Пользователь %(user_id)s не найден" - -#, python-format -msgid "Could not find version: %(version)s" -msgstr "Версия %(version)s не найдена" - -#, python-format -msgid "Could not find: %(target)s" -msgstr "%(target)s не найдена" - -msgid "" -"Could not map any federated user properties to identity values. Check debug " -"logs or the mapping used for additional details." -msgstr "" -"Не удается связать объединенные свойства пользователя с идентификаторами. " -"Дополнительные сведения о связывании приведены в протоколе отладки." - -msgid "" -"Could not map user while setting ephemeral user identity. Either mapping " -"rules must specify user id/name or REMOTE_USER environment variable must be " -"set." -msgstr "" -"Не удалось привязать пользователя во время настройки временного " -"идентификатора пользователя. Правила привязка должны указывать имя/ИД " -"пользователя, либо должна быть задана переменная среды REMOTE_USER." - -msgid "Could not validate the access token" -msgstr "Не удалось проверить ключ доступа" - -msgid "Credential belongs to another user" -msgstr "Разрешение принадлежит другому пользователю" - -msgid "Credential signature mismatch" -msgstr "Несовпадение подписи идентификационных данных" - -#, python-format -msgid "" -"Direct import of auth plugin %(name)r is deprecated as of Liberty in favor " -"of its entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"Прямой импорт модуля идентификации %(name)r устарел в Liberty и может быть " -"удален в выпуске N. Вместо этого используется его точка входа из " -"%(namespace)r." - -#, python-format -msgid "" -"Direct import of driver %(name)r is deprecated as of Liberty in favor of its " -"entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"Прямой импорт драйвера %(name)r устарел в Liberty и может быть удален в " -"выпуске N. Вместо этого используется его точка входа из %(namespace)r." - -msgid "" -"Disabling an entity where the 'enable' attribute is ignored by configuration." -msgstr "" -"Отключение сущности, при котором атрибут 'enable' в конфигурации " -"игнорируется." - -#, python-format -msgid "Domain (%s)" -msgstr "Домен (%s)" - -#, python-format -msgid "Domain cannot be named %s" -msgstr "Домену нельзя присвоить имя %s" - -#, python-format -msgid "Domain cannot have ID %s" -msgstr "Домен не может иметь идентификатор %s" - -#, python-format -msgid "Domain is disabled: %s" -msgstr "Домен отключен: %s" - -msgid "Domain name cannot contain reserved characters." -msgstr "Имя домена не может содержать зарезервированные символы." - -msgid "Domain scoped token is not supported" -msgstr "Маркер, область которого - домен, не поддерживается" - -msgid "Domain specific roles are not supported in the V8 role driver" -msgstr "Особые роли домена не поддерживаются в драйвере ролей V8" - -#, python-format -msgid "" -"Domain: %(domain)s already has a configuration defined - ignoring file: " -"%(file)s." -msgstr "" -"У домена %(domain)s уже определена конфигурация - файл пропущен: %(file)s." - -msgid "Duplicate Entry" -msgstr "Дубликат записи" - -#, python-format -msgid "Duplicate ID, %s." -msgstr "Повторяющийся идентификатор, %s." - -#, python-format -msgid "Duplicate entry: %s" -msgstr "Повторяющаяся запись: %s" - -#, python-format -msgid "Duplicate name, %s." -msgstr "Повторяющееся имя, %s." - -#, python-format -msgid "Duplicate remote ID: %s" -msgstr "Повторяющийся удаленный ИД: %s" - -msgid "EC2 access key not found." -msgstr "Ключ доступа EC2 не найден." - -msgid "EC2 signature not supplied." -msgstr "Не указана подпись EC2." - -msgid "" -"Either --bootstrap-password argument or OS_BOOTSTRAP_PASSWORD must be set." -msgstr "" -"Необходимо указать аргумент --bootstrap-password или OS_BOOTSTRAP_PASSWORD." - -msgid "Enabled field must be a boolean" -msgstr "Активное поле должно быть булевским значением" - -msgid "Enabled field should be a boolean" -msgstr "Активное поле должно быть булевским значением" - -#, python-format -msgid "Endpoint %(endpoint_id)s not found in project %(project_id)s" -msgstr "Конечная точка %(endpoint_id)s не найдена в проекте %(project_id)s" - -msgid "Endpoint Group Project Association not found" -msgstr "Не найдена связь проекта группы конечных точек" - -msgid "Ensure configuration option idp_entity_id is set." -msgstr "Убедитесь, что указан параметр конфигурации idp_entity_id." - -msgid "Ensure configuration option idp_sso_endpoint is set." -msgstr "Убедитесь, что указан параметр конфигурации idp_sso_endpoint." - -#, python-format -msgid "" -"Error parsing configuration file for domain: %(domain)s, file: %(file)s." -msgstr "" -"Ошибка анализа файла конфигурации для домена %(domain)s, файл: %(file)s." - -#, python-format -msgid "Error while opening file %(path)s: %(err)s" -msgstr "Ошибка при открытии файла %(path)s: %(err)s" - -#, python-format -msgid "Error while parsing line: '%(line)s': %(err)s" -msgstr "Ошибка при анализе строки: '%(line)s': %(err)s" - -#, python-format -msgid "Error while parsing rules %(path)s: %(err)s" -msgstr "Ошибка при анализе правил %(path)s: %(err)s" - -#, python-format -msgid "Error while reading metadata file, %(reason)s" -msgstr "Ошибка чтения файла метаданных: %(reason)s" - -#, python-format -msgid "" -"Exceeded attempts to register domain %(domain)s to use the SQL driver, the " -"last domain that appears to have had it is %(last_domain)s, giving up" -msgstr "" -"Превышено число попыток регистрации домена %(domain)s для использования " -"драйвера SQL. Последний домен, для которого это было сделано - " -"%(last_domain)s. Больше попыток не будет" - -#, python-format -msgid "Expected dict or list: %s" -msgstr "Ожидается dict или list: %s" - -msgid "" -"Expected signing certificates are not available on the server. Please check " -"Keystone configuration." -msgstr "" -"Ожидаемые сертификаты подписания недоступны на сервере. Рекомендуется " -"проверить конфигурацию Keystone." - -#, python-format -msgid "" -"Expecting to find %(attribute)s in %(target)s - the server could not comply " -"with the request since it is either malformed or otherwise incorrect. The " -"client is assumed to be in error." -msgstr "" -"Ожидается %(attribute)s в %(target)s - серверу не удалось удовлетворить " -"запрос, поскольку его формат является неверным, либо запрос некорректен по " -"другой причине. Предположительно, клиент находится в состоянии ошибки." - -#, python-format -msgid "Failed to start the %(name)s server" -msgstr "Не удалось запустить сервер %(name)s" - -msgid "Failed to validate token" -msgstr "Проверить маркер не удалось" - -msgid "Federation token is expired" -msgstr "Срок действия ключа объединения истек" - -#, python-format -msgid "" -"Field \"remaining_uses\" is set to %(value)s while it must not be set in " -"order to redelegate a trust" -msgstr "" -"Полю \"remaining_uses\" присвоено значение %(value)s, хотя поле не может " -"быть задано для изменения делегирования группы доверия" - -msgid "Found invalid token: scoped to both project and domain." -msgstr "" -"Обнаружен недопустимый маркер: он относится и к уровню проекта, и к уровню " -"домена." - -#, python-format -msgid "Group %s not found in config" -msgstr "Группа %s не найдена в конфигурации" - -#, python-format -msgid "Group %(group)s is not supported for domain specific configurations" -msgstr "" -"Группа %(group)s не поддерживается для определенных конфигураций домена" - -#, python-format -msgid "" -"Group %(group_id)s returned by mapping %(mapping_id)s was not found in the " -"backend." -msgstr "" -"Группа %(group_id)s, возвращенная преобразованием %(mapping_id)s, не найдена " -"в на базовом сервере." - -#, python-format -msgid "" -"Group membership across backend boundaries is not allowed, group in question " -"is %(group_id)s, user is %(user_id)s" -msgstr "" -"Членство в группе не может распространяться через границы базовых систем, " -"группа под вопросом - %(group_id)s, пользователь - %(user_id)s" - -#, python-format -msgid "ID attribute %(id_attr)s not found in LDAP object %(dn)s" -msgstr "Атрибут ИД %(id_attr)s не найден в объекте LDAP %(dn)s" - -#, python-format -msgid "Identity Provider %(idp)s is disabled" -msgstr "Поставщик идентификаторов %(idp)s отключен" - -msgid "" -"Incoming identity provider identifier not included among the accepted " -"identifiers." -msgstr "" -"Входящий идентификатор поставщика идентификаторов не включен в принятые " -"идентификаторы." - -msgid "Invalid EC2 signature." -msgstr "Недопустимая подпись EC2." - -#, python-format -msgid "Invalid LDAP TLS certs option: %(option)s. Choose one of: %(options)s" -msgstr "" -"Недопустимая опция certs TLS LDAP: %(option)s. Выберите одно из следующих " -"значений: %(options)s" - -#, python-format -msgid "Invalid LDAP TLS_AVAIL option: %s. TLS not available" -msgstr "Недопустимая опция TLS_AVAIL LDAP: %s. TLS недоступен" - -#, python-format -msgid "Invalid LDAP deref option: %(option)s. Choose one of: %(options)s" -msgstr "" -"Недопустимая опция deref LDAP: %(option)s. Выберите одно из следующих " -"значений: %(options)s" - -#, python-format -msgid "Invalid LDAP scope: %(scope)s. Choose one of: %(options)s" -msgstr "" -"Недопустимая область LDAP: %(scope)s. Выберите одно из следующих значений: " -"%(options)s" - -msgid "Invalid TLS / LDAPS combination" -msgstr "Недопустимое сочетание TLS/LDAPS" - -#, python-format -msgid "Invalid audit info data type: %(data)s (%(type)s)" -msgstr "Недопустимый тип данных в информации контроля: %(data)s (%(type)s)" - -msgid "Invalid blob in credential" -msgstr "Недопустимый большой двоичный объект в разрешении" - -#, python-format -msgid "" -"Invalid domain name: %(domain)s found in config file name: %(file)s - " -"ignoring this file." -msgstr "" -"Обнаружено недопустимое имя домена %(domain)s в файле конфигурации %(file)s " -"- файл пропущен." - -#, python-format -msgid "Invalid domain specific configuration: %(reason)s" -msgstr "Недопустимая конфигурация для домена: %(reason)s" - -#, python-format -msgid "Invalid input for field '%(path)s'. The value is '%(value)s'." -msgstr "Недопустимый ввод для поля '%(path)s'. Значение - '%(value)s'." - -msgid "Invalid limit value" -msgstr "Недопустимое значение ограничения" - -#, python-format -msgid "" -"Invalid mix of entities for policy association - only Endpoint, Service or " -"Region+Service allowed. Request was - Endpoint: %(endpoint_id)s, Service: " -"%(service_id)s, Region: %(region_id)s" -msgstr "" -"Недопустимое смешение сущностей для связывания стратегии. Только Конечная " -"точка, Служба и Регион+Служба разрешены. В запросе было: Конечная точка " -"%(endpoint_id)s, Служба %(service_id)s, Регион %(region_id)s" - -#, python-format -msgid "" -"Invalid rule: %(identity_value)s. Both 'groups' and 'domain' keywords must " -"be specified." -msgstr "" -"Недопустимое правило: %(identity_value)s. Ключевые слова 'groups' и 'domain' " -"должны быть указаны." - -msgid "Invalid signature" -msgstr "Недопустимая подпись" - -msgid "Invalid user / password" -msgstr "Недопустимый пользователь / пароль" - -msgid "Invalid username or TOTP passcode" -msgstr "Недопустимое имя пользователя или пароль TOTP" - -msgid "Invalid username or password" -msgstr "Недопустимое имя пользователя или пароль" - -#, python-format -msgid "KVS region %s is already configured. Cannot reconfigure." -msgstr "Регион KVS %s уже настроен. Изменение конфигурации невозможно." - -#, python-format -msgid "Key Value Store not configured: %s" -msgstr "Хранилище значений ключей не настроено: %s" - -#, python-format -msgid "LDAP %s create" -msgstr "LDAP %s создание" - -#, python-format -msgid "LDAP %s delete" -msgstr "LDAP %s удаление" - -#, python-format -msgid "LDAP %s update" -msgstr "LDAP %s обновление" - -msgid "" -"Length of transformable resource id > 64, which is max allowed characters" -msgstr "" -"Длина ИД преобразуемого ресурса > 64 символов, то есть превышает максимально " -"допустимую" - -#, python-format -msgid "" -"Local section in mapping %(mapping_id)s refers to a remote match that " -"doesn't exist (e.g. {0} in a local section)." -msgstr "" -"Локальный раздел в преобразовании %(mapping_id)s указывает на удаленное " -"совпадение, которое не существует (например, {0} в локальном разделе)." - -#, python-format -msgid "Lock Timeout occurred for key, %(target)s" -msgstr "Наступил тайм-аут блокировки для ключа, %(target)s" - -#, python-format -msgid "Lock key must match target key: %(lock)s != %(target)s" -msgstr "" -"Блокировка должна соответствовать целевому ключу: %(lock)s != %(target)s" - -#, python-format -msgid "Malformed endpoint URL (%(endpoint)s), see ERROR log for details." -msgstr "" -"Неверный формат URL конечной точки (%(endpoint)s), подробную информацию см. " -"в протоколе ОШИБОК." - -msgid "Marker could not be found" -msgstr "Не удалось найти маркер" - -#, python-format -msgid "Max hierarchy depth reached for %s branch." -msgstr "Для ветви %s достигнута максимальная глубина иерархии." - -#, python-format -msgid "Maximum lock attempts on %s occurred." -msgstr "Выполнено максимальное число попыток блокировки в %s." - -#, python-format -msgid "Member %(member)s is already a member of group %(group)s" -msgstr "Элемент %(member)s уже является участником группы %(group)s" - -#, python-format -msgid "Method not callable: %s" -msgstr "Вызов метода невозможен: %s" - -msgid "Missing entity ID from environment" -msgstr "В среде отсутствует ИД сущности" - -msgid "" -"Modifying \"redelegation_count\" upon redelegation is forbidden. Omitting " -"this parameter is advised." -msgstr "" -"Изменение параметра \"redelegation_count\" во время изменения делегирования " -"запрещено. Возможен пропуск этого параметра." - -msgid "Multiple domains are not supported" -msgstr "Множественные домены не поддерживаются" - -msgid "Must be called within an active lock context." -msgstr "Требуется вызов в контексте активной блокировки." - -msgid "Must specify either domain or project" -msgstr "Необходимо указать домен или проект" - -msgid "Name field is required and cannot be empty" -msgstr "Поле имени является обязательным и не может быть пустым" - -msgid "Neither Project Domain ID nor Project Domain Name was provided." -msgstr "Не указаны ни ИД домена проекта, ни имя домена проекта." - -msgid "" -"No Authorization headers found, cannot proceed with OAuth related calls, if " -"running under HTTPd or Apache, ensure WSGIPassAuthorization is set to On." -msgstr "" -"Не найдены заголовки предоставления доступа - вызовы, связанные с OAuth, " -"невозможны при выполнении под управлением HTTPd или Apache. Убедитесь, что " -"параметру WSGIPassAuthorization присвоено значение On." - -msgid "No authenticated user" -msgstr "Нет идентифицированного пользователя" - -msgid "" -"No encryption keys found; run keystone-manage fernet_setup to bootstrap one." -msgstr "" -"Не найдены ключи шифрования. Выполните команду keystone-manage fernet_setup, " -"чтобы создать ключ." - -msgid "No options specified" -msgstr "Параметры не указаны" - -#, python-format -msgid "No policy is associated with endpoint %(endpoint_id)s." -msgstr "С конечной точкой %(endpoint_id)s не связано ни одной стратегии." - -#, python-format -msgid "No remaining uses for trust: %(trust_id)s" -msgstr "Вариантов использования группы доверия %(trust_id)s не осталось" - -msgid "No token in the request" -msgstr "В запросе отсутствует маркер" - -msgid "Non-default domain is not supported" -msgstr "Домен, отличный от применяемого по умолчанию, не поддерживается" - -msgid "One of the trust agents is disabled or deleted" -msgstr "Один из доверенных агентов отключен или удален" - -#, python-format -msgid "" -"Option %(option)s found with no group specified while checking domain " -"configuration request" -msgstr "" -"Обнаружен параметр %(option)s без указанной группы во время проверки запроса " -"на настройку домена" - -#, python-format -msgid "" -"Option %(option)s in group %(group)s is not supported for domain specific " -"configurations" -msgstr "" -"Параметр %(option)s в группе %(group)s не поддерживается для определенных " -"конфигураций домена" - -#, python-format -msgid "Project (%s)" -msgstr "Проект (%s)" - -#, python-format -msgid "Project ID not found: %(t_id)s" -msgstr "Не найден ИД проекта: %(t_id)s" - -msgid "Project field is required and cannot be empty." -msgstr "Поле проекта является обязательным и не может быть пустым." - -#, python-format -msgid "Project is disabled: %s" -msgstr "Проект отключен: %s" - -msgid "Project name cannot contain reserved characters." -msgstr "Имя проекта не может содержать зарезервированные символы." - -msgid "Query string is not UTF-8 encoded" -msgstr "Строка запроса указана в кодировке, отличной от UTF-8" - -#, python-format -msgid "" -"Reading the default for option %(option)s in group %(group)s is not supported" -msgstr "" -"Чтение значения по умолчанию для параметра %(option)s в группе %(group)s не " -"поддерживается" - -msgid "Redelegation allowed for delegated by trust only" -msgstr "Изменение делегирования разрешено только для доверенного пользователя" - -#, python-format -msgid "" -"Remaining redelegation depth of %(redelegation_depth)d out of allowed range " -"of [0..%(max_count)d]" -msgstr "" -"Оставшаяся глубина изменения делегирования %(redelegation_depth)d выходит за " -"пределы разрешенного диапазона [0..%(max_count)d]" - -msgid "" -"Remove admin_crud_extension from the paste pipeline, the admin_crud " -"extension is now always available. Updatethe [pipeline:admin_api] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Удалите admin_crud_extension из конвейера вставки, расширение admin_crud " -"теперь доступно всегда. Обновите раздел [pipeline:admin_api] в файле " -"keystone-paste.ini соответственно, так как он будет удален в выпуске O." - -msgid "" -"Remove endpoint_filter_extension from the paste pipeline, the endpoint " -"filter extension is now always available. Update the [pipeline:api_v3] " -"section in keystone-paste.ini accordingly as it will be removed in the O " -"release." -msgstr "" -"Удалите endpoint_filter_extension из конвейера вставки, расширение фильтра " -"конечной точки теперь доступно всегда. Обновите раздел [pipeline:api_v3] в " -"файле keystone-paste.ini соответственно, так как он будет удален в выпуске O." - -msgid "" -"Remove federation_extension from the paste pipeline, the federation " -"extension is now always available. Update the [pipeline:api_v3] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Удалите federation_filter_extension из конвейера вставки, расширение " -"объединения теперь доступно всегда. Обновите раздел [pipeline:api_v3] в " -"файле keystone-paste.ini соответственно, так как он будет удален в выпуске O." - -msgid "" -"Remove oauth1_extension from the paste pipeline, the oauth1 extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Удалите oauth1_filter_extension из конвейера вставки, расширение oauth1 " -"теперь доступно всегда. Обновите раздел [pipeline:api_v3] в файле keystone-" -"paste.ini соответственно, так как он будет удален в выпуске O." - -msgid "" -"Remove revoke_extension from the paste pipeline, the revoke extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Удалите revoke_filter_extension из конвейера вставки, расширение отзыва " -"теперь доступно всегда. Обновите раздел [pipeline:api_v3] в файле keystone-" -"paste.ini соответственно, так как он будет удален в выпуске O." - -msgid "" -"Remove simple_cert from the paste pipeline, the PKI and PKIz token providers " -"are now deprecated and simple_cert was only used insupport of these token " -"providers. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"Удалите simple_cert из конвейера вставки, теперь поставщики ключей PKI и " -"PKIz устарели, а simple_cert использовался только для поддержки этих " -"поставщиков. Обновите раздел [pipeline:api_v3] в файле keystone-paste.ini " -"соответственно, так как он будет удален в выпуске O." - -msgid "" -"Remove user_crud_extension from the paste pipeline, the user_crud extension " -"is now always available. Updatethe [pipeline:public_api] section in keystone-" -"paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"Удалите user_crud_extension из конвейера вставки, расширение user_crud " -"теперь доступно всегда. Обновите раздел [pipeline:public_api] в файле " -"keystone-paste.ini соответственно, так как он будет удален в выпуске O." - -msgid "Request Token does not have an authorizing user id" -msgstr "" -"Маркер запроса не содержит ИД пользователя для предоставления прав доступа" - -#, python-format -msgid "" -"Request attribute %(attribute)s must be less than or equal to %(size)i. The " -"server could not comply with the request because the attribute size is " -"invalid (too large). The client is assumed to be in error." -msgstr "" -"Атрибут запроса %(attribute)s не может быть больше %(size)i. Серверу не " -"удалось удовлетворить запрос, поскольку размер атрибута является " -"недопустимым (слишком большой). Предположительно, клиент находится в " -"состоянии ошибки." - -msgid "Request must have an origin query parameter" -msgstr "Запрос должен содержать параметр origin" - -msgid "Request token is expired" -msgstr "Срок действия маркера запроса истек" - -msgid "Request token not found" -msgstr "Маркер запроса не найден" - -msgid "Requested expiration time is more than redelegated trust can provide" -msgstr "" -"Запрошенное время истечения срока действия превышает значение, которое может " -"указать доверенный пользователь" - -#, python-format -msgid "" -"Requested redelegation depth of %(requested_count)d is greater than allowed " -"%(max_count)d" -msgstr "" -"Запрошенная глубина изменения делегирования %(requested_count)d превышает " -"разрешенную %(max_count)d" - -msgid "" -"Running keystone via eventlet is deprecated as of Kilo in favor of running " -"in a WSGI server (e.g. mod_wsgi). Support for keystone under eventlet will " -"be removed in the \"M\"-Release." -msgstr "" -"Выполнение Keystone через библиотеку eventlet устарело начиная с выпуска " -"Kilo. Следует выполнять на сервере WSGI (например, mod_wsgi). Поддержка " -"keystone в библиотеке eventlet будет убрана в выпуске \"M\"." - -msgid "Scoping to both domain and project is not allowed" -msgstr "Назначать и домен, и проект в качестве области нельзя" - -msgid "Scoping to both domain and trust is not allowed" -msgstr "Назначать и домен, и группу доверия в качестве области нельзя" - -msgid "Scoping to both project and trust is not allowed" -msgstr "Назначать и проект, и группу доверия в качестве области нельзя" - -#, python-format -msgid "Service Provider %(sp)s is disabled" -msgstr "Поставщик службы %(sp)s отключен" - -msgid "Some of requested roles are not in redelegated trust" -msgstr "" -"Некоторые из запрошенных ролей не относятся к доверенному пользователю с " -"измененными полномочиями" - -msgid "Specify a domain or project, not both" -msgstr "Укажите домен или проект, но не то и другое" - -msgid "Specify a user or group, not both" -msgstr "Укажите пользователя или группу, но не то и другое" - -msgid "Specify one of domain or project" -msgstr "Укажите один домен или проект" - -msgid "Specify one of user or group" -msgstr "Укажите одного пользователя или группу" - -#, python-format -msgid "" -"String length exceeded.The length of string '%(string)s' exceeded the limit " -"of column %(type)s(CHAR(%(length)d))." -msgstr "" -"Превышена длина строки. Длина строки '%(string)s' превышает ограничение " -"столбца %(type)s(CHAR(%(length)d))." - -msgid "Tenant name cannot contain reserved characters." -msgstr "Имя арендатора не может содержать зарезервированные символы." - -#, python-format -msgid "" -"The %s extension has been moved into keystone core and as such its " -"migrations are maintained by the main keystone database control. Use the " -"command: keystone-manage db_sync" -msgstr "" -"Расширение %s было перемещено в ядро keystone, и его перенос поддерживается " -"основной системой управления базы данных keystone. Используйте команду: " -"keystone-manage db_sync" - -msgid "" -"The 'expires_at' must not be before now. The server could not comply with " -"the request since it is either malformed or otherwise incorrect. The client " -"is assumed to be in error." -msgstr "" -"Значение параметра 'expires_at' не должно быть меньше настоящего времени. " -"Серверу не удалось исполнить запрос, так как он поврежден или неправильно " -"сформирован. Предположительно, клиент находится в состоянии ошибки." - -msgid "The --all option cannot be used with the --domain-name option" -msgstr "Параметр --all нельзя указывать вместе с параметром --domain-name" - -#, python-format -msgid "The Keystone configuration file %(config_file)s could not be found." -msgstr "Не удалось найти файл конфигурации Keystone %(config_file)s." - -#, python-format -msgid "" -"The Keystone domain-specific configuration has specified more than one SQL " -"driver (only one is permitted): %(source)s." -msgstr "" -"В конфигурации для домена Keystone указано несколько драйверов SQL (допустим " -"только один): %(source)s." - -msgid "The action you have requested has not been implemented." -msgstr "Запрошенное действие не реализовано." - -msgid "The authenticated user should match the trustor." -msgstr "Идентифицированный пользователь должен соответствовать доверителю." - -msgid "" -"The certificates you requested are not available. It is likely that this " -"server does not use PKI tokens otherwise this is the result of " -"misconfiguration." -msgstr "" -"Запрошенные сертификаты недоступны. Вероятно, данный сервер не использует " -"маркеры PKI, в противном случае, это является следствием ошибки в " -"конфигурации." - -msgid "The configured token provider does not support bind authentication." -msgstr "Настроенный модуль маркера не поддерживает идентификацию привязки." - -msgid "The creation of projects acting as domains is not allowed in v2." -msgstr "Создание проектов, работающих в качестве доменов, не разрешено в v2." - -#, python-format -msgid "" -"The password length must be less than or equal to %(size)i. The server could " -"not comply with the request because the password is invalid." -msgstr "" -"Длина пароля не должна превышать %(size)i. Сервер не может выполнить запрос, " -"поскольку пароль недопустим." - -msgid "The request you have made requires authentication." -msgstr "Выданный запрос требует идентификации." - -msgid "The resource could not be found." -msgstr "Ресурс не найден." - -msgid "" -"The revoke call must not have both domain_id and project_id. This is a bug " -"in the Keystone server. The current request is aborted." -msgstr "" -"В вызове revoke должны быть указаны domain_id и project_id. Это ошибка в " -"коде сервера Keystone. Текущий запрос прерван." - -msgid "The service you have requested is no longer available on this server." -msgstr "Запрошенная служба более не доступна на данном сервере." - -#, python-format -msgid "" -"The specified parent region %(parent_region_id)s would create a circular " -"region hierarchy." -msgstr "" -"Заданная родительская область %(parent_region_id)s создаст круговую " -"структуру области." - -#, python-format -msgid "" -"The value of group %(group)s specified in the config should be a dictionary " -"of options" -msgstr "" -"Значение группы %(group)s, указанное в конфигурации, должно быть словарем " -"параметров" - -msgid "There should not be any non-oauth parameters" -msgstr "Не допускаются параметры, отличные от oauth" - -#, python-format -msgid "This is not a recognized Fernet payload version: %s" -msgstr "Это не распознанная версия полезной нагрузки Fernet: %s" - -#, python-format -msgid "This is not a recognized Fernet token %s" -msgstr "Это не маркер Fernet: %s" - -msgid "" -"Timestamp not in expected format. The server could not comply with the " -"request since it is either malformed or otherwise incorrect. The client is " -"assumed to be in error." -msgstr "" -"Метка в неожиданном формате. Сервер не может выполнить запрос, поскольку он " -"либо искажен или неправилен. Клиент, как предполагается, является ошибочным." - -#, python-format -msgid "" -"To get a more detailed information on this error, re-run this command for " -"the specific domain, i.e.: keystone-manage domain_config_upload --domain-" -"name %s" -msgstr "" -"Для получения дополнительной информации об этой ошибке еще раз выполните эту " -"команду для конкретного домена. Пример: keystone-manage domain_config_upload " -"--domain-name %s" - -msgid "Token belongs to another user" -msgstr "Маркер принадлежит другому пользователю" - -msgid "Token does not belong to specified tenant." -msgstr "Маркер не принадлежит указанному арендатору." - -msgid "Token version is unrecognizable or unsupported." -msgstr "Версия маркера не распознана либо не поддерживается." - -msgid "Trustee has no delegated roles." -msgstr "У доверенного лица нет делегированных ролей." - -msgid "Trustor is disabled." -msgstr "Доверитель отключен." - -#, python-format -msgid "" -"Trying to update group %(group)s, so that, and only that, group must be " -"specified in the config" -msgstr "" -"Изменение группы %(group)s, чтобы группа должна была указываться только в " -"конфигурации" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, but config provided " -"contains option %(option_other)s instead" -msgstr "" -"Изменение параметра %(option)s в группе %(group)s, однако переданная " -"конфигурация содержит параметр %(option_other)s вместо него" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, so that, and only " -"that, option must be specified in the config" -msgstr "" -"Изменение параметра %(option)s в группе %(group)s, чтобы параметр должен был " -"указываться только в конфигурации" - -msgid "" -"Unable to access the keystone database, please check it is configured " -"correctly." -msgstr "" -"Нет доступа к базе данных Keystone. Убедитесь, что она настроена правильно." - -#, python-format -msgid "Unable to consume trust %(trust_id)s, unable to acquire lock." -msgstr "Принять группу доверия %(trust_id)s и захватить блокировку невозможно." - -#, python-format -msgid "" -"Unable to delete region %(region_id)s because it or its child regions have " -"associated endpoints." -msgstr "" -"Не удалось удалить регион %(region_id)s: регион или его дочерние регионы " -"имеют связанные конечные точки." - -msgid "Unable to downgrade schema" -msgstr "Не удается понизить версию схемы" - -#, python-format -msgid "Unable to find valid groups while using mapping %(mapping_id)s" -msgstr "" -"Невозможно найти допустимые группы при использовании преобразования " -"%(mapping_id)s" - -#, python-format -msgid "Unable to locate domain config directory: %s" -msgstr "Не удалось найти каталог конфигурации домена: %s" - -#, python-format -msgid "Unable to lookup user %s" -msgstr "Найти пользователя %s невозможно" - -#, python-format -msgid "" -"Unable to reconcile identity attribute %(attribute)s as it has conflicting " -"values %(new)s and %(old)s" -msgstr "" -"Согласовать атрибут идентификатора, %(attribute)s, невозможно, поскольку он " -"содержит конфликтующие значения %(new)s и %(old)s" - -#, python-format -msgid "" -"Unable to sign SAML assertion. It is likely that this server does not have " -"xmlsec1 installed, or this is the result of misconfiguration. Reason " -"%(reason)s" -msgstr "" -"Не удалось подписать утверждение SAML. Вероятно, на этом сервере не " -"установлена программа xmlsec1 или это результат неправильной настройки. " -"Причина: %(reason)s" - -msgid "Unable to sign token." -msgstr "Подписать маркер невозможно." - -#, python-format -msgid "Unexpected assignment type encountered, %s" -msgstr "Обнаружен непредвиденный тип назначения, %s" - -#, python-format -msgid "" -"Unexpected combination of grant attributes - User: %(user_id)s, Group: " -"%(group_id)s, Project: %(project_id)s, Domain: %(domain_id)s" -msgstr "" -"Непредвиденная комбинация атрибутов предоставления доступа - пользователь: " -"%(user_id)s, группа: %(group_id)s, проект: %(project_id)s, домен: " -"%(domain_id)s" - -#, python-format -msgid "Unexpected status requested for JSON Home response, %s" -msgstr "Запрошено неожиданное состояние для ответа JSON Home, %s" - -msgid "Unknown Target" -msgstr "Неизвестный целевой объект" - -#, python-format -msgid "Unknown domain '%(name)s' specified by --domain-name" -msgstr "В опции --domain-name указано неизвестное имя домена '%(name)s'" - -#, python-format -msgid "Unknown token version %s" -msgstr "Неизвестная версия маркера %s" - -#, python-format -msgid "Unregistered dependency: %(name)s for %(targets)s" -msgstr "Незарегистрированная зависимость %(name)s для %(targets)s" - -msgid "Update of `domain_id` is not allowed." -msgstr "Обновление `domain_id` не разрешено." - -msgid "Update of `is_domain` is not allowed." -msgstr "Обновление `is_domain` не разрешено." - -msgid "Update of `parent_id` is not allowed." -msgstr "Обновление `parent_id` не разрешено." - -msgid "Update of domain_id is only allowed for root projects." -msgstr "Обновление domain_id разрешено только для корневых проектов." - -msgid "Update of domain_id of projects acting as domains is not allowed." -msgstr "" -"Не разрешено обновлять domain_id для проектов, работающих в качестве доменов." - -msgid "Use a project scoped token when attempting to create a SAML assertion" -msgstr "Использовать локальный ключ проекта при создании утверждения SAML" - -msgid "" -"Use of the identity driver config to automatically configure the same " -"assignment driver has been deprecated, in the \"O\" release, the assignment " -"driver will need to be expicitly configured if different than the default " -"(SQL)." -msgstr "" -"Использование конфигурации драйвера идентификатора для автоматической " -"настройки такого же драйвера присвоения устарело. В выпуске \"O\" драйвер " -"присвоения должен будет настраиваться явным образом, если он не совпадает с " -"драйвером по умолчанию (SQL)." - -#, python-format -msgid "User %(u_id)s is unauthorized for tenant %(t_id)s" -msgstr "У пользователя %(u_id)s нет доступа к арендатору %(t_id)s" - -#, python-format -msgid "User %(user_id)s has no access to domain %(domain_id)s" -msgstr "У пользователя %(user_id)s нет доступа к домену %(domain_id)s" - -#, python-format -msgid "User %(user_id)s has no access to project %(project_id)s" -msgstr "У пользователя %(user_id)s нет доступа к проекту %(project_id)s" - -#, python-format -msgid "User %(user_id)s is already a member of group %(group_id)s" -msgstr "Пользователь %(user_id)s уже является участником группы %(group_id)s" - -#, python-format -msgid "User '%(user_id)s' not found in group '%(group_id)s'" -msgstr "Пользователь '%(user_id)s' не найден в группе '%(group_id)s'" - -msgid "User IDs do not match" -msgstr "ИД пользователей не совпадают" - -msgid "" -"User auth cannot be built due to missing either user id, or user name with " -"domain id, or user name with domain name." -msgstr "" -"Не удалось скомпоновать идентификацию пользователя, так как отсутствует ИД " -"пользователя, имя пользователя с ИД домена либо имя пользователя с именем " -"домена." - -#, python-format -msgid "User is disabled: %s" -msgstr "Пользователь отключен: %s" - -msgid "User is not a member of the requested project" -msgstr "Пользователь не является участником запрошенного проекта" - -msgid "User is not a trustee." -msgstr "Пользователь не является доверенным лицом." - -msgid "User not found" -msgstr "Пользователь не найден" - -msgid "User not valid for tenant." -msgstr "Недопустимый пользователь для арендатора." - -msgid "User roles not supported: tenant_id required" -msgstr "Роли пользователей не поддерживаются, требуется tenant_id" - -#, python-format -msgid "User type %s not supported" -msgstr "Тип пользователя %s не поддерживается" - -msgid "You are not authorized to perform the requested action." -msgstr "У вас нет прав на выполнение запрашиваемого действия." - -#, python-format -msgid "You are not authorized to perform the requested action: %(action)s" -msgstr "У вас нет прав на выполнение запрошенного действия: %(action)s" - -msgid "" -"You have tried to create a resource using the admin token. As this token is " -"not within a domain you must explicitly include a domain for this resource " -"to belong to." -msgstr "" -"Попытка создания ресурса с помощью административного маркера. Так как этот " -"маркер не принадлежит домену, необходимо явно указать домен, которому будет " -"принадлежать ресурс." - -msgid "`key_mangler` functions must be callable." -msgstr "Функции `key_mangler` должны быть доступны для вызова." - -msgid "`key_mangler` option must be a function reference" -msgstr "Опция `key_mangler` должна быть ссылкой на функцию" - -msgid "any options" -msgstr "любые параметры" - -msgid "auth_type is not Negotiate" -msgstr "auth_type отличен от Negotiate" - -msgid "authorizing user does not have role required" -msgstr "" -"пользователю, предоставляющему права доступа, не присвоена требуемая роль" - -#, python-format -msgid "cannot create a project in a branch containing a disabled project: %s" -msgstr "Нельзя создать проект в ветви, содержащей отключенный проект: %s" - -#, python-format -msgid "" -"cannot delete an enabled project acting as a domain. Please disable the " -"project %s first." -msgstr "" -"Невозможно удалить включенный проект, работающий как домен. Сначала " -"выключите проект %s." - -#, python-format -msgid "group %(group)s" -msgstr "группа %(group)s" - -msgid "" -"idp_contact_type must be one of: [technical, other, support, administrative " -"or billing." -msgstr "" -"Значение idp_contact_type должно быть одним из следующих: technical, other, " -"support, administrative или billing." - -#, python-format -msgid "invalid date format %s" -msgstr "Недопустимый формат даты %s" - -#, python-format -msgid "" -"it is not permitted to have two projects acting as domains with the same " -"name: %s" -msgstr "" -"Не разрешено использовать два проекта в качестве доменов с одинаковым " -"именем: %s" - -#, python-format -msgid "" -"it is not permitted to have two projects within a domain with the same " -"name : %s" -msgstr "" -"Не разрешено использовать два проекта в одном домене с одинаковыми именами: " -"%s" - -msgid "only root projects are allowed to act as domains." -msgstr "Только корневые проекты могут работать в качестве доменов." - -#, python-format -msgid "option %(option)s in group %(group)s" -msgstr "параметр %(option)s в группе %(group)s" - -msgid "provided consumer key does not match stored consumer key" -msgstr "переданный ключ приемника не совпадает с сохраненным" - -msgid "provided request key does not match stored request key" -msgstr "переданный ключ запроса не совпадает с сохраненным" - -msgid "provided verifier does not match stored verifier" -msgstr "переданная функция проверки не совпадает с сохраненной" - -msgid "remaining_uses must be a positive integer or null." -msgstr "" -"Значение remaining_uses должно быть положительным целым числом или равным " -"нулю." - -msgid "remaining_uses must not be set if redelegation is allowed" -msgstr "" -"Если включено изменение делегирования, параметр remaining_uses не должен " -"быть задан" - -#, python-format -msgid "" -"request to update group %(group)s, but config provided contains group " -"%(group_other)s instead" -msgstr "" -"запрос на изменение группы %(group)s, однако переданная конфигурация " -"содержит группу %(group_other)s вместо нее" - -msgid "rescope a scoped token" -msgstr "Изменить область помещенного в область ключа" - -#, python-format -msgid "role %s is not defined" -msgstr "роль %s не определена" - -msgid "scope.project.id must be specified if include_subtree is also specified" -msgstr "scope.project.id необходимо указать, если указан include_subtree" - -#, python-format -msgid "tls_cacertdir %s not found or is not a directory" -msgstr "tls_cacertdir %s не найден или не является каталогом" - -#, python-format -msgid "tls_cacertfile %s not found or is not a file" -msgstr "tls_cacertfile %s не найден или не является файлом" - -#, python-format -msgid "token reference must be a KeystoneToken type, got: %s" -msgstr "Ссылка на маркер должна относиться к типу KeystoneToken, а получено %s" - -msgid "" -"update of domain_id is deprecated as of Mitaka and will be removed in O." -msgstr "обновление domain_id устарело в Mitaka и будет удалено в O." - -#, python-format -msgid "" -"validated expected to find %(param_name)r in function signature for " -"%(func_name)r." -msgstr "" -"ожидалось найти проверенный параметр %(param_name)r в подписи функции " -"%(func_name)r." diff --git a/keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone-log-critical.po b/keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone-log-critical.po deleted file mode 100644 index 2dc7345d..00000000 --- a/keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone-log-critical.po +++ /dev/null @@ -1,25 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2015-08-04 01:49+0000\n" -"Last-Translator: İşbaran Akçayır \n" -"Language: tr-TR\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Turkish (Turkey)\n" - -#, python-format -msgid "Unable to open template file %s" -msgstr "%s şablon dosyası açılamıyor" diff --git a/keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone-log-error.po b/keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone-log-error.po deleted file mode 100644 index 18bc9fa2..00000000 --- a/keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone-log-error.po +++ /dev/null @@ -1,148 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2015-08-04 01:50+0000\n" -"Last-Translator: İşbaran Akçayır \n" -"Language: tr-TR\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Turkish (Turkey)\n" - -msgid "Cannot retrieve Authorization headers" -msgstr "Yetkilendirme başlıkları alınamıyor" - -#, python-format -msgid "" -"Circular reference or a repeated entry found in projects hierarchy - " -"%(project_id)s." -msgstr "" -"Proje sıra düzeninde çember başvuru ya da tekrar eden girdi bulundu - " -"%(project_id)s." - -#, python-format -msgid "" -"Circular reference or a repeated entry found in region tree - %(region_id)s." -msgstr "" -"Bölge ağacında çember başvuru ya da tekrar eden girdi bulundu - " -"%(region_id)s." - -#, python-format -msgid "" -"Circular reference or a repeated entry found projects hierarchy - " -"%(project_id)s." -msgstr "" -"Proje sıra düzeninde çember başvuru ya da tekrar eden girdi bulundu - " -"%(project_id)s." - -#, python-format -msgid "Could not bind to %(host)s:%(port)s" -msgstr "%(host)s:%(port)s adresine bağlanılamadı" - -#, python-format -msgid "" -"Either [fernet_tokens] key_repository does not exist or Keystone does not " -"have sufficient permission to access it: %s" -msgstr "" -"[fernet_tokents] key_repository mevcut değil ya da Keystone erişmek için " -"yeterli izine sahip değil: %s" - -msgid "" -"Error setting up the debug environment. Verify that the option --debug-url " -"has the format : and that a debugger processes is listening on " -"that port." -msgstr "" -"Hata ayıklama ortamının ayarlanmasında hata. --debug-url seçeneğinin " -": biçimine sahip olduğunu ve bu bağlantı " -"noktasında hata ayıklama sürecinin dinlediğini doğrulayın." - -msgid "Failed to construct notifier" -msgstr "Bildirici inşa etme başarısız" - -msgid "" -"Failed to create [fernet_tokens] key_repository: either it already exists or " -"you don't have sufficient permissions to create it" -msgstr "" -"[fernet_tokens] key_repository oluşturulamıyor: ya zaten mevcut ya da " -"oluşturmak için yeterli izniniz yok" - -#, python-format -msgid "Failed to remove file %(file_path)r: %(error)s" -msgstr "%(file_path)r dosyası silinemedi: %(error)s" - -#, python-format -msgid "Failed to send %(action)s %(event_type)s notification" -msgstr "%(action)s %(event_type)s bildirimi gönderilemedi" - -#, python-format -msgid "Failed to send %(res_id)s %(event_type)s notification" -msgstr "%(res_id)s %(event_type)s bildirimi gönderilemedi" - -msgid "Failed to validate token" -msgstr "Jeton doğrulama başarısız" - -#, python-format -msgid "Malformed endpoint %(url)s - unknown key %(keyerror)s" -msgstr "Kusurlu bitiş noktası %(url)s - bilinmeyen anahtar %(keyerror)s" - -#, python-format -msgid "" -"Malformed endpoint %s - incomplete format (are you missing a type notifier ?)" -msgstr "" -"Kusurlu bitiş noktası %s - tamamlanmamış biçim (bir tür bildiriciniz eksik " -"olabilir mi ?)" - -#, python-format -msgid "" -"Malformed endpoint '%(url)s'. The following type error occurred during " -"string substitution: %(typeerror)s" -msgstr "" -"Kusurlu bitiş noktası '%(url)s'. Karakter dizisi yer değiştirme sırasında şu " -"tür hatası oluştu: %(typeerror)s" - -#, python-format -msgid "Malformed endpoint - %(url)r is not a string" -msgstr "Kusurlu bitiş noktası - %(url)r bir karakter dizisi değil" - -#, python-format -msgid "" -"Reinitializing revocation list due to error in loading revocation list from " -"backend. Expected `list` type got `%(type)s`. Old revocation list data: " -"%(list)r" -msgstr "" -"Arka uçtan feshetme listesi yüklemedeki hata sebebiyle fesih listesi yeniden " -"ilklendiriliyor. `list` beklendi `%(type)s` alındı. Eski fesih listesi " -"verisi: %(list)r" - -msgid "Server error" -msgstr "Sunucu hatası" - -#, python-format -msgid "Unable to convert Keystone user or group ID. Error: %s" -msgstr "Keystone kullanıcı veya grup kimliği dönüştürülemiyor. Hata: %s" - -msgid "Unable to sign token" -msgstr "Jeton imzalanamıyor" - -#, python-format -msgid "Unexpected error or malformed token determining token expiry: %s" -msgstr "Jeton sona erme belirlemede beklenmeyen hata veya kusurlu jeton: %s" - -#, python-format -msgid "" -"Unexpected results in response for domain config - %(count)s responses, " -"first option is %(option)s, expected option %(expected)s" -msgstr "" -"Alan yapılandırması yanıtında beklenmedik sonuçlar - %(count)s yanıt, ilk " -"seçenek %(option)s, beklenen seçenek %(expected)s" diff --git a/keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone-log-info.po b/keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone-log-info.po deleted file mode 100644 index a3451130..00000000 --- a/keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone-log-info.po +++ /dev/null @@ -1,131 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 8.0.1.dev11\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/keystone\n" -"POT-Creation-Date: 2015-11-05 06:13+0000\n" -"PO-Revision-Date: 2015-08-04 01:49+0000\n" -"Last-Translator: İşbaran Akçayır \n" -"Language-Team: Turkish (Turkey)\n" -"Language: tr-TR\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.1\n" - -#, python-format -msgid "" -"\"expires_at\" has conflicting values %(existing)s and %(new)s. Will use " -"the earliest value." -msgstr "" -"\"expires_at\" çatışan değerlere sahip %(existing)s ve %(new)s. İlk değer " -"kullanılacak." - -#, python-format -msgid "Adding proxy '%(proxy)s' to KVS %(name)s." -msgstr "'%(proxy)s' vekili KVS %(name)s'e ekleniyor." - -#, python-format -msgid "Couldn't verify unknown bind: {%(bind_type)s: %(identifier)s}" -msgstr "Bilinmeyen bağ doğrulanamıyor: {%(bind_type)s: %(identifier)s}" - -#, python-format -msgid "Created a new key: %s" -msgstr "Yeni bir anahtar oluşturuldu: %s" - -#, python-format -msgid "Creating the default role %s because it does not exist." -msgstr "Varsayılan rol %s oluşturuluyor çünkü mevcut değil." - -#, python-format -msgid "Creating the default role %s failed because it was already created" -msgstr "Varsayılan rol %s oluşturma başarısız çünkü zaten oluşturulmuş" - -#, python-format -msgid "Current primary key is: %s" -msgstr "Mevcut birincil anahtar: %s" - -#, python-format -msgid "" -"Fernet token created with length of %d characters, which exceeds 255 " -"characters" -msgstr "" -"Fernet jetonu %d karakter uzunluğunda oluşturuldu, bu 255 karakteri geçiyor" - -#, python-format -msgid "KVS region %s key_mangler disabled." -msgstr "KVS bölgesi %s key_mangler kapalı." - -msgid "Kerberos bind authentication successful" -msgstr "Kerberos bağ kimlik doğrulama başarılı" - -msgid "Kerberos credentials do not match those in bind" -msgstr "Kerberos kimlik bilgileri bağda olanlarla eşleşmiyor" - -msgid "Kerberos credentials required and not present" -msgstr "Kerberos kimlik bilgileri gerekli ve mevcut değil" - -msgid "Key repository is already initialized; aborting." -msgstr "Anahtar deposu zaten ilklendirilmiş; iptal ediliyor." - -#, python-format -msgid "Named bind mode %s not in bind information" -msgstr "Adlandırılmış bağlama kipi %s bağlama bilgisinde değil" - -#, python-format -msgid "Next primary key will be: %s" -msgstr "Sonraki birincil anahtar şu olacak: %s" - -msgid "No bind information present in token" -msgstr "Jetonda bağlama bilgisi yok" - -#, python-format -msgid "Promoted key 0 to be the primary: %s" -msgstr "Anahtar 0 birincil anahtarlığa yükseltildi: %s" - -#, python-format -msgid "" -"Received the following notification: service %(service)s, resource_type: " -"%(resource_type)s, operation %(operation)s payload %(payload)s" -msgstr "" -"Şu bildirim alındı: servis %(service)s, kaynak_türü: %(resource_type)s, " -"işlem %(operation)s faydalı yük %(payload)s" - -#, python-format -msgid "Running command - %s" -msgstr "Komut çalıştırılıyor - %s" - -#, python-format -msgid "Starting %(arg0)s on %(host)s:%(port)s" -msgstr "%(host)s:%(port)s üzerinde %(arg0)s başlatılıyor" - -#, python-format -msgid "Starting key rotation with %(count)s key files: %(list)s" -msgstr "Anahtar dönüşümü %(count)s anahtar dosyasıyla başlatılıyor: %(list)s" - -#, python-format -msgid "Total expired tokens removed: %d" -msgstr "Toplam süresi dolmuş jetonlar kaldırıldı: %d" - -#, python-format -msgid "Using %(func)s as KVS region %(name)s key_mangler" -msgstr "%(func)s KVS bölgesi %(name)s key_mangler olarak kullanılıyor" - -#, python-format -msgid "Using default dogpile sha1_mangle_key as KVS region %s key_mangler" -msgstr "" -"Varsayılan dogpile sha1_mangle_key KVS bölgesi %s key_mangler olarak " -"kullanılıyor" - -msgid "" -"[fernet_tokens] key_repository does not appear to exist; attempting to " -"create it" -msgstr "" -"[fernet_tokens] key_repository var gibi görünmüyor; oluşturmaya çalışılıyor" diff --git a/keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone-log-warning.po b/keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone-log-warning.po deleted file mode 100644 index 9d1cd41a..00000000 --- a/keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone-log-warning.po +++ /dev/null @@ -1,238 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2015-09-03 12:54+0000\n" -"Last-Translator: openstackjenkins \n" -"Language: tr-TR\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Turkish (Turkey)\n" - -#, python-format -msgid "%s is not a dogpile.proxy.ProxyBackend" -msgstr "%s dogpile.proxy.ProxyBackend değil" - -#, python-format -msgid "Authorization failed. %(exception)s from %(remote_addr)s" -msgstr "Yetkilendirme başarısız. %(remote_addr)s den %(exception)s" - -#, python-format -msgid "" -"Endpoint %(endpoint_id)s referenced in association for policy %(policy_id)s " -"not found." -msgstr "" -"%(policy_id)s ile ilişkisi için başvurulan bitiş noktası %(endpoint_id)s " -"bulunamadı." - -msgid "Failed to invoke ``openssl version``, assuming is v1.0 or newer" -msgstr "" -"``openssl version`` çalıştırılamadı, v1.0 ya da daha yeni olarak varsayılıyor" - -#, python-format -msgid "" -"Found multiple domains being mapped to a driver that does not support that " -"(e.g. LDAP) - Domain ID: %(domain)s, Default Driver: %(driver)s" -msgstr "" -"Bunu desteklemeyen bir sürücüye eşleştirilen birden fazla alan bulundu (örn. " -"LDAP) - Alan ID: %(domain)s, Varsayılan Sürücü: %(driver)s" - -#, python-format -msgid "" -"Found what looks like an incorrectly constructed config option substitution " -"reference - domain: %(domain)s, group: %(group)s, option: %(option)s, value: " -"%(value)s." -msgstr "" -"Düzgün inşa edilmemiş yapılandırma seçeneği yer değiştirme referansına " -"benzeyen bir şey bulundu - alan: %(domain)s, grup: %(group)s, seçenek: " -"%(option)s, değer: %(value)s." - -#, python-format -msgid "" -"Found what looks like an unmatched config option substitution reference - " -"domain: %(domain)s, group: %(group)s, option: %(option)s, value: %(value)s. " -"Perhaps the config option to which it refers has yet to be added?" -msgstr "" -"Eşleşmemiş yapılandırma seçeneği yer değiştirme referansı gibi görünen bir " -"şey bulundu - alan: %(domain)s, grup: %(group)s, seçenek: %(option)s, değer: " -"%(value)s. Belki başvurduğu yapılandırma seçeneği henüz eklenmemiştir?" - -#, python-format -msgid "Ignoring file (%s) while scanning domain config directory" -msgstr "Alan yapılandırma dizini taranırken dosya (%s) atlanıyor" - -msgid "Ignoring user name" -msgstr "Kullanıcı adı atlanıyor" - -#, python-format -msgid "" -"Invalid additional attribute mapping: \"%s\". Format must be " -":" -msgstr "" -"Geçersiz ek öznitelik eşleştirmesi: \"%s\". Biçim :" -" olmalı" - -#, python-format -msgid "Invalid domain name (%s) found in config file name" -msgstr "Yapılandırma dosyası isminde geçersiz alan adı (%s) bulundu" - -msgid "" -"It is recommended to only use the base key-value-store implementation for " -"the token driver for testing purposes. Please use 'memcache' or 'sql' " -"instead." -msgstr "" -"Jeton sürücüsü için temel anahtar-değer-depolama uygulamasının yalnızca test " -"amaçlı kullanımı önerilir. Lütfen 'memcache' ya da 'sql' kullanın." - -#, python-format -msgid "KVS lock released (timeout reached) for: %s" -msgstr "KVS kilidi kaldırıldı (zaman aşımına uğradı): %s" - -msgid "" -"LDAP Server does not support paging. Disable paging in keystone.conf to " -"avoid this message." -msgstr "" -"LDAP Sunucu sayfalamayı desteklemiyor. Bu iletiyi almamak için sayfalamayı " -"keystone.conf'da kapatın." - -msgid "No domain information specified as part of list request" -msgstr "Listeleme isteğinin parçası olarak alan bilgisi belirtilmedi" - -#, python-format -msgid "" -"Policy %(policy_id)s referenced in association for endpoint %(endpoint_id)s " -"not found." -msgstr "" -"%(endpoint_id)s bitiş noktası için ilişkisi için başvurulan %(policy_id)s " -"ilkesi bulunamadı." - -msgid "RBAC: Bypassing authorization" -msgstr "RBAC: Yetkilendirme baypas ediliyor" - -msgid "RBAC: Invalid token" -msgstr "RBAC: Geçersiz jeton" - -msgid "RBAC: Invalid user data in token" -msgstr "RBAC: Jetonda geçersiz kullanıcı verisi" - -#, python-format -msgid "" -"Removing `%s` from revocation list due to invalid expires data in revocation " -"list." -msgstr "" -"feshetme listesindeki geçersiz sona erme tarihi verisi sebebiyle `%s` " -"feshetme listesinden kaldırılıyor." - -#, python-format -msgid "Token `%s` is expired, not adding to the revocation list." -msgstr "`%s` jetonunun süresi dolmuş, feshetme listesine eklenmiyor." - -#, python-format -msgid "Truncating user password to %d characters." -msgstr "Kullanıcı parolası %d karaktere kırpılıyor." - -#, python-format -msgid "Unable to add user %(user)s to %(tenant)s." -msgstr "Kullanıcı %(user)s %(tenant)s'e eklenemiyor." - -#, python-format -msgid "" -"Unable to change the ownership of [fernet_tokens] key_repository without a " -"keystone user ID and keystone group ID both being provided: %s" -msgstr "" -"Hem keystone kullanıcı kimliği hem keystone grup kimliği verilmeden " -"[fernet_tokens] key_repository sahipliği değiştirilemiyor: %s" - -#, python-format -msgid "" -"Unable to change the ownership of the new key without a keystone user ID and " -"keystone group ID both being provided: %s" -msgstr "" -"Hem keystone kullanıcı kimliği hem keystone grup kimliği verilmeden yeni " -"anahtarın sahipliği değiştirilemiyor: %s" - -#, python-format -msgid "Unable to locate domain config directory: %s" -msgstr "Alan yapılandırma dizini bulunamadı: %s" - -#, python-format -msgid "Unable to remove user %(user)s from %(tenant)s." -msgstr "Kullanıcı %(user)s %(tenant)s'den çıkarılamadı." - -#, python-format -msgid "" -"Unsupported policy association found - Policy %(policy_id)s, Endpoint " -"%(endpoint_id)s, Service %(service_id)s, Region %(region_id)s, " -msgstr "" -"Desteklenmeyen ilke ilişkilendirmesi bulundu - İlke %(policy_id)s, Bitiş " -"noktası %(endpoint_id)s, Servis %(service_id)s, Bölge %(region_id)s, " - -#, python-format -msgid "" -"User %(user_id)s doesn't have access to default project %(project_id)s. The " -"token will be unscoped rather than scoped to the project." -msgstr "" -"%(user_id)s kullanıcısı varsayılan proje %(project_id)s erişimine sahip " -"değil. Jeton projeye kapsamsız olacak, kapsamlı değil." - -#, python-format -msgid "" -"User %(user_id)s's default project %(project_id)s is disabled. The token " -"will be unscoped rather than scoped to the project." -msgstr "" -"%(user_id)s kullanıcısının varsayılan projesi %(project_id)s kapalı. Jeton " -"projeye kapsamsız olacak, kapsamlı değil." - -#, python-format -msgid "" -"User %(user_id)s's default project %(project_id)s not found. The token will " -"be unscoped rather than scoped to the project." -msgstr "" -"%(user_id)s kullanıcısının varsayılan projesi %(project_id)s bulunamadı. " -"Jeton projeye kapsamsız olacak, kapsamlı değil." - -#, python-format -msgid "" -"When deleting entries for %(search_base)s, could not delete nonexistent " -"entries %(entries)s%(dots)s" -msgstr "" -"%(search_base)s için girdiler silinirken, mevcut olmayan girdiler %(entries)s" -"%(dots)s silinemedi" - -#, python-format -msgid "[fernet_tokens] key_repository is world readable: %s" -msgstr "[fernet_tokens] key_repository herkesçe okunabilir: %s" - -msgid "" -"[fernet_tokens] max_active_keys must be at least 1 to maintain a primary key." -msgstr "" -"[fernet_tokens] max_active_keys bir birincil anahtarı korumak için en az 1 " -"olmalı." - -#, python-format -msgid "" -"`token_api.%s` is deprecated as of Juno in favor of utilizing methods on " -"`token_provider_api` and may be removed in Kilo." -msgstr "" -"`token_provider_api` üzerindeki yöntemlerden faydalanmak için `token_api.%s` " -"Juno'dan sonra tercih edilmeyecek ve Kilo'da kaldırılabilir." - -msgid "keystone-manage pki_setup is not recommended for production use." -msgstr "keystone-manage pki_setup üretimde kullanmak için tavsiye edilmez." - -msgid "keystone-manage ssl_setup is not recommended for production use." -msgstr "keystone-manage ssl_setup üretimde kullanmak için tavsiye edilmez." - -msgid "missing exception kwargs (programmer error)" -msgstr "istisna kwargs eksik (programcı hatası)" diff --git a/keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone.po b/keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone.po deleted file mode 100644 index 91bc5d15..00000000 --- a/keystone-moon/keystone/locale/tr_TR/LC_MESSAGES/keystone.po +++ /dev/null @@ -1,1158 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Alper Çiftçi , 2015 -# Andreas Jaeger , 2015 -# catborise , 2013 -# catborise , 2013 -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2015-09-03 12:54+0000\n" -"Last-Translator: openstackjenkins \n" -"Language: tr-TR\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Turkish (Turkey)\n" - -#, python-format -msgid "%(detail)s" -msgstr "%(detail)s" - -#, python-format -msgid "" -"%(event)s is not a valid notification event, must be one of: %(actions)s" -msgstr "" -"%(event)s geçerli bir bilgilendirme olayı değil, şunlardan biri olmalı: " -"%(actions)s" - -#, python-format -msgid "%(host)s is not a trusted dashboard host" -msgstr "%(host)s güvenilir bir gösterge paneli istemcisi değil" - -#, python-format -msgid "%(message)s %(amendment)s" -msgstr "%(message)s %(amendment)s" - -#, python-format -msgid "" -"%(mod_name)s doesn't provide database migrations. The migration repository " -"path at %(path)s doesn't exist or isn't a directory." -msgstr "" -"%(mod_name)s veri tabanı göçü sağlamıyor. %(path)s yolundaki göç deposu yolu " -"mevcut değil ya da bir dizin değil." - -#, python-format -msgid "%(property_name)s cannot be less than %(min_length)s characters." -msgstr "%(property_name)s %(min_length)s karakterden az olamaz." - -#, python-format -msgid "%(property_name)s is not a %(display_expected_type)s" -msgstr "%(property_name)s bir %(display_expected_type)s değil" - -#, python-format -msgid "%(property_name)s should not be greater than %(max_length)s characters." -msgstr "%(property_name)s %(max_length)s karakterden büyük olmamalı." - -#, python-format -msgid "%s cannot be empty." -msgstr "%s boş olamaz." - -#, python-format -msgid "%s extension does not exist." -msgstr "%s eklentisi mevcut değil." - -#, python-format -msgid "%s field is required and cannot be empty" -msgstr "%s alanı gerekli ve boş olamaz" - -#, python-format -msgid "%s field(s) cannot be empty" -msgstr "%s alan(lar)ı boş olamaz" - -msgid "--all option cannot be mixed with other options" -msgstr "--all seçeneği diğer seçeneklerle birleştirilemez" - -msgid "A project-scoped token is required to produce a service catalog." -msgstr "Servis kataloğu oluşturmak için proje-kapsamlı bir jeton gerekli." - -msgid "Access token is expired" -msgstr "Erişim jetonunun süresi dolmuş" - -msgid "Access token not found" -msgstr "Erişim jetonu bulunamadı" - -msgid "Additional authentications steps required." -msgstr "Ek kimlik doğrulama adımları gerekli." - -msgid "An unexpected error occurred when retrieving domain configs" -msgstr "Alan yapılandırmaları alınırken beklenmedik hata oluştu" - -#, python-format -msgid "An unexpected error occurred when trying to store %s" -msgstr "%s depolanırken beklenmedik bir hata oluştu" - -msgid "An unexpected error prevented the server from fulfilling your request." -msgstr "Beklenmedik bir hata sunucunun isteğinizi tamamlamasını engelledi." - -#, python-format -msgid "" -"An unexpected error prevented the server from fulfilling your request: " -"%(exception)s" -msgstr "" -"Beklenmedik bir hata sunucunun isteğinizi tamamlamasını engelledi: " -"%(exception)s" - -msgid "An unhandled exception has occurred: Could not find metadata." -msgstr "Ele alınmayan istisna oluştu: Metadata bulunamadı." - -msgid "At least one option must be provided" -msgstr "En az bir seçenek sağlanmalıdır" - -msgid "At least one option must be provided, use either --all or --domain-name" -msgstr "En az bir seçenek sağlanmalıdır, ya --all ya da --domain-name kullanın" - -msgid "At least one role should be specified." -msgstr "En az bir kural belirtilmeli." - -msgid "Attempted to authenticate with an unsupported method." -msgstr "Desteklenmeyen yöntem ile doğrulama girişiminde bulunuldu." - -msgid "" -"Attempting to use OS-FEDERATION token with V2 Identity Service, use V3 " -"Authentication" -msgstr "" -"OS-FEDERATION jetonu V2 Kimlik Servisi ile kullanılmaya çalışılıyor, V3 " -"Kimlik Doğrulama kullanın" - -msgid "Authentication plugin error." -msgstr "Kimlik doğrulama eklenti hatası." - -#, python-format -msgid "" -"Backend `%(backend)s` is not a valid memcached backend. Valid backends: " -"%(backend_list)s" -msgstr "" -"Arka uç `%(backend)s` geçerli bir memcached arka ucu değil. Geçerli arka " -"uçlar: %(backend_list)s" - -msgid "Cannot authorize a request token with a token issued via delegation." -msgstr "Vekil ile sağlanan bir jeton ile istek yetkilendirilemez." - -#, python-format -msgid "Cannot change %(option_name)s %(attr)s" -msgstr "%(option_name)s %(attr)s değiştirilemiyor" - -msgid "Cannot change Domain ID" -msgstr "Alan ID'si değiştirilemez" - -msgid "Cannot change user ID" -msgstr "Kullanıcı ID'si değiştirilemiyor" - -msgid "Cannot change user name" -msgstr "Kullanıcı adı değiştirilemiyor" - -#, python-format -msgid "Cannot create an endpoint with an invalid URL: %(url)s" -msgstr "%(url)s geçersiz URL' si ile bir bitiş noktası yaratılamıyor" - -#, python-format -msgid "Cannot create project with parent: %(project_id)s" -msgstr "Üst proje %(project_id)s ye sahip proje oluşturulamıyor" - -msgid "Cannot list request tokens with a token issued via delegation." -msgstr "Vekalet ile sağlanan bir jeton ile istek jetonları listelenemez." - -#, python-format -msgid "Cannot open certificate %(cert_file)s. Reason: %(reason)s" -msgstr "Sertifika %(cert_file)s açılamıyor. Sebep: %(reason)s" - -#, python-format -msgid "Cannot remove role that has not been granted, %s" -msgstr "Verilmemiş rol silinemez, %s" - -msgid "" -"Cannot truncate a driver call without hints list as first parameter after " -"self " -msgstr "" -"self'den sonra ilk parametre olarak ipucu listesi verilmeden bir sürücü " -"çağrısı kırpılamıyor " - -msgid "" -"Cannot use parents_as_list and parents_as_ids query params at the same time." -msgstr "" -"parents_as_list ve parents_as_ids sorgu parametreleri aynı anda kullanılamaz." - -msgid "" -"Cannot use subtree_as_list and subtree_as_ids query params at the same time." -msgstr "" -"subtree_as_list ve subtree_as_ids sorgu parametreleri aynı anda kullanılamaz." - -msgid "" -"Combining effective and group filter will always result in an empty list." -msgstr "" -"Efektif ve grup filtresini birleştirmek her zaman boş bir listeye yol açar." - -msgid "" -"Combining effective, domain and inherited filters will always result in an " -"empty list." -msgstr "" -"Efektif, alan ve miras filtrelerin birleştirilmesi her zaman boş bir listeye " -"yol açar." - -#, python-format -msgid "Conflict occurred attempting to store %(type)s - %(details)s" -msgstr "%(type)s depolanırken çatışma oluştu- %(details)s" - -#, python-format -msgid "Conflicting region IDs specified: \"%(url_id)s\" != \"%(ref_id)s\"" -msgstr "Çatışan bölge kimlikleri belirtildi: \"%(url_id)s\" != \"%(ref_id)s\"" - -msgid "Consumer not found" -msgstr "Tüketici bulunamadı" - -#, python-format -msgid "" -"Could not change immutable attribute(s) '%(attributes)s' in target %(target)s" -msgstr "" -"%(target)s hedefindeki değişmez öznitelik(ler) '%(attributes)s' " -"değiştirilemiyor" - -#, python-format -msgid "" -"Could not find %(group_or_option)s in domain configuration for domain " -"%(domain_id)s" -msgstr "" -"%(domain_id)s alanı için alan yapılandırmasında %(group_or_option)s " -"bulunamadı" - -#, python-format -msgid "Could not find Endpoint Group: %(endpoint_group_id)s" -msgstr "Bitişnoktası Grubu bulunamadı: %(endpoint_group_id)s" - -msgid "Could not find Identity Provider identifier in environment" -msgstr "Kimlik Sağlayıcı tanımlayıcısı ortamda bulunamıyor" - -#, python-format -msgid "Could not find Identity Provider: %(idp_id)s" -msgstr "Kimlik Sağlayıcı bulunamadı: %(idp_id)s" - -#, python-format -msgid "Could not find Service Provider: %(sp_id)s" -msgstr "Servis Sağlayıcı bulunamadı: %(sp_id)s" - -#, python-format -msgid "Could not find credential: %(credential_id)s" -msgstr "Kimlik bilgisi bulunamadı: %(credential_id)s" - -#, python-format -msgid "Could not find domain: %(domain_id)s" -msgstr "Alan bulunamadı: %(domain_id)s" - -#, python-format -msgid "Could not find endpoint: %(endpoint_id)s" -msgstr "Bitiş noktası bulunamadı: %(endpoint_id)s" - -#, python-format -msgid "" -"Could not find federated protocol %(protocol_id)s for Identity Provider: " -"%(idp_id)s" -msgstr "" -"Kimlik Sağlayıcı: %(idp_id)s için birleşmiş iletişim kuralı %(protocol_id)s " -"bulunamadı" - -#, python-format -msgid "Could not find group: %(group_id)s" -msgstr "Grup bulunamadı: %(group_id)s" - -#, python-format -msgid "Could not find mapping: %(mapping_id)s" -msgstr "Eşleştirme bulunamadı: %(mapping_id)s" - -msgid "Could not find policy association" -msgstr "İlke ilişkilendirme bulunamadı" - -#, python-format -msgid "Could not find policy: %(policy_id)s" -msgstr "İlke bulunamadı: %(policy_id)s" - -#, python-format -msgid "Could not find project: %(project_id)s" -msgstr "Proje bulunamadı: %(project_id)s" - -#, python-format -msgid "Could not find region: %(region_id)s" -msgstr "Bölge bulunamadı: %(region_id)s" - -#, python-format -msgid "" -"Could not find role assignment with role: %(role_id)s, user or group: " -"%(actor_id)s, project or domain: %(target_id)s" -msgstr "" -"Rol: %(role_id)s, kullanıcı veya grup: %(actor_id)s, proje veya alan: " -"%(target_id)s ile rol ataması bulunamadı" - -#, python-format -msgid "Could not find role: %(role_id)s" -msgstr "Rol bulunamadı: %(role_id)s" - -#, python-format -msgid "Could not find service: %(service_id)s" -msgstr "Servis bulunamadı: %(service_id)s" - -#, python-format -msgid "Could not find token: %(token_id)s" -msgstr "Jeton bulunamadı: %(token_id)s" - -#, python-format -msgid "Could not find trust: %(trust_id)s" -msgstr "Güven bulunamadı: %(trust_id)s" - -#, python-format -msgid "Could not find user: %(user_id)s" -msgstr "Kullanıcı bulunamadı: %(user_id)s" - -#, python-format -msgid "Could not find version: %(version)s" -msgstr "Sürüm bulunamadı: %(version)s" - -#, python-format -msgid "Could not find: %(target)s" -msgstr "Bulunamadı: %(target)s" - -msgid "Could not validate the access token" -msgstr "Erişim jetonu doğrulanamadı" - -msgid "Credential belongs to another user" -msgstr "Kimlik bilgisi başka bir kullanıcıya ait" - -msgid "" -"Disabling an entity where the 'enable' attribute is ignored by configuration." -msgstr "" -"'enable' özniteliği yapılandırma tarafından göz ardı edilen bir öğe " -"kapatılıyor." - -#, python-format -msgid "Domain (%s)" -msgstr "Alan (%s)" - -#, python-format -msgid "Domain cannot be named %s" -msgstr "Alan %s olarak adlandırılamaz" - -#, python-format -msgid "Domain cannot have ID %s" -msgstr "Alan %s ID'sine sahip olamaz" - -#, python-format -msgid "Domain is disabled: %s" -msgstr "Alan kapalı: %s" - -msgid "Domain scoped token is not supported" -msgstr "Alan kapsamlı jeton desteklenmiyor" - -#, python-format -msgid "" -"Domain: %(domain)s already has a configuration defined - ignoring file: " -"%(file)s." -msgstr "" -"Alan: %(domain)s zaten tanımlanmış bir yapılandırmaya sahip - dosya " -"atlanıyor: %(file)s." - -msgid "Duplicate Entry" -msgstr "Kopya Girdi" - -#, python-format -msgid "Duplicate ID, %s." -msgstr "Kopya ID, %s" - -#, python-format -msgid "Duplicate name, %s." -msgstr "Kopya isim, %s." - -msgid "Enabled field must be a boolean" -msgstr "Etkin alan bool olmalı" - -msgid "Enabled field should be a boolean" -msgstr "Etkin alan bool olmalı" - -#, python-format -msgid "Endpoint %(endpoint_id)s not found in project %(project_id)s" -msgstr "Bitiş noktası %(endpoint_id)s %(project_id)s projesinde bulunamadı" - -msgid "Endpoint Group Project Association not found" -msgstr "Bitiş Noktası Grup Proje İlişkisi bulunamadı" - -msgid "Ensure configuration option idp_entity_id is set." -msgstr "idp_entity_id yapılandırma seçeneğinin ayarlandığına emin olun." - -msgid "Ensure configuration option idp_sso_endpoint is set." -msgstr "idp_sso_endpoint yapılandırma seçeneğinin ayarlandığına emin olun." - -#, python-format -msgid "" -"Error parsing configuration file for domain: %(domain)s, file: %(file)s." -msgstr "" -"Alan: %(domain)s için yapılandırma dosyası ayrıştırılırken hata, dosya: " -"%(file)s." - -#, python-format -msgid "Error while opening file %(path)s: %(err)s" -msgstr "Dosya açılırken hata %(path)s: %(err)s" - -#, python-format -msgid "Error while parsing line: '%(line)s': %(err)s" -msgstr "Satır ayrıştırılırken hata: '%(line)s': %(err)s" - -#, python-format -msgid "Error while parsing rules %(path)s: %(err)s" -msgstr "Kurallar ayrıştırılırken hata %(path)s: %(err)s" - -#, python-format -msgid "Error while reading metadata file, %(reason)s" -msgstr "Metadata dosyası okunurken hata, %(reason)s" - -#, python-format -msgid "Expected dict or list: %s" -msgstr "Sözlük ya da liste beklendi: %s" - -msgid "" -"Expected signing certificates are not available on the server. Please check " -"Keystone configuration." -msgstr "" -"Beklenen imzalama sertifikaları sunucuda kullanılabilir değil. Lütfen " -"Keystone yapılandırmasını kontrol edin." - -#, python-format -msgid "" -"Expecting to find %(attribute)s in %(target)s - the server could not comply " -"with the request since it is either malformed or otherwise incorrect. The " -"client is assumed to be in error." -msgstr "" -"%(target)s içinde %(attribute)s bulunması bekleniyordu - sunucu talebi " -"yerine getiremedi çünkü ya istek kusurluydu ya da geçersizdi. İstemcinin " -"hatalı olduğu varsayılıyor." - -#, python-format -msgid "Failed to start the %(name)s server" -msgstr "%(name)s sunucusu başlatılamadı" - -msgid "Failed to validate token" -msgstr "Jeton doğrulama başarısız" - -msgid "Federation token is expired" -msgstr "Federasyon jetonunun süresi dolmuş" - -#, python-format -msgid "" -"Field \"remaining_uses\" is set to %(value)s while it must not be set in " -"order to redelegate a trust" -msgstr "" -"\"remaining_uses\" alanı %(value)s olarak ayarlanmış, bir güvene tekrar " -"yetki vermek için böyle ayarlanmamalı" - -msgid "Found invalid token: scoped to both project and domain." -msgstr "Geçersiz jeton bulundu: hem proje hem alana kapsanmış." - -#, python-format -msgid "Group %(group)s is not supported for domain specific configurations" -msgstr "%(group)s grubu alana özel yapılandırmalar için desteklenmiyor" - -#, python-format -msgid "" -"Group %(group_id)s returned by mapping %(mapping_id)s was not found in the " -"backend." -msgstr "" -"%(mapping_id)s eşleştirmesi tarafından döndürülen %(group_id)s grubu arka " -"uçta bulunamadı." - -#, python-format -msgid "" -"Group membership across backend boundaries is not allowed, group in question " -"is %(group_id)s, user is %(user_id)s" -msgstr "" -"Arka uç sınırları arasında grup üyeliğine izin verilmez, sorudaki grup " -"%(group_id)s, kullanıcı ise %(user_id)s" - -#, python-format -msgid "ID attribute %(id_attr)s not found in LDAP object %(dn)s" -msgstr "ID özniteliği %(id_attr)s %(dn)s LDAP nesnesinde bulunamadı" - -#, python-format -msgid "Identity Provider %(idp)s is disabled" -msgstr "Kimlik Sağlayıcı %(idp)s kapalı" - -msgid "" -"Incoming identity provider identifier not included among the accepted " -"identifiers." -msgstr "" -"Gelen kimlik sağlayıcı tanımlayıcısı kabul edilen tanımlayıcılar arasında " -"yok." - -#, python-format -msgid "Invalid LDAP TLS certs option: %(option)s. Choose one of: %(options)s" -msgstr "" -"Geçersiz LDAP TLS sertifika seçeneği: %(option)s. Şunlardan birini seçin: " -"%(options)s" - -#, python-format -msgid "Invalid LDAP TLS_AVAIL option: %s. TLS not available" -msgstr "Geçersiz LDAP TLS_AVAIL seçeneği: %s. TLS kullanılabilir değil" - -#, python-format -msgid "Invalid LDAP deref option: %(option)s. Choose one of: %(options)s" -msgstr "" -"Geçersiz LDAP referans kaldırma seçeneği: %(option)s. Şunlardan birini " -"seçin: %(options)s" - -#, python-format -msgid "Invalid LDAP scope: %(scope)s. Choose one of: %(options)s" -msgstr "Geçersiz LDAP kapsamı: %(scope)s. Şunlardan birini seçin: %(options)s" - -msgid "Invalid TLS / LDAPS combination" -msgstr "Geçersiz TLS / LDAPS kombinasyonu" - -#, python-format -msgid "Invalid audit info data type: %(data)s (%(type)s)" -msgstr "Geçersiz denetim bilgisi veri türü: %(data)s (%(type)s)" - -msgid "Invalid blob in credential" -msgstr "Kimlik bilgisinde geçersiz düğüm" - -#, python-format -msgid "" -"Invalid domain name: %(domain)s found in config file name: %(file)s - " -"ignoring this file." -msgstr "" -"Yapılandırma dosyası isminde: %(file)s geçersiz alan adı: %(domain)s bulundu " -"- bu dosya atlanıyor." - -#, python-format -msgid "Invalid domain specific configuration: %(reason)s" -msgstr "Geçersiz alana özel yapılandırma: %(reason)s" - -#, python-format -msgid "Invalid input for field '%(path)s'. The value is '%(value)s'." -msgstr "'%(path)s' alanı için geçersiz girdi. Değer '%(value)s'." - -msgid "Invalid limit value" -msgstr "Geçersiz sınır değeri" - -#, python-format -msgid "" -"Invalid mix of entities for policy association - only Endpoint, Service or " -"Region+Service allowed. Request was - Endpoint: %(endpoint_id)s, Service: " -"%(service_id)s, Region: %(region_id)s" -msgstr "" -"İlke ilişkilendirmeleri için geçersiz öğe karışımı - yalnızca Bitişnoktası, " -"Servis veya Bölge+Servise izin verilir. İstek şuydu Bitişnoktası: " -"%(endpoint_id)s, Servis: %(service_id)s, Bölge: %(region_id)s" - -#, python-format -msgid "" -"Invalid rule: %(identity_value)s. Both 'groups' and 'domain' keywords must " -"be specified." -msgstr "" -"Geçersiz kural: %(identity_value)s. Hem 'gruplar' hem 'alan' anahtar " -"kelimeleri belirtilmeli." - -msgid "Invalid signature" -msgstr "Geçersiz imza" - -msgid "Invalid user / password" -msgstr "Geçersiz kullanıcı / parola" - -msgid "Invalid username or password" -msgstr "Geçersiz kullanıcı adı ve parola" - -#, python-format -msgid "KVS region %s is already configured. Cannot reconfigure." -msgstr "KVS bölgesi %s zaten yapılandırılmış. Yeniden yapılandırılamıyor." - -#, python-format -msgid "Key Value Store not configured: %s" -msgstr "Anahtar Değer Deposu yapılandırılmamış: %s" - -#, python-format -msgid "LDAP %s create" -msgstr "LDAP %s oluştur" - -#, python-format -msgid "LDAP %s delete" -msgstr "LDAP %s sil" - -#, python-format -msgid "LDAP %s update" -msgstr "LDAP %s güncelle" - -#, python-format -msgid "Lock Timeout occurred for key, %(target)s" -msgstr "Anahtar için Kilit Zaman Aşımı oluştu, %(target)s" - -#, python-format -msgid "Lock key must match target key: %(lock)s != %(target)s" -msgstr "Kilit anahtarı hedef anahtarla eşleşmeli: %(lock)s != %(target)s" - -#, python-format -msgid "Malformed endpoint URL (%(endpoint)s), see ERROR log for details." -msgstr "" -"Kusurlu bitiş noktası URL'si (%(endpoint)s), detaylar için HATA kaydına " -"bakın." - -msgid "Marker could not be found" -msgstr "İşaretçi bulunamadı" - -#, python-format -msgid "Maximum lock attempts on %s occurred." -msgstr "%s üzerinde azami kilit girişimi yapıldı." - -#, python-format -msgid "Member %(member)s is already a member of group %(group)s" -msgstr "Üye %(member)s zaten %(group)s grubunun üyesi" - -#, python-format -msgid "Method not callable: %s" -msgstr "Metod çağrılabilir değil: %s" - -msgid "Missing entity ID from environment" -msgstr "Öğe kimliği ortamdan eksik" - -msgid "" -"Modifying \"redelegation_count\" upon redelegation is forbidden. Omitting " -"this parameter is advised." -msgstr "" -"Tekrar yetkilendirme üzerine \"redelegation_count\" değiştirmeye izin " -"verilmez. Tavsiye edildiği gibi bu parametre atlanıyor." - -msgid "Multiple domains are not supported" -msgstr "Birden çok alan desteklenmiyor" - -msgid "Must be called within an active lock context." -msgstr "Etkin kilik içeriği içinde çağrılmalı." - -msgid "Must specify either domain or project" -msgstr "Alan ya da projeden biri belirtilmelidir" - -msgid "Name field is required and cannot be empty" -msgstr "İsim alanı gerekli ve boş olamaz" - -msgid "" -"No Authorization headers found, cannot proceed with OAuth related calls, if " -"running under HTTPd or Apache, ensure WSGIPassAuthorization is set to On." -msgstr "" -"Yetkilendirme başlıkları bulunamadı, OAuth ile ilişkili çağrılarla devam " -"edilemez, HTTPd veya Apache altında çalışıyorsanız, WSGIPassAuthorization " -"ayarını açtığınızdan emin olun." - -msgid "No authenticated user" -msgstr "Kimlik denetimi yapılmamış kullanıcı" - -msgid "" -"No encryption keys found; run keystone-manage fernet_setup to bootstrap one." -msgstr "" -"Şifreleme anahtarları bulundu; birini yükletmek için keystone-manage " -"fernet_setup çalıştırın." - -msgid "No options specified" -msgstr "Hiçbir seçenek belirtilmedi" - -#, python-format -msgid "No policy is associated with endpoint %(endpoint_id)s." -msgstr "Hiçbir ilke %(endpoint_id)s bitiş noktasıyla ilişkilendirilmemiş." - -#, python-format -msgid "No remaining uses for trust: %(trust_id)s" -msgstr "Güven için kalan kullanım alanı yok: %(trust_id)s" - -msgid "Non-default domain is not supported" -msgstr "Varsayılan olmayan alan desteklenmiyor" - -msgid "One of the trust agents is disabled or deleted" -msgstr "Güven ajanlarından biri kapalı ya da silinmiş" - -#, python-format -msgid "" -"Option %(option)s found with no group specified while checking domain " -"configuration request" -msgstr "" -"%(option)s seçeneği alan yapılandırma isteği kontrol edilirken hiçbir grup " -"belirtilmemiş şekilde bulundu" - -#, python-format -msgid "" -"Option %(option)s in group %(group)s is not supported for domain specific " -"configurations" -msgstr "" -"%(group)s grubundaki %(option)s seçeneği alana özel yapılandırmalarda " -"desteklenmiyor" - -#, python-format -msgid "Project (%s)" -msgstr "Proje (%s)" - -#, python-format -msgid "Project ID not found: %(t_id)s" -msgstr "Proje kimliği bulunamadı: %(t_id)s" - -msgid "Project field is required and cannot be empty." -msgstr "Proje alanı gerekli ve boş olamaz." - -#, python-format -msgid "Project is disabled: %s" -msgstr "Proje kapalı: %s" - -msgid "Redelegation allowed for delegated by trust only" -msgstr "" -"Tekrar yetki vermeye yalnızca güven tarafından yetki verilenler için izin " -"verilir" - -#, python-format -msgid "" -"Remaining redelegation depth of %(redelegation_depth)d out of allowed range " -"of [0..%(max_count)d]" -msgstr "" -"izin verilen [0..%(max_count)d] aralığı içinden %(redelegation_depth)d izin " -"verilen tekrar yetki verme derinliği" - -msgid "Request Token does not have an authorizing user id" -msgstr "İstek Jetonu yetki veren bir kullanıcı id'sine sahip değil" - -#, python-format -msgid "" -"Request attribute %(attribute)s must be less than or equal to %(size)i. The " -"server could not comply with the request because the attribute size is " -"invalid (too large). The client is assumed to be in error." -msgstr "" -"İstek özniteliği %(attribute)s %(size)i boyutuna eşit ya da daha küçük " -"olmalı. Sunucu talebi yerine getiremedi çünkü öznitelik boyutu geçersiz (çok " -"büyük). İstemcinin hata durumunda olduğu varsayılıyor." - -msgid "Request must have an origin query parameter" -msgstr "İstek bir başlangıç noktası sorgu parametresine sahip olmalı" - -msgid "Request token is expired" -msgstr "İstek jetonunun süresi dolmuş" - -msgid "Request token not found" -msgstr "İstek jetonu bulunamadı" - -msgid "Requested expiration time is more than redelegated trust can provide" -msgstr "" -"İstenen zaman bitim süresi tekrar yetkilendirilen güvenin " -"sağlayabileceğinden fazla" - -#, python-format -msgid "" -"Requested redelegation depth of %(requested_count)d is greater than allowed " -"%(max_count)d" -msgstr "" -"%(requested_count)d istenen tekrar yetki verme derinliği izin verilen " -"%(max_count)d den fazla" - -msgid "" -"Running keystone via eventlet is deprecated as of Kilo in favor of running " -"in a WSGI server (e.g. mod_wsgi). Support for keystone under eventlet will " -"be removed in the \"M\"-Release." -msgstr "" -"Bir WSGI sunucuda (örn. mod_wsgi) çalıştırmak adına, keystone'nin eventlet " -"ile çalıştırılması Kilo'dan sonra desteklenmiyor. Eventlet altında keystone " -"desteği \"M\"-Sürümünde kaldırılacak." - -msgid "Scoping to both domain and project is not allowed" -msgstr "Hem alan hem projeye kapsamlamaya izin verilmez" - -msgid "Scoping to both domain and trust is not allowed" -msgstr "Hem alan hem güvene kapsamlamaya izin verilmez" - -msgid "Scoping to both project and trust is not allowed" -msgstr "Hem proje hem güvene kapsamlamaya izin verilmez" - -#, python-format -msgid "Service Provider %(sp)s is disabled" -msgstr "Servis Sağlayıcı %(sp)s kapalı" - -msgid "Some of requested roles are not in redelegated trust" -msgstr "İstenen rollerin bazıları tekrar yetki verilen güven içinde değil" - -msgid "Specify a domain or project, not both" -msgstr "Bir alan ya da proje belirtin, ya da her ikisini" - -msgid "Specify a user or group, not both" -msgstr "Bir kullanıcı ya da grup belirtin, ikisini birden değil" - -msgid "Specify one of domain or project" -msgstr "Alandan ya da projeden birini belirtin" - -msgid "Specify one of user or group" -msgstr "Kullanıcı ya da grup belirtin" - -#, python-format -msgid "" -"String length exceeded.The length of string '%(string)s' exceeded the limit " -"of column %(type)s(CHAR(%(length)d))." -msgstr "" -"Karakter dizisi uzunluğu aşıldı. '%(string)s' karakter dizisiz uzunluğu " -"%(type)s(CHAR(%(length)d)) sütunu sınırını aşıyor." - -msgid "" -"The 'expires_at' must not be before now. The server could not comply with " -"the request since it is either malformed or otherwise incorrect. The client " -"is assumed to be in error." -msgstr "" -"'expires_at' şu andan önce olmamalı. Sunucu talebi yerine getiremedi çünkü " -"istek ya kusurlu ya da geçersiz. İstemcinin hata durumunda olduğu " -"varsayılıyor." - -msgid "The --all option cannot be used with the --domain-name option" -msgstr "--all seçeneği --domain-name seçeneğiyle kullanılamaz" - -#, python-format -msgid "The Keystone configuration file %(config_file)s could not be found." -msgstr "Keystone yapılandırma dosyası %(config_file)s bulunamadı." - -#, python-format -msgid "" -"The Keystone domain-specific configuration has specified more than one SQL " -"driver (only one is permitted): %(source)s." -msgstr "" -"Keystone alana özel yapılandırması birden fazla SQL sürücüsü belirtti " -"(yalnızca birine izin verilir): %(source)s." - -msgid "The action you have requested has not been implemented." -msgstr "İstediğiniz eylem uygulanmamış." - -msgid "The authenticated user should match the trustor." -msgstr "Yetkilendirilen kullanıcı güven verenle eşleşmeli." - -msgid "" -"The certificates you requested are not available. It is likely that this " -"server does not use PKI tokens otherwise this is the result of " -"misconfiguration." -msgstr "" -"İstediğiniz sertifikalar kullanılabilir değil. Bu sunucu muhtemelen PKI " -"jetonlarını kullanmıyor ya da bu bir yanlış yapılandırmanın sonucu." - -#, python-format -msgid "" -"The password length must be less than or equal to %(size)i. The server could " -"not comply with the request because the password is invalid." -msgstr "" -"Parola uzunluğu %(size)i ye eşit ya da daha küçük olmalı. Sunucu talebe " -"cevap veremedi çünkü parola geçersiz." - -msgid "The request you have made requires authentication." -msgstr "Yaptığınız istek kimlik doğrulama gerektiriyor." - -msgid "The resource could not be found." -msgstr "Kaynak bulunamadı." - -msgid "" -"The revoke call must not have both domain_id and project_id. This is a bug " -"in the Keystone server. The current request is aborted." -msgstr "" -"İptal etme çağrısı hem domain_id hem project_id'ye sahip olmamalı. Bu " -"Keystone sunucudaki bir hata. Mevcut istek iptal edildi." - -msgid "The service you have requested is no longer available on this server." -msgstr "İstediğiniz servis artık bu sunucu üzerinde kullanılabilir değil." - -#, python-format -msgid "" -"The specified parent region %(parent_region_id)s would create a circular " -"region hierarchy." -msgstr "" -"Belirtilen üst bölge %(parent_region_id)s dairesel bölge sıralı dizisi " -"oluştururdu." - -#, python-format -msgid "" -"The value of group %(group)s specified in the config should be a dictionary " -"of options" -msgstr "" -"Yapılandırmada belirtilen %(group)s grubunun değeri seçenekler sözlüğü olmalı" - -msgid "There should not be any non-oauth parameters" -msgstr "Herhangi bir non-oauth parametresi olmamalı" - -#, python-format -msgid "This is not a recognized Fernet payload version: %s" -msgstr "Bu bilinen bir Fernet faydalı yük sürümü değil: %s" - -msgid "" -"Timestamp not in expected format. The server could not comply with the " -"request since it is either malformed or otherwise incorrect. The client is " -"assumed to be in error." -msgstr "" -"Zaman damgası beklenen biçimde değil. Sunucu talebi yerine getiremedi çünkü " -"istek ya kusurlu ya da geçersiz. İstemcinin hata durumunda olduğu " -"varsayılıyor." - -#, python-format -msgid "" -"To get a more detailed information on this error, re-run this command for " -"the specific domain, i.e.: keystone-manage domain_config_upload --domain-" -"name %s" -msgstr "" -"Bu hatayla ilgili daha detaylı bilgi almak için, bu komutu belirtilen alan " -"için tekrar çalıştırın, örn.: keystone-manage domain_config_upload --domain-" -"name %s" - -msgid "Token belongs to another user" -msgstr "Jeton başka bir kullanıcıya ait" - -msgid "Token does not belong to specified tenant." -msgstr "Jeton belirtilen kiracıya ait değil." - -msgid "Trustee has no delegated roles." -msgstr "Yedieminin emanet edilen kuralları yok." - -msgid "Trustor is disabled." -msgstr "Güven kurucu kapalı." - -#, python-format -msgid "" -"Trying to update group %(group)s, so that, and only that, group must be " -"specified in the config" -msgstr "" -"%(group)s grubu güncellenmeye çalışılıyor, böylece yapılandırmada yalnızca " -"grup belirtilmeli" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, but config provided " -"contains option %(option_other)s instead" -msgstr "" -"%(group)s grubundaki %(option)s seçeneği güncellenmeye çalışılıyor, ama " -"sağlanan yapılandırma %(option_other)s seçeneğini içeriyor" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, so that, and only " -"that, option must be specified in the config" -msgstr "" -"%(group)s grubundaki %(option)s seçeneği güncellenmeye çalışıldı, böylece, " -"yapılandırmada yalnızca bu seçenek belirtilmeli" - -msgid "" -"Unable to access the keystone database, please check it is configured " -"correctly." -msgstr "" -"Keystone veri tabanına erişilemiyor, lütfen doğru yapılandırıldığından emin " -"olun." - -#, python-format -msgid "Unable to consume trust %(trust_id)s, unable to acquire lock." -msgstr "%(trust_id)s güveni tüketilemedi, kilit elde edilemiyor." - -#, python-format -msgid "" -"Unable to delete region %(region_id)s because it or its child regions have " -"associated endpoints." -msgstr "" -"Bölge %(region_id)s silinemedi çünkü kendisi ya da alt bölgelerinin " -"ilişkilendirilmiş bitiş noktaları var." - -#, python-format -msgid "Unable to find valid groups while using mapping %(mapping_id)s" -msgstr "Eşleştirme %(mapping_id)s kullanırken geçerli gruplar bulunamadı" - -#, python-format -msgid "Unable to locate domain config directory: %s" -msgstr "Alan yapılandırma dizini bulunamıyor: %s" - -#, python-format -msgid "Unable to lookup user %s" -msgstr "%s kullanıcısı aranamadı" - -#, python-format -msgid "" -"Unable to reconcile identity attribute %(attribute)s as it has conflicting " -"values %(new)s and %(old)s" -msgstr "" -"Kimlik özniteliği %(attribute)s bağdaştırılamıyor çünkü çatışan değerleri " -"var %(new)s ve %(old)s" - -#, python-format -msgid "" -"Unable to sign SAML assertion. It is likely that this server does not have " -"xmlsec1 installed, or this is the result of misconfiguration. Reason " -"%(reason)s" -msgstr "" -"SAML ifadesi imzalanamıyor. Muhtemelen bu sunucuda xmlsec1 kurulu değil, " -"veya bu bir yanlış yapılandırmanın sonucu. Sebep %(reason)s" - -msgid "Unable to sign token." -msgstr "Jeton imzalanamıyor." - -#, python-format -msgid "Unexpected assignment type encountered, %s" -msgstr "Beklenmedik atama türüyle karşılaşıldı, %s" - -#, python-format -msgid "" -"Unexpected combination of grant attributes - User: %(user_id)s, Group: " -"%(group_id)s, Project: %(project_id)s, Domain: %(domain_id)s" -msgstr "" -"İzin özniteliklerinin beklenmedik katışımı - Kullanıcı: %(user_id)s, Grup: " -"%(group_id)s, Proje: %(project_id)s, Alan: %(domain_id)s" - -#, python-format -msgid "Unexpected status requested for JSON Home response, %s" -msgstr "JSON Home yanıtı için beklenmedik durum istendi, %s" - -msgid "Unknown Target" -msgstr "Bilinmeyen Hedef" - -#, python-format -msgid "Unknown domain '%(name)s' specified by --domain-name" -msgstr "--domain-name ile bilinmeyen alan '%(name)s' belirtilmiş" - -#, python-format -msgid "Unknown token version %s" -msgstr "Bilinmeyen jeton sürümü %s" - -#, python-format -msgid "Unregistered dependency: %(name)s for %(targets)s" -msgstr "Kaydı silinmiş bağımlılık: %(targets)s için %(name)s" - -msgid "Update of `parent_id` is not allowed." -msgstr "`parent_id` güncellemesine izin verilmiyor." - -msgid "Use a project scoped token when attempting to create a SAML assertion" -msgstr "" -"SAML iddiası oluşturma girişimi sırasında proje kapsamlı bir jeton kullan" - -#, python-format -msgid "User %(u_id)s is unauthorized for tenant %(t_id)s" -msgstr "%(u_id)s kullanıcısı %(t_id)s kiracısı için yetkilendirilmemiş" - -#, python-format -msgid "User %(user_id)s has no access to domain %(domain_id)s" -msgstr "%(user_id)s kullanıcısının %(domain_id)s alanına erişimi yok" - -#, python-format -msgid "User %(user_id)s has no access to project %(project_id)s" -msgstr "%(user_id)s kullanıcısının %(project_id)s projesine erişimi yok" - -#, python-format -msgid "User %(user_id)s is already a member of group %(group_id)s" -msgstr "Kullanıcı %(user_id)s zaten %(group_id)s grubu üyesi" - -#, python-format -msgid "User '%(user_id)s' not found in group '%(group_id)s'" -msgstr "Kullanıcı '%(user_id)s' '%(group_id)s' grubunda bulunamadı" - -msgid "User IDs do not match" -msgstr "Kullanıcı ID leri uyuşmuyor" - -#, python-format -msgid "User is disabled: %s" -msgstr "Kullanıcı kapalı: %s" - -msgid "User is not a member of the requested project" -msgstr "Kullanıcı istenen projenin üyesi değil" - -msgid "User is not a trustee." -msgstr "Kullanıcı güvenilir değil." - -msgid "User not found" -msgstr "Kullanıcı bulunamadı" - -msgid "User roles not supported: tenant_id required" -msgstr "Kullanıcı rolleri desteklenmiyor: tenant_id gerekli" - -#, python-format -msgid "User type %s not supported" -msgstr "Kullanıcı türü %s desteklenmiyor" - -msgid "You are not authorized to perform the requested action." -msgstr "İstenen eylemi gerçekleştirmek için yetkili değilsiniz." - -#, python-format -msgid "You are not authorized to perform the requested action: %(action)s" -msgstr "İstenen eylemi gerçekleştirmek için yetkili değilsiniz: %(action)s" - -msgid "`key_mangler` functions must be callable." -msgstr "`key_mangler` fonksiyonları çağrılabilir olmalı." - -msgid "`key_mangler` option must be a function reference" -msgstr "`key_mangler` seçeneği fonksiyon referansı olmalı" - -msgid "any options" -msgstr "herhangi bir seçenek" - -msgid "auth_type is not Negotiate" -msgstr "auth_type Negotiate değil" - -msgid "authorizing user does not have role required" -msgstr "yetkilendiren kullanıcı gerekli role sahip değil" - -#, python-format -msgid "cannot create a project in a branch containing a disabled project: %s" -msgstr "kapalı bir proje içeren bir alt grupta proje oluşturulamaz: %s" - -#, python-format -msgid "group %(group)s" -msgstr "grup %(group)s" - -msgid "" -"idp_contact_type must be one of: [technical, other, support, administrative " -"or billing." -msgstr "" -"idp_contact_type şunlardan biri olmalı: [teknik, diğer, destek, idari veya " -"faturalama." - -#, python-format -msgid "invalid date format %s" -msgstr "geçersiz tarih biçimi %s" - -#, python-format -msgid "option %(option)s in group %(group)s" -msgstr "%(group)s grubundaki %(option)s seçeneği" - -msgid "provided consumer key does not match stored consumer key" -msgstr "sağlanan tüketici anahtarı depolanan tüketici anahtarıyla eşleşmiyor" - -msgid "provided request key does not match stored request key" -msgstr "sağlanan istek anahtarı depolanan istek anahtarıyla eşleşmiyor" - -msgid "provided verifier does not match stored verifier" -msgstr "sağlanan doğrulayıcı depolanan doğrulayıcı ile eşleşmiyor" - -msgid "remaining_uses must be a positive integer or null." -msgstr "remaining_uses pozitif bir değer ya da null olmalı." - -msgid "remaining_uses must not be set if redelegation is allowed" -msgstr "tekrar yetkilendirmeye izin veriliyorsa remaining_uses ayarlanmamalı" - -#, python-format -msgid "" -"request to update group %(group)s, but config provided contains group " -"%(group_other)s instead" -msgstr "" -"%(group)s grubunu güncelleme isteği, ama sağlanan yapılandırma " -"%(group_other)s grubunu içeriyor" - -msgid "rescope a scoped token" -msgstr "kapsamlı bir jeton tekrar kapsamlandı" - -#, python-format -msgid "tls_cacertdir %s not found or is not a directory" -msgstr "tls_cacertdir %s bulunamadı ya da bir dizin" - -#, python-format -msgid "tls_cacertfile %s not found or is not a file" -msgstr "tls_cacertfile %s bulunamadı ya da bir dosya değil" - -#, python-format -msgid "token reference must be a KeystoneToken type, got: %s" -msgstr "jeton referansı bir KeystoneToken türünde olmalı, alınan: %s" diff --git a/keystone-moon/keystone/locale/vi_VN/LC_MESSAGES/keystone-log-info.po b/keystone-moon/keystone/locale/vi_VN/LC_MESSAGES/keystone-log-info.po deleted file mode 100644 index bcb9ab4e..00000000 --- a/keystone-moon/keystone/locale/vi_VN/LC_MESSAGES/keystone-log-info.po +++ /dev/null @@ -1,211 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -msgid "" -msgstr "" -"Project-Id-Version: Keystone\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/keystone\n" -"POT-Creation-Date: 2015-03-09 06:03+0000\n" -"PO-Revision-Date: 2015-03-07 04:31+0000\n" -"Last-Translator: openstackjenkins \n" -"Language-Team: Vietnamese (Viet Nam) (http://www.transifex.com/projects/p/" -"keystone/language/vi_VN/)\n" -"Language: vi_VN\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: Babel 1.3\n" -"Plural-Forms: nplurals=1; plural=0;\n" - -#: keystone/assignment/core.py:250 -#, python-format -msgid "Creating the default role %s because it does not exist." -msgstr "" - -#: keystone/assignment/core.py:258 -#, python-format -msgid "Creating the default role %s failed because it was already created" -msgstr "" - -#: keystone/auth/controllers.py:64 -msgid "Loading auth-plugins by class-name is deprecated." -msgstr "" - -#: keystone/auth/controllers.py:106 -#, python-format -msgid "" -"\"expires_at\" has conflicting values %(existing)s and %(new)s. Will use " -"the earliest value." -msgstr "" - -#: keystone/common/openssl.py:81 -#, python-format -msgid "Running command - %s" -msgstr "" - -#: keystone/common/wsgi.py:79 -msgid "No bind information present in token" -msgstr "" - -#: keystone/common/wsgi.py:83 -#, python-format -msgid "Named bind mode %s not in bind information" -msgstr "" - -#: keystone/common/wsgi.py:90 -msgid "Kerberos credentials required and not present" -msgstr "" - -#: keystone/common/wsgi.py:94 -msgid "Kerberos credentials do not match those in bind" -msgstr "" - -#: keystone/common/wsgi.py:98 -msgid "Kerberos bind authentication successful" -msgstr "" - -#: keystone/common/wsgi.py:105 -#, python-format -msgid "Couldn't verify unknown bind: {%(bind_type)s: %(identifier)s}" -msgstr "" - -#: keystone/common/environment/eventlet_server.py:103 -#, python-format -msgid "Starting %(arg0)s on %(host)s:%(port)s" -msgstr "" - -#: keystone/common/kvs/core.py:138 -#, python-format -msgid "Adding proxy '%(proxy)s' to KVS %(name)s." -msgstr "" - -#: keystone/common/kvs/core.py:188 -#, python-format -msgid "Using %(func)s as KVS region %(name)s key_mangler" -msgstr "" - -#: keystone/common/kvs/core.py:200 -#, python-format -msgid "Using default dogpile sha1_mangle_key as KVS region %s key_mangler" -msgstr "" - -#: keystone/common/kvs/core.py:210 -#, python-format -msgid "KVS region %s key_mangler disabled." -msgstr "" - -#: keystone/contrib/example/core.py:64 keystone/contrib/example/core.py:73 -#, python-format -msgid "" -"Received the following notification: service %(service)s, resource_type: " -"%(resource_type)s, operation %(operation)s payload %(payload)s" -msgstr "" - -#: keystone/openstack/common/eventlet_backdoor.py:146 -#, python-format -msgid "Eventlet backdoor listening on %(port)s for process %(pid)d" -msgstr "Eventlet backdoor lắng nghe trên %(port)s đối với tiến trình %(pid)d" - -#: keystone/openstack/common/service.py:173 -#, python-format -msgid "Caught %s, exiting" -msgstr "Bắt %s, thoát" - -#: keystone/openstack/common/service.py:231 -msgid "Parent process has died unexpectedly, exiting" -msgstr "Tiến trình cha bị chết đột ngột, thoát" - -#: keystone/openstack/common/service.py:262 -#, python-format -msgid "Child caught %s, exiting" -msgstr "Tiến trình con bắt %s, thoát" - -#: keystone/openstack/common/service.py:301 -msgid "Forking too fast, sleeping" -msgstr "Tạo tiến trình con quá nhanh, nghỉ" - -#: keystone/openstack/common/service.py:320 -#, python-format -msgid "Started child %d" -msgstr "Tiến trình con đã được khởi động %d " - -#: keystone/openstack/common/service.py:330 -#, python-format -msgid "Starting %d workers" -msgstr "Khởi động %d động cơ" - -#: keystone/openstack/common/service.py:347 -#, python-format -msgid "Child %(pid)d killed by signal %(sig)d" -msgstr "Tiến trình con %(pid)d bị huỷ bởi tín hiệu %(sig)d" - -#: keystone/openstack/common/service.py:351 -#, python-format -msgid "Child %(pid)s exited with status %(code)d" -msgstr "Tiến trình con %(pid)s đã thiaast với trạng thái %(code)d" - -#: keystone/openstack/common/service.py:390 -#, python-format -msgid "Caught %s, stopping children" -msgstr "Bắt %s, đang dừng tiến trình con" - -#: keystone/openstack/common/service.py:399 -msgid "Wait called after thread killed. Cleaning up." -msgstr "" - -#: keystone/openstack/common/service.py:415 -#, python-format -msgid "Waiting on %d children to exit" -msgstr "Chờ đợi %d tiến trình con để thoát " - -#: keystone/token/persistence/backends/sql.py:279 -#, python-format -msgid "Total expired tokens removed: %d" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:72 -msgid "" -"[fernet_tokens] key_repository does not appear to exist; attempting to " -"create it" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:130 -#, python-format -msgid "Created a new key: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:143 -msgid "Key repository is already initialized; aborting." -msgstr "" - -#: keystone/token/providers/fernet/utils.py:179 -#, python-format -msgid "Starting key rotation with %(count)s key files: %(list)s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:185 -#, python-format -msgid "Current primary key is: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:187 -#, python-format -msgid "Next primary key will be: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:197 -#, python-format -msgid "Promoted key 0 to be the primary: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:213 -#, python-format -msgid "Excess keys to purge: %s" -msgstr "" - -#: keystone/token/providers/fernet/utils.py:237 -#, python-format -msgid "Loaded %(count)s encryption keys from: %(dir)s" -msgstr "" diff --git a/keystone-moon/keystone/locale/zh_CN/LC_MESSAGES/keystone-log-critical.po b/keystone-moon/keystone/locale/zh_CN/LC_MESSAGES/keystone-log-critical.po deleted file mode 100644 index d645e82c..00000000 --- a/keystone-moon/keystone/locale/zh_CN/LC_MESSAGES/keystone-log-critical.po +++ /dev/null @@ -1,25 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Linda , 2016. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2014-08-31 03:19+0000\n" -"Last-Translator: openstackjenkins \n" -"Language: zh-CN\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Chinese (China)\n" - -#, python-format -msgid "Unable to open template file %s" -msgstr "无法打开模板文件 %s" diff --git a/keystone-moon/keystone/locale/zh_CN/LC_MESSAGES/keystone-log-error.po b/keystone-moon/keystone/locale/zh_CN/LC_MESSAGES/keystone-log-error.po deleted file mode 100644 index b3df3b82..00000000 --- a/keystone-moon/keystone/locale/zh_CN/LC_MESSAGES/keystone-log-error.po +++ /dev/null @@ -1,140 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Xiao Xi LIU , 2014 -# 刘俊朋 , 2015 -# OpenStack Infra , 2015. #zanata -# Andreas Jaeger , 2016. #zanata -# Gaoxiao Zhu , 2016. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2016-03-15 10:40+0000\n" -"Last-Translator: Andreas Jaeger \n" -"Language: zh-CN\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Chinese (China)\n" - -msgid "Cannot retrieve Authorization headers" -msgstr "无法获取认证头信息" - -#, python-format -msgid "" -"Circular reference or a repeated entry found in projects hierarchy - " -"%(project_id)s." -msgstr "在项目树-%(project_id)s 中发现循环引用或重复项。" - -#, python-format -msgid "" -"Circular reference or a repeated entry found in region tree - %(region_id)s." -msgstr "在域树- %(region_id)s 中发现循环引用或重复项。" - -#, python-format -msgid "" -"Circular reference or a repeated entry found projects hierarchy - " -"%(project_id)s." -msgstr "在项目树-%(project_id)s 中发现循环引用或重复项。" - -#, python-format -msgid "Could not bind to %(host)s:%(port)s" -msgstr "无法绑定至 %(host)s:%(port)s" - -#, python-format -msgid "" -"Either [fernet_tokens] key_repository does not exist or Keystone does not " -"have sufficient permission to access it: %s" -msgstr "[fernet_tokens] 键仓库不存在或者ketystone没有足够的权限去访问它: %s。" - -msgid "" -"Error setting up the debug environment. Verify that the option --debug-url " -"has the format : and that a debugger processes is listening on " -"that port." -msgstr "" -"设置调试环境出错。请确保选项--debug-url 的格式是这样的: ,和确保" -"有一个调试进程正在监听那个端口" - -#, python-format -msgid "Error when signing assertion, reason: %(reason)s%(output)s" -msgstr "对断言进行签名时出错,原因:%(reason)s%(output)s" - -msgid "Failed to construct notifier" -msgstr "构造通知器失败" - -msgid "" -"Failed to create [fernet_tokens] key_repository: either it already exists or " -"you don't have sufficient permissions to create it" -msgstr "创建[Fernet_tokens] 键仓库失败:它已存在或你没有足够的权限去创建它。" - -msgid "Failed to create the default domain." -msgstr "无法创建默认域。" - -#, python-format -msgid "Failed to remove file %(file_path)r: %(error)s" -msgstr "无法删除文件%(file_path)r: %(error)s" - -#, python-format -msgid "Failed to send %(action)s %(event_type)s notification" -msgstr "发送 %(action)s %(event_type)s 通知失败" - -#, python-format -msgid "Failed to send %(res_id)s %(event_type)s notification" -msgstr "发送%(res_id)s %(event_type)s 通知失败" - -msgid "Failed to validate token" -msgstr "token验证失败" - -#, python-format -msgid "Malformed endpoint %(url)s - unknown key %(keyerror)s" -msgstr "端点 %(url)s 的格式不正确 - 键 %(keyerror)s 未知" - -#, python-format -msgid "" -"Malformed endpoint %s - incomplete format (are you missing a type notifier ?)" -msgstr "端点 %s 的格式不完整 - (是否缺少了类型通告者?)" - -#, python-format -msgid "" -"Malformed endpoint '%(url)s'. The following type error occurred during " -"string substitution: %(typeerror)s" -msgstr "" -"端点 '%(url)s' 的格式不正确。在字符串替换时发生以下类型错误:%(typeerror)s" - -#, python-format -msgid "Malformed endpoint - %(url)r is not a string" -msgstr "端点 - %(url)r 不是一个字符串" - -#, python-format -msgid "" -"Reinitializing revocation list due to error in loading revocation list from " -"backend. Expected `list` type got `%(type)s`. Old revocation list data: " -"%(list)r" -msgstr "" -"由于从后端加载撤销列表出现错误,重新初始化撤销列表。期望“列表”类型是 `" -"%(type)s`。旧的撤销列表数据是: %(list)r" - -msgid "Server error" -msgstr "服务器报错" - -msgid "Unable to sign token" -msgstr "无法签名令牌" - -#, python-format -msgid "Unexpected error or malformed token determining token expiry: %s" -msgstr "决策令牌预计超期时间 :%s 时,出现未知错误或变形的令牌" - -#, python-format -msgid "" -"Unexpected results in response for domain config - %(count)s responses, " -"first option is %(option)s, expected option %(expected)s" -msgstr "" -"针对域配置- %(count)s 结果,响应中出现不是预期结果,第一参数是%(option)s,期" -"望参数是 %(expected)s 。" diff --git a/keystone-moon/keystone/locale/zh_CN/LC_MESSAGES/keystone-log-info.po b/keystone-moon/keystone/locale/zh_CN/LC_MESSAGES/keystone-log-info.po deleted file mode 100644 index 8a756333..00000000 --- a/keystone-moon/keystone/locale/zh_CN/LC_MESSAGES/keystone-log-info.po +++ /dev/null @@ -1,83 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Xiao Xi LIU , 2014 -# OpenStack Infra , 2015. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 8.0.1.dev11\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/keystone\n" -"POT-Creation-Date: 2015-11-05 06:13+0000\n" -"PO-Revision-Date: 2015-08-01 06:26+0000\n" -"Last-Translator: openstackjenkins \n" -"Language-Team: Chinese (China)\n" -"Language: zh-CN\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.1\n" - -#, python-format -msgid "" -"\"expires_at\" has conflicting values %(existing)s and %(new)s. Will use " -"the earliest value." -msgstr "" -"\"expires_at\" 被赋予矛盾的值: %(existing)s 和 %(new)s。将采用时间上较早的那" -"个值。" - -#, python-format -msgid "Adding proxy '%(proxy)s' to KVS %(name)s." -msgstr "正在将代理'%(proxy)s'加入KVS %(name)s 中。" - -#, python-format -msgid "Couldn't verify unknown bind: {%(bind_type)s: %(identifier)s}" -msgstr "不能验证未知绑定: {%(bind_type)s: %(identifier)s}" - -#, python-format -msgid "Creating the default role %s because it does not exist." -msgstr "正在创建默认角色%s,因为它之前不存在。" - -#, python-format -msgid "KVS region %s key_mangler disabled." -msgstr "KVS域 %s 的key_mangler处理函数被禁用。" - -msgid "Kerberos bind authentication successful" -msgstr "Kerberos绑定认证成功" - -msgid "Kerberos credentials do not match those in bind" -msgstr "在绑定中没有匹配的Kerberos凭证" - -msgid "Kerberos credentials required and not present" -msgstr "没有所需的Kerberos凭证" - -#, python-format -msgid "Named bind mode %s not in bind information" -msgstr "在绑定信息中没有命名绑定模式%s" - -msgid "No bind information present in token" -msgstr "令牌中暂无绑定信息" - -#, python-format -msgid "Running command - %s" -msgstr "正在运行命令 - %s" - -#, python-format -msgid "Starting %(arg0)s on %(host)s:%(port)s" -msgstr "正在 %(host)s:%(port)s 上启动 %(arg0)s" - -#, python-format -msgid "Total expired tokens removed: %d" -msgstr "被移除的失效令牌总数:%d" - -#, python-format -msgid "Using %(func)s as KVS region %(name)s key_mangler" -msgstr "使用 %(func)s 作为KVS域 %(name)s 的key_mangler处理函数" - -#, python-format -msgid "Using default dogpile sha1_mangle_key as KVS region %s key_mangler" -msgstr "" -"使用默认的dogpile sha1_mangle_key函数作为KVS域 %s 的key_mangler处理函数" diff --git a/keystone-moon/keystone/locale/zh_CN/LC_MESSAGES/keystone.po b/keystone-moon/keystone/locale/zh_CN/LC_MESSAGES/keystone.po deleted file mode 100644 index c20b31f0..00000000 --- a/keystone-moon/keystone/locale/zh_CN/LC_MESSAGES/keystone.po +++ /dev/null @@ -1,1454 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Zhong Chaoliang , 2013 -# Dongliang Yu , 2013 -# Lee Yao , 2013 -# Lee Yao , 2013 -# Zhong Chaoliang , 2013 -# 颜海峰 , 2014 -# Linda , 2016. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2016-04-27 05:34+0000\n" -"Last-Translator: Linda \n" -"Language: zh-CN\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Chinese (China)\n" - -#, python-format -msgid "%(detail)s" -msgstr "%(detail)s" - -#, python-format -msgid "%(driver)s is not supported driver version" -msgstr "%(driver)s 不是受支持的驱动程序版本" - -#, python-format -msgid "" -"%(entity)s name cannot contain the following reserved characters: %(chars)s" -msgstr "%(entity)s 名称不能包含以下保留字符:%(chars)s" - -#, python-format -msgid "" -"%(event)s is not a valid notification event, must be one of: %(actions)s" -msgstr "%(event)s 不是有效通知事件,必须是下列其中一项:%(actions)s" - -#, python-format -msgid "%(host)s is not a trusted dashboard host" -msgstr "%(host)s 不是可信的仪表板主机" - -#, python-format -msgid "%(message)s %(amendment)s" -msgstr "%(message)s %(amendment)s" - -#, python-format -msgid "" -"%(mod_name)s doesn't provide database migrations. The migration repository " -"path at %(path)s doesn't exist or isn't a directory." -msgstr "" -"%(mod_name)s 未提供数据库迁移。%(path)s 处的迁移存储库路径不存在或者不是目" -"录。" - -#, python-format -msgid "%(prior_role_id)s does not imply %(implied_role_id)s" -msgstr "%(prior_role_id)s 并未暗示 %(implied_role_id)s" - -#, python-format -msgid "%(property_name)s cannot be less than %(min_length)s characters." -msgstr "%(property_name)s 不能少于 %(min_length)s 个字符。" - -#, python-format -msgid "%(property_name)s is not a %(display_expected_type)s" -msgstr "%(property_name)s 不在 %(display_expected_type)s 之中" - -#, python-format -msgid "%(property_name)s should not be greater than %(max_length)s characters." -msgstr "%(property_name)s 不应该超过 %(max_length)s 个字符。" - -#, python-format -msgid "%(role_id)s cannot be an implied roles" -msgstr "%(role_id)s 不能是暗示角色" - -#, python-format -msgid "%s cannot be empty." -msgstr "%s 不能为空。" - -#, python-format -msgid "%s extension does not exist." -msgstr "%s 扩展不存在。" - -#, python-format -msgid "%s field is required and cannot be empty" -msgstr "%s 字段是必填字段,不能为空" - -#, python-format -msgid "%s field(s) cannot be empty" -msgstr "%s 字段不能为空" - -#, python-format -msgid "" -"%s for the LDAP identity backend has been deprecated in the Mitaka release " -"in favor of read-only identity LDAP access. It will be removed in the \"O\" " -"release." -msgstr "" -"在 Mitaka 发行版中,已不推荐使用 LDAP 身份后端的 %s (以支持只读身份 LDAP 访" -"问)。它将在“O”发行版中移除。" - -msgid "(Disable insecure_debug mode to suppress these details.)" -msgstr "(禁用 insecure_debug 方式以避免这些详细信息。)" - -msgid "--all option cannot be mixed with other options" -msgstr "--all 选项不能与其他选项一起使用" - -msgid "A project-scoped token is required to produce a service catalog." -msgstr "产生服务目录时需要项目范围的令牌。" - -msgid "Access token is expired" -msgstr "访问令牌已过期" - -msgid "Access token not found" -msgstr "找不到访问令牌" - -msgid "Additional authentications steps required." -msgstr "需要额外的认证步骤。" - -msgid "An unexpected error occurred when retrieving domain configs" -msgstr "检索域配置时发生意外错误" - -#, python-format -msgid "An unexpected error occurred when trying to store %s" -msgstr "尝试存储 %s 时发生意外错误" - -msgid "An unexpected error prevented the server from fulfilling your request." -msgstr "意外错误阻止了服务器完成您的请求。" - -#, python-format -msgid "" -"An unexpected error prevented the server from fulfilling your request: " -"%(exception)s" -msgstr "意外错误阻止了服务器完成您的请求:%(exception)s" - -msgid "An unhandled exception has occurred: Could not find metadata." -msgstr "发送了无法处理的异常:找不到元数据。" - -msgid "At least one option must be provided" -msgstr "必须至少提供一个选项" - -msgid "At least one option must be provided, use either --all or --domain-name" -msgstr "必须至少提供一个选项,请使用 --all 或 --domain-name" - -msgid "At least one role should be specified." -msgstr "应该至少指定一个角色。" - -#, python-format -msgid "" -"Attempted automatic driver selection for assignment based upon " -"[identity]\\driver option failed since driver %s is not found. Set " -"[assignment]/driver to a valid driver in keystone config." -msgstr "" -"尝试根据 [identity]\\driver 选项为分配自动选择驱动程序失败,因为找不到驱动程" -"序 %s。请在 keystone 配置中将 [assignment]/driver 设置为有效驱动程序。" - -msgid "Attempted to authenticate with an unsupported method." -msgstr "尝试使用不受支持的方法进行验证。" - -msgid "" -"Attempting to use OS-FEDERATION token with V2 Identity Service, use V3 " -"Authentication" -msgstr "正在尝试将 OS-FEDERATION 令牌与 V2 身份服务配合使用,请使用 V3 认证" - -msgid "Authentication plugin error." -msgstr "认证插件错误。" - -#, python-format -msgid "" -"Backend `%(backend)s` is not a valid memcached backend. Valid backends: " -"%(backend_list)s" -msgstr "后端“%(backend)s”不是有效的 memcached 后端。有效后端:%(backend_list)s" - -msgid "Cannot authorize a request token with a token issued via delegation." -msgstr "无法对带有通过代理发出的令牌的请求令牌授权。" - -#, python-format -msgid "Cannot change %(option_name)s %(attr)s" -msgstr "无法更改 %(option_name)s %(attr)s" - -msgid "Cannot change Domain ID" -msgstr "无法更改域标识" - -msgid "Cannot change user ID" -msgstr "无法更改用户标识" - -msgid "Cannot change user name" -msgstr "无法更改用户名" - -#, python-format -msgid "Cannot create an endpoint with an invalid URL: %(url)s" -msgstr "无法创建具有无效 URL %(url)s 的端点" - -#, python-format -msgid "Cannot create project with parent: %(project_id)s" -msgstr "无法创建具有父代的项目:%(project_id)s" - -#, python-format -msgid "" -"Cannot create project, since it specifies its owner as domain %(domain_id)s, " -"but specifies a parent in a different domain (%(parent_domain_id)s)." -msgstr "" -"无法创建项目,因为它将其所有者指定为域 %(domain_id)s,但在另一个域 " -"(%(parent_domain_id)s) 中指定了父代。" - -#, python-format -msgid "" -"Cannot create project, since its parent (%(domain_id)s) is acting as a " -"domain, but project's specified parent_id (%(parent_id)s) does not match " -"this domain_id." -msgstr "" -"无法创建项目,因为其父代 (%(domain_id)s) 正充当域,但该项目的指定 parent_id " -"(%(parent_id)s) 与此 domain_id 不匹配。" - -msgid "Cannot delete a domain that is enabled, please disable it first." -msgstr "无法删除已启用的域,请先禁用该域。" - -#, python-format -msgid "" -"Cannot delete project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "无法删除项目 %(project_id)s,因为其子树包含已启用的项目。" - -#, python-format -msgid "" -"Cannot delete the project %s since it is not a leaf in the hierarchy. Use " -"the cascade option if you want to delete a whole subtree." -msgstr "" -"无法删除项目 %s,因为它不是该层次结构中的支叶。如果要删除整个子树,请使用级联" -"选项。" - -#, python-format -msgid "" -"Cannot disable project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "无法禁用项目 %(project_id)s,因为它的子树包含已启用的项目。" - -#, python-format -msgid "Cannot enable project %s since it has disabled parents" -msgstr "无法启用项目 %s,因为它具有已禁用的父代" - -msgid "Cannot list assignments sourced from groups and filtered by user ID." -msgstr "无法列示源自若干组并按用户标识过滤的分配。" - -msgid "Cannot list request tokens with a token issued via delegation." -msgstr "无法列示带有通过代理发出的令牌的请求令牌。" - -#, python-format -msgid "Cannot open certificate %(cert_file)s. Reason: %(reason)s" -msgstr "无法打开证书 %(cert_file)s。原因:%(reason)s" - -#, python-format -msgid "Cannot remove role that has not been granted, %s" -msgstr "无法除去尚未授予的角色 %s" - -msgid "" -"Cannot truncate a driver call without hints list as first parameter after " -"self " -msgstr "" -"在没有将 hints list 用作 self 后面的第一个参数的情况下,无法截断驱动程序调用" - -msgid "Cannot update domain_id of a project that has children." -msgstr "无法更新具有子代的项目的 domain_id。" - -msgid "" -"Cannot use parents_as_list and parents_as_ids query params at the same time." -msgstr "无法同时使用 parents_as_list 和 parents_as_ids 查询参数。" - -msgid "" -"Cannot use subtree_as_list and subtree_as_ids query params at the same time." -msgstr "无法同时使用 subtree_as_list 和 subtree_as_ids 查询参数。" - -msgid "Cascade update is only allowed for enabled attribute." -msgstr "只允许对已启用的属性执行级联更新。" - -msgid "" -"Combining effective and group filter will always result in an empty list." -msgstr "将有效过滤器与组过滤器进行组合将始终产生空列表。" - -msgid "" -"Combining effective, domain and inherited filters will always result in an " -"empty list." -msgstr "将有效过滤器、域过滤器和继承的过滤器进行组合将始终产生空列表。" - -#, python-format -msgid "Config API entity at /domains/%s/config" -msgstr "在 /domains/%s/config 处配置 API 实体" - -#, python-format -msgid "Conflict occurred attempting to store %(type)s - %(details)s" -msgstr "尝试存储 %(type)s 时发生冲突 - %(details)s" - -#, python-format -msgid "Conflicting region IDs specified: \"%(url_id)s\" != \"%(ref_id)s\"" -msgstr "指定的区域标识有冲突:“%(url_id)s”不等于“%(ref_id)s”" - -msgid "Consumer not found" -msgstr "找不到用户" - -#, python-format -msgid "" -"Could not change immutable attribute(s) '%(attributes)s' in target %(target)s" -msgstr "无法更改目标 %(target)s 中的不可变属性 %(attributes)s " - -#, python-format -msgid "" -"Could not determine Identity Provider ID. The configuration option " -"%(issuer_attribute)s was not found in the request environment." -msgstr "" -"未能确定身份提供者标识。在请求环境中找不到配置选项 %(issuer_attribute)s。" - -#, python-format -msgid "" -"Could not find %(group_or_option)s in domain configuration for domain " -"%(domain_id)s" -msgstr "在以下域的域配置中找不到 %(group_or_option)s:%(domain_id)s" - -#, python-format -msgid "Could not find Endpoint Group: %(endpoint_group_id)s" -msgstr "找不到端点组:%(endpoint_group_id)s" - -msgid "Could not find Identity Provider identifier in environment" -msgstr "为在环境中找到“身份提供者”标识" - -#, python-format -msgid "Could not find Identity Provider: %(idp_id)s" -msgstr "找不到身份提供者:%(idp_id)s" - -#, python-format -msgid "Could not find Service Provider: %(sp_id)s" -msgstr "找不到服务提供程序:%(sp_id)s" - -#, python-format -msgid "Could not find credential: %(credential_id)s" -msgstr "找不到凭证:%(credential_id)s" - -#, python-format -msgid "Could not find domain: %(domain_id)s" -msgstr "找不到域:%(domain_id)s" - -#, python-format -msgid "Could not find endpoint: %(endpoint_id)s" -msgstr "找不到端点:%(endpoint_id)s" - -#, python-format -msgid "" -"Could not find federated protocol %(protocol_id)s for Identity Provider: " -"%(idp_id)s" -msgstr "找不到身份提供者 %(idp_id)s 的联合协议 %(protocol_id)s " - -#, python-format -msgid "Could not find group: %(group_id)s" -msgstr "找不到组:%(group_id)s" - -#, python-format -msgid "Could not find mapping: %(mapping_id)s" -msgstr "找不到映射:%(mapping_id)s" - -msgid "Could not find policy association" -msgstr "找不到策略关联" - -#, python-format -msgid "Could not find policy: %(policy_id)s" -msgstr "找不到策略:%(policy_id)s" - -#, python-format -msgid "Could not find project: %(project_id)s" -msgstr "找不到项目:%(project_id)s" - -#, python-format -msgid "Could not find region: %(region_id)s" -msgstr "找不到区域:%(region_id)s" - -#, python-format -msgid "" -"Could not find role assignment with role: %(role_id)s, user or group: " -"%(actor_id)s, project or domain: %(target_id)s" -msgstr "" -"找不到角色分配,角色为 %(role_id)s,用户或组为 %(actor_id)s,项目或域为 " -"%(target_id)s" - -#, python-format -msgid "Could not find role: %(role_id)s" -msgstr "找不到角色:%(role_id)s" - -#, python-format -msgid "Could not find service: %(service_id)s" -msgstr "找不到服务:%(service_id)s" - -#, python-format -msgid "Could not find token: %(token_id)s" -msgstr "找不到令牌:%(token_id)s" - -#, python-format -msgid "Could not find trust: %(trust_id)s" -msgstr "找不到信任:%(trust_id)s" - -#, python-format -msgid "Could not find user: %(user_id)s" -msgstr "找不到用户:%(user_id)s" - -#, python-format -msgid "Could not find version: %(version)s" -msgstr "找不到版本:%(version)s" - -#, python-format -msgid "Could not find: %(target)s" -msgstr "找不到 %(target)s" - -msgid "" -"Could not map any federated user properties to identity values. Check debug " -"logs or the mapping used for additional details." -msgstr "" -"无法将任何联合用户属性映射至身份值。请检查调试日志或所使用的映射以获取其他详" -"细信息。" - -msgid "" -"Could not map user while setting ephemeral user identity. Either mapping " -"rules must specify user id/name or REMOTE_USER environment variable must be " -"set." -msgstr "" -"设置临时用户身份时未能映射用户。映射规则必须指定用户标识/用户名,或者必须设" -"置 REMOTE_USER 环境变量。" - -msgid "Could not validate the access token" -msgstr "未能验证访问令牌" - -msgid "Credential belongs to another user" -msgstr "凭证属于另一用户" - -msgid "Credential signature mismatch" -msgstr "凭据签名不匹配" - -#, python-format -msgid "" -"Direct import of auth plugin %(name)r is deprecated as of Liberty in favor " -"of its entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"自 Liberty 开始,已不推荐直接导入认证插件 %(name)r(为了支持它在 " -"%(namespace)r 中的入口点),并且可能在 N 中移除。" - -#, python-format -msgid "" -"Direct import of driver %(name)r is deprecated as of Liberty in favor of its " -"entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"自 Liberty 开始,已不推荐直接导入驱动程序 %(name)r(为了支持它在 " -"%(namespace)r 中的入口点),并且可能在 N 中移除。" - -msgid "" -"Disabling an entity where the 'enable' attribute is ignored by configuration." -msgstr "正在禁用实体,在此情况下,配置已忽略“enable”属性。" - -#, python-format -msgid "Domain (%s)" -msgstr "域 (%s)" - -#, python-format -msgid "Domain cannot be named %s" -msgstr "无法将域命名为 %s" - -#, python-format -msgid "Domain cannot have ID %s" -msgstr "域不能具有标识 %s" - -#, python-format -msgid "Domain is disabled: %s" -msgstr "域已禁用:%s" - -msgid "Domain name cannot contain reserved characters." -msgstr "域名不能包含保留字符。" - -msgid "Domain scoped token is not supported" -msgstr "作用域限定的令牌不受支持" - -msgid "Domain specific roles are not supported in the V8 role driver" -msgstr "V8 角色驱动程序中不支持特定于域的角色" - -#, python-format -msgid "" -"Domain: %(domain)s already has a configuration defined - ignoring file: " -"%(file)s." -msgstr "域 %(domain)s 已定义配置 - 忽略以下文件:%(file)s。" - -msgid "Duplicate Entry" -msgstr "重复条目" - -#, python-format -msgid "Duplicate ID, %s." -msgstr "标识 %s 重复。" - -#, python-format -msgid "Duplicate entry: %s" -msgstr "重复条目:%s" - -#, python-format -msgid "Duplicate name, %s." -msgstr "名称 %s 重复。" - -#, python-format -msgid "Duplicate remote ID: %s" -msgstr "重复远程标识:%s" - -msgid "EC2 access key not found." -msgstr "找不到 EC2 访问密钥。" - -msgid "EC2 signature not supplied." -msgstr "未提供 EC2 签名。" - -msgid "" -"Either --bootstrap-password argument or OS_BOOTSTRAP_PASSWORD must be set." -msgstr "必须设置 --bootstrap-password 自变量或 OS_BOOTSTRAP_PASSWORD。" - -msgid "Enabled field must be a boolean" -msgstr "已启用的字段必须为布尔值" - -msgid "Enabled field should be a boolean" -msgstr "已启用的字段应该为布尔值" - -#, python-format -msgid "Endpoint %(endpoint_id)s not found in project %(project_id)s" -msgstr "在项目 %(project_id)s 中找不到端点 %(endpoint_id)s" - -msgid "Endpoint Group Project Association not found" -msgstr "找不到端点组项目关联" - -msgid "Ensure configuration option idp_entity_id is set." -msgstr "请确保设置了配置选项 idp_entity_id。" - -msgid "Ensure configuration option idp_sso_endpoint is set." -msgstr "请确保设置了配置选项 idp_sso_endpoint。" - -#, python-format -msgid "" -"Error parsing configuration file for domain: %(domain)s, file: %(file)s." -msgstr "解析域 %(domain)s 的配置文件时出错,文件为 %(file)s。" - -#, python-format -msgid "Error while opening file %(path)s: %(err)s" -msgstr "打开文件 %(path)s 时出错:%(err)s" - -#, python-format -msgid "Error while parsing line: '%(line)s': %(err)s" -msgstr "解析行“%(line)s”时出错:%(err)s" - -#, python-format -msgid "Error while parsing rules %(path)s: %(err)s" -msgstr "解析规则 %(path)s 时出错:%(err)s" - -#, python-format -msgid "Error while reading metadata file, %(reason)s" -msgstr "读取元数据文件时出错,原因为 %(reason)s" - -#, python-format -msgid "" -"Exceeded attempts to register domain %(domain)s to use the SQL driver, the " -"last domain that appears to have had it is %(last_domain)s, giving up" -msgstr "" -"注册域 %(domain)s 以使用 SQL 驱动程序的尝试次数已超出限制,显示为进行此尝试的" -"最后一个域为 %(last_domain)s,正在放弃" - -#, python-format -msgid "Expected dict or list: %s" -msgstr "期望的字典或者列表:%s" - -msgid "" -"Expected signing certificates are not available on the server. Please check " -"Keystone configuration." -msgstr "在服务器上,期望的签名证书不可用。请检查 Keystone 配置。" - -#, python-format -msgid "" -"Expecting to find %(attribute)s in %(target)s - the server could not comply " -"with the request since it is either malformed or otherwise incorrect. The " -"client is assumed to be in error." -msgstr "" -"期望在 %(target)s 中找到 %(attribute)s - 服务器未能遵照请求,因为它的格式或者" -"其他方面不正确。客户机被认为发生错误。" - -#, python-format -msgid "Failed to start the %(name)s server" -msgstr "未能启动 %(name)s 服务器" - -msgid "Failed to validate token" -msgstr "验证令牌失败" - -msgid "Federation token is expired" -msgstr "联合令牌已到期" - -#, python-format -msgid "" -"Field \"remaining_uses\" is set to %(value)s while it must not be set in " -"order to redelegate a trust" -msgstr "" -"字段“remaining_uses”已设置为 %(value)s,尽管为了重新委派信任,不能设置该字段" - -msgid "Found invalid token: scoped to both project and domain." -msgstr "发现无效令牌:范围同时为项目和域。" - -#, python-format -msgid "Group %s not found in config" -msgstr "在配置中找不到组 %s。" - -#, python-format -msgid "Group %(group)s is not supported for domain specific configurations" -msgstr "特定于域的配置不支持组 %(group)s" - -#, python-format -msgid "" -"Group %(group_id)s returned by mapping %(mapping_id)s was not found in the " -"backend." -msgstr "在后端中,找不到由映射 %(mapping_id)s 返回的组 %(group_id)s。" - -#, python-format -msgid "" -"Group membership across backend boundaries is not allowed, group in question " -"is %(group_id)s, user is %(user_id)s" -msgstr "" -"不允许使用跨后端边界的组成员资格,所提到的组为%(group_id)s,用户为 " -"%(user_id)s" - -#, python-format -msgid "ID attribute %(id_attr)s not found in LDAP object %(dn)s" -msgstr "未在 LDAP 对象 %(dn)s 中找到标识属性 %(id_attr)s" - -#, python-format -msgid "Identity Provider %(idp)s is disabled" -msgstr "身份提供者 %(idp)s 已禁用" - -msgid "" -"Incoming identity provider identifier not included among the accepted " -"identifiers." -msgstr "新的“身份提供者”标识未包含在已接受的标识中。" - -msgid "Invalid EC2 signature." -msgstr "无效 EC2 签名。" - -#, python-format -msgid "Invalid LDAP TLS certs option: %(option)s. Choose one of: %(options)s" -msgstr "LDAP TLS 证书选项 %(option)s 无效。请选择下列其中一项:%(options)s" - -#, python-format -msgid "Invalid LDAP TLS_AVAIL option: %s. TLS not available" -msgstr "无效的 LDAP TLS_AVAIL 选项:%s。TLS 不可用" - -#, python-format -msgid "Invalid LDAP deref option: %(option)s. Choose one of: %(options)s" -msgstr "LDAP deref 选项 %(option)s 无效。请选择下列其中一项:%(options)s" - -#, python-format -msgid "Invalid LDAP scope: %(scope)s. Choose one of: %(options)s" -msgstr "无效的 LDAP 作用域:%(scope)s。请选择下列其中一项:%(options)s" - -msgid "Invalid TLS / LDAPS combination" -msgstr "无效的 TLS / LDAPS 组合" - -#, python-format -msgid "Invalid audit info data type: %(data)s (%(type)s)" -msgstr "无效审计信息数据类型:%(data)s (%(type)s)" - -msgid "Invalid blob in credential" -msgstr "凭证中的 BLOB 无效" - -#, python-format -msgid "" -"Invalid domain name: %(domain)s found in config file name: %(file)s - " -"ignoring this file." -msgstr "在配置文件名 %(file)s 中找到的域名 %(domain)s 无效 - 忽略此文件。" - -#, python-format -msgid "Invalid domain specific configuration: %(reason)s" -msgstr "特定于域的配置无效:%(reason)s" - -#, python-format -msgid "Invalid input for field '%(path)s'. The value is '%(value)s'." -msgstr "对字段“%(path)s”的输入无效。该值为“%(value)s”。" - -msgid "Invalid limit value" -msgstr "限制值无效" - -#, python-format -msgid "" -"Invalid mix of entities for policy association - only Endpoint, Service or " -"Region+Service allowed. Request was - Endpoint: %(endpoint_id)s, Service: " -"%(service_id)s, Region: %(region_id)s" -msgstr "" -"用于策略关联的实体混合无效 - 仅允许“端点”、“服务”或“区域 + 服务”。请求为 - 端" -"点:%(endpoint_id)s,服务:%(service_id)s,区域:%(region_id)s" - -#, python-format -msgid "" -"Invalid rule: %(identity_value)s. Both 'groups' and 'domain' keywords must " -"be specified." -msgstr "规则 %(identity_value)s 无效。必须同时指定关键字“groups”和“domain”。" - -msgid "Invalid signature" -msgstr "签名无效" - -msgid "Invalid user / password" -msgstr "用户/密码无效" - -msgid "Invalid username or TOTP passcode" -msgstr "无效用户名或 TOTP 密码" - -msgid "Invalid username or password" -msgstr "无效用户名或密码" - -#, python-format -msgid "KVS region %s is already configured. Cannot reconfigure." -msgstr "已配置 KVS 区域 %s。无法重新配置。" - -#, python-format -msgid "Key Value Store not configured: %s" -msgstr "未配置键值存储:%s" - -#, python-format -msgid "LDAP %s create" -msgstr "LDAP %s 创建" - -#, python-format -msgid "LDAP %s delete" -msgstr "LDAP %s 删除" - -#, python-format -msgid "LDAP %s update" -msgstr "LDAP %s 更新" - -msgid "" -"Length of transformable resource id > 64, which is max allowed characters" -msgstr "可变换资源标识的长度超过 64 个字符(允许的最大字符数)" - -#, python-format -msgid "" -"Local section in mapping %(mapping_id)s refers to a remote match that " -"doesn't exist (e.g. {0} in a local section)." -msgstr "" -"映射 %(mapping_id)s 中的本地节引用不存在的远程匹配(例如,本地节中的 " -"'{0}')。" - -#, python-format -msgid "Lock Timeout occurred for key, %(target)s" -msgstr "对于键 %(target)s,发生锁定超时" - -#, python-format -msgid "Lock key must match target key: %(lock)s != %(target)s" -msgstr "锁定键必须与目标键匹配:%(lock)s != %(target)s" - -#, python-format -msgid "Malformed endpoint URL (%(endpoint)s), see ERROR log for details." -msgstr "端点 URL (%(endpoint)s) 的格式不正确,请查看错误日志获取详细信息。" - -msgid "Marker could not be found" -msgstr "找不到标记符" - -#, python-format -msgid "Max hierarchy depth reached for %s branch." -msgstr "已达到 %s 分支的最大层深度。" - -#, python-format -msgid "Maximum lock attempts on %s occurred." -msgstr "已达到对 %s 的最大锁定尝试次数。" - -#, python-format -msgid "Member %(member)s is already a member of group %(group)s" -msgstr "成员 %(member)s 已属于组 %(group)s" - -#, python-format -msgid "Method not callable: %s" -msgstr "方法不可调用:%s" - -msgid "Missing entity ID from environment" -msgstr "环境中缺少实体标识" - -msgid "" -"Modifying \"redelegation_count\" upon redelegation is forbidden. Omitting " -"this parameter is advised." -msgstr "禁止修改针对重新授权的“redelegation_count”。建议省略此参数。" - -msgid "Multiple domains are not supported" -msgstr "多个域不受支持" - -msgid "Must be called within an active lock context." -msgstr "必须在处于活动状态的锁定上下文内调用。" - -msgid "Must specify either domain or project" -msgstr "必须指定域或项目" - -msgid "Name field is required and cannot be empty" -msgstr "名称字段是必填字段,不能为空" - -msgid "Neither Project Domain ID nor Project Domain Name was provided." -msgstr "既未提供项目域标识,也未提供项目域名。" - -msgid "" -"No Authorization headers found, cannot proceed with OAuth related calls, if " -"running under HTTPd or Apache, ensure WSGIPassAuthorization is set to On." -msgstr "" -"找不到任何授权头,无法继续进行与 OAuth 相关的调用,如果是通过 HTTP 或 Apache " -"运行,请确保 WSGIPassAuthorization 设置为开启。" - -msgid "No authenticated user" -msgstr "不存在任何已认证的用户" - -msgid "" -"No encryption keys found; run keystone-manage fernet_setup to bootstrap one." -msgstr "" -"找不到任何加密密钥;请针对引导程序 1 运行 keystone-manage fernet_setup。" - -msgid "No options specified" -msgstr "未指定选项" - -#, python-format -msgid "No policy is associated with endpoint %(endpoint_id)s." -msgstr "没有任何策略与端点 %(endpoint_id)s 关联。" - -#, python-format -msgid "No remaining uses for trust: %(trust_id)s" -msgstr "对于信任 %(trust_id)s,不存在其余使用" - -msgid "No token in the request" -msgstr "请求中没有令牌。" - -msgid "Non-default domain is not supported" -msgstr "非缺省域不受支持" - -msgid "One of the trust agents is disabled or deleted" -msgstr "其中一个信任代理已禁用或删除" - -#, python-format -msgid "" -"Option %(option)s found with no group specified while checking domain " -"configuration request" -msgstr "在检查域配置请求时,找到选项 %(option)s,但未指定任何组" - -#, python-format -msgid "" -"Option %(option)s in group %(group)s is not supported for domain specific " -"configurations" -msgstr "特定于域的配置不支持组 %(group)s 中的选项 %(option)s" - -#, python-format -msgid "Project (%s)" -msgstr "项目 (%s)" - -#, python-format -msgid "Project ID not found: %(t_id)s" -msgstr "找不到项目标识:%(t_id)s" - -msgid "Project field is required and cannot be empty." -msgstr "项目字段是必填字段,不能为空。" - -#, python-format -msgid "Project is disabled: %s" -msgstr "项目已禁用:%s" - -msgid "Project name cannot contain reserved characters." -msgstr "项目名称不能包含保留字符。" - -msgid "Query string is not UTF-8 encoded" -msgstr "查询字符串不是采用 UTF-8 编码" - -#, python-format -msgid "" -"Reading the default for option %(option)s in group %(group)s is not supported" -msgstr "系统不支持读取组 %(group)s 中的选项 %(option)s 的缺省值。" - -msgid "Redelegation allowed for delegated by trust only" -msgstr "仅允许对“委派者”信任进行重新委派" - -#, python-format -msgid "" -"Remaining redelegation depth of %(redelegation_depth)d out of allowed range " -"of [0..%(max_count)d]" -msgstr "" -"其余重新委派深度 %(redelegation_depth)d 超出允许的范围 [0..%(max_count)d]" - -msgid "" -"Remove admin_crud_extension from the paste pipeline, the admin_crud " -"extension is now always available. Updatethe [pipeline:admin_api] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"从粘贴管道移除 admin_crud_extension,admin_crud 扩展现在始终可用。对 " -"keystone-paste.ini 中的 [pipeline:admin_api] 节进行相应更新,因为它将会在 O " -"发行版中移除。" - -msgid "" -"Remove endpoint_filter_extension from the paste pipeline, the endpoint " -"filter extension is now always available. Update the [pipeline:api_v3] " -"section in keystone-paste.ini accordingly as it will be removed in the O " -"release." -msgstr "" -"从粘贴管道移除 endpoint_filter_extension,端点过滤器扩展现在始终可用。对 " -"keystone-paste.ini 中的 [pipeline:api_v3] 节进行相应更新,因为它将会在 O 发" -"行版中移除。" - -msgid "" -"Remove federation_extension from the paste pipeline, the federation " -"extension is now always available. Update the [pipeline:api_v3] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"从粘贴管道移除 federation_extension,联合扩展现在始终可用。对 keystone-paste." -"ini 中的 [pipeline:api_v3] 节进行相应更新,因为它将会在 O 发行版中移除。" - -msgid "" -"Remove oauth1_extension from the paste pipeline, the oauth1 extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"从粘贴管道移除 oauth1_extension,oauth1 扩展现在始终可用。对 keystone-paste." -"ini 中的 [pipeline:api_v3] 节进行相应更新,因为它将会在 O 发行版中移除。" - -msgid "" -"Remove revoke_extension from the paste pipeline, the revoke extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"从粘贴管道移除 revoke_extension,撤销扩展现在始终可用。对 keystone-paste.ini " -"中的 [pipeline:api_v3] 节进行相应更新,因为它将会在 O 发行版中移除。" - -msgid "" -"Remove simple_cert from the paste pipeline, the PKI and PKIz token providers " -"are now deprecated and simple_cert was only used insupport of these token " -"providers. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"从粘贴管道移除 simple_cert,现在已不推荐使用 PKI 和 PKIz 令牌,simple_cert 仅" -"用于支持这些令牌提供程序。对 keystone-paste.ini 中的 [pipeline:api_v3] 节进行" -"相应更新,因为它将会在 O 发行版中移除。" - -msgid "" -"Remove user_crud_extension from the paste pipeline, the user_crud extension " -"is now always available. Updatethe [pipeline:public_api] section in keystone-" -"paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"从粘贴管道移除 user_crud_extension,user_crud 扩展现在始终可用。对 keystone-" -"paste.ini 中的 [pipeline:admin_api] 节进行相应更新,因为它将会在 O 发行版中移" -"除。" - -msgid "Request Token does not have an authorizing user id" -msgstr "请求令牌没有授权用户标识" - -#, python-format -msgid "" -"Request attribute %(attribute)s must be less than or equal to %(size)i. The " -"server could not comply with the request because the attribute size is " -"invalid (too large). The client is assumed to be in error." -msgstr "" -"请求属性 %(attribute)s 必须小于或等于 %(size)i。服务器未能遵照请求,因为属性" -"大小无效(太大)。客户机被认为发生错误。" - -msgid "Request must have an origin query parameter" -msgstr "请求必须具有源查询参数" - -msgid "Request token is expired" -msgstr "请求令牌已过期" - -msgid "Request token not found" -msgstr "找不到请求令牌" - -msgid "Requested expiration time is more than redelegated trust can provide" -msgstr "请求的到期时间超过重新委派的信任可提供的到期时间" - -#, python-format -msgid "" -"Requested redelegation depth of %(requested_count)d is greater than allowed " -"%(max_count)d" -msgstr "请求的重新委派深度 %(requested_count)d 超过允许的 %(max_count)d" - -msgid "" -"Running keystone via eventlet is deprecated as of Kilo in favor of running " -"in a WSGI server (e.g. mod_wsgi). Support for keystone under eventlet will " -"be removed in the \"M\"-Release." -msgstr "" -"自 Kilo 开始,建议不要通过 eventlet 运行 keystone,改为在 WSGI 服务器(例如 " -"mod_wsgi)中运行。在“M”发行版中,将移除对在 eventlet 下运行 keystone 的支持。" - -msgid "Scoping to both domain and project is not allowed" -msgstr "不允许同时将作用域限定到域和项目" - -msgid "Scoping to both domain and trust is not allowed" -msgstr "不允许同时将作用域限定到域和信任" - -msgid "Scoping to both project and trust is not allowed" -msgstr "不允许同时将作用域限定到项目和信任" - -#, python-format -msgid "Service Provider %(sp)s is disabled" -msgstr "服务提供程序 %(sp)s 已禁用" - -msgid "Some of requested roles are not in redelegated trust" -msgstr "某些所请求角色未在重新委派的信任中" - -msgid "Specify a domain or project, not both" -msgstr "请指定域或项目,但不是同时指定这两者" - -msgid "Specify a user or group, not both" -msgstr "请指定用户或组,但不是同时指定这两者" - -msgid "Specify one of domain or project" -msgstr "请指定其中一个域或项目" - -msgid "Specify one of user or group" -msgstr "请指定其中一个用户或组" - -#, python-format -msgid "" -"String length exceeded.The length of string '%(string)s' exceeded the limit " -"of column %(type)s(CHAR(%(length)d))." -msgstr "" -"字符串长度过长。字符串“%(string)s”的长度超过列限制 %(type)s(字符" -"(%(length)d))。" - -msgid "Tenant name cannot contain reserved characters." -msgstr "租户名称不能包含保留字符。" - -#, python-format -msgid "" -"The %s extension has been moved into keystone core and as such its " -"migrations are maintained by the main keystone database control. Use the " -"command: keystone-manage db_sync" -msgstr "" -"%s 扩展已移至 keystone 核心,因此,其迁移由主 keystone 数据库控件维护。使用以" -"下命令:keystone-manage db_sync" - -msgid "" -"The 'expires_at' must not be before now. The server could not comply with " -"the request since it is either malformed or otherwise incorrect. The client " -"is assumed to be in error." -msgstr "" -"“expires_at”不得早于现在。服务器未能遵从请求,因为它的格式不正确,或者其他方" -"面不正确。客户机被认为发生错误。" - -msgid "The --all option cannot be used with the --domain-name option" -msgstr "--all 选项不能与 --domain-name 选项配合使用" - -#, python-format -msgid "The Keystone configuration file %(config_file)s could not be found." -msgstr "找不到 Keystone 配置文件 %(config_file)s。" - -#, python-format -msgid "" -"The Keystone domain-specific configuration has specified more than one SQL " -"driver (only one is permitted): %(source)s." -msgstr "" -"特定于 Keystone 域的配置指定了多个 SQL 驱动程序(仅允许指定一个):" -"%(source)s。" - -msgid "The action you have requested has not been implemented." -msgstr "暂未执行您请求的操作。" - -msgid "The authenticated user should match the trustor." -msgstr "认证用户应匹配信任者。" - -msgid "" -"The certificates you requested are not available. It is likely that this " -"server does not use PKI tokens otherwise this is the result of " -"misconfiguration." -msgstr "请求的证书不可用。可能此服务器未使用 PKI 令牌,或者这是因为配置错误。" - -msgid "The configured token provider does not support bind authentication." -msgstr "所配置的令牌提供程序不支持绑定认证。" - -msgid "The creation of projects acting as domains is not allowed in v2." -msgstr "不允许在 V2 中创建充当域的项目。" - -#, python-format -msgid "" -"The password length must be less than or equal to %(size)i. The server could " -"not comply with the request because the password is invalid." -msgstr "密码长度必须小于或等于 %(size)i。服务器未能遵照请求,因为密码无效。" - -msgid "The request you have made requires authentication." -msgstr "您的请求需要先授权。" - -msgid "The resource could not be found." -msgstr "找不到该资源。" - -msgid "" -"The revoke call must not have both domain_id and project_id. This is a bug " -"in the Keystone server. The current request is aborted." -msgstr "" -"撤销调用不能同时具有 domain_id 和 project_id。这是 Keystone 服务器中的错误。" -"当前请求已异常中止。" - -msgid "The service you have requested is no longer available on this server." -msgstr "在此服务器上,已请求的服务不再可用。" - -#, python-format -msgid "" -"The specified parent region %(parent_region_id)s would create a circular " -"region hierarchy." -msgstr "指定的父区域 %(parent_region_id)s 将创建循环区域层次结构。" - -#, python-format -msgid "" -"The value of group %(group)s specified in the config should be a dictionary " -"of options" -msgstr "在配置中指定的组 %(group)s 的值应该是选项的字典" - -msgid "There should not be any non-oauth parameters" -msgstr "不应该存在任何非 oauth 参数" - -#, python-format -msgid "This is not a recognized Fernet payload version: %s" -msgstr "这不是可识别的 Fernet 有效内容版本:%s" - -#, python-format -msgid "This is not a recognized Fernet token %s" -msgstr "这不是可识别的 Fernet 令牌 %s" - -msgid "" -"Timestamp not in expected format. The server could not comply with the " -"request since it is either malformed or otherwise incorrect. The client is " -"assumed to be in error." -msgstr "" -"时间戳记未采用所需格式。服务器未能遵照请求,因为它的格式或者其他方面不正确。" -"客户机被认为发生错误。" - -#, python-format -msgid "" -"To get a more detailed information on this error, re-run this command for " -"the specific domain, i.e.: keystone-manage domain_config_upload --domain-" -"name %s" -msgstr "" -"要获取有关此错误的更详细信息,请针对特定域重新运行此命令,即:keystone-" -"manage domain_config_upload --domain-name %s" - -msgid "Token belongs to another user" -msgstr "令牌属于另一用户" - -msgid "Token does not belong to specified tenant." -msgstr "令牌不属于指定的租户。" - -msgid "Token version is unrecognizable or unsupported." -msgstr "令牌版本不可识别或者不受支持。" - -msgid "Trustee has no delegated roles." -msgstr "托管人没有委派的角色。" - -msgid "Trustor is disabled." -msgstr "Trustor 已禁用" - -#, python-format -msgid "" -"Trying to update group %(group)s, so that, and only that, group must be " -"specified in the config" -msgstr "正在尝试更新组 %(group)s,因此仅存在以下要求:必须在配置中指定组" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, but config provided " -"contains option %(option_other)s instead" -msgstr "" -"正在尝试更新组 %(group)s 中的选项 %(option)s,但所提供配置反而包含选项 " -"%(option_other)s" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, so that, and only " -"that, option must be specified in the config" -msgstr "" -"正在尝试更新组 %(group)s 中的选项 %(option)s,因此仅存在以下要求:必须在配置" -"中指定选项" - -msgid "" -"Unable to access the keystone database, please check it is configured " -"correctly." -msgstr "无法访问 keystone 数据库,请检查它是否正确配置。" - -#, python-format -msgid "Unable to consume trust %(trust_id)s, unable to acquire lock." -msgstr "无法使用信任 %(trust_id)s,无法获取锁定。" - -#, python-format -msgid "" -"Unable to delete region %(region_id)s because it or its child regions have " -"associated endpoints." -msgstr "无法删除区域 %(region_id)s,因为它或它的子区域具有关联的端点。" - -msgid "Unable to downgrade schema" -msgstr "无法对模式进行降级" - -#, python-format -msgid "Unable to find valid groups while using mapping %(mapping_id)s" -msgstr "使用映射 %(mapping_id)s 时,找不到有效组" - -#, python-format -msgid "Unable to locate domain config directory: %s" -msgstr "找不到域配置目录:%s" - -#, python-format -msgid "Unable to lookup user %s" -msgstr "无法查找用户 %s" - -#, python-format -msgid "" -"Unable to reconcile identity attribute %(attribute)s as it has conflicting " -"values %(new)s and %(old)s" -msgstr "无法协调身份属性 %(attribute)s,因为它具有冲突值 %(new)s 和 %(old)s" - -#, python-format -msgid "" -"Unable to sign SAML assertion. It is likely that this server does not have " -"xmlsec1 installed, or this is the result of misconfiguration. Reason " -"%(reason)s" -msgstr "" -"无法对 SAML 断言进行签名。此服务器可能未安装 xmlsec1,或者这可能是由于配置错" -"误导致的。原因 %(reason)s" - -msgid "Unable to sign token." -msgstr "无法对令牌进行签名。" - -#, python-format -msgid "Unexpected assignment type encountered, %s" -msgstr "遇到意外的指派类型 %s" - -#, python-format -msgid "" -"Unexpected combination of grant attributes - User: %(user_id)s, Group: " -"%(group_id)s, Project: %(project_id)s, Domain: %(domain_id)s" -msgstr "" -"存在以下 grant 属性的意外组合 - 用户 %(user_id)s、组 %(group_id)s、项目 " -"%(project_id)s 和域 %(domain_id)s" - -#, python-format -msgid "Unexpected status requested for JSON Home response, %s" -msgstr "请求 JSON 主页响应时处于意外状态,%s" - -msgid "Unknown Target" -msgstr "目标未知" - -#, python-format -msgid "Unknown domain '%(name)s' specified by --domain-name" -msgstr "--domain-name 指定的“%(name)s”是未知域" - -#, python-format -msgid "Unknown token version %s" -msgstr "令牌版本 %s 未知" - -#, python-format -msgid "Unregistered dependency: %(name)s for %(targets)s" -msgstr "已撤销注册 %(targets)s 的依赖关系 %(name)s" - -msgid "Update of `domain_id` is not allowed." -msgstr "不允许更新“domain_id”。" - -msgid "Update of `is_domain` is not allowed." -msgstr "不允许更新“is_domain”。" - -msgid "Update of `parent_id` is not allowed." -msgstr "不允许更新“parent_id”。" - -msgid "Update of domain_id is only allowed for root projects." -msgstr "只允许更新根项目的 domain_id。" - -msgid "Update of domain_id of projects acting as domains is not allowed." -msgstr "不允许更新充当域的项目的 domain_id。" - -msgid "Use a project scoped token when attempting to create a SAML assertion" -msgstr "尝试创建 SAML 断言时,请使用项目范围的令牌" - -msgid "" -"Use of the identity driver config to automatically configure the same " -"assignment driver has been deprecated, in the \"O\" release, the assignment " -"driver will need to be expicitly configured if different than the default " -"(SQL)." -msgstr "" -"已不推荐使用标识驱动程序配置来自动配置同一分配驱动程序,在“O”发行版中,如果不" -"同于缺省值 (SQL),那么需要显式配置分配驱动程序。" - -#, python-format -msgid "User %(u_id)s is unauthorized for tenant %(t_id)s" -msgstr "用户 %(u_id)s 没有授权给租户 %(t_id)s" - -#, python-format -msgid "User %(user_id)s has no access to domain %(domain_id)s" -msgstr "用户 %(user_id)s 没有访问域 %(domain_id)s 的权限" - -#, python-format -msgid "User %(user_id)s has no access to project %(project_id)s" -msgstr "用户 %(user_id)s 没有访问项目 %(project_id)s 的权限" - -#, python-format -msgid "User %(user_id)s is already a member of group %(group_id)s" -msgstr "用户 %(user_id)s 已是组 %(group_id)s 的成员" - -#, python-format -msgid "User '%(user_id)s' not found in group '%(group_id)s'" -msgstr "在组“%(group_id)s”中找不到用户“%(user_id)s”" - -msgid "User IDs do not match" -msgstr "用户 ID 不匹配" - -msgid "" -"User auth cannot be built due to missing either user id, or user name with " -"domain id, or user name with domain name." -msgstr "" -"由于缺少用户标识、具有域的用户名或者具有域名的用户名,因此无法构建用户认证。" - -#, python-format -msgid "User is disabled: %s" -msgstr "用户已禁用:%s" - -msgid "User is not a member of the requested project" -msgstr "用户不是所请求项目的成员" - -msgid "User is not a trustee." -msgstr "用户不是受托人。" - -msgid "User not found" -msgstr "找不到用户" - -msgid "User not valid for tenant." -msgstr "用户做为租户是无效的。" - -msgid "User roles not supported: tenant_id required" -msgstr "用户角色不受支持:需要 tenant_id" - -#, python-format -msgid "User type %s not supported" -msgstr "用户类型 %s 不受支持" - -msgid "You are not authorized to perform the requested action." -msgstr "您无权执行请求的操作。" - -#, python-format -msgid "You are not authorized to perform the requested action: %(action)s" -msgstr "您无权执行请求的操作:%(action)s" - -msgid "" -"You have tried to create a resource using the admin token. As this token is " -"not within a domain you must explicitly include a domain for this resource " -"to belong to." -msgstr "" -"您已尝试使用管理员令牌创建资源。因为此令牌不在域中,所以您必须显式添加域以使" -"此资源成为其成员。" - -msgid "`key_mangler` functions must be callable." -msgstr "“key_mangler”函数必须可调用。" - -msgid "`key_mangler` option must be a function reference" -msgstr "“key_mangler”选项必须为函数引用" - -msgid "any options" -msgstr "任何选项" - -msgid "auth_type is not Negotiate" -msgstr "auth_type 不是“Negotiate”" - -msgid "authorizing user does not have role required" -msgstr "授权用户没有必需的角色" - -#, python-format -msgid "cannot create a project in a branch containing a disabled project: %s" -msgstr "无法在包含已禁用项目的分支中创建项目:%s" - -#, python-format -msgid "" -"cannot delete an enabled project acting as a domain. Please disable the " -"project %s first." -msgstr "无法删除充当域的已启用项目。请先禁用项目 %s。" - -#, python-format -msgid "group %(group)s" -msgstr "组 %(group)s" - -msgid "" -"idp_contact_type must be one of: [technical, other, support, administrative " -"or billing." -msgstr "" -"idp_contact_type 必须是下列其中一项:technical、other、support、" -"administrative 或 billing。" - -#, python-format -msgid "invalid date format %s" -msgstr "日期格式 %s 无效" - -#, python-format -msgid "" -"it is not permitted to have two projects acting as domains with the same " -"name: %s" -msgstr "不允许两个同名项目充当域:%s" - -#, python-format -msgid "" -"it is not permitted to have two projects within a domain with the same " -"name : %s" -msgstr "不允许一个域的两个项目具有相同名称:%s" - -msgid "only root projects are allowed to act as domains." -msgstr "只允许根项目充当域。" - -#, python-format -msgid "option %(option)s in group %(group)s" -msgstr "组 %(group)s 中的选项 %(option)s" - -msgid "provided consumer key does not match stored consumer key" -msgstr "提供的用户密钥与存储的用户密钥不匹配" - -msgid "provided request key does not match stored request key" -msgstr "提供的请求密钥与存储的请求密钥不匹配" - -msgid "provided verifier does not match stored verifier" -msgstr "提供的验证器与存储的验证器不匹配" - -msgid "remaining_uses must be a positive integer or null." -msgstr "remaining_uses 必须为正整数或 Null。" - -msgid "remaining_uses must not be set if redelegation is allowed" -msgstr "如果允许重新委派,那么不能设置 remaining_uses" - -#, python-format -msgid "" -"request to update group %(group)s, but config provided contains group " -"%(group_other)s instead" -msgstr "请求更新组 %(group)s,但所提供的配置反而包含组 %(group_other)s" - -msgid "rescope a scoped token" -msgstr "请重新确定带范围的令牌的范围" - -#, python-format -msgid "role %s is not defined" -msgstr "未定义角色 %s" - -msgid "scope.project.id must be specified if include_subtree is also specified" -msgstr "如果还指定了 include_subtree,那么必须指定 scope.project.id" - -#, python-format -msgid "tls_cacertdir %s not found or is not a directory" -msgstr "tls_cacertdir %s 未找到或者不是一个目录" - -#, python-format -msgid "tls_cacertfile %s not found or is not a file" -msgstr "tls_cacertfile %s 未找到或者不是一个文件" - -#, python-format -msgid "token reference must be a KeystoneToken type, got: %s" -msgstr "令牌引用必须为 KeystoneToken 类型,但收到:%s" - -msgid "" -"update of domain_id is deprecated as of Mitaka and will be removed in O." -msgstr "从 Mitaka 开始,已不推荐更新 domain_id,它将在 O 发行版中移除。" - -#, python-format -msgid "" -"validated expected to find %(param_name)r in function signature for " -"%(func_name)r." -msgstr "已验证期望在 %(func_name)r 的函数签名中查找 %(param_name)r" diff --git a/keystone-moon/keystone/locale/zh_TW/LC_MESSAGES/keystone-log-critical.po b/keystone-moon/keystone/locale/zh_TW/LC_MESSAGES/keystone-log-critical.po deleted file mode 100644 index 3c4e36e8..00000000 --- a/keystone-moon/keystone/locale/zh_TW/LC_MESSAGES/keystone-log-critical.po +++ /dev/null @@ -1,25 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Jennifer , 2016. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2014-08-31 03:19+0000\n" -"Last-Translator: openstackjenkins \n" -"Language: zh-TW\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Chinese (Taiwan)\n" - -#, python-format -msgid "Unable to open template file %s" -msgstr "無法開啟範本檔 %s" diff --git a/keystone-moon/keystone/locale/zh_TW/LC_MESSAGES/keystone.po b/keystone-moon/keystone/locale/zh_TW/LC_MESSAGES/keystone.po deleted file mode 100644 index 3f4a798e..00000000 --- a/keystone-moon/keystone/locale/zh_TW/LC_MESSAGES/keystone.po +++ /dev/null @@ -1,1455 +0,0 @@ -# Translations template for keystone. -# Copyright (C) 2015 OpenStack Foundation -# This file is distributed under the same license as the keystone project. -# -# Translators: -# Jennifer , 2016. #zanata -msgid "" -msgstr "" -"Project-Id-Version: keystone 9.0.1.dev10\n" -"Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" -"POT-Creation-Date: 2016-05-03 20:05+0000\n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"PO-Revision-Date: 2016-04-19 03:26+0000\n" -"Last-Translator: Jennifer \n" -"Language: zh-TW\n" -"Plural-Forms: nplurals=1; plural=0;\n" -"Generated-By: Babel 2.0\n" -"X-Generator: Zanata 3.7.3\n" -"Language-Team: Chinese (Taiwan)\n" - -#, python-format -msgid "%(detail)s" -msgstr "%(detail)s" - -#, python-format -msgid "%(driver)s is not supported driver version" -msgstr "%(driver)s 不是受支援的驅動程式版本" - -#, python-format -msgid "" -"%(entity)s name cannot contain the following reserved characters: %(chars)s" -msgstr "%(entity)s 名稱不能包含下列保留字元:%(chars)s" - -#, python-format -msgid "" -"%(event)s is not a valid notification event, must be one of: %(actions)s" -msgstr "%(event)s 不是有效的通知事件,必須是下列其中一個:%(actions)s" - -#, python-format -msgid "%(host)s is not a trusted dashboard host" -msgstr "%(host)s 不是授信儀表板主機" - -#, python-format -msgid "%(message)s %(amendment)s" -msgstr "%(message)s %(amendment)s" - -#, python-format -msgid "" -"%(mod_name)s doesn't provide database migrations. The migration repository " -"path at %(path)s doesn't exist or isn't a directory." -msgstr "" -"%(mod_name)s 未提供資料庫移轉。%(path)s 處的移轉儲存庫路徑不存在或者不是目" -"錄。" - -#, python-format -msgid "%(prior_role_id)s does not imply %(implied_role_id)s" -msgstr "%(prior_role_id)s 不暗示 %(implied_role_id)s" - -#, python-format -msgid "%(property_name)s cannot be less than %(min_length)s characters." -msgstr "%(property_name)s 不能少於 %(min_length)s 個字元。" - -#, python-format -msgid "%(property_name)s is not a %(display_expected_type)s" -msgstr "%(property_name)s 不是 %(display_expected_type)s" - -#, python-format -msgid "%(property_name)s should not be greater than %(max_length)s characters." -msgstr "%(property_name)s 不應超過 %(max_length)s 個字元。" - -#, python-format -msgid "%(role_id)s cannot be an implied roles" -msgstr "%(role_id)s 不能是隱含角色" - -#, python-format -msgid "%s cannot be empty." -msgstr "%s 不能是空的。" - -#, python-format -msgid "%s extension does not exist." -msgstr "%s 延伸不存在。" - -#, python-format -msgid "%s field is required and cannot be empty" -msgstr "%s 欄位是必要欄位,因此不能是空的" - -#, python-format -msgid "%s field(s) cannot be empty" -msgstr "%s 欄位不能是空的" - -#, python-format -msgid "" -"%s for the LDAP identity backend has been deprecated in the Mitaka release " -"in favor of read-only identity LDAP access. It will be removed in the \"O\" " -"release." -msgstr "" -"LDAP 身分後端的 %s 在 Mitaka 版本中已遭到淘汰,以支援唯讀身分 LDAP 存取。它將" -"在 \"O\" 版本中予以移除。" - -msgid "(Disable insecure_debug mode to suppress these details.)" -msgstr "(停用 insecure_debug 模式,以暫停這些詳細資料。)" - -msgid "--all option cannot be mixed with other options" -msgstr "--all 選項不能與其他選項混合" - -msgid "A project-scoped token is required to produce a service catalog." -msgstr "需要專案範圍的記號來產生服務型錄。" - -msgid "Access token is expired" -msgstr "存取記號已過期" - -msgid "Access token not found" -msgstr "找不到存取記號" - -msgid "Additional authentications steps required." -msgstr "需要其他鑑別步驟。" - -msgid "An unexpected error occurred when retrieving domain configs" -msgstr "擷取網域配置時發生非預期的錯誤" - -#, python-format -msgid "An unexpected error occurred when trying to store %s" -msgstr "嘗試儲存 %s 時發生非預期的錯誤" - -msgid "An unexpected error prevented the server from fulfilling your request." -msgstr "發生非預期的錯誤,造成伺服器無法履行要求。" - -#, python-format -msgid "" -"An unexpected error prevented the server from fulfilling your request: " -"%(exception)s" -msgstr "發生非預期的錯誤,造成伺服器無法履行要求:%(exception)s" - -msgid "An unhandled exception has occurred: Could not find metadata." -msgstr "發生未處理的異常狀況:找不到 meta 資料。" - -msgid "At least one option must be provided" -msgstr "必須提供至少一個選項" - -msgid "At least one option must be provided, use either --all or --domain-name" -msgstr "必須提供至少一個選項,請使用 --all 或 --domain-name" - -msgid "At least one role should be specified." -msgstr "應該至少指定一個角色。" - -#, python-format -msgid "" -"Attempted automatic driver selection for assignment based upon " -"[identity]\\driver option failed since driver %s is not found. Set " -"[assignment]/driver to a valid driver in keystone config." -msgstr "" -"針對基於 [identity]\\driver 選項的指派,嘗試自動選取驅動程式失敗,因為找不到" -"驅動程式 %s。請在 Keystone 配置中,將 [assignment]/driver 設為有效的驅動程" -"式。" - -msgid "Attempted to authenticate with an unsupported method." -msgstr "已嘗試使用不支援的方法進行鑑別。" - -msgid "" -"Attempting to use OS-FEDERATION token with V2 Identity Service, use V3 " -"Authentication" -msgstr "" -"正在嘗試使用具有第 2 版身分服務的 OS-FEDERATION 記號,請使用第 3 版鑑別" - -msgid "Authentication plugin error." -msgstr "鑑別外掛程式錯誤。" - -#, python-format -msgid "" -"Backend `%(backend)s` is not a valid memcached backend. Valid backends: " -"%(backend_list)s" -msgstr "" -"後端 `%(backend)s` 不是有效的 Memcached 後端。有效後端:%(backend_list)s" - -msgid "Cannot authorize a request token with a token issued via delegation." -msgstr "無法對含有透過委派發出之記號的要求記號進行授權。" - -#, python-format -msgid "Cannot change %(option_name)s %(attr)s" -msgstr "無法變更 %(option_name)s %(attr)s" - -msgid "Cannot change Domain ID" -msgstr "無法變更網域 ID" - -msgid "Cannot change user ID" -msgstr "無法變更使用者 ID" - -msgid "Cannot change user name" -msgstr "無法變更使用者名稱" - -#, python-format -msgid "Cannot create an endpoint with an invalid URL: %(url)s" -msgstr "無法使用無效 URL %(url)s 來建立端點" - -#, python-format -msgid "Cannot create project with parent: %(project_id)s" -msgstr "無法建立具有母項的專案:%(project_id)s" - -#, python-format -msgid "" -"Cannot create project, since it specifies its owner as domain %(domain_id)s, " -"but specifies a parent in a different domain (%(parent_domain_id)s)." -msgstr "" -"無法建立專案,因為它指定自己的擁有者作為網域 %(domain_id)s,但卻指定了位於不" -"同網域 (%(parent_domain_id)s) 中的母項。" - -#, python-format -msgid "" -"Cannot create project, since its parent (%(domain_id)s) is acting as a " -"domain, but project's specified parent_id (%(parent_id)s) does not match " -"this domain_id." -msgstr "" -"無法建立專案,因為它的母項 (%(domain_id)s) 正在充當網域,但專案的指定 " -"parent_id (%(parent_id)s) 與此 domain_id 不符。" - -msgid "Cannot delete a domain that is enabled, please disable it first." -msgstr "無法刪除已啟用的網域,請先停用該網域。" - -#, python-format -msgid "" -"Cannot delete project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "無法刪除專案 %(project_id)s,因為它的子樹狀結構包含已啟用的專案。" - -#, python-format -msgid "" -"Cannot delete the project %s since it is not a leaf in the hierarchy. Use " -"the cascade option if you want to delete a whole subtree." -msgstr "" -"無法刪除專案 %s,因為它不是階層中的葉節點。如果要刪除整個子樹狀結構,請使用重" -"疊顯示選項。" - -#, python-format -msgid "" -"Cannot disable project %(project_id)s since its subtree contains enabled " -"projects." -msgstr "無法停用專案 %(project_id)s,因為它的子樹狀結構包含已啟用的專案。" - -#, python-format -msgid "Cannot enable project %s since it has disabled parents" -msgstr "無法啟用專案 %s,因為它具有已停用的母項" - -msgid "Cannot list assignments sourced from groups and filtered by user ID." -msgstr "無法列出由群組提供且依使用者 ID 進行過濾的指派。" - -msgid "Cannot list request tokens with a token issued via delegation." -msgstr "無法列出含有透過委派發出之記號的要求記號。" - -#, python-format -msgid "Cannot open certificate %(cert_file)s. Reason: %(reason)s" -msgstr "無法開啟憑證 %(cert_file)s。原因:%(reason)s" - -#, python-format -msgid "Cannot remove role that has not been granted, %s" -msgstr "無法移除尚未授權的角色,%s" - -msgid "" -"Cannot truncate a driver call without hints list as first parameter after " -"self " -msgstr "如果提示清單不是 self 後面的第一個參數,則無法截斷驅動程式呼叫" - -msgid "Cannot update domain_id of a project that has children." -msgstr "無法更新包含子項之專案的 domain_id。" - -msgid "" -"Cannot use parents_as_list and parents_as_ids query params at the same time." -msgstr "無法同時使用 parents_as_list 與 parents_as_ids 查詢參數。" - -msgid "" -"Cannot use subtree_as_list and subtree_as_ids query params at the same time." -msgstr "無法同時使用 subtree_as_list 與 subtree_as_ids 查詢參數。" - -msgid "Cascade update is only allowed for enabled attribute." -msgstr "只容許對已啟用的屬性進行重疊顯示更新。" - -msgid "" -"Combining effective and group filter will always result in an empty list." -msgstr "結合作用中的過濾器和群組過濾器將一律導致空清單。" - -msgid "" -"Combining effective, domain and inherited filters will always result in an " -"empty list." -msgstr "結合作用中的過濾器、網域過濾器及繼承的過濾器將一律導致空清單。" - -#, python-format -msgid "Config API entity at /domains/%s/config" -msgstr "在 /domains/%s/config 處配置 API 實體" - -#, python-format -msgid "Conflict occurred attempting to store %(type)s - %(details)s" -msgstr "嘗試儲存 %(type)s 時發生衝突 - %(details)s" - -#, python-format -msgid "Conflicting region IDs specified: \"%(url_id)s\" != \"%(ref_id)s\"" -msgstr "指定了相衝突的區域 ID:\"%(url_id)s\" != \"%(ref_id)s\"" - -msgid "Consumer not found" -msgstr "找不到消費者" - -#, python-format -msgid "" -"Could not change immutable attribute(s) '%(attributes)s' in target %(target)s" -msgstr "無法變更目標 %(target)s 中固定不變的屬性 '%(attributes)s'" - -#, python-format -msgid "" -"Could not determine Identity Provider ID. The configuration option " -"%(issuer_attribute)s was not found in the request environment." -msgstr "" -"無法判定身分提供者 ID。在要求環境中,找不到配置選項 %(issuer_attribute)s。" - -#, python-format -msgid "" -"Could not find %(group_or_option)s in domain configuration for domain " -"%(domain_id)s" -msgstr "在下列網域的網域配置中找不到 %(group_or_option)s:%(domain_id)s" - -#, python-format -msgid "Could not find Endpoint Group: %(endpoint_group_id)s" -msgstr "找不到端點群組:%(endpoint_group_id)s" - -msgid "Could not find Identity Provider identifier in environment" -msgstr "在環境中找不到身分提供者 ID" - -#, python-format -msgid "Could not find Identity Provider: %(idp_id)s" -msgstr "找不到身分提供者:%(idp_id)s" - -#, python-format -msgid "Could not find Service Provider: %(sp_id)s" -msgstr "找不到服務提供者:%(sp_id)s" - -#, python-format -msgid "Could not find credential: %(credential_id)s" -msgstr "找不到認證:%(credential_id)s" - -#, python-format -msgid "Could not find domain: %(domain_id)s" -msgstr "找不到網域:%(domain_id)s" - -#, python-format -msgid "Could not find endpoint: %(endpoint_id)s" -msgstr "找不到端點:%(endpoint_id)s" - -#, python-format -msgid "" -"Could not find federated protocol %(protocol_id)s for Identity Provider: " -"%(idp_id)s" -msgstr "找不到下列身分提供者的聯合通訊協定 %(protocol_id)s:%(idp_id)s" - -#, python-format -msgid "Could not find group: %(group_id)s" -msgstr "找不到群組:%(group_id)s" - -#, python-format -msgid "Could not find mapping: %(mapping_id)s" -msgstr "找不到對映:%(mapping_id)s" - -msgid "Could not find policy association" -msgstr "找不到原則關聯" - -#, python-format -msgid "Could not find policy: %(policy_id)s" -msgstr "找不到原則:%(policy_id)s" - -#, python-format -msgid "Could not find project: %(project_id)s" -msgstr "找不到專案:%(project_id)s" - -#, python-format -msgid "Could not find region: %(region_id)s" -msgstr "找不到區域:%(region_id)s" - -#, python-format -msgid "" -"Could not find role assignment with role: %(role_id)s, user or group: " -"%(actor_id)s, project or domain: %(target_id)s" -msgstr "" -"找不到具有角色 %(role_id)s、使用者或群組 %(actor_id)s、專案或網域 " -"%(target_id)s 的角色指派" - -#, python-format -msgid "Could not find role: %(role_id)s" -msgstr "找不到角色:%(role_id)s" - -#, python-format -msgid "Could not find service: %(service_id)s" -msgstr "找不到服務:%(service_id)s" - -#, python-format -msgid "Could not find token: %(token_id)s" -msgstr "找不到記號:%(token_id)s" - -#, python-format -msgid "Could not find trust: %(trust_id)s" -msgstr "找不到信任:%(trust_id)s" - -#, python-format -msgid "Could not find user: %(user_id)s" -msgstr "找不到使用者:%(user_id)s" - -#, python-format -msgid "Could not find version: %(version)s" -msgstr "找不到版本:%(version)s" - -#, python-format -msgid "Could not find: %(target)s" -msgstr "找不到:%(target)s" - -msgid "" -"Could not map any federated user properties to identity values. Check debug " -"logs or the mapping used for additional details." -msgstr "" -"無法將任何聯合使用者內容對映至身分值。如需其他詳細資料,請檢查除錯日誌或使用" -"的對映。" - -msgid "" -"Could not map user while setting ephemeral user identity. Either mapping " -"rules must specify user id/name or REMOTE_USER environment variable must be " -"set." -msgstr "" -"設定暫時使用者身分時,無法對映使用者。對映規則必須指定使用者 ID/名稱,或者必" -"須設定 REMOTE_USER 環境變數。" - -msgid "Could not validate the access token" -msgstr "無法驗證存取記號" - -msgid "Credential belongs to another user" -msgstr "認證屬於另一個使用者" - -msgid "Credential signature mismatch" -msgstr "認證簽章不符" - -#, python-format -msgid "" -"Direct import of auth plugin %(name)r is deprecated as of Liberty in favor " -"of its entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"不建議直接匯入鑑別外掛程式 %(name)r,因為 Liberty 支援它在 %(namespace)r 中的" -"進入點且可能在 N 中予以移除。" - -#, python-format -msgid "" -"Direct import of driver %(name)r is deprecated as of Liberty in favor of its " -"entrypoint from %(namespace)r and may be removed in N." -msgstr "" -"不建議直接匯入驅動程式 %(name)r,因為 Liberty 支援它在 %(namespace)r 中的進入" -"點且可能在 N 中予以移除。" - -msgid "" -"Disabling an entity where the 'enable' attribute is ignored by configuration." -msgstr "正在停用配置已忽略其 'enable' 屬性的實體。" - -#, python-format -msgid "Domain (%s)" -msgstr "網域 (%s)" - -#, python-format -msgid "Domain cannot be named %s" -msgstr "無法將網域命名為 %s" - -#, python-format -msgid "Domain cannot have ID %s" -msgstr "網域不能具有 ID %s" - -#, python-format -msgid "Domain is disabled: %s" -msgstr "已停用網域:%s" - -msgid "Domain name cannot contain reserved characters." -msgstr "網域名稱不能包含保留字元。" - -msgid "Domain scoped token is not supported" -msgstr "不支援網域範圍的記號" - -msgid "Domain specific roles are not supported in the V8 role driver" -msgstr "網域專屬角色在第 8 版角色驅動程式中不受支援" - -#, python-format -msgid "" -"Domain: %(domain)s already has a configuration defined - ignoring file: " -"%(file)s." -msgstr "網域 %(domain)s 已定義配置 - 正在忽略檔案 %(file)s。" - -msgid "Duplicate Entry" -msgstr "重複的項目" - -#, python-format -msgid "Duplicate ID, %s." -msgstr "重複的 ID,%s。" - -#, python-format -msgid "Duplicate entry: %s" -msgstr "重複的項目:%s" - -#, python-format -msgid "Duplicate name, %s." -msgstr "重複的名稱,%s。" - -#, python-format -msgid "Duplicate remote ID: %s" -msgstr "重複的遠端 ID:%s" - -msgid "EC2 access key not found." -msgstr "找不到 EC2 存取金鑰。" - -msgid "EC2 signature not supplied." -msgstr "未提供 EC2 簽章。" - -msgid "" -"Either --bootstrap-password argument or OS_BOOTSTRAP_PASSWORD must be set." -msgstr "必須設定 --bootstrap-password 引數或 OS_BOOTSTRAP_PASSWORD。" - -msgid "Enabled field must be a boolean" -msgstr "「已啟用」欄位必須是布林值" - -msgid "Enabled field should be a boolean" -msgstr "「已啟用」欄位應該是布林值" - -#, python-format -msgid "Endpoint %(endpoint_id)s not found in project %(project_id)s" -msgstr "在專案 %(project_id)s 中找不到端點 %(endpoint_id)s" - -msgid "Endpoint Group Project Association not found" -msgstr "找不到端點群組專案關聯" - -msgid "Ensure configuration option idp_entity_id is set." -msgstr "請確保已設定配置選項 idp_entity_id。" - -msgid "Ensure configuration option idp_sso_endpoint is set." -msgstr "請確保已設定配置選項 idp_sso_endpoint。" - -#, python-format -msgid "" -"Error parsing configuration file for domain: %(domain)s, file: %(file)s." -msgstr "剖析網域 %(domain)s 的配置檔時發生錯誤,檔案:%(file)s。" - -#, python-format -msgid "Error while opening file %(path)s: %(err)s" -msgstr "開啟檔案 %(path)s 時發生錯誤:%(err)s" - -#, python-format -msgid "Error while parsing line: '%(line)s': %(err)s" -msgstr "剖析行 '%(line)s' 時發生錯誤:%(err)s" - -#, python-format -msgid "Error while parsing rules %(path)s: %(err)s" -msgstr "剖析規則 %(path)s 時發生錯誤:%(err)s" - -#, python-format -msgid "Error while reading metadata file, %(reason)s" -msgstr "讀取 meta 資料檔時發生錯誤,%(reason)s" - -#, python-format -msgid "" -"Exceeded attempts to register domain %(domain)s to use the SQL driver, the " -"last domain that appears to have had it is %(last_domain)s, giving up" -msgstr "" -"已超過嘗試登錄網域 %(domain)s 以使用 SQL 驅動程式的次數,似乎已經具有它的最後" -"一個網域是 %(last_domain)s,將放棄" - -#, python-format -msgid "Expected dict or list: %s" -msgstr "預期字典或清單:%s" - -msgid "" -"Expected signing certificates are not available on the server. Please check " -"Keystone configuration." -msgstr "在伺服器上,無法使用預期的簽署憑證。請檢查 Keystone 配置。" - -#, python-format -msgid "" -"Expecting to find %(attribute)s in %(target)s - the server could not comply " -"with the request since it is either malformed or otherwise incorrect. The " -"client is assumed to be in error." -msgstr "" -"預期在 %(target)s 中找到 %(attribute)s - 伺服器無法遵守要求,因為它的形態異" -"常,或者在其他方面發生錯誤。系統會假定用戶端處於錯誤狀態。" - -#, python-format -msgid "Failed to start the %(name)s server" -msgstr "無法啟動 %(name)s 伺服器" - -msgid "Failed to validate token" -msgstr "無法驗證記號" - -msgid "Federation token is expired" -msgstr "聯合記號過期" - -#, python-format -msgid "" -"Field \"remaining_uses\" is set to %(value)s while it must not be set in " -"order to redelegate a trust" -msgstr "" -"欄位 \"remaining_uses\" 設定為 %(value)s,但為了重新委派信任,不得設定該欄位" - -msgid "Found invalid token: scoped to both project and domain." -msgstr "找到無效記號:已將範圍限定為專案及網域。" - -#, python-format -msgid "Group %s not found in config" -msgstr "在配置中找不到群組 %s" - -#, python-format -msgid "Group %(group)s is not supported for domain specific configurations" -msgstr "網域專屬配置不支援群組 %(group)s" - -#, python-format -msgid "" -"Group %(group_id)s returned by mapping %(mapping_id)s was not found in the " -"backend." -msgstr "在後端找不到對映 %(mapping_id)s 所傳回的群組 %(group_id)s。" - -#, python-format -msgid "" -"Group membership across backend boundaries is not allowed, group in question " -"is %(group_id)s, user is %(user_id)s" -msgstr "" -"不容許跨後端界限的群組成員資格,有問題的群組為 %(group_id)s,使用者為 " -"%(user_id)s" - -#, python-format -msgid "ID attribute %(id_attr)s not found in LDAP object %(dn)s" -msgstr "在 LDAP 物件 %(dn)s 中找不到 ID 屬性 %(id_attr)s" - -#, python-format -msgid "Identity Provider %(idp)s is disabled" -msgstr "已停用身分提供者 %(idp)s" - -msgid "" -"Incoming identity provider identifier not included among the accepted " -"identifiers." -msgstr "送入的身分提供者 ID 未包括在接受的 ID 中。" - -msgid "Invalid EC2 signature." -msgstr "無效的 EC2 簽章。" - -#, python-format -msgid "Invalid LDAP TLS certs option: %(option)s. Choose one of: %(options)s" -msgstr "無效的 LDAP TLS 憑證選項:%(option)s。請選擇下列其中一個:%(options)s" - -#, python-format -msgid "Invalid LDAP TLS_AVAIL option: %s. TLS not available" -msgstr "無效的 LDAP TLS_AVAIL 選項:%s。無法使用 TLS" - -#, python-format -msgid "Invalid LDAP deref option: %(option)s. Choose one of: %(options)s" -msgstr "無效的 LDAP deref 選項:%(option)s。請選擇下列其中一個:%(options)s" - -#, python-format -msgid "Invalid LDAP scope: %(scope)s. Choose one of: %(options)s" -msgstr "無效的 LDAP 範圍:%(scope)s。請選擇下列其中一個:%(options)s" - -msgid "Invalid TLS / LDAPS combination" -msgstr "無效的 TLS/LDAPS 組合" - -#, python-format -msgid "Invalid audit info data type: %(data)s (%(type)s)" -msgstr "審核資訊資料類型無效:%(data)s (%(type)s)" - -msgid "Invalid blob in credential" -msgstr "認證中的二進位大型物件無效" - -#, python-format -msgid "" -"Invalid domain name: %(domain)s found in config file name: %(file)s - " -"ignoring this file." -msgstr "" -"在配置檔名稱 %(file)s 中找到的網域名稱 %(domain)s 無效 - 正在忽略此檔案。" - -#, python-format -msgid "Invalid domain specific configuration: %(reason)s" -msgstr "網域專屬配置無效:%(reason)s" - -#, python-format -msgid "Invalid input for field '%(path)s'. The value is '%(value)s'." -msgstr "欄位 '%(path)s' 的輸入無效。值為 '%(value)s'。" - -msgid "Invalid limit value" -msgstr "無效的限制值" - -#, python-format -msgid "" -"Invalid mix of entities for policy association - only Endpoint, Service or " -"Region+Service allowed. Request was - Endpoint: %(endpoint_id)s, Service: " -"%(service_id)s, Region: %(region_id)s" -msgstr "" -"原則關聯的混合實體無效 - 僅容許「端點」、「服務」或「區域+服務」。要求為 -" -"「端點」:%(endpoint_id)s,「服務」:%(service_id)s,「區域」:%(region_id)s" - -#, python-format -msgid "" -"Invalid rule: %(identity_value)s. Both 'groups' and 'domain' keywords must " -"be specified." -msgstr "規則 %(identity_value)s 無效。必須指定 'groups' 及 'domain' 關鍵字。" - -msgid "Invalid signature" -msgstr "無效的簽章" - -msgid "Invalid user / password" -msgstr "無效的使用者/密碼" - -msgid "Invalid username or TOTP passcode" -msgstr "使用者名稱或 TOTP 密碼無效" - -msgid "Invalid username or password" -msgstr "使用者名稱或密碼無效" - -#, python-format -msgid "KVS region %s is already configured. Cannot reconfigure." -msgstr "KVS 區域 %s 已配置。無法重新配置。" - -#, python-format -msgid "Key Value Store not configured: %s" -msgstr "未配置「金鑰值儲存庫」:%s" - -#, python-format -msgid "LDAP %s create" -msgstr "LDAP %s 建立" - -#, python-format -msgid "LDAP %s delete" -msgstr "LDAP %s 刪除" - -#, python-format -msgid "LDAP %s update" -msgstr "LDAP %s 更新" - -msgid "" -"Length of transformable resource id > 64, which is max allowed characters" -msgstr "可轉換資源 ID 的長度大於 64(這是所容許的字元數目上限)" - -#, python-format -msgid "" -"Local section in mapping %(mapping_id)s refers to a remote match that " -"doesn't exist (e.g. {0} in a local section)." -msgstr "" -"對映 %(mapping_id)s 中的本端區段參照了一個不存在的遠端相符項(例如,本端區段" -"中的 '{0}')。" - -#, python-format -msgid "Lock Timeout occurred for key, %(target)s" -msgstr "金鑰 %(target)s 發生「鎖定逾時」" - -#, python-format -msgid "Lock key must match target key: %(lock)s != %(target)s" -msgstr "鎖定金鑰必須與目標金鑰相符:%(lock)s != %(target)s" - -#, python-format -msgid "Malformed endpoint URL (%(endpoint)s), see ERROR log for details." -msgstr "端點 URL (%(endpoint)s) 的形態異常,請參閱錯誤日誌以取得詳細資料。" - -msgid "Marker could not be found" -msgstr "找不到標記" - -#, python-format -msgid "Max hierarchy depth reached for %s branch." -msgstr "已達到 %s 分支的階層深度上限。" - -#, python-format -msgid "Maximum lock attempts on %s occurred." -msgstr "已達到 %s 的鎖定嘗試次數上限。" - -#, python-format -msgid "Member %(member)s is already a member of group %(group)s" -msgstr "成員 %(member)s 已是群組 %(group)s 的成員" - -#, python-format -msgid "Method not callable: %s" -msgstr "方法不可呼叫:%s" - -msgid "Missing entity ID from environment" -msgstr "環境中遺漏了實體 ID" - -msgid "" -"Modifying \"redelegation_count\" upon redelegation is forbidden. Omitting " -"this parameter is advised." -msgstr "禁止在重新委派時修改 \"redelegation_count\"。建議省略此參數。" - -msgid "Multiple domains are not supported" -msgstr "不支援多個網域" - -msgid "Must be called within an active lock context." -msgstr "必須在作用中鎖定環境定義內予以呼叫。" - -msgid "Must specify either domain or project" -msgstr "必須指定網域或專案" - -msgid "Name field is required and cannot be empty" -msgstr "「名稱」欄位是必要欄位,因此不能是空的" - -msgid "Neither Project Domain ID nor Project Domain Name was provided." -msgstr "既未提供「專案網域 ID」,也未提供「專案網域名稱」。" - -msgid "" -"No Authorization headers found, cannot proceed with OAuth related calls, if " -"running under HTTPd or Apache, ensure WSGIPassAuthorization is set to On." -msgstr "" -"找不到授權標頭,無法繼續進行 OAuth 相關呼叫,如果在 HTTPd 或 Apache 下執行," -"請確保 WSGIPassAuthorization 設定為 On。" - -msgid "No authenticated user" -msgstr "沒有已鑑別的使用者" - -msgid "" -"No encryption keys found; run keystone-manage fernet_setup to bootstrap one." -msgstr "找不到加密金鑰;請執行 keystone-manage fernet_setup 以引導一個。" - -msgid "No options specified" -msgstr "未指定選項" - -#, python-format -msgid "No policy is associated with endpoint %(endpoint_id)s." -msgstr "沒有原則與端點 %(endpoint_id)s 相關聯。" - -#, python-format -msgid "No remaining uses for trust: %(trust_id)s" -msgstr "沒有信任 %(trust_id)s 的剩餘使用情形" - -msgid "No token in the request" -msgstr "要求中沒有記號" - -msgid "Non-default domain is not supported" -msgstr "不支援非預設網域" - -msgid "One of the trust agents is disabled or deleted" -msgstr "已停用或刪除其中一個信任代理程式" - -#, python-format -msgid "" -"Option %(option)s found with no group specified while checking domain " -"configuration request" -msgstr "檢查網域配置要求時,發現選項 %(option)s 未指定任何群組" - -#, python-format -msgid "" -"Option %(option)s in group %(group)s is not supported for domain specific " -"configurations" -msgstr "網域專屬配置不支援群組 %(group)s 中的選項 %(option)s" - -#, python-format -msgid "Project (%s)" -msgstr "專案 (%s)" - -#, python-format -msgid "Project ID not found: %(t_id)s" -msgstr "找不到專案 ID:%(t_id)s" - -msgid "Project field is required and cannot be empty." -msgstr "「專案」欄位是必要的,因此不能是空的。" - -#, python-format -msgid "Project is disabled: %s" -msgstr "已停用專案:%s" - -msgid "Project name cannot contain reserved characters." -msgstr "專案名稱不能包含保留字元。" - -msgid "Query string is not UTF-8 encoded" -msgstr "查詢字串未使用 UTF-8 進行編碼" - -#, python-format -msgid "" -"Reading the default for option %(option)s in group %(group)s is not supported" -msgstr "不支援讀取群組 %(group)s 中選項 %(option)s 的預設值" - -msgid "Redelegation allowed for delegated by trust only" -msgstr "僅委派為信任時,才容許重新委派" - -#, python-format -msgid "" -"Remaining redelegation depth of %(redelegation_depth)d out of allowed range " -"of [0..%(max_count)d]" -msgstr "" -"剩餘的重新委派深度 %(redelegation_depth)d 超出容許的範圍 [0..%(max_count)d]" - -msgid "" -"Remove admin_crud_extension from the paste pipeline, the admin_crud " -"extension is now always available. Updatethe [pipeline:admin_api] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"從貼上 Pipeline 中移除 admin_crud_extension,admin_crud 延伸現在將一律可用。" -"相應地更新 keystone-paste.ini 中的 [pipeline:admin_api] 區段,因為它在 O 版本" -"中將予以移除。" - -msgid "" -"Remove endpoint_filter_extension from the paste pipeline, the endpoint " -"filter extension is now always available. Update the [pipeline:api_v3] " -"section in keystone-paste.ini accordingly as it will be removed in the O " -"release." -msgstr "" -"從貼上 Pipeline 中移除 endpoint_filter_extension,端點過濾器延伸現在將一律可" -"用。相應地更新 keystone-paste.ini 中的 [pipeline:api_v3] 區段,因為它在 O 版" -"本中將予以移除。" - -msgid "" -"Remove federation_extension from the paste pipeline, the federation " -"extension is now always available. Update the [pipeline:api_v3] section in " -"keystone-paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"從貼上 Pipeline 中移除 federation_extension,聯合延伸現在將一律可用。相應地更" -"新 keystone-paste.ini 中的 [pipeline:api_v3] 區段,因為它在 O 版本中將予以移" -"除。" - -msgid "" -"Remove oauth1_extension from the paste pipeline, the oauth1 extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"從貼上 Pipeline 中移除 oauth1_extension,oauth1 延伸現在將一律可用。相應地更" -"新 keystone-paste.ini 中的 [pipeline:api_v3] 區段,因為它在 O 版本中將予以移" -"除。" - -msgid "" -"Remove revoke_extension from the paste pipeline, the revoke extension is now " -"always available. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"從貼上 Pipeline 中移除 revoke_extension,撤銷延伸現在將一律可用。相應地更新 " -"keystone-paste.ini 中的 [pipeline:api_v3] 區段,因為它在 O 版本中將予以移除。" - -msgid "" -"Remove simple_cert from the paste pipeline, the PKI and PKIz token providers " -"are now deprecated and simple_cert was only used insupport of these token " -"providers. Update the [pipeline:api_v3] section in keystone-paste.ini " -"accordingly, as it will be removed in the O release." -msgstr "" -"從貼上 Pipeline 中移除 simple_cert,PKI 和 PKIz 記號提供者現在已遭到淘汰,並" -"且使用 simple_cert 的目的只是為了支援這些記號提供者。相應地更新 keystone-" -"paste.ini 中的 [pipeline:api_v3] 區段,因為它在 O 版本中將予以移除。" - -msgid "" -"Remove user_crud_extension from the paste pipeline, the user_crud extension " -"is now always available. Updatethe [pipeline:public_api] section in keystone-" -"paste.ini accordingly, as it will be removed in the O release." -msgstr "" -"從貼上 Pipeline 中移除 user_crud_extension,user_crud 延伸現在將一律可用。相" -"應地更新 keystone-paste.ini 中的 [pipeline:public_api] 區段,因為它在 O 版本" -"中將予以移除。" - -msgid "Request Token does not have an authorizing user id" -msgstr "要求記號不具有授權使用者 ID" - -#, python-format -msgid "" -"Request attribute %(attribute)s must be less than or equal to %(size)i. The " -"server could not comply with the request because the attribute size is " -"invalid (too large). The client is assumed to be in error." -msgstr "" -"要求屬性 %(attribute)s 必須小於或等於 %(size)i。伺服器無法遵守要求,因為屬性" -"大小無效(太大)。系統會假定用戶端處於錯誤狀態。" - -msgid "Request must have an origin query parameter" -msgstr "要求必須具有原始查詢參數" - -msgid "Request token is expired" -msgstr "要求記號已過期" - -msgid "Request token not found" -msgstr "找不到要求記號" - -msgid "Requested expiration time is more than redelegated trust can provide" -msgstr "所要求的有效期限超過重新委派之信任可提供的有效期限" - -#, python-format -msgid "" -"Requested redelegation depth of %(requested_count)d is greater than allowed " -"%(max_count)d" -msgstr "所要求的重新委派深度 %(requested_count)d 大於容許的 %(max_count)d" - -msgid "" -"Running keystone via eventlet is deprecated as of Kilo in favor of running " -"in a WSGI server (e.g. mod_wsgi). Support for keystone under eventlet will " -"be removed in the \"M\"-Release." -msgstr "" -"透過 eventlet 執行 Keystone 這一做法已遭到淘汰,因為 Kilo 支援在 WSGI 伺服器" -"(例如,mod_wsgi)中執行 Keystone。將在 \"M\" 版本中移除對在 eventlet 下執行 " -"Keystone 的支援。" - -msgid "Scoping to both domain and project is not allowed" -msgstr "不容許將範圍同時設定為網域及專案" - -msgid "Scoping to both domain and trust is not allowed" -msgstr "不容許將範圍同時設定為網域及信任" - -msgid "Scoping to both project and trust is not allowed" -msgstr "不容許將範圍同時設定為專案及信任" - -#, python-format -msgid "Service Provider %(sp)s is disabled" -msgstr "已停用服務提供者 %(sp)s" - -msgid "Some of requested roles are not in redelegated trust" -msgstr "所要求的部分角色不在重新委派的信任中" - -msgid "Specify a domain or project, not both" -msgstr "指定網域或專案,但不能同時指定這兩者" - -msgid "Specify a user or group, not both" -msgstr "指定使用者或群組,但不能同時指定這兩者" - -msgid "Specify one of domain or project" -msgstr "指定網域或專案" - -msgid "Specify one of user or group" -msgstr "指定使用者或群組" - -#, python-format -msgid "" -"String length exceeded.The length of string '%(string)s' exceeded the limit " -"of column %(type)s(CHAR(%(length)d))." -msgstr "" -"已超出字串長度。字串 '%(string)s' 的長度已超出直欄 %(type)s 的限制 " -"(CHAR(%(length)d))。" - -msgid "Tenant name cannot contain reserved characters." -msgstr "承租人名稱不能包含保留字元。" - -#, python-format -msgid "" -"The %s extension has been moved into keystone core and as such its " -"migrations are maintained by the main keystone database control. Use the " -"command: keystone-manage db_sync" -msgstr "" -"%s 延伸已移到 Keystone 核心內,因此它的移轉將由主要 Keystone 資料庫控制進行維" -"護。請使用指令:keystone-manage db_sync" - -msgid "" -"The 'expires_at' must not be before now. The server could not comply with " -"the request since it is either malformed or otherwise incorrect. The client " -"is assumed to be in error." -msgstr "" -"'expires_at' 不得早於現在。伺服器無法遵守要求,因為它的形態異常,或者在其他方" -"面發生錯誤。系統會假定用戶端處於錯誤狀態。" - -msgid "The --all option cannot be used with the --domain-name option" -msgstr "--all 選項不能與 --domain-name 選項搭配使用" - -#, python-format -msgid "The Keystone configuration file %(config_file)s could not be found." -msgstr "找不到 Keystone 配置檔 %(config_file)s。" - -#, python-format -msgid "" -"The Keystone domain-specific configuration has specified more than one SQL " -"driver (only one is permitted): %(source)s." -msgstr "" -"Keystone 網域專屬配置指定了多個 SQL 驅動程式(僅允許一個):%(source)s。" - -msgid "The action you have requested has not been implemented." -msgstr "尚未實作所要求的動作。" - -msgid "The authenticated user should match the trustor." -msgstr "已鑑別使用者應該與委託人相符。" - -msgid "" -"The certificates you requested are not available. It is likely that this " -"server does not use PKI tokens otherwise this is the result of " -"misconfiguration." -msgstr "" -"無法使用所要求的憑證。可能是此伺服器沒有使用 PKI 記號,否則,這是配置錯誤的結" -"果。" - -msgid "The configured token provider does not support bind authentication." -msgstr "所配置的記號提供者不支援連結鑑別。" - -msgid "The creation of projects acting as domains is not allowed in v2." -msgstr "在第 2 版中,不容許建立專案以充當網域。" - -#, python-format -msgid "" -"The password length must be less than or equal to %(size)i. The server could " -"not comply with the request because the password is invalid." -msgstr "密碼長度必須小於或等於 %(size)i。伺服器無法遵守要求,因為密碼無效。" - -msgid "The request you have made requires authentication." -msgstr "您發出的要求需要鑑別。" - -msgid "The resource could not be found." -msgstr "找不到資源。" - -msgid "" -"The revoke call must not have both domain_id and project_id. This is a bug " -"in the Keystone server. The current request is aborted." -msgstr "" -"撤銷呼叫不得同時具有 domain_id 和 project_id。這是 Keystone 伺服器中的錯誤。" -"已中斷現行要求。" - -msgid "The service you have requested is no longer available on this server." -msgstr "在此伺服器上,無法再使用所要求的服務。" - -#, python-format -msgid "" -"The specified parent region %(parent_region_id)s would create a circular " -"region hierarchy." -msgstr "指定的母項區域 %(parent_region_id)s 會建立循環區域階層。" - -#, python-format -msgid "" -"The value of group %(group)s specified in the config should be a dictionary " -"of options" -msgstr "在配置中指定之群組 %(group)s 的值應該為選項字典" - -msgid "There should not be any non-oauth parameters" -msgstr "不應該具有任何 non-oauth 參數" - -#, python-format -msgid "This is not a recognized Fernet payload version: %s" -msgstr "這不是已辨識的 Fernet 內容版本:%s" - -#, python-format -msgid "This is not a recognized Fernet token %s" -msgstr "這不是已辨識的 Fernet 記號 %s" - -msgid "" -"Timestamp not in expected format. The server could not comply with the " -"request since it is either malformed or otherwise incorrect. The client is " -"assumed to be in error." -msgstr "" -"時間戳記的格式不符合預期。伺服器無法遵守要求,因為它的形態異常,或者在其他方" -"面發生錯誤。系統會假定用戶端處於錯誤狀態。" - -#, python-format -msgid "" -"To get a more detailed information on this error, re-run this command for " -"the specific domain, i.e.: keystone-manage domain_config_upload --domain-" -"name %s" -msgstr "" -"如果要取得此錯誤的更詳細資訊,請針對特定的網域重新執行此指令,例如:keystone-" -"manage domain_config_upload --domain-name %s" - -msgid "Token belongs to another user" -msgstr "記號屬於另一個使用者" - -msgid "Token does not belong to specified tenant." -msgstr "記號不屬於所指定的承租人。" - -msgid "Token version is unrecognizable or unsupported." -msgstr "無法辨識或不支援記號版本。" - -msgid "Trustee has no delegated roles." -msgstr "受託人沒有委派的角色。" - -msgid "Trustor is disabled." -msgstr "委託人已停用。" - -#, python-format -msgid "" -"Trying to update group %(group)s, so that, and only that, group must be " -"specified in the config" -msgstr "" -"正在嘗試更新群組 %(group)s,因此必須在配置中指定該群組且必須僅指定該群組" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, but config provided " -"contains option %(option_other)s instead" -msgstr "" -"正在嘗試更新群組 %(group)s 中的選項 %(option)s,但提供的配置卻包含選項 " -"%(option_other)s" - -#, python-format -msgid "" -"Trying to update option %(option)s in group %(group)s, so that, and only " -"that, option must be specified in the config" -msgstr "" -"正在嘗試更新群組 %(group)s 中的選項 %(option)s,因此必須在配置中指定該選項且" -"必須僅指定該選項" - -msgid "" -"Unable to access the keystone database, please check it is configured " -"correctly." -msgstr "無法存取 Keystone 資料庫,請檢查它是否已正確配置。" - -#, python-format -msgid "Unable to consume trust %(trust_id)s, unable to acquire lock." -msgstr "無法耗用信任 %(trust_id)s,無法獲得鎖定。" - -#, python-format -msgid "" -"Unable to delete region %(region_id)s because it or its child regions have " -"associated endpoints." -msgstr "無法刪除區域 %(region_id)s,因為此區域或其子區域具有相關聯的端點。" - -msgid "Unable to downgrade schema" -msgstr "無法將綱目降級" - -#, python-format -msgid "Unable to find valid groups while using mapping %(mapping_id)s" -msgstr "使用對映 %(mapping_id)s 時找不到有效的群組" - -#, python-format -msgid "Unable to locate domain config directory: %s" -msgstr "找不到網域配置目錄:%s" - -#, python-format -msgid "Unable to lookup user %s" -msgstr "無法查閱使用者 %s" - -#, python-format -msgid "" -"Unable to reconcile identity attribute %(attribute)s as it has conflicting " -"values %(new)s and %(old)s" -msgstr "" -"無法核對身分屬性 %(attribute)s,因為該屬性具有衝突的值 %(new)s 和 %(old)s" - -#, python-format -msgid "" -"Unable to sign SAML assertion. It is likely that this server does not have " -"xmlsec1 installed, or this is the result of misconfiguration. Reason " -"%(reason)s" -msgstr "" -"無法簽署 SAML 主張。此伺服器可能未安裝 xmlsec1,或者這是配置錯誤的結果。原" -"因:%(reason)s" - -msgid "Unable to sign token." -msgstr "無法簽署記號。" - -#, python-format -msgid "Unexpected assignment type encountered, %s" -msgstr "發現非預期的指派類型,%s" - -#, python-format -msgid "" -"Unexpected combination of grant attributes - User: %(user_id)s, Group: " -"%(group_id)s, Project: %(project_id)s, Domain: %(domain_id)s" -msgstr "" -"非預期的授權屬性組合 - 使用者:%(user_id)s,群組:%(group_id)s,專案:" -"%(project_id)s,網域:%(domain_id)s" - -#, python-format -msgid "Unexpected status requested for JSON Home response, %s" -msgstr "針對「JSON 起始目錄」回應要求了非預期狀態,%s" - -msgid "Unknown Target" -msgstr "不明的目標" - -#, python-format -msgid "Unknown domain '%(name)s' specified by --domain-name" -msgstr "由 --domain-name 指定的網域 '%(name)s' 不明" - -#, python-format -msgid "Unknown token version %s" -msgstr "不明的記號版本 %s" - -#, python-format -msgid "Unregistered dependency: %(name)s for %(targets)s" -msgstr "已取消登錄 %(targets)s 的相依關係:%(name)s" - -msgid "Update of `domain_id` is not allowed." -msgstr "不容許更新 'domain_id'。" - -msgid "Update of `is_domain` is not allowed." -msgstr "不容許更新 `is_domain`。" - -msgid "Update of `parent_id` is not allowed." -msgstr "不容許更新 'parent_id'。" - -msgid "Update of domain_id is only allowed for root projects." -msgstr "只容許更新根專案的 domain_id。" - -msgid "Update of domain_id of projects acting as domains is not allowed." -msgstr "不容許更新正在充當網域之專案的 domain_id。" - -msgid "Use a project scoped token when attempting to create a SAML assertion" -msgstr "嘗試建立 SAML 主張時,使用專案範圍的記號" - -msgid "" -"Use of the identity driver config to automatically configure the same " -"assignment driver has been deprecated, in the \"O\" release, the assignment " -"driver will need to be expicitly configured if different than the default " -"(SQL)." -msgstr "" -"不建議使用身分驅動程式配置來自動配置相同的指派驅動程式,在 \"O\" 版本中,如果" -"指派驅動程式與預設值 (SQL) 不同,則需要明確配置指派驅動程式。" - -#, python-format -msgid "User %(u_id)s is unauthorized for tenant %(t_id)s" -msgstr "使用者 %(u_id)s 未獲承租人 %(t_id)s 的授權" - -#, python-format -msgid "User %(user_id)s has no access to domain %(domain_id)s" -msgstr "使用者 %(user_id)s 無權存取網域 %(domain_id)s" - -#, python-format -msgid "User %(user_id)s has no access to project %(project_id)s" -msgstr "使用者 %(user_id)s 無權存取專案 %(project_id)s" - -#, python-format -msgid "User %(user_id)s is already a member of group %(group_id)s" -msgstr "使用者 %(user_id)s 已是群組 %(group_id)s 的成員" - -#, python-format -msgid "User '%(user_id)s' not found in group '%(group_id)s'" -msgstr "在群組 '%(group_id)s' 中找不到使用者 '%(user_id)s'" - -msgid "User IDs do not match" -msgstr "使用者 ID 不符" - -msgid "" -"User auth cannot be built due to missing either user id, or user name with " -"domain id, or user name with domain name." -msgstr "" -"無法建置使用者鑑別,因為遺漏了使用者 ID、具有網域 ID 的使用者名稱或具有網域名" -"稱的使用者名稱。" - -#, python-format -msgid "User is disabled: %s" -msgstr "已停用使用者:%s" - -msgid "User is not a member of the requested project" -msgstr "使用者並不隸屬於所要求的專案" - -msgid "User is not a trustee." -msgstr "使用者不是受託人。" - -msgid "User not found" -msgstr "找不到使用者" - -msgid "User not valid for tenant." -msgstr "使用者不是有效的承租人。" - -msgid "User roles not supported: tenant_id required" -msgstr "使用者角色不受支援:需要 tenant_id" - -#, python-format -msgid "User type %s not supported" -msgstr "使用者類型 %s 不受支援" - -msgid "You are not authorized to perform the requested action." -msgstr "您未獲授權來執行所要求的動作。" - -#, python-format -msgid "You are not authorized to perform the requested action: %(action)s" -msgstr "您未獲授權來執行所要求的動作:%(action)s" - -msgid "" -"You have tried to create a resource using the admin token. As this token is " -"not within a domain you must explicitly include a domain for this resource " -"to belong to." -msgstr "" -"您已嘗試使用管理者記號建立資源。因為此記號不在網域內,所以您必須明確包含某個" -"網域,以讓此資源屬於該網域。" - -msgid "`key_mangler` functions must be callable." -msgstr "`key_mangler` 函數必須可呼叫。" - -msgid "`key_mangler` option must be a function reference" -msgstr "`key_mangler` 選項必須是函數參照" - -msgid "any options" -msgstr "任何選項" - -msgid "auth_type is not Negotiate" -msgstr "auth_type 不是 Negotiate" - -msgid "authorizing user does not have role required" -msgstr "授權使用者不具有必要的角色" - -#, python-format -msgid "cannot create a project in a branch containing a disabled project: %s" -msgstr "無法在包含已停用專案的分支中建立專案:%s" - -#, python-format -msgid "" -"cannot delete an enabled project acting as a domain. Please disable the " -"project %s first." -msgstr "無法刪除已啟用且正在充當網域的專案。請先停用專案 %s。" - -#, python-format -msgid "group %(group)s" -msgstr "群組 %(group)s" - -msgid "" -"idp_contact_type must be one of: [technical, other, support, administrative " -"or billing." -msgstr "idp_contact_type 必須是下列其中一個:技術、其他、支援、管理或計費。" - -#, python-format -msgid "invalid date format %s" -msgstr "無效的日期格式 %s" - -#, python-format -msgid "" -"it is not permitted to have two projects acting as domains with the same " -"name: %s" -msgstr "不允許包含兩個具有相同名稱且充當網域的專案:%s" - -#, python-format -msgid "" -"it is not permitted to have two projects within a domain with the same " -"name : %s" -msgstr "在一個網域內,不允許包含兩個具有相同名稱的專案:%s" - -msgid "only root projects are allowed to act as domains." -msgstr "只容許根專案充當網域。" - -#, python-format -msgid "option %(option)s in group %(group)s" -msgstr "群組 %(group)s 中的選項 %(option)s" - -msgid "provided consumer key does not match stored consumer key" -msgstr "所提供的消費者金鑰與儲存的消費者金鑰不符" - -msgid "provided request key does not match stored request key" -msgstr "所提供的要求金鑰與儲存的要求金鑰不符" - -msgid "provided verifier does not match stored verifier" -msgstr "所提供的驗證器與儲存的驗證器不符" - -msgid "remaining_uses must be a positive integer or null." -msgstr "remaining_uses 必須是正整數或空值。" - -msgid "remaining_uses must not be set if redelegation is allowed" -msgstr "如果容許重新委派,則不得設定 remaining_uses" - -#, python-format -msgid "" -"request to update group %(group)s, but config provided contains group " -"%(group_other)s instead" -msgstr "要求更新群組 %(group)s,但提供的配置卻包含群組 %(group_other)s" - -msgid "rescope a scoped token" -msgstr "重新劃定已限定範圍之記號的範圍" - -#, python-format -msgid "role %s is not defined" -msgstr "未定義角色 %s" - -msgid "scope.project.id must be specified if include_subtree is also specified" -msgstr "如果也指定了 include_subtree,則必須指定 scope.project.id" - -#, python-format -msgid "tls_cacertdir %s not found or is not a directory" -msgstr "tls_cacertdir %s 找不到,或者不是目錄" - -#, python-format -msgid "tls_cacertfile %s not found or is not a file" -msgstr "tls_cacertfile %s 找不到,或者不是檔案" - -#, python-format -msgid "token reference must be a KeystoneToken type, got: %s" -msgstr "記號參照必須是 KeystoneToken 類型,但卻取得:%s" - -msgid "" -"update of domain_id is deprecated as of Mitaka and will be removed in O." -msgstr "不建議更新 domain_id,因為 Mitaka 將在 O 版本中予以移除。" - -#, python-format -msgid "" -"validated expected to find %(param_name)r in function signature for " -"%(func_name)r." -msgstr "在 %(func_name)r 的函數簽章中,驗證預期尋找 %(param_name)r。" diff --git a/keystone-moon/keystone/middleware/__init__.py b/keystone-moon/keystone/middleware/__init__.py deleted file mode 100644 index 4325d946..00000000 --- a/keystone-moon/keystone/middleware/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.middleware.auth import * # noqa -from keystone.middleware.core import * # noqa diff --git a/keystone-moon/keystone/middleware/auth.py b/keystone-moon/keystone/middleware/auth.py deleted file mode 100644 index cc7d0ecc..00000000 --- a/keystone-moon/keystone/middleware/auth.py +++ /dev/null @@ -1,222 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_context import context as oslo_context -from oslo_log import log -from oslo_log import versionutils - -from keystone.common import authorization -from keystone.common import tokenless_auth -from keystone.common import wsgi -from keystone import exception -from keystone.federation import constants as federation_constants -from keystone.federation import utils -from keystone.i18n import _, _LI, _LW -from keystone.middleware import core -from keystone.models import token_model -from keystone.token.providers import common - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - -__all__ = ('AuthContextMiddleware',) - - -class AuthContextMiddleware(wsgi.Middleware): - """Build the authentication context from the request auth token.""" - - def _build_auth_context(self, request): - - # NOTE(gyee): token takes precedence over SSL client certificates. - # This will preserve backward compatibility with the existing - # behavior. Tokenless authorization with X.509 SSL client - # certificate is effectively disabled if no trusted issuers are - # provided. - - token_id = None - if core.AUTH_TOKEN_HEADER in request.headers: - token_id = request.headers[core.AUTH_TOKEN_HEADER].strip() - - is_admin = request.environ.get(core.CONTEXT_ENV, {}).get('is_admin', - False) - if is_admin: - # NOTE(gyee): no need to proceed any further as we already know - # this is an admin request. - auth_context = {} - return auth_context, token_id, is_admin - - if token_id: - # In this case the client sent in a token. - auth_context, is_admin = self._build_token_auth_context( - request, token_id) - return auth_context, token_id, is_admin - - # No token, maybe the client presented an X.509 certificate. - - if self._validate_trusted_issuer(request.environ): - auth_context = self._build_tokenless_auth_context( - request.environ) - return auth_context, None, False - - LOG.debug('There is either no auth token in the request or ' - 'the certificate issuer is not trusted. No auth ' - 'context will be set.') - - return None, None, False - - def _build_token_auth_context(self, request, token_id): - if CONF.admin_token and token_id == CONF.admin_token: - versionutils.report_deprecated_feature( - LOG, - _LW('build_auth_context middleware checking for the admin ' - 'token is deprecated as of the Mitaka release and will be ' - 'removed in the O release. If your deployment requires ' - 'use of the admin token, update keystone-paste.ini so ' - 'that admin_token_auth is before build_auth_context in ' - 'the paste pipelines, otherwise remove the ' - 'admin_token_auth middleware from the paste pipelines.')) - return {}, True - - context = {'token_id': token_id} - context['environment'] = request.environ - - try: - token_ref = token_model.KeystoneToken( - token_id=token_id, - token_data=self.token_provider_api.validate_token(token_id)) - # TODO(gyee): validate_token_bind should really be its own - # middleware - wsgi.validate_token_bind(context, token_ref) - return authorization.token_to_auth_context(token_ref), False - except exception.TokenNotFound: - LOG.warning(_LW('RBAC: Invalid token')) - raise exception.Unauthorized() - - def _build_tokenless_auth_context(self, env): - """Build the authentication context. - - The context is built from the attributes provided in the env, - such as certificate and scope attributes. - """ - tokenless_helper = tokenless_auth.TokenlessAuthHelper(env) - - (domain_id, project_id, trust_ref, unscoped) = ( - tokenless_helper.get_scope()) - user_ref = tokenless_helper.get_mapped_user( - project_id, - domain_id) - - # NOTE(gyee): if it is an ephemeral user, the - # given X.509 SSL client cert does not need to map to - # an existing user. - if user_ref['type'] == utils.UserType.EPHEMERAL: - auth_context = {} - auth_context['group_ids'] = user_ref['group_ids'] - auth_context[federation_constants.IDENTITY_PROVIDER] = ( - user_ref[federation_constants.IDENTITY_PROVIDER]) - auth_context[federation_constants.PROTOCOL] = ( - user_ref[federation_constants.PROTOCOL]) - if domain_id and project_id: - msg = _('Scoping to both domain and project is not allowed') - raise ValueError(msg) - if domain_id: - auth_context['domain_id'] = domain_id - if project_id: - auth_context['project_id'] = project_id - auth_context['roles'] = user_ref['roles'] - else: - # it's the local user, so token data is needed. - token_helper = common.V3TokenDataHelper() - token_data = token_helper.get_token_data( - user_id=user_ref['id'], - method_names=[CONF.tokenless_auth.protocol], - domain_id=domain_id, - project_id=project_id) - - auth_context = {'user_id': user_ref['id']} - auth_context['is_delegated_auth'] = False - if domain_id: - auth_context['domain_id'] = domain_id - if project_id: - auth_context['project_id'] = project_id - auth_context['roles'] = [role['name'] for role - in token_data['token']['roles']] - return auth_context - - def _validate_trusted_issuer(self, env): - """To further filter the certificates that are trusted. - - If the config option 'trusted_issuer' is absent or does - not contain the trusted issuer DN, no certificates - will be allowed in tokenless authorization. - - :param env: The env contains the client issuer's attributes - :type env: dict - :returns: True if client_issuer is trusted; otherwise False - """ - if not CONF.tokenless_auth.trusted_issuer: - return False - - client_issuer = env.get(CONF.tokenless_auth.issuer_attribute) - if not client_issuer: - msg = _LI('Cannot find client issuer in env by the ' - 'issuer attribute - %s.') - LOG.info(msg, CONF.tokenless_auth.issuer_attribute) - return False - - if client_issuer in CONF.tokenless_auth.trusted_issuer: - return True - - msg = _LI('The client issuer %(client_issuer)s does not match with ' - 'the trusted issuer %(trusted_issuer)s') - LOG.info( - msg, {'client_issuer': client_issuer, - 'trusted_issuer': CONF.tokenless_auth.trusted_issuer}) - - return False - - def process_request(self, request): - - # The request context stores itself in thread-local memory for logging. - request_context = oslo_context.RequestContext( - request_id=request.environ.get('openstack.request_id')) - - if authorization.AUTH_CONTEXT_ENV in request.environ: - msg = _LW('Auth context already exists in the request ' - 'environment; it will be used for authorization ' - 'instead of creating a new one.') - LOG.warning(msg) - return - - auth_context, token_id, is_admin = self._build_auth_context(request) - - request_context.auth_token = token_id - request_context.is_admin = is_admin - - if auth_context is None: - # The client didn't send any auth info, so don't set auth context. - return - - # The attributes of request_context are put into the logs. This is a - # common pattern for all the OpenStack services. In all the other - # projects these are IDs, so set the attributes to IDs here rather than - # the name. - request_context.user = auth_context.get('user_id') - request_context.tenant = auth_context.get('project_id') - request_context.domain = auth_context.get('domain_id') - request_context.user_domain = auth_context.get('user_domain_id') - request_context.project_domain = auth_context.get('project_domain_id') - request_context.update_store() - - LOG.debug('RBAC: auth_context: %s', auth_context) - request.environ[authorization.AUTH_CONTEXT_ENV] = auth_context diff --git a/keystone-moon/keystone/middleware/core.py b/keystone-moon/keystone/middleware/core.py deleted file mode 100644 index 245b9e67..00000000 --- a/keystone-moon/keystone/middleware/core.py +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_log import log -from oslo_serialization import jsonutils - -from keystone.common import wsgi -from keystone import exception -from keystone.i18n import _LW - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - -# Header used to transmit the auth token -AUTH_TOKEN_HEADER = 'X-Auth-Token' - - -# Header used to transmit the subject token -SUBJECT_TOKEN_HEADER = 'X-Subject-Token' - - -# Environment variable used to pass the request context -CONTEXT_ENV = wsgi.CONTEXT_ENV - - -# Environment variable used to pass the request params -PARAMS_ENV = wsgi.PARAMS_ENV - - -class TokenAuthMiddleware(wsgi.Middleware): - def process_request(self, request): - token = request.headers.get(AUTH_TOKEN_HEADER) - context = request.environ.get(CONTEXT_ENV, {}) - context['token_id'] = token - if SUBJECT_TOKEN_HEADER in request.headers: - context['subject_token_id'] = request.headers[SUBJECT_TOKEN_HEADER] - request.environ[CONTEXT_ENV] = context - - -class AdminTokenAuthMiddleware(wsgi.Middleware): - """A trivial filter that checks for a pre-defined admin token. - - Sets 'is_admin' to true in the context, expected to be checked by - methods that are admin-only. - - """ - - def __init__(self, application): - super(AdminTokenAuthMiddleware, self).__init__(application) - LOG.warning(_LW("The admin_token_auth middleware presents a security " - "risk and should be removed from the " - "[pipeline:api_v3], [pipeline:admin_api], and " - "[pipeline:public_api] sections of your paste ini " - "file.")) - - def process_request(self, request): - token = request.headers.get(AUTH_TOKEN_HEADER) - context = request.environ.get(CONTEXT_ENV, {}) - context['is_admin'] = CONF.admin_token and (token == CONF.admin_token) - request.environ[CONTEXT_ENV] = context - - -class JsonBodyMiddleware(wsgi.Middleware): - """Middleware to allow method arguments to be passed as serialized JSON. - - Accepting arguments as JSON is useful for accepting data that may be more - complex than simple primitives. - - Filters out the parameters `self`, `context` and anything beginning with - an underscore. - - """ - - def process_request(self, request): - # Abort early if we don't have any work to do - params_json = request.body - if not params_json: - return - - # Reject unrecognized content types. Empty string indicates - # the client did not explicitly set the header - if request.content_type not in ('application/json', ''): - e = exception.ValidationError(attribute='application/json', - target='Content-Type header') - return wsgi.render_exception(e, request=request) - - params_parsed = {} - try: - params_parsed = jsonutils.loads(params_json) - except ValueError: - e = exception.ValidationError(attribute='valid JSON', - target='request body') - return wsgi.render_exception(e, request=request) - finally: - if not params_parsed: - params_parsed = {} - - if not isinstance(params_parsed, dict): - e = exception.ValidationError(attribute='valid JSON object', - target='request body') - return wsgi.render_exception(e, request=request) - - params = {} - for k, v in params_parsed.items(): - if k in ('self', 'context'): - continue - if k.startswith('_'): - continue - params[k] = v - - request.environ[PARAMS_ENV] = params - - -class NormalizingFilter(wsgi.Middleware): - """Middleware filter to handle URL normalization.""" - - def process_request(self, request): - """Normalizes URLs.""" - # Removes a trailing slash from the given path, if any. - if (len(request.environ['PATH_INFO']) > 1 and - request.environ['PATH_INFO'][-1] == '/'): - request.environ['PATH_INFO'] = request.environ['PATH_INFO'][:-1] - # Rewrites path to root if no path is given. - elif not request.environ['PATH_INFO']: - request.environ['PATH_INFO'] = '/' diff --git a/keystone-moon/keystone/middleware/ec2_token.py b/keystone-moon/keystone/middleware/ec2_token.py deleted file mode 100644 index 771b74f8..00000000 --- a/keystone-moon/keystone/middleware/ec2_token.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Starting point for routing EC2 requests. - -The EC2 Token Middleware has been deprecated as of Juno. It has been moved into -keystonemiddleware, `keystonemiddleware.ec2_token`. - -""" - -from keystonemiddleware import ec2_token - -from keystone.openstack.common import versionutils - - -class EC2Token(ec2_token.EC2Token): - - @versionutils.deprecated( - versionutils.deprecated.JUNO, - in_favor_of='keystonemiddleware.ec2_token.EC2Token', - remove_in=+2, - what='keystone.middleware.ec2_token.EC2Token') - def __init__(self, *args, **kwargs): - super(EC2Token, self).__init__(*args, **kwargs) - - -filter_factory = ec2_token.filter_factory -app_factory = ec2_token.app_factory -keystone_ec2_opts = ec2_token.keystone_ec2_opts diff --git a/keystone-moon/keystone/models/__init__.py b/keystone-moon/keystone/models/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/models/revoke_model.py b/keystone-moon/keystone/models/revoke_model.py deleted file mode 100644 index 0fc3e628..00000000 --- a/keystone-moon/keystone/models/revoke_model.py +++ /dev/null @@ -1,373 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_utils import timeutils -from six.moves import map - -from keystone.common import utils - - -# The set of attributes common between the RevokeEvent -# and the dictionaries created from the token Data. -_NAMES = ['trust_id', - 'consumer_id', - 'access_token_id', - 'audit_id', - 'audit_chain_id', - 'expires_at', - 'domain_id', - 'project_id', - 'user_id', - 'role_id'] - - -# Additional arguments for creating a RevokeEvent -_EVENT_ARGS = ['issued_before', 'revoked_at'] - -# Names of attributes in the RevocationEvent, including "virtual" attributes. -# Virtual attributes are those added based on other values. -_EVENT_NAMES = _NAMES + ['domain_scope_id'] - -# Values that will be in the token data but not in the event. -# These will compared with event values that have different names. -# For example: both trustor_id and trustee_id are compared against user_id -_TOKEN_KEYS = ['identity_domain_id', - 'assignment_domain_id', - 'issued_at', - 'trustor_id', - 'trustee_id'] - -# Alternative names to be checked in token for every field in -# revoke tree. -ALTERNATIVES = { - 'user_id': ['user_id', 'trustor_id', 'trustee_id'], - 'domain_id': ['identity_domain_id', 'assignment_domain_id'], - # For a domain-scoped token, the domain is in assignment_domain_id. - 'domain_scope_id': ['assignment_domain_id', ], -} - - -REVOKE_KEYS = _NAMES + _EVENT_ARGS - - -def blank_token_data(issued_at): - token_data = dict() - for name in _NAMES: - token_data[name] = None - for name in _TOKEN_KEYS: - token_data[name] = None - # required field - token_data['issued_at'] = issued_at - return token_data - - -class RevokeEvent(object): - def __init__(self, **kwargs): - for k in REVOKE_KEYS: - v = kwargs.get(k) - setattr(self, k, v) - - if self.domain_id and self.expires_at: - # This is revoking a domain-scoped token. - self.domain_scope_id = self.domain_id - self.domain_id = None - else: - # This is revoking all tokens for a domain. - self.domain_scope_id = None - - if self.expires_at is not None: - # Trim off the expiration time because MySQL timestamps are only - # accurate to the second. - self.expires_at = self.expires_at.replace(microsecond=0) - - if self.revoked_at is None: - self.revoked_at = timeutils.utcnow() - if self.issued_before is None: - self.issued_before = self.revoked_at - - def to_dict(self): - keys = ['user_id', - 'role_id', - 'domain_id', - 'domain_scope_id', - 'project_id', - 'audit_id', - 'audit_chain_id', - ] - event = {key: self.__dict__[key] for key in keys - if self.__dict__[key] is not None} - if self.trust_id is not None: - event['OS-TRUST:trust_id'] = self.trust_id - if self.consumer_id is not None: - event['OS-OAUTH1:consumer_id'] = self.consumer_id - if self.consumer_id is not None: - event['OS-OAUTH1:access_token_id'] = self.access_token_id - if self.expires_at is not None: - event['expires_at'] = utils.isotime(self.expires_at) - if self.issued_before is not None: - event['issued_before'] = utils.isotime(self.issued_before, - subsecond=True) - return event - - def key_for_name(self, name): - return "%s=%s" % (name, getattr(self, name) or '*') - - -def attr_keys(event): - return list(map(event.key_for_name, _EVENT_NAMES)) - - -class RevokeTree(object): - """Fast Revocation Checking Tree Structure - - The Tree is an index to quickly match tokens against events. - Each node is a hashtable of key=value combinations from revocation events. - The - - """ - - def __init__(self, revoke_events=None): - self.revoke_map = dict() - self.add_events(revoke_events) - - def add_event(self, event): - """Updates the tree based on a revocation event. - - Creates any necessary internal nodes in the tree corresponding to the - fields of the revocation event. The leaf node will always be set to - the latest 'issued_before' for events that are otherwise identical. - - :param: Event to add to the tree - - :returns: the event that was passed in. - - """ - revoke_map = self.revoke_map - for key in attr_keys(event): - revoke_map = revoke_map.setdefault(key, {}) - revoke_map['issued_before'] = max( - event.issued_before, revoke_map.get( - 'issued_before', event.issued_before)) - return event - - def remove_event(self, event): - """Update the tree based on the removal of a Revocation Event - - Removes empty nodes from the tree from the leaf back to the root. - - If multiple events trace the same path, but have different - 'issued_before' values, only the last is ever stored in the tree. - So only an exact match on 'issued_before' ever triggers a removal - - :param: Event to remove from the tree - - """ - stack = [] - revoke_map = self.revoke_map - for name in _EVENT_NAMES: - key = event.key_for_name(name) - nxt = revoke_map.get(key) - if nxt is None: - break - stack.append((revoke_map, key, nxt)) - revoke_map = nxt - else: - if event.issued_before == revoke_map['issued_before']: - revoke_map.pop('issued_before') - for parent, key, child in reversed(stack): - if not any(child): - del parent[key] - - def add_events(self, revoke_events): - return list(map(self.add_event, revoke_events or [])) - - @staticmethod - def _next_level_keys(name, token_data): - """Generate keys based on current field name and token data - - Generate all keys to look for in the next iteration of revocation - event tree traversal. - """ - yield '*' - if name == 'role_id': - # Roles are very special since a token has a list of them. - # If the revocation event matches any one of them, - # revoke the token. - for role_id in token_data.get('roles', []): - yield role_id - else: - # For other fields we try to get any branch that concur - # with any alternative field in the token. - for alt_name in ALTERNATIVES.get(name, [name]): - yield token_data[alt_name] - - def _search(self, revoke_map, names, token_data): - """Search for revocation event by token_data - - Traverse the revocation events tree looking for event matching token - data issued after the token. - """ - if not names: - # The last (leaf) level is checked in a special way because we - # verify issued_at field differently. - try: - return revoke_map['issued_before'] >= token_data['issued_at'] - except KeyError: - return False - - name, remaining_names = names[0], names[1:] - - for key in self._next_level_keys(name, token_data): - subtree = revoke_map.get('%s=%s' % (name, key)) - if subtree and self._search(subtree, remaining_names, token_data): - return True - - # If we made it out of the loop then no element in revocation tree - # corresponds to our token and it is good. - return False - - def is_revoked(self, token_data): - """Check if a token matches the revocation event - - Compare the values for each level of the tree with the values from - the token, accounting for attributes that have alternative - keys, and for wildcard matches. - if there is a match, continue down the tree. - if there is no match, exit early. - - token_data is a map based on a flattened view of token. - The required fields are: - - 'expires_at','user_id', 'project_id', 'identity_domain_id', - 'assignment_domain_id', 'trust_id', 'trustor_id', 'trustee_id' - 'consumer_id', 'access_token_id' - - """ - return self._search(self.revoke_map, _EVENT_NAMES, token_data) - - -def build_token_values_v2(access, default_domain_id): - token_data = access['token'] - - token_expires_at = timeutils.parse_isotime(token_data['expires']) - - # Trim off the microseconds because the revocation event only has - # expirations accurate to the second. - token_expires_at = token_expires_at.replace(microsecond=0) - - token_values = { - 'expires_at': timeutils.normalize_time(token_expires_at), - 'issued_at': timeutils.normalize_time( - timeutils.parse_isotime(token_data['issued_at'])), - 'audit_id': token_data.get('audit_ids', [None])[0], - 'audit_chain_id': token_data.get('audit_ids', [None])[-1], - } - - token_values['user_id'] = access.get('user', {}).get('id') - - project = token_data.get('tenant') - if project is not None: - token_values['project_id'] = project['id'] - else: - token_values['project_id'] = None - - token_values['identity_domain_id'] = default_domain_id - token_values['assignment_domain_id'] = default_domain_id - - trust = token_data.get('trust') - if trust is None: - token_values['trust_id'] = None - token_values['trustor_id'] = None - token_values['trustee_id'] = None - else: - token_values['trust_id'] = trust['id'] - token_values['trustor_id'] = trust['trustor_id'] - token_values['trustee_id'] = trust['trustee_id'] - - token_values['consumer_id'] = None - token_values['access_token_id'] = None - - role_list = [] - # Roles are by ID in metadata and by name in the user section - roles = access.get('metadata', {}).get('roles', []) - for role in roles: - role_list.append(role) - token_values['roles'] = role_list - return token_values - - -def build_token_values(token_data): - - token_expires_at = timeutils.parse_isotime(token_data['expires_at']) - - # Trim off the microseconds because the revocation event only has - # expirations accurate to the second. - token_expires_at = token_expires_at.replace(microsecond=0) - - token_values = { - 'expires_at': timeutils.normalize_time(token_expires_at), - 'issued_at': timeutils.normalize_time( - timeutils.parse_isotime(token_data['issued_at'])), - 'audit_id': token_data.get('audit_ids', [None])[0], - 'audit_chain_id': token_data.get('audit_ids', [None])[-1], - } - - user = token_data.get('user') - if user is not None: - token_values['user_id'] = user['id'] - # Federated users do not have a domain, be defensive and get the user - # domain set to None in the federated user case. - token_values['identity_domain_id'] = user.get('domain', {}).get('id') - else: - token_values['user_id'] = None - token_values['identity_domain_id'] = None - - project = token_data.get('project', token_data.get('tenant')) - if project is not None: - token_values['project_id'] = project['id'] - # The domain_id of projects acting as domains is None - token_values['assignment_domain_id'] = ( - project['domain']['id'] if project['domain'] else None) - else: - token_values['project_id'] = None - - domain = token_data.get('domain') - if domain is not None: - token_values['assignment_domain_id'] = domain['id'] - else: - token_values['assignment_domain_id'] = None - - role_list = [] - roles = token_data.get('roles') - if roles is not None: - for role in roles: - role_list.append(role['id']) - token_values['roles'] = role_list - - trust = token_data.get('OS-TRUST:trust') - if trust is None: - token_values['trust_id'] = None - token_values['trustor_id'] = None - token_values['trustee_id'] = None - else: - token_values['trust_id'] = trust['id'] - token_values['trustor_id'] = trust['trustor_user']['id'] - token_values['trustee_id'] = trust['trustee_user']['id'] - - oauth1 = token_data.get('OS-OAUTH1') - if oauth1 is None: - token_values['consumer_id'] = None - token_values['access_token_id'] = None - else: - token_values['consumer_id'] = oauth1['consumer_id'] - token_values['access_token_id'] = oauth1['access_token_id'] - return token_values diff --git a/keystone-moon/keystone/models/token_model.py b/keystone-moon/keystone/models/token_model.py deleted file mode 100644 index 32e6b365..00000000 --- a/keystone-moon/keystone/models/token_model.py +++ /dev/null @@ -1,339 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Unified in-memory token model.""" - -from keystoneclient.common import cms -from oslo_config import cfg -from oslo_utils import reflection -from oslo_utils import timeutils -import six - -from keystone import exception -from keystone.federation import constants -from keystone.i18n import _ - -CONF = cfg.CONF -# supported token versions -V2 = 'v2.0' -V3 = 'v3.0' -VERSIONS = frozenset([V2, V3]) - - -def _parse_and_normalize_time(time_data): - if isinstance(time_data, six.string_types): - time_data = timeutils.parse_isotime(time_data) - return timeutils.normalize_time(time_data) - - -class KeystoneToken(dict): - """An in-memory representation that unifies v2 and v3 tokens.""" - - # TODO(morganfainberg): Align this in-memory representation with the - # objects in keystoneclient. This object should be eventually updated - # to be the source of token data with the ability to emit any version - # of the token instead of only consuming the token dict and providing - # property accessors for the underlying data. - - def __init__(self, token_id, token_data): - self.token_data = token_data - if 'access' in token_data: - super(KeystoneToken, self).__init__(**token_data['access']) - self.version = V2 - elif 'token' in token_data and 'methods' in token_data['token']: - super(KeystoneToken, self).__init__(**token_data['token']) - self.version = V3 - else: - raise exception.UnsupportedTokenVersionException() - self.token_id = token_id - self.short_id = cms.cms_hash_token(token_id, - mode=CONF.token.hash_algorithm) - - if self.project_scoped and self.domain_scoped: - raise exception.UnexpectedError(_('Found invalid token: scoped to ' - 'both project and domain.')) - - def __repr__(self): - desc = ('<%(type)s (audit_id=%(audit_id)s, ' - 'audit_chain_id=%(audit_chain_id)s) at %(loc)s>') - self_cls_name = reflection.get_class_name(self, - fully_qualified=False) - return desc % {'type': self_cls_name, - 'audit_id': self.audit_id, - 'audit_chain_id': self.audit_chain_id, - 'loc': hex(id(self))} - - @property - def expires(self): - if self.version is V3: - expires_at = self['expires_at'] - else: - expires_at = self['token']['expires'] - return _parse_and_normalize_time(expires_at) - - @property - def issued(self): - if self.version is V3: - issued_at = self['issued_at'] - else: - issued_at = self['token']['issued_at'] - return _parse_and_normalize_time(issued_at) - - @property - def audit_id(self): - if self.version is V3: - return self.get('audit_ids', [None])[0] - return self['token'].get('audit_ids', [None])[0] - - @property - def audit_chain_id(self): - if self.version is V3: - return self.get('audit_ids', [None])[-1] - return self['token'].get('audit_ids', [None])[-1] - - @property - def auth_token(self): - return self.token_id - - @property - def user_id(self): - return self['user']['id'] - - @property - def user_name(self): - return self['user']['name'] - - @property - def user_domain_name(self): - try: - if self.version == V3: - return self['user']['domain']['name'] - elif 'user' in self: - return "Default" - except KeyError: # nosec - # Do not raise KeyError, raise UnexpectedError - pass - raise exception.UnexpectedError() - - @property - def user_domain_id(self): - try: - if self.version == V3: - return self['user']['domain']['id'] - elif 'user' in self: - return CONF.identity.default_domain_id - except KeyError: # nosec - # Do not raise KeyError, raise UnexpectedError - pass - raise exception.UnexpectedError() - - @property - def domain_id(self): - if self.version is V3: - try: - return self['domain']['id'] - except KeyError: - # Do not raise KeyError, raise UnexpectedError - raise exception.UnexpectedError() - # No domain scoped tokens in V2. - raise NotImplementedError() - - @property - def domain_name(self): - if self.version is V3: - try: - return self['domain']['name'] - except KeyError: - # Do not raise KeyError, raise UnexpectedError - raise exception.UnexpectedError() - # No domain scoped tokens in V2. - raise NotImplementedError() - - @property - def project_id(self): - try: - if self.version is V3: - return self['project']['id'] - else: - return self['token']['tenant']['id'] - except KeyError: - # Do not raise KeyError, raise UnexpectedError - raise exception.UnexpectedError() - - @property - def project_name(self): - try: - if self.version is V3: - return self['project']['name'] - else: - return self['token']['tenant']['name'] - except KeyError: - # Do not raise KeyError, raise UnexpectedError - raise exception.UnexpectedError() - - @property - def project_domain_id(self): - try: - if self.version is V3: - return self['project']['domain']['id'] - elif 'tenant' in self['token']: - return CONF.identity.default_domain_id - except KeyError: # nosec - # Do not raise KeyError, raise UnexpectedError - pass - - raise exception.UnexpectedError() - - @property - def project_domain_name(self): - try: - if self.version is V3: - return self['project']['domain']['name'] - if 'tenant' in self['token']: - return 'Default' - except KeyError: # nosec - # Do not raise KeyError, raise UnexpectedError - pass - - raise exception.UnexpectedError() - - @property - def project_scoped(self): - if self.version is V3: - return 'project' in self - else: - return 'tenant' in self['token'] - - @property - def domain_scoped(self): - if self.version is V3: - return 'domain' in self - return False - - @property - def scoped(self): - return self.project_scoped or self.domain_scoped - - @property - def trust_id(self): - if self.version is V3: - return self.get('OS-TRUST:trust', {}).get('id') - else: - return self.get('trust', {}).get('id') - - @property - def trust_scoped(self): - if self.version is V3: - return 'OS-TRUST:trust' in self - else: - return 'trust' in self - - @property - def trustee_user_id(self): - if self.version is V3: - return self.get( - 'OS-TRUST:trust', {}).get('trustee_user_id') - else: - return self.get('trust', {}).get('trustee_user_id') - - @property - def trustor_user_id(self): - if self.version is V3: - return self.get( - 'OS-TRUST:trust', {}).get('trustor_user_id') - else: - return self.get('trust', {}).get('trustor_user_id') - - @property - def trust_impersonation(self): - if self.version is V3: - return self.get('OS-TRUST:trust', {}).get('impersonation') - else: - return self.get('trust', {}).get('impersonation') - - @property - def oauth_scoped(self): - return 'OS-OAUTH1' in self - - @property - def oauth_access_token_id(self): - if self.version is V3 and self.oauth_scoped: - return self['OS-OAUTH1']['access_token_id'] - return None - - @property - def oauth_consumer_id(self): - if self.version is V3 and self.oauth_scoped: - return self['OS-OAUTH1']['consumer_id'] - return None - - @property - def role_ids(self): - if self.version is V3: - return [r['id'] for r in self.get('roles', [])] - else: - return self.get('metadata', {}).get('roles', []) - - @property - def role_names(self): - if self.version is V3: - return [r['name'] for r in self.get('roles', [])] - else: - return [r['name'] for r in self['user'].get('roles', [])] - - @property - def bind(self): - if self.version is V3: - return self.get('bind') - return self.get('token', {}).get('bind') - - @property - def is_federated_user(self): - try: - return (self.version is V3 and - constants.FEDERATION in self['user']) - except KeyError: - raise exception.UnexpectedError() - - @property - def federation_group_ids(self): - if self.is_federated_user: - if self.version is V3: - try: - groups = self['user'][constants.FEDERATION].get( - 'groups', []) - return [g['id'] for g in groups] - except KeyError: - raise exception.UnexpectedError() - return [] - - @property - def federation_idp_id(self): - if self.version is not V3 or not self.is_federated_user: - return None - return self['user'][constants.FEDERATION]['identity_provider']['id'] - - @property - def federation_protocol_id(self): - if self.version is V3 and self.is_federated_user: - return self['user'][constants.FEDERATION]['protocol']['id'] - return None - - @property - def metadata(self): - return self.get('metadata', {}) - - @property - def methods(self): - if self.version is V3: - return self.get('methods', []) - return [] diff --git a/keystone-moon/keystone/notifications.py b/keystone-moon/keystone/notifications.py deleted file mode 100644 index 30d1713c..00000000 --- a/keystone-moon/keystone/notifications.py +++ /dev/null @@ -1,741 +0,0 @@ -# Copyright 2013 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Notifications module for OpenStack Identity Service resources""" - -import collections -import functools -import inspect -import logging -import socket - -from oslo_config import cfg -from oslo_log import log -import oslo_messaging -from oslo_utils import reflection -import pycadf -from pycadf import cadftaxonomy as taxonomy -from pycadf import cadftype -from pycadf import credential -from pycadf import eventfactory -from pycadf import resource - -from keystone.i18n import _, _LE -from keystone.common import utils - - -notifier_opts = [ - cfg.StrOpt('default_publisher_id', - help='Default publisher_id for outgoing notifications'), - cfg.StrOpt('notification_format', default='basic', - choices=['basic', 'cadf'], - help='Define the notification format for Identity Service ' - 'events. A "basic" notification has information about ' - 'the resource being operated on. A "cadf" notification ' - 'has the same information, as well as information about ' - 'the initiator of the event.'), - cfg.MultiStrOpt('notification_opt_out', default=[], - help='Define the notification options to opt-out from. ' - 'The value expected is: ' - 'identity... This field ' - 'can be set multiple times in order to add more ' - 'notifications to opt-out from. For example:\n ' - 'notification_opt_out=identity.user.created\n ' - 'notification_opt_out=identity.authenticate.success'), -] - -config_section = None -list_opts = lambda: [(config_section, notifier_opts), ] - -LOG = log.getLogger(__name__) -# NOTE(gyee): actions that can be notified. One must update this list whenever -# a new action is supported. -_ACTIONS = collections.namedtuple( - 'NotificationActions', - 'created, deleted, disabled, updated, internal') -ACTIONS = _ACTIONS(created='created', deleted='deleted', disabled='disabled', - updated='updated', internal='internal') -"""The actions on resources.""" - -CADF_TYPE_MAP = { - 'group': taxonomy.SECURITY_GROUP, - 'project': taxonomy.SECURITY_PROJECT, - 'role': taxonomy.SECURITY_ROLE, - 'user': taxonomy.SECURITY_ACCOUNT_USER, - 'domain': taxonomy.SECURITY_DOMAIN, - 'region': taxonomy.SECURITY_REGION, - 'endpoint': taxonomy.SECURITY_ENDPOINT, - 'service': taxonomy.SECURITY_SERVICE, - 'policy': taxonomy.SECURITY_POLICY, - 'OS-TRUST:trust': taxonomy.SECURITY_TRUST, - 'OS-OAUTH1:access_token': taxonomy.SECURITY_CREDENTIAL, - 'OS-OAUTH1:request_token': taxonomy.SECURITY_CREDENTIAL, - 'OS-OAUTH1:consumer': taxonomy.SECURITY_ACCOUNT, -} - -SAML_AUDIT_TYPE = 'http://docs.oasis-open.org/security/saml/v2.0' -# resource types that can be notified -_SUBSCRIBERS = {} -_notifier = None -SERVICE = 'identity' - - -CONF = cfg.CONF -CONF.register_opts(notifier_opts) - -# NOTE(morganfainberg): Special case notifications that are only used -# internally for handling token persistence token deletions -INVALIDATE_USER_TOKEN_PERSISTENCE = 'invalidate_user_tokens' -INVALIDATE_USER_PROJECT_TOKEN_PERSISTENCE = 'invalidate_user_project_tokens' -INVALIDATE_USER_OAUTH_CONSUMER_TOKENS = 'invalidate_user_consumer_tokens' - - -class Audit(object): - """Namespace for audit notification functions. - - This is a namespace object to contain all of the direct notification - functions utilized for ``Manager`` methods. - """ - - @classmethod - def _emit(cls, operation, resource_type, resource_id, initiator, public, - actor_dict=None): - """Directly send an event notification. - - :param operation: one of the values from ACTIONS - :param resource_type: type of resource being affected - :param resource_id: ID of the resource affected - :param initiator: CADF representation of the user that created the - request - :param public: If True (default), the event will be sent to the - notifier API. If False, the event will only be sent via - notify_event_callbacks to in process listeners - :param actor_dict: dictionary of actor information in the event of - assignment notification - """ - # NOTE(stevemar): the _send_notification function is - # overloaded, it's used to register callbacks and to actually - # send the notification externally. Thus, we should check - # the desired notification format in the function instead - # of before it. - _send_notification( - operation, - resource_type, - resource_id, - actor_dict, - public=public) - - if CONF.notification_format == 'cadf' and public: - outcome = taxonomy.OUTCOME_SUCCESS - _create_cadf_payload(operation, resource_type, resource_id, - outcome, initiator) - - @classmethod - def created(cls, resource_type, resource_id, initiator=None, - public=True): - cls._emit(ACTIONS.created, resource_type, resource_id, initiator, - public) - - @classmethod - def updated(cls, resource_type, resource_id, initiator=None, - public=True): - cls._emit(ACTIONS.updated, resource_type, resource_id, initiator, - public) - - @classmethod - def disabled(cls, resource_type, resource_id, initiator=None, - public=True): - cls._emit(ACTIONS.disabled, resource_type, resource_id, initiator, - public) - - @classmethod - def deleted(cls, resource_type, resource_id, initiator=None, - public=True): - cls._emit(ACTIONS.deleted, resource_type, resource_id, initiator, - public) - - @classmethod - def added_to(cls, target_type, target_id, actor_type, actor_id, - initiator=None, public=True): - actor_dict = {'id': actor_id, - 'type': actor_type, - 'actor_operation': 'added'} - cls._emit(ACTIONS.updated, target_type, target_id, initiator, public, - actor_dict=actor_dict) - - @classmethod - def removed_from(cls, target_type, target_id, actor_type, actor_id, - initiator=None, public=True): - actor_dict = {'id': actor_id, - 'type': actor_type, - 'actor_operation': 'removed'} - cls._emit(ACTIONS.updated, target_type, target_id, initiator, public, - actor_dict=actor_dict) - - @classmethod - def internal(cls, resource_type, resource_id): - # NOTE(lbragstad): Internal notifications are never public and have - # never used the initiator variable, but the _emit() method expects - # them. Let's set them here but not expose them through the method - # signature - that way someone can not do something like send an - # internal notification publicly. - initiator = None - public = False - cls._emit(ACTIONS.internal, resource_type, resource_id, initiator, - public) - - -def _get_callback_info(callback): - """Return list containing callback's module and name. - - If the callback is a bound instance method also return the class name. - - :param callback: Function to call - :type callback: function - :returns: List containing parent module, (optional class,) function name - :rtype: list - """ - module_name = getattr(callback, '__module__', None) - func_name = callback.__name__ - if inspect.ismethod(callback): - class_name = reflection.get_class_name(callback.__self__, - fully_qualified=False) - return [module_name, class_name, func_name] - else: - return [module_name, func_name] - - -def register_event_callback(event, resource_type, callbacks): - """Register each callback with the event. - - :param event: Action being registered - :type event: keystone.notifications.ACTIONS - :param resource_type: Type of resource being operated on - :type resource_type: str - :param callbacks: Callback items to be registered with event - :type callbacks: list - :raises ValueError: If event is not a valid ACTION - :raises TypeError: If callback is not callable - """ - if event not in ACTIONS: - raise ValueError(_('%(event)s is not a valid notification event, must ' - 'be one of: %(actions)s') % - {'event': event, 'actions': ', '.join(ACTIONS)}) - - if not hasattr(callbacks, '__iter__'): - callbacks = [callbacks] - - for callback in callbacks: - if not callable(callback): - msg = _('Method not callable: %s') % callback - LOG.error(msg) - raise TypeError(msg) - _SUBSCRIBERS.setdefault(event, {}).setdefault(resource_type, set()) - _SUBSCRIBERS[event][resource_type].add(callback) - - if LOG.logger.getEffectiveLevel() <= logging.DEBUG: - # Do this only if its going to appear in the logs. - msg = 'Callback: `%(callback)s` subscribed to event `%(event)s`.' - callback_info = _get_callback_info(callback) - callback_str = '.'.join(i for i in callback_info if i is not None) - event_str = '.'.join(['identity', resource_type, event]) - LOG.debug(msg, {'callback': callback_str, 'event': event_str}) - - -def listener(cls): - """A class decorator to declare a class to be a notification listener. - - A notification listener must specify the event(s) it is interested in by - defining a ``event_callbacks`` attribute or property. ``event_callbacks`` - is a dictionary where the key is the type of event and the value is a - dictionary containing a mapping of resource types to callback(s). - - :data:`.ACTIONS` contains constants for the currently - supported events. There is currently no single place to find constants for - the resource types. - - Example:: - - @listener - class Something(object): - - def __init__(self): - self.event_callbacks = { - notifications.ACTIONS.created: { - 'user': self._user_created_callback, - }, - notifications.ACTIONS.deleted: { - 'project': [ - self._project_deleted_callback, - self._do_cleanup, - ] - }, - } - - """ - def init_wrapper(init): - @functools.wraps(init) - def __new_init__(self, *args, **kwargs): - init(self, *args, **kwargs) - _register_event_callbacks(self) - return __new_init__ - - def _register_event_callbacks(self): - for event, resource_types in self.event_callbacks.items(): - for resource_type, callbacks in resource_types.items(): - register_event_callback(event, resource_type, callbacks) - - cls.__init__ = init_wrapper(cls.__init__) - return cls - - -def notify_event_callbacks(service, resource_type, operation, payload): - """Sends a notification to registered extensions.""" - if operation in _SUBSCRIBERS: - if resource_type in _SUBSCRIBERS[operation]: - for cb in _SUBSCRIBERS[operation][resource_type]: - subst_dict = {'cb_name': cb.__name__, - 'service': service, - 'resource_type': resource_type, - 'operation': operation, - 'payload': payload} - LOG.debug('Invoking callback %(cb_name)s for event ' - '%(service)s %(resource_type)s %(operation)s for ' - '%(payload)s', subst_dict) - cb(service, resource_type, operation, payload) - - -def _get_notifier(): - """Return a notifier object. - - If _notifier is None it means that a notifier object has not been set. - If _notifier is False it means that a notifier has previously failed to - construct. - Otherwise it is a constructed Notifier object. - """ - global _notifier - - if _notifier is None: - host = CONF.default_publisher_id or socket.gethostname() - try: - transport = oslo_messaging.get_transport(CONF) - _notifier = oslo_messaging.Notifier(transport, - "identity.%s" % host) - except Exception: - LOG.exception(_LE("Failed to construct notifier")) - _notifier = False - - return _notifier - - -def clear_subscribers(): - """Empty subscribers dictionary. - - This effectively stops notifications since there will be no subscribers - to publish to. - """ - _SUBSCRIBERS.clear() - - -def reset_notifier(): - """Reset the notifications internal state. - - This is used only for testing purposes. - - """ - global _notifier - _notifier = None - - -def _create_cadf_payload(operation, resource_type, resource_id, - outcome, initiator): - """Prepare data for CADF audit notifier. - - Transform the arguments into content to be consumed by the function that - emits CADF events (_send_audit_notification). Specifically the - ``resource_type`` (role, user, etc) must be transformed into a CADF - keyword, such as: ``data/security/role``. The ``resource_id`` is added as a - top level value for the ``resource_info`` key. Lastly, the ``operation`` is - used to create the CADF ``action``, and the ``event_type`` name. - - As per the CADF specification, the ``action`` must start with create, - update, delete, etc... i.e.: created.user or deleted.role - - However the ``event_type`` is an OpenStack-ism that is typically of the - form project.resource.operation. i.e.: identity.project.updated - - :param operation: operation being performed (created, updated, or deleted) - :param resource_type: type of resource being operated on (role, user, etc) - :param resource_id: ID of resource being operated on - :param outcome: outcomes of the operation (SUCCESS, FAILURE, etc) - :param initiator: CADF representation of the user that created the request - """ - if resource_type not in CADF_TYPE_MAP: - target_uri = taxonomy.UNKNOWN - else: - target_uri = CADF_TYPE_MAP.get(resource_type) - target = resource.Resource(typeURI=target_uri, - id=resource_id) - - audit_kwargs = {'resource_info': resource_id} - cadf_action = '%s.%s' % (operation, resource_type) - event_type = '%s.%s.%s' % (SERVICE, resource_type, operation) - - _send_audit_notification(cadf_action, initiator, outcome, - target, event_type, **audit_kwargs) - - -def _send_notification(operation, resource_type, resource_id, actor_dict=None, - public=True): - """Send notification to inform observers about the affected resource. - - This method doesn't raise an exception when sending the notification fails. - - :param operation: operation being performed (created, updated, or deleted) - :param resource_type: type of resource being operated on - :param resource_id: ID of resource being operated on - :param actor_dict: a dictionary containing the actor's ID and type - :param public: if True (default), the event will be sent - to the notifier API. - if False, the event will only be sent via - notify_event_callbacks to in process listeners. - """ - payload = {'resource_info': resource_id} - - if actor_dict: - payload['actor_id'] = actor_dict['id'] - payload['actor_type'] = actor_dict['type'] - payload['actor_operation'] = actor_dict['actor_operation'] - - notify_event_callbacks(SERVICE, resource_type, operation, payload) - - # Only send this notification if the 'basic' format is used, otherwise - # let the CADF functions handle sending the notification. But we check - # here so as to not disrupt the notify_event_callbacks function. - if public and CONF.notification_format == 'basic': - notifier = _get_notifier() - if notifier: - context = {} - event_type = '%(service)s.%(resource_type)s.%(operation)s' % { - 'service': SERVICE, - 'resource_type': resource_type, - 'operation': operation} - if _check_notification_opt_out(event_type, outcome=None): - return - try: - notifier.info(context, event_type, payload) - except Exception: - LOG.exception(_LE( - 'Failed to send %(res_id)s %(event_type)s notification'), - {'res_id': resource_id, 'event_type': event_type}) - - -def _get_request_audit_info(context, user_id=None): - """Collect audit information about the request used for CADF. - - :param context: Request context - :param user_id: Optional user ID, alternatively collected from context - :returns: Auditing data about the request - :rtype: :class:`pycadf.Resource` - """ - remote_addr = None - http_user_agent = None - project_id = None - domain_id = None - - if context and 'environment' in context and context['environment']: - environment = context['environment'] - remote_addr = environment.get('REMOTE_ADDR') - http_user_agent = environment.get('HTTP_USER_AGENT') - if not user_id: - user_id = environment.get('KEYSTONE_AUTH_CONTEXT', - {}).get('user_id') - project_id = environment.get('KEYSTONE_AUTH_CONTEXT', - {}).get('project_id') - domain_id = environment.get('KEYSTONE_AUTH_CONTEXT', - {}).get('domain_id') - - host = pycadf.host.Host(address=remote_addr, agent=http_user_agent) - initiator = resource.Resource(typeURI=taxonomy.ACCOUNT_USER, host=host) - - if user_id: - initiator.user_id = user_id - initiator.id = utils.resource_uuid(user_id) - - if project_id: - initiator.project_id = project_id - if domain_id: - initiator.domain_id = domain_id - - return initiator - - -class CadfNotificationWrapper(object): - """Send CADF event notifications for various methods. - - This function is only used for Authentication events. Its ``action`` and - ``event_type`` are dictated below. - - - action: ``authenticate`` - - event_type: ``identity.authenticate`` - - Sends CADF notifications for events such as whether an authentication was - successful or not. - - :param operation: The authentication related action being performed - - """ - - def __init__(self, operation): - self.action = operation - self.event_type = '%s.%s' % (SERVICE, operation) - - def __call__(self, f): - @functools.wraps(f) - def wrapper(wrapped_self, context, user_id, *args, **kwargs): - """Always send a notification.""" - initiator = _get_request_audit_info(context, user_id) - target = resource.Resource(typeURI=taxonomy.ACCOUNT_USER) - try: - result = f(wrapped_self, context, user_id, *args, **kwargs) - except Exception: - # For authentication failure send a cadf event as well - _send_audit_notification(self.action, initiator, - taxonomy.OUTCOME_FAILURE, - target, self.event_type) - raise - else: - _send_audit_notification(self.action, initiator, - taxonomy.OUTCOME_SUCCESS, - target, self.event_type) - return result - - return wrapper - - -class CadfRoleAssignmentNotificationWrapper(object): - """Send CADF notifications for ``role_assignment`` methods. - - This function is only used for role assignment events. Its ``action`` and - ``event_type`` are dictated below. - - - action: ``created.role_assignment`` or ``deleted.role_assignment`` - - event_type: ``identity.role_assignment.created`` or - ``identity.role_assignment.deleted`` - - Sends a CADF notification if the wrapped method does not raise an - :class:`Exception` (such as :class:`keystone.exception.NotFound`). - - :param operation: one of the values from ACTIONS (created or deleted) - """ - - ROLE_ASSIGNMENT = 'role_assignment' - - def __init__(self, operation): - self.action = '%s.%s' % (operation, self.ROLE_ASSIGNMENT) - self.event_type = '%s.%s.%s' % (SERVICE, self.ROLE_ASSIGNMENT, - operation) - - def __call__(self, f): - @functools.wraps(f) - def wrapper(wrapped_self, role_id, *args, **kwargs): - """Send a notification if the wrapped callable is successful. - - NOTE(stevemar): The reason we go through checking kwargs - and args for possible target and actor values is because the - create_grant() (and delete_grant()) method are called - differently in various tests. - Using named arguments, i.e.:: - - create_grant(user_id=user['id'], domain_id=domain['id'], - role_id=role['id']) - - Or, using positional arguments, i.e.:: - - create_grant(role_id['id'], user['id'], None, - domain_id=domain['id'], None) - - Or, both, i.e.:: - - create_grant(role_id['id'], user_id=user['id'], - domain_id=domain['id']) - - Checking the values for kwargs is easy enough, since it comes - in as a dictionary - - The actual method signature is - - :: - - create_grant(role_id, user_id=None, group_id=None, - domain_id=None, project_id=None, - inherited_to_projects=False) - - So, if the values of actor or target are still None after - checking kwargs, we can check the positional arguments, - based on the method signature. - """ - call_args = inspect.getcallargs( - f, wrapped_self, role_id, *args, **kwargs) - inherited = call_args['inherited_to_projects'] - context = call_args['context'] - - initiator = _get_request_audit_info(context) - target = resource.Resource(typeURI=taxonomy.ACCOUNT_USER) - - audit_kwargs = {} - if call_args['project_id']: - audit_kwargs['project'] = call_args['project_id'] - elif call_args['domain_id']: - audit_kwargs['domain'] = call_args['domain_id'] - - if call_args['user_id']: - audit_kwargs['user'] = call_args['user_id'] - elif call_args['group_id']: - audit_kwargs['group'] = call_args['group_id'] - - audit_kwargs['inherited_to_projects'] = inherited - audit_kwargs['role'] = role_id - - try: - result = f(wrapped_self, role_id, *args, **kwargs) - except Exception: - _send_audit_notification(self.action, initiator, - taxonomy.OUTCOME_FAILURE, - target, self.event_type, - **audit_kwargs) - raise - else: - _send_audit_notification(self.action, initiator, - taxonomy.OUTCOME_SUCCESS, - target, self.event_type, - **audit_kwargs) - return result - - return wrapper - - -def send_saml_audit_notification(action, context, user_id, group_ids, - identity_provider, protocol, token_id, - outcome): - """Send notification to inform observers about SAML events. - - :param action: Action being audited - :type action: str - :param context: Current request context to collect request info from - :type context: dict - :param user_id: User ID from Keystone token - :type user_id: str - :param group_ids: List of Group IDs from Keystone token - :type group_ids: list - :param identity_provider: ID of the IdP from the Keystone token - :type identity_provider: str or None - :param protocol: Protocol ID for IdP from the Keystone token - :type protocol: str - :param token_id: audit_id from Keystone token - :type token_id: str or None - :param outcome: One of :class:`pycadf.cadftaxonomy` - :type outcome: str - """ - initiator = _get_request_audit_info(context) - target = resource.Resource(typeURI=taxonomy.ACCOUNT_USER) - audit_type = SAML_AUDIT_TYPE - user_id = user_id or taxonomy.UNKNOWN - token_id = token_id or taxonomy.UNKNOWN - group_ids = group_ids or [] - cred = credential.FederatedCredential(token=token_id, type=audit_type, - identity_provider=identity_provider, - user=user_id, groups=group_ids) - initiator.credential = cred - event_type = '%s.%s' % (SERVICE, action) - _send_audit_notification(action, initiator, outcome, target, event_type) - - -def _send_audit_notification(action, initiator, outcome, target, - event_type, **kwargs): - """Send CADF notification to inform observers about the affected resource. - - This method logs an exception when sending the notification fails. - - :param action: CADF action being audited (e.g., 'authenticate') - :param initiator: CADF resource representing the initiator - :param outcome: The CADF outcome (taxonomy.OUTCOME_PENDING, - taxonomy.OUTCOME_SUCCESS, taxonomy.OUTCOME_FAILURE) - :param target: CADF resource representing the target - :param event_type: An OpenStack-ism, typically this is the meter name that - Ceilometer uses to poll events. - :param kwargs: Any additional arguments passed in will be added as - key-value pairs to the CADF event. - - """ - if _check_notification_opt_out(event_type, outcome): - return - - event = eventfactory.EventFactory().new_event( - eventType=cadftype.EVENTTYPE_ACTIVITY, - outcome=outcome, - action=action, - initiator=initiator, - target=target, - observer=resource.Resource(typeURI=taxonomy.SERVICE_SECURITY)) - - for key, value in kwargs.items(): - setattr(event, key, value) - - context = {} - payload = event.as_dict() - notifier = _get_notifier() - - if notifier: - try: - notifier.info(context, event_type, payload) - except Exception: - # diaper defense: any exception that occurs while emitting the - # notification should not interfere with the API request - LOG.exception(_LE( - 'Failed to send %(action)s %(event_type)s notification'), - {'action': action, 'event_type': event_type}) - - -def _check_notification_opt_out(event_type, outcome): - """Check if a particular event_type has been opted-out of. - - This method checks to see if an event should be sent to the messaging - service. Any event specified in the opt-out list will not be transmitted. - - :param event_type: This is the meter name that Ceilometer uses to poll - events. For example: identity.user.created, or - identity.authenticate.success, or identity.role_assignment.created - :param outcome: The CADF outcome (taxonomy.OUTCOME_PENDING, - taxonomy.OUTCOME_SUCCESS, taxonomy.OUTCOME_FAILURE) - - """ - # NOTE(stevemar): Special handling for authenticate, we look at the outcome - # as well when evaluating. For authN events, event_type is just - # idenitity.authenticate, which isn't fine enough to provide any opt-out - # value, so we attach the outcome to re-create the meter name used in - # ceilometer. - if 'authenticate' in event_type: - event_type = event_type + "." + outcome - - if event_type in CONF.notification_opt_out: - return True - - return False - - -emit_event = CadfNotificationWrapper - - -role_assignment = CadfRoleAssignmentNotificationWrapper diff --git a/keystone-moon/keystone/oauth1/__init__.py b/keystone-moon/keystone/oauth1/__init__.py deleted file mode 100644 index ea011f6b..00000000 --- a/keystone-moon/keystone/oauth1/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.oauth1.core import * # noqa diff --git a/keystone-moon/keystone/oauth1/backends/__init__.py b/keystone-moon/keystone/oauth1/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/oauth1/backends/sql.py b/keystone-moon/keystone/oauth1/backends/sql.py deleted file mode 100644 index c5da7873..00000000 --- a/keystone-moon/keystone/oauth1/backends/sql.py +++ /dev/null @@ -1,258 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime -import random as _random -import uuid - -from oslo_serialization import jsonutils -from oslo_utils import timeutils - -from keystone.common import sql -from keystone.common import utils -from keystone import exception -from keystone.i18n import _ -from keystone.oauth1 import core - - -random = _random.SystemRandom() - - -class Consumer(sql.ModelBase, sql.DictBase): - __tablename__ = 'consumer' - attributes = ['id', 'description', 'secret'] - id = sql.Column(sql.String(64), primary_key=True, nullable=False) - description = sql.Column(sql.String(64), nullable=True) - secret = sql.Column(sql.String(64), nullable=False) - extra = sql.Column(sql.JsonBlob(), nullable=False) - - -class RequestToken(sql.ModelBase, sql.DictBase): - __tablename__ = 'request_token' - attributes = ['id', 'request_secret', - 'verifier', 'authorizing_user_id', 'requested_project_id', - 'role_ids', 'consumer_id', 'expires_at'] - id = sql.Column(sql.String(64), primary_key=True, nullable=False) - request_secret = sql.Column(sql.String(64), nullable=False) - verifier = sql.Column(sql.String(64), nullable=True) - authorizing_user_id = sql.Column(sql.String(64), nullable=True) - requested_project_id = sql.Column(sql.String(64), nullable=False) - role_ids = sql.Column(sql.Text(), nullable=True) - consumer_id = sql.Column(sql.String(64), sql.ForeignKey('consumer.id'), - nullable=False, index=True) - expires_at = sql.Column(sql.String(64), nullable=True) - - @classmethod - def from_dict(cls, user_dict): - return cls(**user_dict) - - def to_dict(self): - return dict(self.items()) - - -class AccessToken(sql.ModelBase, sql.DictBase): - __tablename__ = 'access_token' - attributes = ['id', 'access_secret', 'authorizing_user_id', - 'project_id', 'role_ids', 'consumer_id', - 'expires_at'] - id = sql.Column(sql.String(64), primary_key=True, nullable=False) - access_secret = sql.Column(sql.String(64), nullable=False) - authorizing_user_id = sql.Column(sql.String(64), nullable=False, - index=True) - project_id = sql.Column(sql.String(64), nullable=False) - role_ids = sql.Column(sql.Text(), nullable=False) - consumer_id = sql.Column(sql.String(64), sql.ForeignKey('consumer.id'), - nullable=False) - expires_at = sql.Column(sql.String(64), nullable=True) - - @classmethod - def from_dict(cls, user_dict): - return cls(**user_dict) - - def to_dict(self): - return dict(self.items()) - - -class OAuth1(core.Oauth1DriverV8): - def _get_consumer(self, session, consumer_id): - consumer_ref = session.query(Consumer).get(consumer_id) - if consumer_ref is None: - raise exception.NotFound(_('Consumer not found')) - return consumer_ref - - def get_consumer_with_secret(self, consumer_id): - with sql.session_for_read() as session: - consumer_ref = self._get_consumer(session, consumer_id) - return consumer_ref.to_dict() - - def get_consumer(self, consumer_id): - return core.filter_consumer( - self.get_consumer_with_secret(consumer_id)) - - def create_consumer(self, consumer_ref): - with sql.session_for_write() as session: - consumer = Consumer.from_dict(consumer_ref) - session.add(consumer) - return consumer.to_dict() - - def _delete_consumer(self, session, consumer_id): - consumer_ref = self._get_consumer(session, consumer_id) - session.delete(consumer_ref) - - def _delete_request_tokens(self, session, consumer_id): - q = session.query(RequestToken) - req_tokens = q.filter_by(consumer_id=consumer_id) - req_tokens_list = set([x.id for x in req_tokens]) - for token_id in req_tokens_list: - token_ref = self._get_request_token(session, token_id) - session.delete(token_ref) - - def _delete_access_tokens(self, session, consumer_id): - q = session.query(AccessToken) - acc_tokens = q.filter_by(consumer_id=consumer_id) - acc_tokens_list = set([x.id for x in acc_tokens]) - for token_id in acc_tokens_list: - token_ref = self._get_access_token(session, token_id) - session.delete(token_ref) - - def delete_consumer(self, consumer_id): - with sql.session_for_write() as session: - self._delete_request_tokens(session, consumer_id) - self._delete_access_tokens(session, consumer_id) - self._delete_consumer(session, consumer_id) - - def list_consumers(self): - with sql.session_for_read() as session: - cons = session.query(Consumer) - return [core.filter_consumer(x.to_dict()) for x in cons] - - def update_consumer(self, consumer_id, consumer_ref): - with sql.session_for_write() as session: - consumer = self._get_consumer(session, consumer_id) - old_consumer_dict = consumer.to_dict() - old_consumer_dict.update(consumer_ref) - new_consumer = Consumer.from_dict(old_consumer_dict) - consumer.description = new_consumer.description - consumer.extra = new_consumer.extra - return core.filter_consumer(consumer.to_dict()) - - def create_request_token(self, consumer_id, requested_project, - request_token_duration): - request_token_id = uuid.uuid4().hex - request_token_secret = uuid.uuid4().hex - expiry_date = None - if request_token_duration: - now = timeutils.utcnow() - future = now + datetime.timedelta(seconds=request_token_duration) - expiry_date = utils.isotime(future, subsecond=True) - - ref = {} - ref['id'] = request_token_id - ref['request_secret'] = request_token_secret - ref['verifier'] = None - ref['authorizing_user_id'] = None - ref['requested_project_id'] = requested_project - ref['role_ids'] = None - ref['consumer_id'] = consumer_id - ref['expires_at'] = expiry_date - with sql.session_for_write() as session: - token_ref = RequestToken.from_dict(ref) - session.add(token_ref) - return token_ref.to_dict() - - def _get_request_token(self, session, request_token_id): - token_ref = session.query(RequestToken).get(request_token_id) - if token_ref is None: - raise exception.NotFound(_('Request token not found')) - return token_ref - - def get_request_token(self, request_token_id): - with sql.session_for_read() as session: - token_ref = self._get_request_token(session, request_token_id) - return token_ref.to_dict() - - def authorize_request_token(self, request_token_id, user_id, - role_ids): - with sql.session_for_write() as session: - token_ref = self._get_request_token(session, request_token_id) - token_dict = token_ref.to_dict() - token_dict['authorizing_user_id'] = user_id - token_dict['verifier'] = ''.join(random.sample(core.VERIFIER_CHARS, - 8)) - token_dict['role_ids'] = jsonutils.dumps(role_ids) - - new_token = RequestToken.from_dict(token_dict) - for attr in RequestToken.attributes: - if (attr == 'authorizing_user_id' or attr == 'verifier' - or attr == 'role_ids'): - setattr(token_ref, attr, getattr(new_token, attr)) - - return token_ref.to_dict() - - def create_access_token(self, request_id, access_token_duration): - access_token_id = uuid.uuid4().hex - access_token_secret = uuid.uuid4().hex - with sql.session_for_write() as session: - req_token_ref = self._get_request_token(session, request_id) - token_dict = req_token_ref.to_dict() - - expiry_date = None - if access_token_duration: - now = timeutils.utcnow() - future = (now + - datetime.timedelta(seconds=access_token_duration)) - expiry_date = utils.isotime(future, subsecond=True) - - # add Access Token - ref = {} - ref['id'] = access_token_id - ref['access_secret'] = access_token_secret - ref['authorizing_user_id'] = token_dict['authorizing_user_id'] - ref['project_id'] = token_dict['requested_project_id'] - ref['role_ids'] = token_dict['role_ids'] - ref['consumer_id'] = token_dict['consumer_id'] - ref['expires_at'] = expiry_date - token_ref = AccessToken.from_dict(ref) - session.add(token_ref) - - # remove request token, it's been used - session.delete(req_token_ref) - - return token_ref.to_dict() - - def _get_access_token(self, session, access_token_id): - token_ref = session.query(AccessToken).get(access_token_id) - if token_ref is None: - raise exception.NotFound(_('Access token not found')) - return token_ref - - def get_access_token(self, access_token_id): - with sql.session_for_read() as session: - token_ref = self._get_access_token(session, access_token_id) - return token_ref.to_dict() - - def list_access_tokens(self, user_id): - with sql.session_for_read() as session: - q = session.query(AccessToken) - user_auths = q.filter_by(authorizing_user_id=user_id) - return [core.filter_token(x.to_dict()) for x in user_auths] - - def delete_access_token(self, user_id, access_token_id): - with sql.session_for_write() as session: - token_ref = self._get_access_token(session, access_token_id) - token_dict = token_ref.to_dict() - if token_dict['authorizing_user_id'] != user_id: - raise exception.Unauthorized(_('User IDs do not match')) - - session.delete(token_ref) diff --git a/keystone-moon/keystone/oauth1/controllers.py b/keystone-moon/keystone/oauth1/controllers.py deleted file mode 100644 index 489bb4c7..00000000 --- a/keystone-moon/keystone/oauth1/controllers.py +++ /dev/null @@ -1,409 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Extensions supporting OAuth1.""" - -from oslo_config import cfg -from oslo_serialization import jsonutils -from oslo_utils import timeutils - -from keystone.common import controller -from keystone.common import dependency -from keystone.common import utils -from keystone.common import validation -from keystone.common import wsgi -from keystone import exception -from keystone.i18n import _ -from keystone import notifications -from keystone.oauth1 import core as oauth1 -from keystone.oauth1 import schema -from keystone.oauth1 import validator - - -CONF = cfg.CONF - - -def _emit_user_oauth_consumer_token_invalidate(payload): - # This is a special case notification that expect the payload to be a dict - # containing the user_id and the consumer_id. This is so that the token - # provider can invalidate any tokens in the token persistence if - # token persistence is enabled - notifications.Audit.internal( - notifications.INVALIDATE_USER_OAUTH_CONSUMER_TOKENS, - payload, - ) - - -@dependency.requires('oauth_api', 'token_provider_api') -class ConsumerCrudV3(controller.V3Controller): - collection_name = 'consumers' - member_name = 'consumer' - - @classmethod - def base_url(cls, context, path=None): - """Construct a path and pass it to V3Controller.base_url method.""" - # NOTE(stevemar): Overriding path to /OS-OAUTH1/consumers so that - # V3Controller.base_url handles setting the self link correctly. - path = '/OS-OAUTH1/' + cls.collection_name - return controller.V3Controller.base_url(context, path=path) - - @controller.protected() - @validation.validated(schema.consumer_create, 'consumer') - def create_consumer(self, context, consumer): - ref = self._assign_unique_id(self._normalize_dict(consumer)) - initiator = notifications._get_request_audit_info(context) - consumer_ref = self.oauth_api.create_consumer(ref, initiator) - return ConsumerCrudV3.wrap_member(context, consumer_ref) - - @controller.protected() - @validation.validated(schema.consumer_update, 'consumer') - def update_consumer(self, context, consumer_id, consumer): - self._require_matching_id(consumer_id, consumer) - ref = self._normalize_dict(consumer) - initiator = notifications._get_request_audit_info(context) - ref = self.oauth_api.update_consumer(consumer_id, ref, initiator) - return ConsumerCrudV3.wrap_member(context, ref) - - @controller.protected() - def list_consumers(self, context): - ref = self.oauth_api.list_consumers() - return ConsumerCrudV3.wrap_collection(context, ref) - - @controller.protected() - def get_consumer(self, context, consumer_id): - ref = self.oauth_api.get_consumer(consumer_id) - return ConsumerCrudV3.wrap_member(context, ref) - - @controller.protected() - def delete_consumer(self, context, consumer_id): - user_token_ref = utils.get_token_ref(context) - payload = {'user_id': user_token_ref.user_id, - 'consumer_id': consumer_id} - _emit_user_oauth_consumer_token_invalidate(payload) - initiator = notifications._get_request_audit_info(context) - self.oauth_api.delete_consumer(consumer_id, initiator) - - -@dependency.requires('oauth_api') -class AccessTokenCrudV3(controller.V3Controller): - collection_name = 'access_tokens' - member_name = 'access_token' - - @classmethod - def _add_self_referential_link(cls, context, ref): - # NOTE(lwolf): overriding method to add proper path to self link - ref.setdefault('links', {}) - path = '/users/%(user_id)s/OS-OAUTH1/access_tokens' % { - 'user_id': cls._get_user_id(ref) - } - ref['links']['self'] = cls.base_url(context, path) + '/' + ref['id'] - - @controller.protected() - def get_access_token(self, context, user_id, access_token_id): - access_token = self.oauth_api.get_access_token(access_token_id) - if access_token['authorizing_user_id'] != user_id: - raise exception.NotFound() - access_token = self._format_token_entity(context, access_token) - return AccessTokenCrudV3.wrap_member(context, access_token) - - @controller.protected() - def list_access_tokens(self, context, user_id): - auth_context = context.get('environment', - {}).get('KEYSTONE_AUTH_CONTEXT', {}) - if auth_context.get('is_delegated_auth'): - raise exception.Forbidden( - _('Cannot list request tokens' - ' with a token issued via delegation.')) - refs = self.oauth_api.list_access_tokens(user_id) - formatted_refs = ([self._format_token_entity(context, x) - for x in refs]) - return AccessTokenCrudV3.wrap_collection(context, formatted_refs) - - @controller.protected() - def delete_access_token(self, context, user_id, access_token_id): - access_token = self.oauth_api.get_access_token(access_token_id) - consumer_id = access_token['consumer_id'] - payload = {'user_id': user_id, 'consumer_id': consumer_id} - _emit_user_oauth_consumer_token_invalidate(payload) - initiator = notifications._get_request_audit_info(context) - return self.oauth_api.delete_access_token( - user_id, access_token_id, initiator) - - @staticmethod - def _get_user_id(entity): - return entity.get('authorizing_user_id', '') - - def _format_token_entity(self, context, entity): - - formatted_entity = entity.copy() - access_token_id = formatted_entity['id'] - user_id = self._get_user_id(formatted_entity) - if 'role_ids' in entity: - formatted_entity.pop('role_ids') - if 'access_secret' in entity: - formatted_entity.pop('access_secret') - - url = ('/users/%(user_id)s/OS-OAUTH1/access_tokens/%(access_token_id)s' - '/roles' % {'user_id': user_id, - 'access_token_id': access_token_id}) - - formatted_entity.setdefault('links', {}) - formatted_entity['links']['roles'] = (self.base_url(context, url)) - - return formatted_entity - - -@dependency.requires('oauth_api', 'role_api') -class AccessTokenRolesV3(controller.V3Controller): - collection_name = 'roles' - member_name = 'role' - - @controller.protected() - def list_access_token_roles(self, context, user_id, access_token_id): - access_token = self.oauth_api.get_access_token(access_token_id) - if access_token['authorizing_user_id'] != user_id: - raise exception.NotFound() - authed_role_ids = access_token['role_ids'] - authed_role_ids = jsonutils.loads(authed_role_ids) - refs = ([self._format_role_entity(x) for x in authed_role_ids]) - return AccessTokenRolesV3.wrap_collection(context, refs) - - @controller.protected() - def get_access_token_role(self, context, user_id, - access_token_id, role_id): - access_token = self.oauth_api.get_access_token(access_token_id) - if access_token['authorizing_user_id'] != user_id: - raise exception.Unauthorized(_('User IDs do not match')) - authed_role_ids = access_token['role_ids'] - authed_role_ids = jsonutils.loads(authed_role_ids) - for authed_role_id in authed_role_ids: - if authed_role_id == role_id: - role = self._format_role_entity(role_id) - return AccessTokenRolesV3.wrap_member(context, role) - raise exception.RoleNotFound(role_id=role_id) - - def _format_role_entity(self, role_id): - role = self.role_api.get_role(role_id) - formatted_entity = role.copy() - if 'description' in role: - formatted_entity.pop('description') - if 'enabled' in role: - formatted_entity.pop('enabled') - return formatted_entity - - -@dependency.requires('assignment_api', 'oauth_api', - 'resource_api', 'token_provider_api') -class OAuthControllerV3(controller.V3Controller): - collection_name = 'not_used' - member_name = 'not_used' - - def create_request_token(self, context): - headers = context['headers'] - oauth_headers = oauth1.get_oauth_headers(headers) - consumer_id = oauth_headers.get('oauth_consumer_key') - requested_project_id = headers.get('Requested-Project-Id') - - if not consumer_id: - raise exception.ValidationError( - attribute='oauth_consumer_key', target='request') - if not requested_project_id: - raise exception.ValidationError( - attribute='requested_project_id', target='request') - - # NOTE(stevemar): Ensure consumer and requested project exist - self.resource_api.get_project(requested_project_id) - self.oauth_api.get_consumer(consumer_id) - - url = self.base_url(context, context['path']) - - req_headers = {'Requested-Project-Id': requested_project_id} - req_headers.update(headers) - request_verifier = oauth1.RequestTokenEndpoint( - request_validator=validator.OAuthValidator(), - token_generator=oauth1.token_generator) - h, b, s = request_verifier.create_request_token_response( - url, - http_method='POST', - body=context['query_string'], - headers=req_headers) - - if (not b) or int(s) > 399: - msg = _('Invalid signature') - raise exception.Unauthorized(message=msg) - - request_token_duration = CONF.oauth1.request_token_duration - initiator = notifications._get_request_audit_info(context) - token_ref = self.oauth_api.create_request_token(consumer_id, - requested_project_id, - request_token_duration, - initiator) - - result = ('oauth_token=%(key)s&oauth_token_secret=%(secret)s' - % {'key': token_ref['id'], - 'secret': token_ref['request_secret']}) - - if CONF.oauth1.request_token_duration: - expiry_bit = '&oauth_expires_at=%s' % token_ref['expires_at'] - result += expiry_bit - - headers = [('Content-Type', 'application/x-www-urlformencoded')] - response = wsgi.render_response(result, - status=(201, 'Created'), - headers=headers) - - return response - - def create_access_token(self, context): - headers = context['headers'] - oauth_headers = oauth1.get_oauth_headers(headers) - consumer_id = oauth_headers.get('oauth_consumer_key') - request_token_id = oauth_headers.get('oauth_token') - oauth_verifier = oauth_headers.get('oauth_verifier') - - if not consumer_id: - raise exception.ValidationError( - attribute='oauth_consumer_key', target='request') - if not request_token_id: - raise exception.ValidationError( - attribute='oauth_token', target='request') - if not oauth_verifier: - raise exception.ValidationError( - attribute='oauth_verifier', target='request') - - req_token = self.oauth_api.get_request_token( - request_token_id) - - expires_at = req_token['expires_at'] - if expires_at: - now = timeutils.utcnow() - expires = timeutils.normalize_time( - timeutils.parse_isotime(expires_at)) - if now > expires: - raise exception.Unauthorized(_('Request token is expired')) - - url = self.base_url(context, context['path']) - - access_verifier = oauth1.AccessTokenEndpoint( - request_validator=validator.OAuthValidator(), - token_generator=oauth1.token_generator) - h, b, s = access_verifier.create_access_token_response( - url, - http_method='POST', - body=context['query_string'], - headers=headers) - params = oauth1.extract_non_oauth_params(b) - if params: - msg = _('There should not be any non-oauth parameters') - raise exception.Unauthorized(message=msg) - - if req_token['consumer_id'] != consumer_id: - msg = _('provided consumer key does not match stored consumer key') - raise exception.Unauthorized(message=msg) - - if req_token['verifier'] != oauth_verifier: - msg = _('provided verifier does not match stored verifier') - raise exception.Unauthorized(message=msg) - - if req_token['id'] != request_token_id: - msg = _('provided request key does not match stored request key') - raise exception.Unauthorized(message=msg) - - if not req_token.get('authorizing_user_id'): - msg = _('Request Token does not have an authorizing user id') - raise exception.Unauthorized(message=msg) - - access_token_duration = CONF.oauth1.access_token_duration - initiator = notifications._get_request_audit_info(context) - token_ref = self.oauth_api.create_access_token(request_token_id, - access_token_duration, - initiator) - - result = ('oauth_token=%(key)s&oauth_token_secret=%(secret)s' - % {'key': token_ref['id'], - 'secret': token_ref['access_secret']}) - - if CONF.oauth1.access_token_duration: - expiry_bit = '&oauth_expires_at=%s' % (token_ref['expires_at']) - result += expiry_bit - - headers = [('Content-Type', 'application/x-www-urlformencoded')] - response = wsgi.render_response(result, - status=(201, 'Created'), - headers=headers) - - return response - - @controller.protected() - def authorize_request_token(self, context, request_token_id, roles): - """An authenticated user is going to authorize a request token. - - As a security precaution, the requested roles must match those in - the request token. Because this is in a CLI-only world at the moment, - there is not another easy way to make sure the user knows which roles - are being requested before authorizing. - """ - auth_context = context.get('environment', - {}).get('KEYSTONE_AUTH_CONTEXT', {}) - if auth_context.get('is_delegated_auth'): - raise exception.Forbidden( - _('Cannot authorize a request token' - ' with a token issued via delegation.')) - - req_token = self.oauth_api.get_request_token(request_token_id) - - expires_at = req_token['expires_at'] - if expires_at: - now = timeutils.utcnow() - expires = timeutils.normalize_time( - timeutils.parse_isotime(expires_at)) - if now > expires: - raise exception.Unauthorized(_('Request token is expired')) - - # put the roles in a set for easy comparison - authed_roles = set() - for role in roles: - authed_roles.add(role['id']) - - # verify the authorizing user has the roles - user_token = utils.get_token_ref(context) - user_id = user_token.user_id - project_id = req_token['requested_project_id'] - user_roles = self.assignment_api.get_roles_for_user_and_project( - user_id, project_id) - cred_set = set(user_roles) - - if not cred_set.issuperset(authed_roles): - msg = _('authorizing user does not have role required') - raise exception.Unauthorized(message=msg) - - # create list of just the id's for the backend - role_ids = list(authed_roles) - - # verify the user has the project too - req_project_id = req_token['requested_project_id'] - user_projects = self.assignment_api.list_projects_for_user(user_id) - for user_project in user_projects: - if user_project['id'] == req_project_id: - break - else: - msg = _("User is not a member of the requested project") - raise exception.Unauthorized(message=msg) - - # finally authorize the token - authed_token = self.oauth_api.authorize_request_token( - request_token_id, user_id, role_ids) - - to_return = {'token': {'oauth_verifier': authed_token['verifier']}} - return to_return diff --git a/keystone-moon/keystone/oauth1/core.py b/keystone-moon/keystone/oauth1/core.py deleted file mode 100644 index 2e52aefe..00000000 --- a/keystone-moon/keystone/oauth1/core.py +++ /dev/null @@ -1,367 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into the OAuth1 service.""" - -from __future__ import absolute_import - -import abc -import string -import uuid - -import oauthlib.common -from oauthlib import oauth1 -from oslo_config import cfg -from oslo_log import log -import six - -from keystone.common import dependency -from keystone.common import extension -from keystone.common import manager -from keystone import exception -from keystone.i18n import _LE -from keystone import notifications - - -RequestValidator = oauth1.RequestValidator -Client = oauth1.Client -AccessTokenEndpoint = oauth1.AccessTokenEndpoint -ResourceEndpoint = oauth1.ResourceEndpoint -AuthorizationEndpoint = oauth1.AuthorizationEndpoint -SIG_HMAC = oauth1.SIGNATURE_HMAC -RequestTokenEndpoint = oauth1.RequestTokenEndpoint -oRequest = oauthlib.common.Request -# The characters used to generate verifiers are limited to alphanumerical -# values for ease of manual entry. Commonly confused characters are omitted. -VERIFIER_CHARS = string.ascii_letters + string.digits -CONFUSED_CHARS = 'jiIl1oO0' -VERIFIER_CHARS = ''.join(c for c in VERIFIER_CHARS if c not in CONFUSED_CHARS) - - -class Token(object): - def __init__(self, key, secret): - self.key = key - self.secret = secret - self.verifier = None - - def set_verifier(self, verifier): - self.verifier = verifier - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -def token_generator(*args, **kwargs): - return uuid.uuid4().hex - - -EXTENSION_DATA = { - 'name': 'OpenStack OAUTH1 API', - 'namespace': 'http://docs.openstack.org/identity/api/ext/' - 'OS-OAUTH1/v1.0', - 'alias': 'OS-OAUTH1', - 'updated': '2013-07-07T12:00:0-00:00', - 'description': 'OpenStack OAuth 1.0a Delegated Auth Mechanism.', - 'links': [ - { - 'rel': 'describedby', - 'type': 'text/html', - 'href': 'http://specs.openstack.org/openstack/keystone-specs/api/' - 'v3/identity-api-v3-os-oauth1-ext.html', - } - ]} -extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) -extension.register_public_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) - - -def filter_consumer(consumer_ref): - """Filter out private items in a consumer dict. - - 'secret' is never returned. - - :returns: consumer_ref - - """ - if consumer_ref: - consumer_ref = consumer_ref.copy() - consumer_ref.pop('secret', None) - return consumer_ref - - -def filter_token(access_token_ref): - """Filter out private items in an access token dict. - - 'access_secret' is never returned. - - :returns: access_token_ref - - """ - if access_token_ref: - access_token_ref = access_token_ref.copy() - access_token_ref.pop('access_secret', None) - return access_token_ref - - -def get_oauth_headers(headers): - parameters = {} - - # The incoming headers variable is your usual heading from context - # In an OAuth signed req, where the oauth variables are in the header, - # they with the key 'Authorization'. - - if headers and 'Authorization' in headers: - # A typical value for Authorization is seen below - # 'OAuth realm="", oauth_body_hash="2jm%3D", oauth_nonce="14475435" - # along with other oauth variables, the 'OAuth ' part is trimmed - # to split the rest of the headers. - - auth_header = headers['Authorization'] - params = oauth1.rfc5849.utils.parse_authorization_header(auth_header) - parameters.update(dict(params)) - return parameters - else: - msg = _LE('Cannot retrieve Authorization headers') - LOG.error(msg) - raise exception.OAuthHeadersMissingError() - - -def extract_non_oauth_params(query_string): - params = oauthlib.common.extract_params(query_string) - return {k: v for k, v in params if not k.startswith('oauth_')} - - -@dependency.provider('oauth_api') -class Manager(manager.Manager): - """Default pivot point for the OAuth1 backend. - - See :mod:`keystone.common.manager.Manager` for more details on how this - dynamically calls the backend. - - """ - - driver_namespace = 'keystone.oauth1' - - _ACCESS_TOKEN = "OS-OAUTH1:access_token" - _REQUEST_TOKEN = "OS-OAUTH1:request_token" - _CONSUMER = "OS-OAUTH1:consumer" - - def __init__(self): - super(Manager, self).__init__(CONF.oauth1.driver) - - def create_consumer(self, consumer_ref, initiator=None): - consumer_ref = consumer_ref.copy() - consumer_ref['secret'] = uuid.uuid4().hex - ret = self.driver.create_consumer(consumer_ref) - notifications.Audit.created(self._CONSUMER, ret['id'], initiator) - return ret - - def update_consumer(self, consumer_id, consumer_ref, initiator=None): - ret = self.driver.update_consumer(consumer_id, consumer_ref) - notifications.Audit.updated(self._CONSUMER, consumer_id, initiator) - return ret - - def delete_consumer(self, consumer_id, initiator=None): - ret = self.driver.delete_consumer(consumer_id) - notifications.Audit.deleted(self._CONSUMER, consumer_id, initiator) - return ret - - def create_access_token(self, request_id, access_token_duration, - initiator=None): - ret = self.driver.create_access_token(request_id, - access_token_duration) - notifications.Audit.created(self._ACCESS_TOKEN, ret['id'], initiator) - return ret - - def delete_access_token(self, user_id, access_token_id, initiator=None): - ret = self.driver.delete_access_token(user_id, access_token_id) - notifications.Audit.deleted(self._ACCESS_TOKEN, access_token_id, - initiator) - return ret - - def create_request_token(self, consumer_id, requested_project, - request_token_duration, initiator=None): - ret = self.driver.create_request_token( - consumer_id, requested_project, request_token_duration) - notifications.Audit.created(self._REQUEST_TOKEN, ret['id'], - initiator) - return ret - - -@six.add_metaclass(abc.ABCMeta) -class Oauth1DriverV8(object): - """Interface description for an OAuth1 driver.""" - - @abc.abstractmethod - def create_consumer(self, consumer_ref): - """Create consumer. - - :param consumer_ref: consumer ref with consumer name - :type consumer_ref: dict - :returns: consumer_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_consumer(self, consumer_id, consumer_ref): - """Update consumer. - - :param consumer_id: id of consumer to update - :type consumer_id: string - :param consumer_ref: new consumer ref with consumer name - :type consumer_ref: dict - :returns: consumer_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_consumers(self): - """List consumers. - - :returns: list of consumers - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_consumer(self, consumer_id): - """Get consumer, returns the consumer id (key) and description. - - :param consumer_id: id of consumer to get - :type consumer_id: string - :returns: consumer_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_consumer_with_secret(self, consumer_id): - """Like get_consumer(), but also returns consumer secret. - - Returned dictionary consumer_ref includes consumer secret. - Secrets should only be shared upon consumer creation; the - consumer secret is required to verify incoming OAuth requests. - - :param consumer_id: id of consumer to get - :type consumer_id: string - :returns: consumer_ref containing consumer secret - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_consumer(self, consumer_id): - """Delete consumer. - - :param consumer_id: id of consumer to get - :type consumer_id: string - :returns: None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_access_tokens(self, user_id): - """List access tokens. - - :param user_id: search for access tokens authorized by given user id - :type user_id: string - :returns: list of access tokens the user has authorized - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_access_token(self, user_id, access_token_id): - """Delete access token. - - :param user_id: authorizing user id - :type user_id: string - :param access_token_id: access token to delete - :type access_token_id: string - :returns: None - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def create_request_token(self, consumer_id, requested_project, - request_token_duration): - """Create request token. - - :param consumer_id: the id of the consumer - :type consumer_id: string - :param requested_project_id: requested project id - :type requested_project_id: string - :param request_token_duration: duration of request token - :type request_token_duration: string - :returns: request_token_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_request_token(self, request_token_id): - """Get request token. - - :param request_token_id: the id of the request token - :type request_token_id: string - :returns: request_token_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_access_token(self, access_token_id): - """Get access token. - - :param access_token_id: the id of the access token - :type access_token_id: string - :returns: access_token_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def authorize_request_token(self, request_token_id, user_id, role_ids): - """Authorize request token. - - :param request_token_id: the id of the request token, to be authorized - :type request_token_id: string - :param user_id: the id of the authorizing user - :type user_id: string - :param role_ids: list of role ids to authorize - :type role_ids: list - :returns: verifier - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def create_access_token(self, request_id, access_token_duration): - """Create access token. - - :param request_id: the id of the request token, to be deleted - :type request_id: string - :param access_token_duration: duration of an access token - :type access_token_duration: string - :returns: access_token_ref - - """ - raise exception.NotImplemented() # pragma: no cover - - -Driver = manager.create_legacy_driver(Oauth1DriverV8) diff --git a/keystone-moon/keystone/oauth1/routers.py b/keystone-moon/keystone/oauth1/routers.py deleted file mode 100644 index 0575b107..00000000 --- a/keystone-moon/keystone/oauth1/routers.py +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools - -from keystone.common import json_home -from keystone.common import wsgi -from keystone.oauth1 import controllers - - -build_resource_relation = functools.partial( - json_home.build_v3_extension_resource_relation, - extension_name='OS-OAUTH1', extension_version='1.0') - -build_parameter_relation = functools.partial( - json_home.build_v3_extension_parameter_relation, - extension_name='OS-OAUTH1', extension_version='1.0') - -ACCESS_TOKEN_ID_PARAMETER_RELATION = build_parameter_relation( - parameter_name='access_token_id') - - -class Routers(wsgi.RoutersBase): - """API Endpoints for the OAuth1 extension. - - The goal of this extension is to allow third-party service providers - to acquire tokens with a limited subset of a user's roles for acting - on behalf of that user. This is done using an oauth-similar flow and - api. - - The API looks like:: - - # Basic admin-only consumer crud - POST /OS-OAUTH1/consumers - GET /OS-OAUTH1/consumers - PATCH /OS-OAUTH1/consumers/{consumer_id} - GET /OS-OAUTH1/consumers/{consumer_id} - DELETE /OS-OAUTH1/consumers/{consumer_id} - - # User access token crud - GET /users/{user_id}/OS-OAUTH1/access_tokens - GET /users/{user_id}/OS-OAUTH1/access_tokens/{access_token_id} - GET /users/{user_id}/OS-OAUTH1/access_tokens/{access_token_id}/roles - GET /users/{user_id}/OS-OAUTH1/access_tokens - /{access_token_id}/roles/{role_id} - DELETE /users/{user_id}/OS-OAUTH1/access_tokens/{access_token_id} - - # OAuth interfaces - POST /OS-OAUTH1/request_token # create a request token - PUT /OS-OAUTH1/authorize # authorize a request token - POST /OS-OAUTH1/access_token # create an access token - - """ - - def append_v3_routers(self, mapper, routers): - consumer_controller = controllers.ConsumerCrudV3() - access_token_controller = controllers.AccessTokenCrudV3() - access_token_roles_controller = controllers.AccessTokenRolesV3() - oauth_controller = controllers.OAuthControllerV3() - - # basic admin-only consumer crud - self._add_resource( - mapper, consumer_controller, - path='/OS-OAUTH1/consumers', - get_action='list_consumers', - post_action='create_consumer', - rel=build_resource_relation(resource_name='consumers')) - self._add_resource( - mapper, consumer_controller, - path='/OS-OAUTH1/consumers/{consumer_id}', - get_action='get_consumer', - patch_action='update_consumer', - delete_action='delete_consumer', - rel=build_resource_relation(resource_name='consumer'), - path_vars={ - 'consumer_id': - build_parameter_relation(parameter_name='consumer_id'), - }) - - # user access token crud - self._add_resource( - mapper, access_token_controller, - path='/users/{user_id}/OS-OAUTH1/access_tokens', - get_action='list_access_tokens', - rel=build_resource_relation(resource_name='user_access_tokens'), - path_vars={ - 'user_id': json_home.Parameters.USER_ID, - }) - self._add_resource( - mapper, access_token_controller, - path='/users/{user_id}/OS-OAUTH1/access_tokens/{access_token_id}', - get_action='get_access_token', - delete_action='delete_access_token', - rel=build_resource_relation(resource_name='user_access_token'), - path_vars={ - 'access_token_id': ACCESS_TOKEN_ID_PARAMETER_RELATION, - 'user_id': json_home.Parameters.USER_ID, - }) - self._add_resource( - mapper, access_token_roles_controller, - path='/users/{user_id}/OS-OAUTH1/access_tokens/{access_token_id}/' - 'roles', - get_action='list_access_token_roles', - rel=build_resource_relation( - resource_name='user_access_token_roles'), - path_vars={ - 'access_token_id': ACCESS_TOKEN_ID_PARAMETER_RELATION, - 'user_id': json_home.Parameters.USER_ID, - }) - self._add_resource( - mapper, access_token_roles_controller, - path='/users/{user_id}/OS-OAUTH1/access_tokens/{access_token_id}/' - 'roles/{role_id}', - get_action='get_access_token_role', - rel=build_resource_relation( - resource_name='user_access_token_role'), - path_vars={ - 'access_token_id': ACCESS_TOKEN_ID_PARAMETER_RELATION, - 'role_id': json_home.Parameters.ROLE_ID, - 'user_id': json_home.Parameters.USER_ID, - }) - - # oauth flow calls - self._add_resource( - mapper, oauth_controller, - path='/OS-OAUTH1/request_token', - post_action='create_request_token', - rel=build_resource_relation(resource_name='request_tokens')) - self._add_resource( - mapper, oauth_controller, - path='/OS-OAUTH1/access_token', - post_action='create_access_token', - rel=build_resource_relation(resource_name='access_tokens')) - self._add_resource( - mapper, oauth_controller, - path='/OS-OAUTH1/authorize/{request_token_id}', - path_vars={ - 'request_token_id': - build_parameter_relation(parameter_name='request_token_id') - }, - put_action='authorize_request_token', - rel=build_resource_relation( - resource_name='authorize_request_token')) diff --git a/keystone-moon/keystone/oauth1/schema.py b/keystone-moon/keystone/oauth1/schema.py deleted file mode 100644 index 51c11afe..00000000 --- a/keystone-moon/keystone/oauth1/schema.py +++ /dev/null @@ -1,34 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import validation -from keystone.common.validation import parameter_types - -_consumer_properties = { - 'description': validation.nullable(parameter_types.description) -} - -consumer_create = { - 'type': 'object', - 'properties': _consumer_properties, - 'additionalProperties': True -} - -consumer_update = { - 'type': 'object', - 'properties': _consumer_properties, - 'not': { - 'required': ['secret'] - }, - 'minProperties': 1, - 'additionalProperties': True -} diff --git a/keystone-moon/keystone/oauth1/validator.py b/keystone-moon/keystone/oauth1/validator.py deleted file mode 100644 index f21a02d7..00000000 --- a/keystone-moon/keystone/oauth1/validator.py +++ /dev/null @@ -1,177 +0,0 @@ -# Copyright 2014 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""oAuthlib request validator.""" - -import six - -from keystone.common import dependency -from keystone import exception -from keystone.oauth1 import core as oauth1 - - -METHOD_NAME = 'oauth_validator' - - -@dependency.requires('oauth_api') -class OAuthValidator(oauth1.RequestValidator): - - # TODO(mhu) set as option probably? - @property - def enforce_ssl(self): - return False - - @property - def safe_characters(self): - # oauth tokens are generated from a uuid hex value - return set("abcdef0123456789") - - def _check_token(self, token): - # generic token verification when they're obtained from a uuid hex - return (set(token) <= self.safe_characters and - len(token) == 32) - - def check_client_key(self, client_key): - return self._check_token(client_key) - - def check_request_token(self, request_token): - return self._check_token(request_token) - - def check_access_token(self, access_token): - return self._check_token(access_token) - - def check_nonce(self, nonce): - # Assuming length is not a concern - return set(nonce) <= self.safe_characters - - def check_verifier(self, verifier): - return (all(i in oauth1.VERIFIER_CHARS for i in verifier) and - len(verifier) == 8) - - def get_client_secret(self, client_key, request): - client = self.oauth_api.get_consumer_with_secret(client_key) - return client['secret'] - - def get_request_token_secret(self, client_key, token, request): - token_ref = self.oauth_api.get_request_token(token) - return token_ref['request_secret'] - - def get_access_token_secret(self, client_key, token, request): - access_token = self.oauth_api.get_access_token(token) - return access_token['access_secret'] - - def get_default_realms(self, client_key, request): - # realms weren't implemented with the previous library - return [] - - def get_realms(self, token, request): - return [] - - def get_redirect_uri(self, token, request): - # OOB (out of band) is supposed to be the default value to use - return 'oob' - - def get_rsa_key(self, client_key, request): - # HMAC signing is used, so return a dummy value - return '' - - def invalidate_request_token(self, client_key, request_token, request): - # this method is invoked when an access token is generated out of a - # request token, to make sure that request token cannot be consumed - # anymore. This is done in the backend, so we do nothing here. - pass - - def validate_client_key(self, client_key, request): - try: - return self.oauth_api.get_consumer(client_key) is not None - except exception.NotFound: - return False - - def validate_request_token(self, client_key, token, request): - try: - return self.oauth_api.get_request_token(token) is not None - except exception.NotFound: - return False - - def validate_access_token(self, client_key, token, request): - try: - return self.oauth_api.get_access_token(token) is not None - except exception.NotFound: - return False - - def validate_timestamp_and_nonce(self, - client_key, - timestamp, - nonce, - request, - request_token=None, - access_token=None): - return True - - def validate_redirect_uri(self, client_key, redirect_uri, request): - # we expect OOB, we don't really care - return True - - def validate_requested_realms(self, client_key, realms, request): - # realms are not used - return True - - def validate_realms(self, - client_key, - token, - request, - uri=None, - realms=None): - return True - - def validate_verifier(self, client_key, token, verifier, request): - try: - req_token = self.oauth_api.get_request_token(token) - return req_token['verifier'] == verifier - except exception.NotFound: - return False - - def verify_request_token(self, token, request): - # there aren't strong expectations on the request token format - return isinstance(token, six.string_types) - - def verify_realms(self, token, realms, request): - return True - - # The following save_XXX methods are called to create tokens. I chose to - # keep the original logic, but the comments below show how that could be - # implemented. The real implementation logic is in the backend. - def save_access_token(self, token, request): - pass -# token_duration = CONF.oauth1.request_token_duration -# request_token_id = request.client_key -# self.oauth_api.create_access_token(request_token_id, -# token_duration, -# token["oauth_token"], -# token["oauth_token_secret"]) - - def save_request_token(self, token, request): - pass -# project_id = request.headers.get('Requested-Project-Id') -# token_duration = CONF.oauth1.request_token_duration -# self.oauth_api.create_request_token(request.client_key, -# project_id, -# token_duration, -# token["oauth_token"], -# token["oauth_token_secret"]) - - def save_verifier(self, token, verifier, request): - # keep the old logic for this, as it is done in two steps and requires - # information that the request validator has no access to - pass diff --git a/keystone-moon/keystone/openstack/__init__.py b/keystone-moon/keystone/openstack/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/openstack/common/README b/keystone-moon/keystone/openstack/common/README deleted file mode 100644 index 0700c72b..00000000 --- a/keystone-moon/keystone/openstack/common/README +++ /dev/null @@ -1,13 +0,0 @@ -openstack-common ----------------- - -A number of modules from openstack-common are imported into this project. - -These modules are "incubating" in openstack-common and are kept in sync -with the help of openstack-common's update.py script. See: - - https://wiki.openstack.org/wiki/Oslo#Syncing_Code_from_Incubator - -The copy of the code should never be directly modified here. Please -always update openstack-common first and then run the script to copy -the changes across. diff --git a/keystone-moon/keystone/openstack/common/__init__.py b/keystone-moon/keystone/openstack/common/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/openstack/common/_i18n.py b/keystone-moon/keystone/openstack/common/_i18n.py deleted file mode 100644 index 76a74c05..00000000 --- a/keystone-moon/keystone/openstack/common/_i18n.py +++ /dev/null @@ -1,45 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""oslo.i18n integration module. - -See http://docs.openstack.org/developer/oslo.i18n/usage.html - -""" - -try: - import oslo_i18n - - # NOTE(dhellmann): This reference to o-s-l-o will be replaced by the - # application name when this module is synced into the separate - # repository. It is OK to have more than one translation function - # using the same domain, since there will still only be one message - # catalog. - _translators = oslo_i18n.TranslatorFactory(domain='keystone') - - # The primary translation function using the well-known name "_" - _ = _translators.primary - - # Translators for log levels. - # - # The abbreviated names are meant to reflect the usual use of a short - # name like '_'. The "L" is for "log" and the other letter comes from - # the level. - _LI = _translators.log_info - _LW = _translators.log_warning - _LE = _translators.log_error - _LC = _translators.log_critical -except ImportError: - # NOTE(dims): Support for cases where a project wants to use - # code from oslo-incubator, but is not ready to be internationalized - # (like tempest) - _ = _LI = _LW = _LE = _LC = lambda x: x diff --git a/keystone-moon/keystone/openstack/common/eventlet_backdoor.py b/keystone-moon/keystone/openstack/common/eventlet_backdoor.py deleted file mode 100644 index c656d81b..00000000 --- a/keystone-moon/keystone/openstack/common/eventlet_backdoor.py +++ /dev/null @@ -1,151 +0,0 @@ -# Copyright (c) 2012 OpenStack Foundation. -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import print_function - -import copy -import errno -import gc -import logging -import os -import pprint -import socket -import sys -import traceback - -import eventlet.backdoor -import greenlet -from oslo_config import cfg - -from keystone.openstack.common._i18n import _LI - -help_for_backdoor_port = ( - "Acceptable values are 0, , and :, where 0 results " - "in listening on a random tcp port number; results in listening " - "on the specified port number (and not enabling backdoor if that port " - "is in use); and : results in listening on the smallest " - "unused port number within the specified range of port numbers. The " - "chosen port is displayed in the service's log file.") -eventlet_backdoor_opts = [ - cfg.StrOpt('backdoor_port', - help="Enable eventlet backdoor. %s" % help_for_backdoor_port) -] - -CONF = cfg.CONF -CONF.register_opts(eventlet_backdoor_opts) -LOG = logging.getLogger(__name__) - - -def list_opts(): - """Entry point for oslo-config-generator. - """ - return [(None, copy.deepcopy(eventlet_backdoor_opts))] - - -class EventletBackdoorConfigValueError(Exception): - def __init__(self, port_range, help_msg, ex): - msg = ('Invalid backdoor_port configuration %(range)s: %(ex)s. ' - '%(help)s' % - {'range': port_range, 'ex': ex, 'help': help_msg}) - super(EventletBackdoorConfigValueError, self).__init__(msg) - self.port_range = port_range - - -def _dont_use_this(): - print("Don't use this, just disconnect instead") - - -def _find_objects(t): - return [o for o in gc.get_objects() if isinstance(o, t)] - - -def _print_greenthreads(): - for i, gt in enumerate(_find_objects(greenlet.greenlet)): - print(i, gt) - traceback.print_stack(gt.gr_frame) - print() - - -def _print_nativethreads(): - for threadId, stack in sys._current_frames().items(): - print(threadId) - traceback.print_stack(stack) - print() - - -def _parse_port_range(port_range): - if ':' not in port_range: - start, end = port_range, port_range - else: - start, end = port_range.split(':', 1) - try: - start, end = int(start), int(end) - if end < start: - raise ValueError - return start, end - except ValueError as ex: - raise EventletBackdoorConfigValueError(port_range, ex, - help_for_backdoor_port) - - -def _listen(host, start_port, end_port, listen_func): - try_port = start_port - while True: - try: - return listen_func((host, try_port)) - except socket.error as exc: - if (exc.errno != errno.EADDRINUSE or - try_port >= end_port): - raise - try_port += 1 - - -def initialize_if_enabled(): - backdoor_locals = { - 'exit': _dont_use_this, # So we don't exit the entire process - 'quit': _dont_use_this, # So we don't exit the entire process - 'fo': _find_objects, - 'pgt': _print_greenthreads, - 'pnt': _print_nativethreads, - } - - if CONF.backdoor_port is None: - return None - - start_port, end_port = _parse_port_range(str(CONF.backdoor_port)) - - # NOTE(johannes): The standard sys.displayhook will print the value of - # the last expression and set it to __builtin__._, which overwrites - # the __builtin__._ that gettext sets. Let's switch to using pprint - # since it won't interact poorly with gettext, and it's easier to - # read the output too. - def displayhook(val): - if val is not None: - pprint.pprint(val) - sys.displayhook = displayhook - - sock = _listen('localhost', start_port, end_port, eventlet.listen) - - # In the case of backdoor port being zero, a port number is assigned by - # listen(). In any case, pull the port number out here. - port = sock.getsockname()[1] - LOG.info( - _LI('Eventlet backdoor listening on %(port)s for process %(pid)d') % - {'port': port, 'pid': os.getpid()} - ) - eventlet.spawn_n(eventlet.backdoor.backdoor_server, sock, - locals=backdoor_locals) - return port diff --git a/keystone-moon/keystone/openstack/common/fileutils.py b/keystone-moon/keystone/openstack/common/fileutils.py deleted file mode 100644 index 9097c35d..00000000 --- a/keystone-moon/keystone/openstack/common/fileutils.py +++ /dev/null @@ -1,149 +0,0 @@ -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import contextlib -import errno -import logging -import os -import stat -import tempfile - -from oslo_utils import excutils - -LOG = logging.getLogger(__name__) - -_FILE_CACHE = {} -DEFAULT_MODE = stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO - - -def ensure_tree(path, mode=DEFAULT_MODE): - """Create a directory (and any ancestor directories required) - - :param path: Directory to create - :param mode: Directory creation permissions - """ - try: - os.makedirs(path, mode) - except OSError as exc: - if exc.errno == errno.EEXIST: - if not os.path.isdir(path): - raise - else: - raise - - -def read_cached_file(filename, force_reload=False): - """Read from a file if it has been modified. - - :param force_reload: Whether to reload the file. - :returns: A tuple with a boolean specifying if the data is fresh - or not. - """ - global _FILE_CACHE - - if force_reload: - delete_cached_file(filename) - - reloaded = False - mtime = os.path.getmtime(filename) - cache_info = _FILE_CACHE.setdefault(filename, {}) - - if not cache_info or mtime > cache_info.get('mtime', 0): - LOG.debug("Reloading cached file %s" % filename) - with open(filename) as fap: - cache_info['data'] = fap.read() - cache_info['mtime'] = mtime - reloaded = True - return (reloaded, cache_info['data']) - - -def delete_cached_file(filename): - """Delete cached file if present. - - :param filename: filename to delete - """ - global _FILE_CACHE - - if filename in _FILE_CACHE: - del _FILE_CACHE[filename] - - -def delete_if_exists(path, remove=os.unlink): - """Delete a file, but ignore file not found error. - - :param path: File to delete - :param remove: Optional function to remove passed path - """ - - try: - remove(path) - except OSError as e: - if e.errno != errno.ENOENT: - raise - - -@contextlib.contextmanager -def remove_path_on_error(path, remove=delete_if_exists): - """Protect code that wants to operate on PATH atomically. - Any exception will cause PATH to be removed. - - :param path: File to work with - :param remove: Optional function to remove passed path - """ - - try: - yield - except Exception: - with excutils.save_and_reraise_exception(): - remove(path) - - -def file_open(*args, **kwargs): - """Open file - - see built-in open() documentation for more details - - Note: The reason this is kept in a separate module is to easily - be able to provide a stub module that doesn't alter system - state at all (for unit tests) - """ - return open(*args, **kwargs) - - -def write_to_tempfile(content, path=None, suffix='', prefix='tmp'): - """Create temporary file or use existing file. - - This util is needed for creating temporary file with - specified content, suffix and prefix. If path is not None, - it will be used for writing content. If the path doesn't - exist it'll be created. - - :param content: content for temporary file. - :param path: same as parameter 'dir' for mkstemp - :param suffix: same as parameter 'suffix' for mkstemp - :param prefix: same as parameter 'prefix' for mkstemp - - For example: it can be used in database tests for creating - configuration files. - """ - if path: - ensure_tree(path) - - (fd, path) = tempfile.mkstemp(suffix=suffix, dir=path, prefix=prefix) - try: - os.write(fd, content) - finally: - os.close(fd) - return path diff --git a/keystone-moon/keystone/openstack/common/loopingcall.py b/keystone-moon/keystone/openstack/common/loopingcall.py deleted file mode 100644 index 39eed47d..00000000 --- a/keystone-moon/keystone/openstack/common/loopingcall.py +++ /dev/null @@ -1,147 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging -import sys -import time - -from eventlet import event -from eventlet import greenthread - -from keystone.openstack.common._i18n import _LE, _LW - -LOG = logging.getLogger(__name__) - -# NOTE(zyluo): This lambda function was declared to avoid mocking collisions -# with time.time() called in the standard logging module -# during unittests. -_ts = lambda: time.time() - - -class LoopingCallDone(Exception): - """Exception to break out and stop a LoopingCallBase. - - The poll-function passed to LoopingCallBase can raise this exception to - break out of the loop normally. This is somewhat analogous to - StopIteration. - - An optional return-value can be included as the argument to the exception; - this return-value will be returned by LoopingCallBase.wait() - - """ - - def __init__(self, retvalue=True): - """:param retvalue: Value that LoopingCallBase.wait() should return.""" - self.retvalue = retvalue - - -class LoopingCallBase(object): - def __init__(self, f=None, *args, **kw): - self.args = args - self.kw = kw - self.f = f - self._running = False - self.done = None - - def stop(self): - self._running = False - - def wait(self): - return self.done.wait() - - -class FixedIntervalLoopingCall(LoopingCallBase): - """A fixed interval looping call.""" - - def start(self, interval, initial_delay=None): - self._running = True - done = event.Event() - - def _inner(): - if initial_delay: - greenthread.sleep(initial_delay) - - try: - while self._running: - start = _ts() - self.f(*self.args, **self.kw) - end = _ts() - if not self._running: - break - delay = end - start - interval - if delay > 0: - LOG.warn(_LW('task %(func_name)r run outlasted ' - 'interval by %(delay).2f sec'), - {'func_name': self.f, 'delay': delay}) - greenthread.sleep(-delay if delay < 0 else 0) - except LoopingCallDone as e: - self.stop() - done.send(e.retvalue) - except Exception: - LOG.exception(_LE('in fixed duration looping call')) - done.send_exception(*sys.exc_info()) - return - else: - done.send(True) - - self.done = done - - greenthread.spawn_n(_inner) - return self.done - - -class DynamicLoopingCall(LoopingCallBase): - """A looping call which sleeps until the next known event. - - The function called should return how long to sleep for before being - called again. - """ - - def start(self, initial_delay=None, periodic_interval_max=None): - self._running = True - done = event.Event() - - def _inner(): - if initial_delay: - greenthread.sleep(initial_delay) - - try: - while self._running: - idle = self.f(*self.args, **self.kw) - if not self._running: - break - - if periodic_interval_max is not None: - idle = min(idle, periodic_interval_max) - LOG.debug('Dynamic looping call %(func_name)r sleeping ' - 'for %(idle).02f seconds', - {'func_name': self.f, 'idle': idle}) - greenthread.sleep(idle) - except LoopingCallDone as e: - self.stop() - done.send(e.retvalue) - except Exception: - LOG.exception(_LE('in dynamic looping call')) - done.send_exception(*sys.exc_info()) - return - else: - done.send(True) - - self.done = done - - greenthread.spawn(_inner) - return self.done diff --git a/keystone-moon/keystone/openstack/common/service.py b/keystone-moon/keystone/openstack/common/service.py deleted file mode 100644 index cfae56b7..00000000 --- a/keystone-moon/keystone/openstack/common/service.py +++ /dev/null @@ -1,495 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# Copyright 2011 Justin Santa Barbara -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Generic Node base class for all workers that run on hosts.""" - -import errno -import logging -import os -import random -import signal -import sys -import time - -try: - # Importing just the symbol here because the io module does not - # exist in Python 2.6. - from io import UnsupportedOperation # noqa -except ImportError: - # Python 2.6 - UnsupportedOperation = None - -import eventlet -from eventlet import event -from oslo_config import cfg - -from keystone.openstack.common import eventlet_backdoor -from keystone.openstack.common._i18n import _LE, _LI, _LW -from keystone.openstack.common import systemd -from keystone.openstack.common import threadgroup - - -CONF = cfg.CONF -LOG = logging.getLogger(__name__) - - -def _sighup_supported(): - return hasattr(signal, 'SIGHUP') - - -def _is_daemon(): - # The process group for a foreground process will match the - # process group of the controlling terminal. If those values do - # not match, or ioctl() fails on the stdout file handle, we assume - # the process is running in the background as a daemon. - # http://www.gnu.org/software/bash/manual/bashref.html#Job-Control-Basics - try: - is_daemon = os.getpgrp() != os.tcgetpgrp(sys.stdout.fileno()) - except OSError as err: - if err.errno == errno.ENOTTY: - # Assume we are a daemon because there is no terminal. - is_daemon = True - else: - raise - except UnsupportedOperation: - # Could not get the fileno for stdout, so we must be a daemon. - is_daemon = True - return is_daemon - - -def _is_sighup_and_daemon(signo): - if not (_sighup_supported() and signo == signal.SIGHUP): - # Avoid checking if we are a daemon, because the signal isn't - # SIGHUP. - return False - return _is_daemon() - - -def _signo_to_signame(signo): - signals = {signal.SIGTERM: 'SIGTERM', - signal.SIGINT: 'SIGINT'} - if _sighup_supported(): - signals[signal.SIGHUP] = 'SIGHUP' - return signals[signo] - - -def _set_signals_handler(handler): - signal.signal(signal.SIGTERM, handler) - signal.signal(signal.SIGINT, handler) - if _sighup_supported(): - signal.signal(signal.SIGHUP, handler) - - -class Launcher(object): - """Launch one or more services and wait for them to complete.""" - - def __init__(self): - """Initialize the service launcher. - - :returns: None - - """ - self.services = Services() - self.backdoor_port = eventlet_backdoor.initialize_if_enabled() - - def launch_service(self, service): - """Load and start the given service. - - :param service: The service you would like to start. - :returns: None - - """ - service.backdoor_port = self.backdoor_port - self.services.add(service) - - def stop(self): - """Stop all services which are currently running. - - :returns: None - - """ - self.services.stop() - - def wait(self): - """Waits until all services have been stopped, and then returns. - - :returns: None - - """ - self.services.wait() - - def restart(self): - """Reload config files and restart service. - - :returns: None - - """ - cfg.CONF.reload_config_files() - self.services.restart() - - -class SignalExit(SystemExit): - def __init__(self, signo, exccode=1): - super(SignalExit, self).__init__(exccode) - self.signo = signo - - -class ServiceLauncher(Launcher): - def _handle_signal(self, signo, frame): - # Allow the process to be killed again and die from natural causes - _set_signals_handler(signal.SIG_DFL) - raise SignalExit(signo) - - def handle_signal(self): - _set_signals_handler(self._handle_signal) - - def _wait_for_exit_or_signal(self, ready_callback=None): - status = None - signo = 0 - - LOG.debug('Full set of CONF:') - CONF.log_opt_values(LOG, logging.DEBUG) - - try: - if ready_callback: - ready_callback() - super(ServiceLauncher, self).wait() - except SignalExit as exc: - signame = _signo_to_signame(exc.signo) - LOG.info(_LI('Caught %s, exiting'), signame) - status = exc.code - signo = exc.signo - except SystemExit as exc: - status = exc.code - finally: - self.stop() - - return status, signo - - def wait(self, ready_callback=None): - systemd.notify_once() - while True: - self.handle_signal() - status, signo = self._wait_for_exit_or_signal(ready_callback) - if not _is_sighup_and_daemon(signo): - return status - self.restart() - - -class ServiceWrapper(object): - def __init__(self, service, workers): - self.service = service - self.workers = workers - self.children = set() - self.forktimes = [] - - -class ProcessLauncher(object): - def __init__(self): - """Constructor.""" - - self.children = {} - self.sigcaught = None - self.running = True - rfd, self.writepipe = os.pipe() - self.readpipe = eventlet.greenio.GreenPipe(rfd, 'r') - self.handle_signal() - - def handle_signal(self): - _set_signals_handler(self._handle_signal) - - def _handle_signal(self, signo, frame): - self.sigcaught = signo - self.running = False - - # Allow the process to be killed again and die from natural causes - _set_signals_handler(signal.SIG_DFL) - - def _pipe_watcher(self): - # This will block until the write end is closed when the parent - # dies unexpectedly - self.readpipe.read() - - LOG.info(_LI('Parent process has died unexpectedly, exiting')) - - sys.exit(1) - - def _child_process_handle_signal(self): - # Setup child signal handlers differently - def _sigterm(*args): - signal.signal(signal.SIGTERM, signal.SIG_DFL) - raise SignalExit(signal.SIGTERM) - - def _sighup(*args): - signal.signal(signal.SIGHUP, signal.SIG_DFL) - raise SignalExit(signal.SIGHUP) - - signal.signal(signal.SIGTERM, _sigterm) - if _sighup_supported(): - signal.signal(signal.SIGHUP, _sighup) - # Block SIGINT and let the parent send us a SIGTERM - signal.signal(signal.SIGINT, signal.SIG_IGN) - - def _child_wait_for_exit_or_signal(self, launcher): - status = 0 - signo = 0 - - # NOTE(johannes): All exceptions are caught to ensure this - # doesn't fallback into the loop spawning children. It would - # be bad for a child to spawn more children. - try: - launcher.wait() - except SignalExit as exc: - signame = _signo_to_signame(exc.signo) - LOG.info(_LI('Child caught %s, exiting'), signame) - status = exc.code - signo = exc.signo - except SystemExit as exc: - status = exc.code - except BaseException: - LOG.exception(_LE('Unhandled exception')) - status = 2 - finally: - launcher.stop() - - return status, signo - - def _child_process(self, service): - self._child_process_handle_signal() - - # Reopen the eventlet hub to make sure we don't share an epoll - # fd with parent and/or siblings, which would be bad - eventlet.hubs.use_hub() - - # Close write to ensure only parent has it open - os.close(self.writepipe) - # Create greenthread to watch for parent to close pipe - eventlet.spawn_n(self._pipe_watcher) - - # Reseed random number generator - random.seed() - - launcher = Launcher() - launcher.launch_service(service) - return launcher - - def _start_child(self, wrap): - if len(wrap.forktimes) > wrap.workers: - # Limit ourselves to one process a second (over the period of - # number of workers * 1 second). This will allow workers to - # start up quickly but ensure we don't fork off children that - # die instantly too quickly. - if time.time() - wrap.forktimes[0] < wrap.workers: - LOG.info(_LI('Forking too fast, sleeping')) - time.sleep(1) - - wrap.forktimes.pop(0) - - wrap.forktimes.append(time.time()) - - pid = os.fork() - if pid == 0: - launcher = self._child_process(wrap.service) - while True: - self._child_process_handle_signal() - status, signo = self._child_wait_for_exit_or_signal(launcher) - if not _is_sighup_and_daemon(signo): - break - launcher.restart() - - os._exit(status) - - LOG.info(_LI('Started child %d'), pid) - - wrap.children.add(pid) - self.children[pid] = wrap - - return pid - - def launch_service(self, service, workers=1): - wrap = ServiceWrapper(service, workers) - - LOG.info(_LI('Starting %d workers'), wrap.workers) - while self.running and len(wrap.children) < wrap.workers: - self._start_child(wrap) - - def _wait_child(self): - try: - # Block while any of child processes have exited - pid, status = os.waitpid(0, 0) - if not pid: - return None - except OSError as exc: - if exc.errno not in (errno.EINTR, errno.ECHILD): - raise - return None - - if os.WIFSIGNALED(status): - sig = os.WTERMSIG(status) - LOG.info(_LI('Child %(pid)d killed by signal %(sig)d'), - dict(pid=pid, sig=sig)) - else: - code = os.WEXITSTATUS(status) - LOG.info(_LI('Child %(pid)s exited with status %(code)d'), - dict(pid=pid, code=code)) - - if pid not in self.children: - LOG.warning(_LW('pid %d not in child list'), pid) - return None - - wrap = self.children.pop(pid) - wrap.children.remove(pid) - return wrap - - def _respawn_children(self): - while self.running: - wrap = self._wait_child() - if not wrap: - continue - while self.running and len(wrap.children) < wrap.workers: - self._start_child(wrap) - - def wait(self): - """Loop waiting on children to die and respawning as necessary.""" - - systemd.notify_once() - LOG.debug('Full set of CONF:') - CONF.log_opt_values(LOG, logging.DEBUG) - - try: - while True: - self.handle_signal() - self._respawn_children() - # No signal means that stop was called. Don't clean up here. - if not self.sigcaught: - return - - signame = _signo_to_signame(self.sigcaught) - LOG.info(_LI('Caught %s, stopping children'), signame) - if not _is_sighup_and_daemon(self.sigcaught): - break - - for pid in self.children: - os.kill(pid, signal.SIGHUP) - self.running = True - self.sigcaught = None - except eventlet.greenlet.GreenletExit: - LOG.info(_LI("Wait called after thread killed. Cleaning up.")) - - self.stop() - - def stop(self): - """Terminate child processes and wait on each.""" - self.running = False - for pid in self.children: - try: - os.kill(pid, signal.SIGTERM) - except OSError as exc: - if exc.errno != errno.ESRCH: - raise - - # Wait for children to die - if self.children: - LOG.info(_LI('Waiting on %d children to exit'), len(self.children)) - while self.children: - self._wait_child() - - -class Service(object): - """Service object for binaries running on hosts.""" - - def __init__(self, threads=1000): - self.tg = threadgroup.ThreadGroup(threads) - - # signal that the service is done shutting itself down: - self._done = event.Event() - - def reset(self): - # NOTE(Fengqian): docs for Event.reset() recommend against using it - self._done = event.Event() - - def start(self): - pass - - def stop(self, graceful=False): - self.tg.stop(graceful) - self.tg.wait() - # Signal that service cleanup is done: - if not self._done.ready(): - self._done.send() - - def wait(self): - self._done.wait() - - -class Services(object): - - def __init__(self): - self.services = [] - self.tg = threadgroup.ThreadGroup() - self.done = event.Event() - - def add(self, service): - self.services.append(service) - self.tg.add_thread(self.run_service, service, self.done) - - def stop(self): - # wait for graceful shutdown of services: - for service in self.services: - service.stop() - service.wait() - - # Each service has performed cleanup, now signal that the run_service - # wrapper threads can now die: - if not self.done.ready(): - self.done.send() - - # reap threads: - self.tg.stop() - - def wait(self): - self.tg.wait() - - def restart(self): - self.stop() - self.done = event.Event() - for restart_service in self.services: - restart_service.reset() - self.tg.add_thread(self.run_service, restart_service, self.done) - - @staticmethod - def run_service(service, done): - """Service start wrapper. - - :param service: service to run - :param done: event to wait on until a shutdown is triggered - :returns: None - - """ - service.start() - done.wait() - - -def launch(service, workers=1): - if workers is None or workers == 1: - launcher = ServiceLauncher() - launcher.launch_service(service) - else: - launcher = ProcessLauncher() - launcher.launch_service(service, workers=workers) - - return launcher diff --git a/keystone-moon/keystone/openstack/common/systemd.py b/keystone-moon/keystone/openstack/common/systemd.py deleted file mode 100644 index 36243b34..00000000 --- a/keystone-moon/keystone/openstack/common/systemd.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright 2012-2014 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Helper module for systemd service readiness notification. -""" - -import logging -import os -import socket -import sys - - -LOG = logging.getLogger(__name__) - - -def _abstractify(socket_name): - if socket_name.startswith('@'): - # abstract namespace socket - socket_name = '\0%s' % socket_name[1:] - return socket_name - - -def _sd_notify(unset_env, msg): - notify_socket = os.getenv('NOTIFY_SOCKET') - if notify_socket: - sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) - try: - sock.connect(_abstractify(notify_socket)) - sock.sendall(msg) - if unset_env: - del os.environ['NOTIFY_SOCKET'] - except EnvironmentError: - LOG.debug("Systemd notification failed", exc_info=True) - finally: - sock.close() - - -def notify(): - """Send notification to Systemd that service is ready. - - For details see - http://www.freedesktop.org/software/systemd/man/sd_notify.html - """ - _sd_notify(False, 'READY=1') - - -def notify_once(): - """Send notification once to Systemd that service is ready. - - Systemd sets NOTIFY_SOCKET environment variable with the name of the - socket listening for notifications from services. - This method removes the NOTIFY_SOCKET environment variable to ensure - notification is sent only once. - """ - _sd_notify(True, 'READY=1') - - -def onready(notify_socket, timeout): - """Wait for systemd style notification on the socket. - - :param notify_socket: local socket address - :type notify_socket: string - :param timeout: socket timeout - :type timeout: float - :returns: 0 service ready - 1 service not ready - 2 timeout occurred - """ - sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) - sock.settimeout(timeout) - sock.bind(_abstractify(notify_socket)) - try: - msg = sock.recv(512) - except socket.timeout: - return 2 - finally: - sock.close() - if 'READY=1' in msg: - return 0 - else: - return 1 - - -if __name__ == '__main__': - # simple CLI for testing - if len(sys.argv) == 1: - notify() - elif len(sys.argv) >= 2: - timeout = float(sys.argv[1]) - notify_socket = os.getenv('NOTIFY_SOCKET') - if notify_socket: - retval = onready(notify_socket, timeout) - sys.exit(retval) diff --git a/keystone-moon/keystone/openstack/common/threadgroup.py b/keystone-moon/keystone/openstack/common/threadgroup.py deleted file mode 100644 index fc0bcb53..00000000 --- a/keystone-moon/keystone/openstack/common/threadgroup.py +++ /dev/null @@ -1,149 +0,0 @@ -# Copyright 2012 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import logging -import threading - -import eventlet -from eventlet import greenpool - -from keystone.openstack.common import loopingcall - - -LOG = logging.getLogger(__name__) - - -def _thread_done(gt, *args, **kwargs): - """Callback function to be passed to GreenThread.link() when we spawn() - Calls the :class:`ThreadGroup` to notify if. - - """ - kwargs['group'].thread_done(kwargs['thread']) - - -class Thread(object): - """Wrapper around a greenthread, that holds a reference to the - :class:`ThreadGroup`. The Thread will notify the :class:`ThreadGroup` when - it has done so it can be removed from the threads list. - """ - def __init__(self, thread, group): - self.thread = thread - self.thread.link(_thread_done, group=group, thread=self) - - def stop(self): - self.thread.kill() - - def wait(self): - return self.thread.wait() - - def link(self, func, *args, **kwargs): - self.thread.link(func, *args, **kwargs) - - -class ThreadGroup(object): - """The point of the ThreadGroup class is to: - - * keep track of timers and greenthreads (making it easier to stop them - when need be). - * provide an easy API to add timers. - """ - def __init__(self, thread_pool_size=10): - self.pool = greenpool.GreenPool(thread_pool_size) - self.threads = [] - self.timers = [] - - def add_dynamic_timer(self, callback, initial_delay=None, - periodic_interval_max=None, *args, **kwargs): - timer = loopingcall.DynamicLoopingCall(callback, *args, **kwargs) - timer.start(initial_delay=initial_delay, - periodic_interval_max=periodic_interval_max) - self.timers.append(timer) - - def add_timer(self, interval, callback, initial_delay=None, - *args, **kwargs): - pulse = loopingcall.FixedIntervalLoopingCall(callback, *args, **kwargs) - pulse.start(interval=interval, - initial_delay=initial_delay) - self.timers.append(pulse) - - def add_thread(self, callback, *args, **kwargs): - gt = self.pool.spawn(callback, *args, **kwargs) - th = Thread(gt, self) - self.threads.append(th) - return th - - def thread_done(self, thread): - self.threads.remove(thread) - - def _stop_threads(self): - current = threading.current_thread() - - # Iterate over a copy of self.threads so thread_done doesn't - # modify the list while we're iterating - for x in self.threads[:]: - if x is current: - # don't kill the current thread. - continue - try: - x.stop() - except eventlet.greenlet.GreenletExit: - pass - except Exception as ex: - LOG.exception(ex) - - def stop_timers(self): - for x in self.timers: - try: - x.stop() - except Exception as ex: - LOG.exception(ex) - self.timers = [] - - def stop(self, graceful=False): - """stop function has the option of graceful=True/False. - - * In case of graceful=True, wait for all threads to be finished. - Never kill threads. - * In case of graceful=False, kill threads immediately. - """ - self.stop_timers() - if graceful: - # In case of graceful=True, wait for all threads to be - # finished, never kill threads - self.wait() - else: - # In case of graceful=False(Default), kill threads - # immediately - self._stop_threads() - - def wait(self): - for x in self.timers: - try: - x.wait() - except eventlet.greenlet.GreenletExit: - pass - except Exception as ex: - LOG.exception(ex) - current = threading.current_thread() - - # Iterate over a copy of self.threads so thread_done doesn't - # modify the list while we're iterating - for x in self.threads[:]: - if x is current: - continue - try: - x.wait() - except eventlet.greenlet.GreenletExit: - pass - except Exception as ex: - LOG.exception(ex) diff --git a/keystone-moon/keystone/openstack/common/versionutils.py b/keystone-moon/keystone/openstack/common/versionutils.py deleted file mode 100644 index 111bfd6f..00000000 --- a/keystone-moon/keystone/openstack/common/versionutils.py +++ /dev/null @@ -1,262 +0,0 @@ -# Copyright (c) 2013 OpenStack Foundation -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -""" -Helpers for comparing version strings. -""" - -import copy -import functools -import inspect -import logging - -from oslo_config import cfg -import pkg_resources -import six - -from keystone.openstack.common._i18n import _ - - -LOG = logging.getLogger(__name__) -CONF = cfg.CONF - - -deprecated_opts = [ - cfg.BoolOpt('fatal_deprecations', - default=False, - help='Enables or disables fatal status of deprecations.'), -] - - -def list_opts(): - """Entry point for oslo.config-generator. - """ - return [(None, copy.deepcopy(deprecated_opts))] - - -class deprecated(object): - """A decorator to mark callables as deprecated. - - This decorator logs a deprecation message when the callable it decorates is - used. The message will include the release where the callable was - deprecated, the release where it may be removed and possibly an optional - replacement. - - Examples: - - 1. Specifying the required deprecated release - - >>> @deprecated(as_of=deprecated.ICEHOUSE) - ... def a(): pass - - 2. Specifying a replacement: - - >>> @deprecated(as_of=deprecated.ICEHOUSE, in_favor_of='f()') - ... def b(): pass - - 3. Specifying the release where the functionality may be removed: - - >>> @deprecated(as_of=deprecated.ICEHOUSE, remove_in=+1) - ... def c(): pass - - 4. Specifying the deprecated functionality will not be removed: - >>> @deprecated(as_of=deprecated.ICEHOUSE, remove_in=0) - ... def d(): pass - - 5. Specifying a replacement, deprecated functionality will not be removed: - >>> @deprecated(as_of=deprecated.ICEHOUSE, in_favor_of='f()', remove_in=0) - ... def e(): pass - - """ - - # NOTE(morganfainberg): Bexar is used for unit test purposes, it is - # expected we maintain a gap between Bexar and Folsom in this list. - BEXAR = 'B' - FOLSOM = 'F' - GRIZZLY = 'G' - HAVANA = 'H' - ICEHOUSE = 'I' - JUNO = 'J' - KILO = 'K' - LIBERTY = 'L' - - _RELEASES = { - # NOTE(morganfainberg): Bexar is used for unit test purposes, it is - # expected we maintain a gap between Bexar and Folsom in this list. - 'B': 'Bexar', - 'F': 'Folsom', - 'G': 'Grizzly', - 'H': 'Havana', - 'I': 'Icehouse', - 'J': 'Juno', - 'K': 'Kilo', - 'L': 'Liberty', - } - - _deprecated_msg_with_alternative = _( - '%(what)s is deprecated as of %(as_of)s in favor of ' - '%(in_favor_of)s and may be removed in %(remove_in)s.') - - _deprecated_msg_no_alternative = _( - '%(what)s is deprecated as of %(as_of)s and may be ' - 'removed in %(remove_in)s. It will not be superseded.') - - _deprecated_msg_with_alternative_no_removal = _( - '%(what)s is deprecated as of %(as_of)s in favor of %(in_favor_of)s.') - - _deprecated_msg_with_no_alternative_no_removal = _( - '%(what)s is deprecated as of %(as_of)s. It will not be superseded.') - - def __init__(self, as_of, in_favor_of=None, remove_in=2, what=None): - """Initialize decorator - - :param as_of: the release deprecating the callable. Constants - are define in this class for convenience. - :param in_favor_of: the replacement for the callable (optional) - :param remove_in: an integer specifying how many releases to wait - before removing (default: 2) - :param what: name of the thing being deprecated (default: the - callable's name) - - """ - self.as_of = as_of - self.in_favor_of = in_favor_of - self.remove_in = remove_in - self.what = what - - def __call__(self, func_or_cls): - if not self.what: - self.what = func_or_cls.__name__ + '()' - msg, details = self._build_message() - - if inspect.isfunction(func_or_cls): - - @six.wraps(func_or_cls) - def wrapped(*args, **kwargs): - report_deprecated_feature(LOG, msg, details) - return func_or_cls(*args, **kwargs) - return wrapped - elif inspect.isclass(func_or_cls): - orig_init = func_or_cls.__init__ - - # TODO(tsufiev): change `functools` module to `six` as - # soon as six 1.7.4 (with fix for passing `assigned` - # argument to underlying `functools.wraps`) is released - # and added to the oslo-incubator requrements - @functools.wraps(orig_init, assigned=('__name__', '__doc__')) - def new_init(self, *args, **kwargs): - report_deprecated_feature(LOG, msg, details) - orig_init(self, *args, **kwargs) - func_or_cls.__init__ = new_init - return func_or_cls - else: - raise TypeError('deprecated can be used only with functions or ' - 'classes') - - def _get_safe_to_remove_release(self, release): - # TODO(dstanek): this method will have to be reimplemented once - # when we get to the X release because once we get to the Y - # release, what is Y+2? - new_release = chr(ord(release) + self.remove_in) - if new_release in self._RELEASES: - return self._RELEASES[new_release] - else: - return new_release - - def _build_message(self): - details = dict(what=self.what, - as_of=self._RELEASES[self.as_of], - remove_in=self._get_safe_to_remove_release(self.as_of)) - - if self.in_favor_of: - details['in_favor_of'] = self.in_favor_of - if self.remove_in > 0: - msg = self._deprecated_msg_with_alternative - else: - # There are no plans to remove this function, but it is - # now deprecated. - msg = self._deprecated_msg_with_alternative_no_removal - else: - if self.remove_in > 0: - msg = self._deprecated_msg_no_alternative - else: - # There are no plans to remove this function, but it is - # now deprecated. - msg = self._deprecated_msg_with_no_alternative_no_removal - return msg, details - - -def is_compatible(requested_version, current_version, same_major=True): - """Determine whether `requested_version` is satisfied by - `current_version`; in other words, `current_version` is >= - `requested_version`. - - :param requested_version: version to check for compatibility - :param current_version: version to check against - :param same_major: if True, the major version must be identical between - `requested_version` and `current_version`. This is used when a - major-version difference indicates incompatibility between the two - versions. Since this is the common-case in practice, the default is - True. - :returns: True if compatible, False if not - """ - requested_parts = pkg_resources.parse_version(requested_version) - current_parts = pkg_resources.parse_version(current_version) - - if same_major and (requested_parts[0] != current_parts[0]): - return False - - return current_parts >= requested_parts - - -# Track the messages we have sent already. See -# report_deprecated_feature(). -_deprecated_messages_sent = {} - - -def report_deprecated_feature(logger, msg, *args, **kwargs): - """Call this function when a deprecated feature is used. - - If the system is configured for fatal deprecations then the message - is logged at the 'critical' level and :class:`DeprecatedConfig` will - be raised. - - Otherwise, the message will be logged (once) at the 'warn' level. - - :raises: :class:`DeprecatedConfig` if the system is configured for - fatal deprecations. - """ - stdmsg = _("Deprecated: %s") % msg - CONF.register_opts(deprecated_opts) - if CONF.fatal_deprecations: - logger.critical(stdmsg, *args, **kwargs) - raise DeprecatedConfig(msg=stdmsg) - - # Using a list because a tuple with dict can't be stored in a set. - sent_args = _deprecated_messages_sent.setdefault(msg, list()) - - if args in sent_args: - # Already logged this message, so don't log it again. - return - - sent_args.append(args) - logger.warn(stdmsg, *args, **kwargs) - - -class DeprecatedConfig(Exception): - message = _("Fatal call to deprecated config: %(msg)s") - - def __init__(self, msg): - super(Exception, self).__init__(self.message % dict(msg=msg)) diff --git a/keystone-moon/keystone/policy/__init__.py b/keystone-moon/keystone/policy/__init__.py deleted file mode 100644 index a95aac1f..00000000 --- a/keystone-moon/keystone/policy/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.policy import controllers # noqa -from keystone.policy.core import * # noqa diff --git a/keystone-moon/keystone/policy/backends/__init__.py b/keystone-moon/keystone/policy/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/policy/backends/rules.py b/keystone-moon/keystone/policy/backends/rules.py deleted file mode 100644 index 5a13287d..00000000 --- a/keystone-moon/keystone/policy/backends/rules.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright (c) 2011 OpenStack, LLC. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Policy engine for keystone""" - -from oslo_config import cfg -from oslo_log import log -from oslo_policy import policy as common_policy - -from keystone import exception -from keystone import policy - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -_ENFORCER = None - - -def reset(): - global _ENFORCER - _ENFORCER = None - - -def init(): - global _ENFORCER - if not _ENFORCER: - _ENFORCER = common_policy.Enforcer(CONF) - - -def enforce(credentials, action, target, do_raise=True): - """Verifies that the action is valid on the target in this context. - - :param credentials: user credentials - :param action: string representing the action to be checked, which should - be colon separated for clarity. - :param target: dictionary representing the object of the action for object - creation this should be a dictionary representing the - location of the object e.g. {'project_id': - object.project_id} - :raises keystone.exception.Forbidden: If verification fails. - - Actions should be colon separated for clarity. For example: - - * identity:list_users - - """ - init() - - # Add the exception arguments if asked to do a raise - extra = {} - if do_raise: - extra.update(exc=exception.ForbiddenAction, action=action, - do_raise=do_raise) - - return _ENFORCER.enforce(action, target, credentials, **extra) - - -class Policy(policy.PolicyDriverV8): - def enforce(self, credentials, action, target): - LOG.debug('enforce %(action)s: %(credentials)s', { - 'action': action, - 'credentials': credentials}) - enforce(credentials, action, target) - - def create_policy(self, policy_id, policy): - raise exception.NotImplemented() - - def list_policies(self): - raise exception.NotImplemented() - - def get_policy(self, policy_id): - raise exception.NotImplemented() - - def update_policy(self, policy_id, policy): - raise exception.NotImplemented() - - def delete_policy(self, policy_id): - raise exception.NotImplemented() diff --git a/keystone-moon/keystone/policy/backends/sql.py b/keystone-moon/keystone/policy/backends/sql.py deleted file mode 100644 index 94763f0d..00000000 --- a/keystone-moon/keystone/policy/backends/sql.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright 2012 OpenStack LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import sql -from keystone import exception -from keystone.policy.backends import rules - - -class PolicyModel(sql.ModelBase, sql.DictBase): - __tablename__ = 'policy' - attributes = ['id', 'blob', 'type'] - id = sql.Column(sql.String(64), primary_key=True) - blob = sql.Column(sql.JsonBlob(), nullable=False) - type = sql.Column(sql.String(255), nullable=False) - extra = sql.Column(sql.JsonBlob()) - - -class Policy(rules.Policy): - - @sql.handle_conflicts(conflict_type='policy') - def create_policy(self, policy_id, policy): - with sql.session_for_write() as session: - ref = PolicyModel.from_dict(policy) - session.add(ref) - - return ref.to_dict() - - def list_policies(self): - with sql.session_for_read() as session: - refs = session.query(PolicyModel).all() - return [ref.to_dict() for ref in refs] - - def _get_policy(self, session, policy_id): - """Private method to get a policy model object (NOT a dictionary).""" - ref = session.query(PolicyModel).get(policy_id) - if not ref: - raise exception.PolicyNotFound(policy_id=policy_id) - return ref - - def get_policy(self, policy_id): - with sql.session_for_read() as session: - return self._get_policy(session, policy_id).to_dict() - - @sql.handle_conflicts(conflict_type='policy') - def update_policy(self, policy_id, policy): - with sql.session_for_write() as session: - ref = self._get_policy(session, policy_id) - old_dict = ref.to_dict() - old_dict.update(policy) - new_policy = PolicyModel.from_dict(old_dict) - ref.blob = new_policy.blob - ref.type = new_policy.type - ref.extra = new_policy.extra - - return ref.to_dict() - - def delete_policy(self, policy_id): - with sql.session_for_write() as session: - ref = self._get_policy(session, policy_id) - session.delete(ref) diff --git a/keystone-moon/keystone/policy/controllers.py b/keystone-moon/keystone/policy/controllers.py deleted file mode 100644 index e6eb9bca..00000000 --- a/keystone-moon/keystone/policy/controllers.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import controller -from keystone.common import dependency -from keystone.common import validation -from keystone import notifications -from keystone.policy import schema - - -@dependency.requires('policy_api') -class PolicyV3(controller.V3Controller): - collection_name = 'policies' - member_name = 'policy' - - @controller.protected() - @validation.validated(schema.policy_create, 'policy') - def create_policy(self, context, policy): - ref = self._assign_unique_id(self._normalize_dict(policy)) - initiator = notifications._get_request_audit_info(context) - ref = self.policy_api.create_policy(ref['id'], ref, initiator) - return PolicyV3.wrap_member(context, ref) - - @controller.filterprotected('type') - def list_policies(self, context, filters): - hints = PolicyV3.build_driver_hints(context, filters) - refs = self.policy_api.list_policies(hints=hints) - return PolicyV3.wrap_collection(context, refs, hints=hints) - - @controller.protected() - def get_policy(self, context, policy_id): - ref = self.policy_api.get_policy(policy_id) - return PolicyV3.wrap_member(context, ref) - - @controller.protected() - @validation.validated(schema.policy_update, 'policy') - def update_policy(self, context, policy_id, policy): - initiator = notifications._get_request_audit_info(context) - ref = self.policy_api.update_policy(policy_id, policy, initiator) - return PolicyV3.wrap_member(context, ref) - - @controller.protected() - def delete_policy(self, context, policy_id): - initiator = notifications._get_request_audit_info(context) - return self.policy_api.delete_policy(policy_id, initiator) diff --git a/keystone-moon/keystone/policy/core.py b/keystone-moon/keystone/policy/core.py deleted file mode 100644 index f52795a5..00000000 --- a/keystone-moon/keystone/policy/core.py +++ /dev/null @@ -1,141 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into the Policy service.""" - -import abc - -from oslo_config import cfg -import six - -from keystone.common import dependency -from keystone.common import manager -from keystone import exception -from keystone import notifications - - -CONF = cfg.CONF - - -@dependency.provider('policy_api') -class Manager(manager.Manager): - """Default pivot point for the Policy backend. - - See :mod:`keystone.common.manager.Manager` for more details on how this - dynamically calls the backend. - - """ - - driver_namespace = 'keystone.policy' - - _POLICY = 'policy' - - def __init__(self): - super(Manager, self).__init__(CONF.policy.driver) - - def create_policy(self, policy_id, policy, initiator=None): - ref = self.driver.create_policy(policy_id, policy) - notifications.Audit.created(self._POLICY, policy_id, initiator) - return ref - - def get_policy(self, policy_id): - try: - return self.driver.get_policy(policy_id) - except exception.NotFound: - raise exception.PolicyNotFound(policy_id=policy_id) - - def update_policy(self, policy_id, policy, initiator=None): - if 'id' in policy and policy_id != policy['id']: - raise exception.ValidationError('Cannot change policy ID') - try: - ref = self.driver.update_policy(policy_id, policy) - except exception.NotFound: - raise exception.PolicyNotFound(policy_id=policy_id) - notifications.Audit.updated(self._POLICY, policy_id, initiator) - return ref - - @manager.response_truncated - def list_policies(self, hints=None): - # NOTE(henry-nash): Since the advantage of filtering or list limiting - # of policies at the driver level is minimal, we leave this to the - # caller. - return self.driver.list_policies() - - def delete_policy(self, policy_id, initiator=None): - try: - ret = self.driver.delete_policy(policy_id) - except exception.NotFound: - raise exception.PolicyNotFound(policy_id=policy_id) - notifications.Audit.deleted(self._POLICY, policy_id, initiator) - return ret - - -@six.add_metaclass(abc.ABCMeta) -class PolicyDriverV8(object): - - def _get_list_limit(self): - return CONF.policy.list_limit or CONF.list_limit - - @abc.abstractmethod - def enforce(self, context, credentials, action, target): - """Verify that a user is authorized to perform action. - - For more information on a full implementation of this see: - `keystone.policy.backends.rules.Policy.enforce` - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def create_policy(self, policy_id, policy): - """Store a policy blob. - - :raises keystone.exception.Conflict: If a duplicate policy exists. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_policies(self): - """List all policies.""" - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_policy(self, policy_id): - """Retrieve a specific policy blob. - - :raises keystone.exception.PolicyNotFound: If the policy doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_policy(self, policy_id, policy): - """Update a policy blob. - - :raises keystone.exception.PolicyNotFound: If the policy doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_policy(self, policy_id): - """Remove a policy blob. - - :raises keystone.exception.PolicyNotFound: If the policy doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - -Driver = manager.create_legacy_driver(PolicyDriverV8) diff --git a/keystone-moon/keystone/policy/routers.py b/keystone-moon/keystone/policy/routers.py deleted file mode 100644 index 5daadc81..00000000 --- a/keystone-moon/keystone/policy/routers.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from keystone.common import router -from keystone.common import wsgi -from keystone.policy import controllers - - -class Routers(wsgi.RoutersBase): - - def append_v3_routers(self, mapper, routers): - policy_controller = controllers.PolicyV3() - routers.append(router.Router(policy_controller, 'policies', 'policy', - resource_descriptions=self.v3_resources)) diff --git a/keystone-moon/keystone/policy/schema.py b/keystone-moon/keystone/policy/schema.py deleted file mode 100644 index 512c4ce7..00000000 --- a/keystone-moon/keystone/policy/schema.py +++ /dev/null @@ -1,36 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -_policy_properties = { - 'blob': { - 'type': 'string' - }, - 'type': { - 'type': 'string', - 'maxLength': 255 - } -} - -policy_create = { - 'type': 'object', - 'properties': _policy_properties, - 'required': ['blob', 'type'], - 'additionalProperties': True -} - -policy_update = { - 'type': 'object', - 'properties': _policy_properties, - 'minProperties': 1, - 'additionalProperties': True -} diff --git a/keystone-moon/keystone/resource/V8_backends/__init__.py b/keystone-moon/keystone/resource/V8_backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/resource/V8_backends/sql.py b/keystone-moon/keystone/resource/V8_backends/sql.py deleted file mode 100644 index 6c9b7912..00000000 --- a/keystone-moon/keystone/resource/V8_backends/sql.py +++ /dev/null @@ -1,260 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log - -from keystone.common import clean -from keystone.common import driver_hints -from keystone.common import sql -from keystone import exception -from keystone.i18n import _LE -from keystone import resource as keystone_resource - - -LOG = log.getLogger(__name__) - - -class Resource(keystone_resource.ResourceDriverV8): - - def default_assignment_driver(self): - return 'sql' - - def _get_project(self, session, project_id): - project_ref = session.query(Project).get(project_id) - if project_ref is None: - raise exception.ProjectNotFound(project_id=project_id) - return project_ref - - def get_project(self, tenant_id): - with sql.session_for_read() as session: - return self._get_project(session, tenant_id).to_dict() - - def get_project_by_name(self, tenant_name, domain_id): - with sql.session_for_read() as session: - query = session.query(Project) - query = query.filter_by(name=tenant_name) - query = query.filter_by(domain_id=domain_id) - try: - project_ref = query.one() - except sql.NotFound: - raise exception.ProjectNotFound(project_id=tenant_name) - return project_ref.to_dict() - - @driver_hints.truncated - def list_projects(self, hints): - with sql.session_for_read() as session: - query = session.query(Project) - project_refs = sql.filter_limit_query(Project, query, hints) - return [project_ref.to_dict() for project_ref in project_refs] - - def list_projects_from_ids(self, ids): - if not ids: - return [] - else: - with sql.session_for_read() as session: - query = session.query(Project) - query = query.filter(Project.id.in_(ids)) - return [project_ref.to_dict() for project_ref in query.all()] - - def list_project_ids_from_domain_ids(self, domain_ids): - if not domain_ids: - return [] - else: - with sql.session_for_read() as session: - query = session.query(Project.id) - query = ( - query.filter(Project.domain_id.in_(domain_ids))) - return [x.id for x in query.all()] - - def list_projects_in_domain(self, domain_id): - with sql.session_for_read() as session: - self._get_domain(session, domain_id) - query = session.query(Project) - project_refs = query.filter_by(domain_id=domain_id) - return [project_ref.to_dict() for project_ref in project_refs] - - def _get_children(self, session, project_ids): - query = session.query(Project) - query = query.filter(Project.parent_id.in_(project_ids)) - project_refs = query.all() - return [project_ref.to_dict() for project_ref in project_refs] - - def list_projects_in_subtree(self, project_id): - with sql.session_for_read() as session: - children = self._get_children(session, [project_id]) - subtree = [] - examined = set([project_id]) - while children: - children_ids = set() - for ref in children: - if ref['id'] in examined: - msg = _LE('Circular reference or a repeated ' - 'entry found in projects hierarchy - ' - '%(project_id)s.') - LOG.error(msg, {'project_id': ref['id']}) - return - children_ids.add(ref['id']) - - examined.update(children_ids) - subtree += children - children = self._get_children(session, children_ids) - return subtree - - def list_project_parents(self, project_id): - with sql.session_for_read() as session: - project = self._get_project(session, project_id).to_dict() - parents = [] - examined = set() - while project.get('parent_id') is not None: - if project['id'] in examined: - msg = _LE('Circular reference or a repeated ' - 'entry found in projects hierarchy - ' - '%(project_id)s.') - LOG.error(msg, {'project_id': project['id']}) - return - - examined.add(project['id']) - parent_project = self._get_project( - session, project['parent_id']).to_dict() - parents.append(parent_project) - project = parent_project - return parents - - def is_leaf_project(self, project_id): - with sql.session_for_read() as session: - project_refs = self._get_children(session, [project_id]) - return not project_refs - - # CRUD - @sql.handle_conflicts(conflict_type='project') - def create_project(self, tenant_id, tenant): - tenant['name'] = clean.project_name(tenant['name']) - with sql.session_for_write() as session: - tenant_ref = Project.from_dict(tenant) - session.add(tenant_ref) - return tenant_ref.to_dict() - - @sql.handle_conflicts(conflict_type='project') - def update_project(self, tenant_id, tenant): - if 'name' in tenant: - tenant['name'] = clean.project_name(tenant['name']) - - with sql.session_for_write() as session: - tenant_ref = self._get_project(session, tenant_id) - old_project_dict = tenant_ref.to_dict() - for k in tenant: - old_project_dict[k] = tenant[k] - new_project = Project.from_dict(old_project_dict) - for attr in Project.attributes: - if attr != 'id': - setattr(tenant_ref, attr, getattr(new_project, attr)) - tenant_ref.extra = new_project.extra - return tenant_ref.to_dict(include_extra_dict=True) - - @sql.handle_conflicts(conflict_type='project') - def delete_project(self, tenant_id): - with sql.session_for_write() as session: - tenant_ref = self._get_project(session, tenant_id) - session.delete(tenant_ref) - - # domain crud - - @sql.handle_conflicts(conflict_type='domain') - def create_domain(self, domain_id, domain): - with sql.session_for_write() as session: - ref = Domain.from_dict(domain) - session.add(ref) - return ref.to_dict() - - @driver_hints.truncated - def list_domains(self, hints): - with sql.session_for_read() as session: - query = session.query(Domain) - refs = sql.filter_limit_query(Domain, query, hints) - return [ref.to_dict() for ref in refs] - - def list_domains_from_ids(self, ids): - if not ids: - return [] - else: - with sql.session_for_read() as session: - query = session.query(Domain) - query = query.filter(Domain.id.in_(ids)) - domain_refs = query.all() - return [domain_ref.to_dict() for domain_ref in domain_refs] - - def _get_domain(self, session, domain_id): - ref = session.query(Domain).get(domain_id) - if ref is None: - raise exception.DomainNotFound(domain_id=domain_id) - return ref - - def get_domain(self, domain_id): - with sql.session_for_read() as session: - return self._get_domain(session, domain_id).to_dict() - - def get_domain_by_name(self, domain_name): - with sql.session_for_read() as session: - try: - ref = (session.query(Domain). - filter_by(name=domain_name).one()) - except sql.NotFound: - raise exception.DomainNotFound(domain_id=domain_name) - return ref.to_dict() - - @sql.handle_conflicts(conflict_type='domain') - def update_domain(self, domain_id, domain): - with sql.session_for_write() as session: - ref = self._get_domain(session, domain_id) - old_dict = ref.to_dict() - for k in domain: - old_dict[k] = domain[k] - new_domain = Domain.from_dict(old_dict) - for attr in Domain.attributes: - if attr != 'id': - setattr(ref, attr, getattr(new_domain, attr)) - ref.extra = new_domain.extra - return ref.to_dict() - - def delete_domain(self, domain_id): - with sql.session_for_write() as session: - ref = self._get_domain(session, domain_id) - session.delete(ref) - - -class Domain(sql.ModelBase, sql.DictBase): - __tablename__ = 'domain' - attributes = ['id', 'name', 'enabled'] - id = sql.Column(sql.String(64), primary_key=True) - name = sql.Column(sql.String(64), nullable=False) - enabled = sql.Column(sql.Boolean, default=True, nullable=False) - extra = sql.Column(sql.JsonBlob()) - __table_args__ = (sql.UniqueConstraint('name'),) - - -class Project(sql.ModelBase, sql.DictBase): - __tablename__ = 'project' - attributes = ['id', 'name', 'domain_id', 'description', 'enabled', - 'parent_id', 'is_domain'] - id = sql.Column(sql.String(64), primary_key=True) - name = sql.Column(sql.String(64), nullable=False) - domain_id = sql.Column(sql.String(64), sql.ForeignKey('domain.id'), - nullable=False) - description = sql.Column(sql.Text()) - enabled = sql.Column(sql.Boolean) - extra = sql.Column(sql.JsonBlob()) - parent_id = sql.Column(sql.String(64), sql.ForeignKey('project.id')) - is_domain = sql.Column(sql.Boolean, default=False, nullable=False, - server_default='0') - # Unique constraint across two columns to create the separation - # rather than just only 'name' being unique - __table_args__ = (sql.UniqueConstraint('domain_id', 'name'),) diff --git a/keystone-moon/keystone/resource/__init__.py b/keystone-moon/keystone/resource/__init__.py deleted file mode 100644 index 7f879f4b..00000000 --- a/keystone-moon/keystone/resource/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.resource import controllers # noqa -from keystone.resource.core import * # noqa diff --git a/keystone-moon/keystone/resource/backends/__init__.py b/keystone-moon/keystone/resource/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/resource/backends/ldap.py b/keystone-moon/keystone/resource/backends/ldap.py deleted file mode 100644 index 566adc5d..00000000 --- a/keystone-moon/keystone/resource/backends/ldap.py +++ /dev/null @@ -1,217 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import - -import uuid - -from oslo_config import cfg -from oslo_log import log -from oslo_log import versionutils - -from keystone.common import clean -from keystone.common import driver_hints -from keystone.common import ldap as common_ldap -from keystone.common import models -from keystone import exception -from keystone.i18n import _ -from keystone.identity.backends import ldap as ldap_identity -from keystone import resource - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -class Resource(resource.ResourceDriverV8): - @versionutils.deprecated( - versionutils.deprecated.LIBERTY, - remove_in=+1, - what='ldap resource') - def __init__(self): - super(Resource, self).__init__() - self.LDAP_URL = CONF.ldap.url - self.LDAP_USER = CONF.ldap.user - self.LDAP_PASSWORD = CONF.ldap.password - self.suffix = CONF.ldap.suffix - - # This is the only deep dependency from resource back to identity. - # This is safe to do since if you are using LDAP for resource, it is - # required that you are using it for identity as well. - self.user = ldap_identity.UserApi(CONF) - - self.project = ProjectApi(CONF) - - def default_assignment_driver(self): - return 'ldap' - - def _set_default_parent_project(self, ref): - """If the parent project ID has not been set, set it to None.""" - if isinstance(ref, dict): - if 'parent_id' not in ref: - ref = dict(ref, parent_id=None) - return ref - elif isinstance(ref, list): - return [self._set_default_parent_project(x) for x in ref] - else: - raise ValueError(_('Expected dict or list: %s') % type(ref)) - - def _set_default_is_domain_project(self, ref): - if isinstance(ref, dict): - return dict(ref, is_domain=False) - elif isinstance(ref, list): - return [self._set_default_is_domain_project(x) for x in ref] - else: - raise ValueError(_('Expected dict or list: %s') % type(ref)) - - def _validate_parent_project_is_none(self, ref): - """If a parent_id different from None was given, - raises InvalidProjectException. - - """ - parent_id = ref.get('parent_id') - if parent_id is not None: - raise exception.InvalidParentProject(parent_id) - - def _validate_is_domain_field_is_false(self, ref): - is_domain = ref.pop('is_domain', None) - if is_domain: - raise exception.ValidationError(_('LDAP does not support projects ' - 'with is_domain flag enabled')) - - def _set_default_attributes(self, project_ref): - project_ref = self._set_default_domain(project_ref) - project_ref = self._set_default_is_domain_project(project_ref) - return self._set_default_parent_project(project_ref) - - def get_project(self, tenant_id): - return self._set_default_attributes( - self.project.get(tenant_id)) - - def list_projects(self, hints): - return self._set_default_attributes( - self.project.get_all_filtered(hints)) - - def list_projects_in_domain(self, domain_id): - # We don't support multiple domains within this driver, so ignore - # any domain specified - return self.list_projects(driver_hints.Hints()) - - def list_projects_in_subtree(self, project_id): - # We don't support projects hierarchy within this driver, so a - # project will never have children - return [] - - def list_project_parents(self, project_id): - # We don't support projects hierarchy within this driver, so a - # project will never have parents - return [] - - def is_leaf_project(self, project_id): - # We don't support projects hierarchy within this driver, so a - # project will always be a root and a leaf at the same time - return True - - def list_projects_from_ids(self, ids): - return [self.get_project(id) for id in ids] - - def list_project_ids_from_domain_ids(self, domain_ids): - # We don't support multiple domains within this driver, so ignore - # any domain specified - return [x.id for x in self.list_projects(driver_hints.Hints())] - - def get_project_by_name(self, tenant_name, domain_id): - self._validate_default_domain_id(domain_id) - return self._set_default_attributes( - self.project.get_by_name(tenant_name)) - - def create_project(self, tenant_id, tenant): - self.project.check_allow_create() - self._validate_parent_project_is_none(tenant) - self._validate_is_domain_field_is_false(tenant) - tenant['name'] = clean.project_name(tenant['name']) - data = tenant.copy() - if 'id' not in data or data['id'] is None: - data['id'] = str(uuid.uuid4().hex) - if 'description' in data and data['description'] in ['', None]: - data.pop('description') - return self._set_default_attributes( - self.project.create(data)) - - def update_project(self, tenant_id, tenant): - self.project.check_allow_update() - tenant = self._validate_default_domain(tenant) - self._validate_is_domain_field_is_false(tenant) - if 'name' in tenant: - tenant['name'] = clean.project_name(tenant['name']) - return self._set_default_attributes( - self.project.update(tenant_id, tenant)) - - def delete_project(self, tenant_id): - self.project.check_allow_delete() - if self.project.subtree_delete_enabled: - self.project.deleteTree(tenant_id) - else: - # The manager layer will call assignments to delete the - # role assignments, so we just have to delete the project itself. - self.project.delete(tenant_id) - - def create_domain(self, domain_id, domain): - if domain_id == CONF.identity.default_domain_id: - msg = _('Duplicate ID, %s.') % domain_id - raise exception.Conflict(type='domain', details=msg) - raise exception.Forbidden(_('Domains are read-only against LDAP')) - - def get_domain(self, domain_id): - self._validate_default_domain_id(domain_id) - return resource.calc_default_domain() - - def update_domain(self, domain_id, domain): - self._validate_default_domain_id(domain_id) - raise exception.Forbidden(_('Domains are read-only against LDAP')) - - def delete_domain(self, domain_id): - self._validate_default_domain_id(domain_id) - raise exception.Forbidden(_('Domains are read-only against LDAP')) - - def list_domains(self, hints): - return [resource.calc_default_domain()] - - def list_domains_from_ids(self, ids): - return [resource.calc_default_domain()] - - def get_domain_by_name(self, domain_name): - default_domain = resource.calc_default_domain() - if domain_name != default_domain['name']: - raise exception.DomainNotFound(domain_id=domain_name) - return default_domain - - -# TODO(termie): turn this into a data object and move logic to driver -class ProjectApi(common_ldap.ProjectLdapStructureMixin, - common_ldap.EnabledEmuMixIn, common_ldap.BaseLdap): - - model = models.Project - - def create(self, values): - data = values.copy() - if data.get('id') is None: - data['id'] = uuid.uuid4().hex - return super(ProjectApi, self).create(data) - - def update(self, project_id, values): - old_obj = self.get(project_id) - return super(ProjectApi, self).update(project_id, values, old_obj) - - def get_all_filtered(self, hints): - query = self.filter_query(hints) - return super(ProjectApi, self).get_all(query) diff --git a/keystone-moon/keystone/resource/backends/sql.py b/keystone-moon/keystone/resource/backends/sql.py deleted file mode 100644 index 39bb4f3b..00000000 --- a/keystone-moon/keystone/resource/backends/sql.py +++ /dev/null @@ -1,267 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import log - -from keystone.common import clean -from keystone.common import driver_hints -from keystone.common import sql -from keystone import exception -from keystone.i18n import _LE, _LW -from keystone import resource as keystone_resource - - -LOG = log.getLogger(__name__) - - -class Resource(keystone_resource.ResourceDriverV9): - - def default_assignment_driver(self): - return 'sql' - - def _encode_domain_id(self, ref): - if 'domain_id' in ref and ref['domain_id'] is None: - new_ref = ref.copy() - new_ref['domain_id'] = keystone_resource.NULL_DOMAIN_ID - return new_ref - else: - return ref - - def _is_hidden_ref(self, ref): - return ref.id == keystone_resource.NULL_DOMAIN_ID - - def _get_project(self, session, project_id): - project_ref = session.query(Project).get(project_id) - if project_ref is None or self._is_hidden_ref(project_ref): - raise exception.ProjectNotFound(project_id=project_id) - return project_ref - - def get_project(self, project_id): - with sql.session_for_read() as session: - return self._get_project(session, project_id).to_dict() - - def get_project_by_name(self, project_name, domain_id): - with sql.session_for_read() as session: - query = session.query(Project) - query = query.filter_by(name=project_name) - if domain_id is None: - query = query.filter_by( - domain_id=keystone_resource.NULL_DOMAIN_ID) - else: - query = query.filter_by(domain_id=domain_id) - try: - project_ref = query.one() - except sql.NotFound: - raise exception.ProjectNotFound(project_id=project_name) - - if self._is_hidden_ref(project_ref): - raise exception.ProjectNotFound(project_id=project_name) - return project_ref.to_dict() - - @driver_hints.truncated - def list_projects(self, hints): - # If there is a filter on domain_id and the value is None, then to - # ensure that the sql filtering works correctly, we need to patch - # the value to be NULL_DOMAIN_ID. This is safe to do here since we - # know we are able to satisfy any filter of this type in the call to - # filter_limit_query() below, which will remove the filter from the - # hints (hence ensuring our substitution is not exposed to the caller). - for f in hints.filters: - if (f['name'] == 'domain_id' and f['value'] is None): - f['value'] = keystone_resource.NULL_DOMAIN_ID - with sql.session_for_read() as session: - query = session.query(Project) - project_refs = sql.filter_limit_query(Project, query, hints) - return [project_ref.to_dict() for project_ref in project_refs - if not self._is_hidden_ref(project_ref)] - - def list_projects_from_ids(self, ids): - if not ids: - return [] - else: - with sql.session_for_read() as session: - query = session.query(Project) - query = query.filter(Project.id.in_(ids)) - return [project_ref.to_dict() for project_ref in query.all() - if not self._is_hidden_ref(project_ref)] - - def list_project_ids_from_domain_ids(self, domain_ids): - if not domain_ids: - return [] - else: - with sql.session_for_read() as session: - query = session.query(Project.id) - query = ( - query.filter(Project.domain_id.in_(domain_ids))) - return [x.id for x in query.all() - if not self._is_hidden_ref(x)] - - def list_projects_in_domain(self, domain_id): - with sql.session_for_read() as session: - try: - self._get_project(session, domain_id) - except exception.ProjectNotFound: - raise exception.DomainNotFound(domain_id=domain_id) - query = session.query(Project) - project_refs = query.filter(Project.domain_id == domain_id) - return [project_ref.to_dict() for project_ref in project_refs] - - def list_projects_acting_as_domain(self, hints): - hints.add_filter('is_domain', True) - return self.list_projects(hints) - - def _get_children(self, session, project_ids, domain_id=None): - query = session.query(Project) - query = query.filter(Project.parent_id.in_(project_ids)) - project_refs = query.all() - return [project_ref.to_dict() for project_ref in project_refs] - - def list_projects_in_subtree(self, project_id): - with sql.session_for_read() as session: - children = self._get_children(session, [project_id]) - subtree = [] - examined = set([project_id]) - while children: - children_ids = set() - for ref in children: - if ref['id'] in examined: - msg = _LE('Circular reference or a repeated ' - 'entry found in projects hierarchy - ' - '%(project_id)s.') - LOG.error(msg, {'project_id': ref['id']}) - return - children_ids.add(ref['id']) - - examined.update(children_ids) - subtree += children - children = self._get_children(session, children_ids) - return subtree - - def list_project_parents(self, project_id): - with sql.session_for_read() as session: - project = self._get_project(session, project_id).to_dict() - parents = [] - examined = set() - while project.get('parent_id') is not None: - if project['id'] in examined: - msg = _LE('Circular reference or a repeated ' - 'entry found in projects hierarchy - ' - '%(project_id)s.') - LOG.error(msg, {'project_id': project['id']}) - return - - examined.add(project['id']) - parent_project = self._get_project( - session, project['parent_id']).to_dict() - parents.append(parent_project) - project = parent_project - return parents - - def is_leaf_project(self, project_id): - with sql.session_for_read() as session: - project_refs = self._get_children(session, [project_id]) - return not project_refs - - # CRUD - @sql.handle_conflicts(conflict_type='project') - def create_project(self, project_id, project): - project['name'] = clean.project_name(project['name']) - new_project = self._encode_domain_id(project) - with sql.session_for_write() as session: - project_ref = Project.from_dict(new_project) - session.add(project_ref) - return project_ref.to_dict() - - @sql.handle_conflicts(conflict_type='project') - def update_project(self, project_id, project): - if 'name' in project: - project['name'] = clean.project_name(project['name']) - - update_project = self._encode_domain_id(project) - with sql.session_for_write() as session: - project_ref = self._get_project(session, project_id) - old_project_dict = project_ref.to_dict() - for k in update_project: - old_project_dict[k] = update_project[k] - # When we read the old_project_dict, any "null" domain_id will have - # been decoded, so we need to re-encode it - old_project_dict = self._encode_domain_id(old_project_dict) - new_project = Project.from_dict(old_project_dict) - for attr in Project.attributes: - if attr != 'id': - setattr(project_ref, attr, getattr(new_project, attr)) - project_ref.extra = new_project.extra - return project_ref.to_dict(include_extra_dict=True) - - @sql.handle_conflicts(conflict_type='project') - def delete_project(self, project_id): - with sql.session_for_write() as session: - project_ref = self._get_project(session, project_id) - session.delete(project_ref) - - @sql.handle_conflicts(conflict_type='project') - def delete_projects_from_ids(self, project_ids): - if not project_ids: - return - with sql.session_for_write() as session: - query = session.query(Project).filter(Project.id.in_( - project_ids)) - project_ids_from_bd = [p['id'] for p in query.all()] - for project_id in project_ids: - if (project_id not in project_ids_from_bd or - project_id == keystone_resource.NULL_DOMAIN_ID): - LOG.warning(_LW('Project %s does not exist and was not ' - 'deleted.') % project_id) - query.delete(synchronize_session=False) - - -class Domain(sql.ModelBase, sql.DictBase): - __tablename__ = 'domain' - attributes = ['id', 'name', 'enabled'] - id = sql.Column(sql.String(64), primary_key=True) - name = sql.Column(sql.String(64), nullable=False) - enabled = sql.Column(sql.Boolean, default=True, nullable=False) - extra = sql.Column(sql.JsonBlob()) - __table_args__ = (sql.UniqueConstraint('name'),) - - -class Project(sql.ModelBase, sql.DictBase): - # NOTE(henry-nash): From the manager and above perspective, the domain_id - # is nullable. However, to ensure uniqueness in multi-process - # configurations, it is better to still use the sql uniqueness constraint. - # Since the support for a nullable component of a uniqueness constraint - # across different sql databases is mixed, we instead store a special value - # to represent null, as defined in NULL_DOMAIN_ID above. - - def to_dict(self, include_extra_dict=False): - d = super(Project, self).to_dict( - include_extra_dict=include_extra_dict) - if d['domain_id'] == keystone_resource.NULL_DOMAIN_ID: - d['domain_id'] = None - return d - - __tablename__ = 'project' - attributes = ['id', 'name', 'domain_id', 'description', 'enabled', - 'parent_id', 'is_domain'] - id = sql.Column(sql.String(64), primary_key=True) - name = sql.Column(sql.String(64), nullable=False) - domain_id = sql.Column(sql.String(64), sql.ForeignKey('project.id'), - nullable=False) - description = sql.Column(sql.Text()) - enabled = sql.Column(sql.Boolean) - extra = sql.Column(sql.JsonBlob()) - parent_id = sql.Column(sql.String(64), sql.ForeignKey('project.id')) - is_domain = sql.Column(sql.Boolean, default=False, nullable=False, - server_default='0') - # Unique constraint across two columns to create the separation - # rather than just only 'name' being unique - __table_args__ = (sql.UniqueConstraint('domain_id', 'name'),) diff --git a/keystone-moon/keystone/resource/config_backends/__init__.py b/keystone-moon/keystone/resource/config_backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/resource/config_backends/sql.py b/keystone-moon/keystone/resource/config_backends/sql.py deleted file mode 100644 index 6413becc..00000000 --- a/keystone-moon/keystone/resource/config_backends/sql.py +++ /dev/null @@ -1,152 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import sql -from keystone import exception -from keystone.i18n import _ -from keystone import resource - - -class WhiteListedConfig(sql.ModelBase, sql.ModelDictMixin): - __tablename__ = 'whitelisted_config' - domain_id = sql.Column(sql.String(64), primary_key=True) - group = sql.Column(sql.String(255), primary_key=True) - option = sql.Column(sql.String(255), primary_key=True) - value = sql.Column(sql.JsonBlob(), nullable=False) - - def to_dict(self): - d = super(WhiteListedConfig, self).to_dict() - d.pop('domain_id') - return d - - -class SensitiveConfig(sql.ModelBase, sql.ModelDictMixin): - __tablename__ = 'sensitive_config' - domain_id = sql.Column(sql.String(64), primary_key=True) - group = sql.Column(sql.String(255), primary_key=True) - option = sql.Column(sql.String(255), primary_key=True) - value = sql.Column(sql.JsonBlob(), nullable=False) - - def to_dict(self): - d = super(SensitiveConfig, self).to_dict() - d.pop('domain_id') - return d - - -class ConfigRegister(sql.ModelBase, sql.ModelDictMixin): - __tablename__ = 'config_register' - type = sql.Column(sql.String(64), primary_key=True) - domain_id = sql.Column(sql.String(64), nullable=False) - - -class DomainConfig(resource.DomainConfigDriverV8): - - def choose_table(self, sensitive): - if sensitive: - return SensitiveConfig - else: - return WhiteListedConfig - - @sql.handle_conflicts(conflict_type='domain_config') - def create_config_option(self, domain_id, group, option, value, - sensitive=False): - with sql.session_for_write() as session: - config_table = self.choose_table(sensitive) - ref = config_table(domain_id=domain_id, group=group, - option=option, value=value) - session.add(ref) - return ref.to_dict() - - def _get_config_option(self, session, domain_id, group, option, sensitive): - try: - config_table = self.choose_table(sensitive) - ref = (session.query(config_table). - filter_by(domain_id=domain_id, group=group, - option=option).one()) - except sql.NotFound: - msg = _('option %(option)s in group %(group)s') % { - 'group': group, 'option': option} - raise exception.DomainConfigNotFound( - domain_id=domain_id, group_or_option=msg) - return ref - - def get_config_option(self, domain_id, group, option, sensitive=False): - with sql.session_for_read() as session: - ref = self._get_config_option(session, domain_id, group, option, - sensitive) - return ref.to_dict() - - def list_config_options(self, domain_id, group=None, option=None, - sensitive=False): - with sql.session_for_read() as session: - config_table = self.choose_table(sensitive) - query = session.query(config_table) - query = query.filter_by(domain_id=domain_id) - if group: - query = query.filter_by(group=group) - if option: - query = query.filter_by(option=option) - return [ref.to_dict() for ref in query.all()] - - def update_config_option(self, domain_id, group, option, value, - sensitive=False): - with sql.session_for_write() as session: - ref = self._get_config_option(session, domain_id, group, option, - sensitive) - ref.value = value - return ref.to_dict() - - def delete_config_options(self, domain_id, group=None, option=None, - sensitive=False): - """Deletes config options that match the filter parameters. - - Since the public API is broken down into calls for delete in both the - whitelisted and sensitive methods, we are silent at the driver level - if there was nothing to delete. - - """ - with sql.session_for_write() as session: - config_table = self.choose_table(sensitive) - query = session.query(config_table) - query = query.filter_by(domain_id=domain_id) - if group: - query = query.filter_by(group=group) - if option: - query = query.filter_by(option=option) - query.delete(False) - - def obtain_registration(self, domain_id, type): - try: - with sql.session_for_write() as session: - ref = ConfigRegister(type=type, domain_id=domain_id) - session.add(ref) - return True - except sql.DBDuplicateEntry: # nosec - # Continue on and return False to indicate failure. - pass - return False - - def read_registration(self, type): - with sql.session_for_read() as session: - ref = session.query(ConfigRegister).get(type) - if not ref: - raise exception.ConfigRegistrationNotFound() - return ref.domain_id - - def release_registration(self, domain_id, type=None): - """Silently delete anything registered for the domain specified.""" - with sql.session_for_write() as session: - query = session.query(ConfigRegister) - if type: - query = query.filter_by(type=type) - query = query.filter_by(domain_id=domain_id) - query.delete(False) diff --git a/keystone-moon/keystone/resource/controllers.py b/keystone-moon/keystone/resource/controllers.py deleted file mode 100644 index 5cabe064..00000000 --- a/keystone-moon/keystone/resource/controllers.py +++ /dev/null @@ -1,334 +0,0 @@ -# Copyright 2013 Metacloud, Inc. -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Workflow Logic the Resource service.""" - -import uuid - -from oslo_config import cfg - -from keystone.common import controller -from keystone.common import dependency -from keystone.common import validation -from keystone.common import wsgi -from keystone import exception -from keystone.i18n import _ -from keystone import notifications -from keystone.resource import schema - - -CONF = cfg.CONF - - -@dependency.requires('resource_api') -class Tenant(controller.V2Controller): - - @controller.v2_deprecated - def get_all_projects(self, context, **kw): - """Gets a list of all tenants for an admin user.""" - self.assert_admin(context) - - if 'name' in context['query_string']: - return self._get_project_by_name(context['query_string']['name']) - - try: - tenant_refs = self.resource_api.list_projects_in_domain( - CONF.identity.default_domain_id) - except exception.DomainNotFound: - # If the default domain doesn't exist then there are no V2 - # projects. - tenant_refs = [] - tenant_refs = [self.v3_to_v2_project(tenant_ref) - for tenant_ref in tenant_refs - if not tenant_ref.get('is_domain')] - params = { - 'limit': context['query_string'].get('limit'), - 'marker': context['query_string'].get('marker'), - } - return self.format_project_list(tenant_refs, **params) - - def _assert_not_is_domain_project(self, project_id, project_ref=None): - # Projects acting as a domain should not be visible via v2 - if not project_ref: - project_ref = self.resource_api.get_project(project_id) - if project_ref.get('is_domain'): - raise exception.ProjectNotFound(project_id) - - @controller.v2_deprecated - def get_project(self, context, tenant_id): - # TODO(termie): this stuff should probably be moved to middleware - self.assert_admin(context) - ref = self.resource_api.get_project(tenant_id) - self._assert_not_is_domain_project(tenant_id, ref) - return {'tenant': self.v3_to_v2_project(ref)} - - def _get_project_by_name(self, tenant_name): - # Projects acting as a domain should not be visible via v2 - ref = self.resource_api.get_project_by_name( - tenant_name, CONF.identity.default_domain_id) - self._assert_not_is_domain_project(ref['id'], ref) - return {'tenant': self.v3_to_v2_project(ref)} - - # CRUD Extension - @controller.v2_deprecated - def create_project(self, context, tenant): - tenant_ref = self._normalize_dict(tenant) - - if 'name' not in tenant_ref or not tenant_ref['name']: - msg = _('Name field is required and cannot be empty') - raise exception.ValidationError(message=msg) - - if 'is_domain' in tenant_ref: - msg = _('The creation of projects acting as domains is not ' - 'allowed in v2.') - raise exception.ValidationError(message=msg) - - self.assert_admin(context) - - self.resource_api.ensure_default_domain_exists() - - tenant_ref['id'] = tenant_ref.get('id', uuid.uuid4().hex) - initiator = notifications._get_request_audit_info(context) - tenant = self.resource_api.create_project( - tenant_ref['id'], - self._normalize_domain_id(context, tenant_ref), - initiator) - return {'tenant': self.v3_to_v2_project(tenant)} - - @controller.v2_deprecated - def update_project(self, context, tenant_id, tenant): - self.assert_admin(context) - self._assert_not_is_domain_project(tenant_id) - # Remove domain_id and is_domain if specified - a v2 api caller - # should not be specifying that - clean_tenant = tenant.copy() - clean_tenant.pop('domain_id', None) - clean_tenant.pop('is_domain', None) - initiator = notifications._get_request_audit_info(context) - tenant_ref = self.resource_api.update_project( - tenant_id, clean_tenant, initiator) - return {'tenant': self.v3_to_v2_project(tenant_ref)} - - @controller.v2_deprecated - def delete_project(self, context, tenant_id): - self.assert_admin(context) - self._assert_not_is_domain_project(tenant_id) - initiator = notifications._get_request_audit_info(context) - self.resource_api.delete_project(tenant_id, initiator) - - -@dependency.requires('resource_api') -class DomainV3(controller.V3Controller): - collection_name = 'domains' - member_name = 'domain' - - def __init__(self): - super(DomainV3, self).__init__() - self.get_member_from_driver = self.resource_api.get_domain - - @controller.protected() - @validation.validated(schema.domain_create, 'domain') - def create_domain(self, context, domain): - ref = self._assign_unique_id(self._normalize_dict(domain)) - initiator = notifications._get_request_audit_info(context) - ref = self.resource_api.create_domain(ref['id'], ref, initiator) - return DomainV3.wrap_member(context, ref) - - @controller.filterprotected('enabled', 'name') - def list_domains(self, context, filters): - hints = DomainV3.build_driver_hints(context, filters) - refs = self.resource_api.list_domains(hints=hints) - return DomainV3.wrap_collection(context, refs, hints=hints) - - @controller.protected() - def get_domain(self, context, domain_id): - ref = self.resource_api.get_domain(domain_id) - return DomainV3.wrap_member(context, ref) - - @controller.protected() - @validation.validated(schema.domain_update, 'domain') - def update_domain(self, context, domain_id, domain): - self._require_matching_id(domain_id, domain) - initiator = notifications._get_request_audit_info(context) - ref = self.resource_api.update_domain(domain_id, domain, initiator) - return DomainV3.wrap_member(context, ref) - - @controller.protected() - def delete_domain(self, context, domain_id): - initiator = notifications._get_request_audit_info(context) - return self.resource_api.delete_domain(domain_id, initiator) - - -@dependency.requires('domain_config_api') -@dependency.requires('resource_api') -class DomainConfigV3(controller.V3Controller): - member_name = 'config' - - @controller.protected() - def create_domain_config(self, context, domain_id, config): - self.resource_api.get_domain(domain_id) - original_config = ( - self.domain_config_api.get_config_with_sensitive_info(domain_id)) - ref = self.domain_config_api.create_config(domain_id, config) - if original_config: - # Return status code 200, since config already existed - return wsgi.render_response(body={self.member_name: ref}) - else: - return wsgi.render_response(body={self.member_name: ref}, - status=('201', 'Created')) - - @controller.protected() - def get_domain_config(self, context, domain_id, group=None, option=None): - self.resource_api.get_domain(domain_id) - ref = self.domain_config_api.get_config(domain_id, group, option) - return {self.member_name: ref} - - @controller.protected() - def update_domain_config( - self, context, domain_id, config, group, option): - self.resource_api.get_domain(domain_id) - ref = self.domain_config_api.update_config( - domain_id, config, group, option) - return wsgi.render_response(body={self.member_name: ref}) - - def update_domain_config_group(self, context, domain_id, group, config): - self.resource_api.get_domain(domain_id) - return self.update_domain_config( - context, domain_id, config, group, option=None) - - def update_domain_config_only(self, context, domain_id, config): - self.resource_api.get_domain(domain_id) - return self.update_domain_config( - context, domain_id, config, group=None, option=None) - - @controller.protected() - def delete_domain_config( - self, context, domain_id, group=None, option=None): - self.resource_api.get_domain(domain_id) - self.domain_config_api.delete_config(domain_id, group, option) - - @controller.protected() - def get_domain_config_default(self, context, group=None, option=None): - ref = self.domain_config_api.get_config_default(group, option) - return {self.member_name: ref} - - -@dependency.requires('resource_api') -class ProjectV3(controller.V3Controller): - collection_name = 'projects' - member_name = 'project' - - def __init__(self): - super(ProjectV3, self).__init__() - self.get_member_from_driver = self.resource_api.get_project - - @controller.protected() - @validation.validated(schema.project_create, 'project') - def create_project(self, context, project): - ref = self._assign_unique_id(self._normalize_dict(project)) - - if not ref.get('is_domain'): - ref = self._normalize_domain_id(context, ref) - # Our API requires that you specify the location in the hierarchy - # unambiguously. This could be by parent_id or, if it is a top level - # project, just by providing a domain_id. - if not ref.get('parent_id'): - ref['parent_id'] = ref.get('domain_id') - - initiator = notifications._get_request_audit_info(context) - try: - ref = self.resource_api.create_project(ref['id'], ref, - initiator=initiator) - except (exception.DomainNotFound, exception.ProjectNotFound) as e: - raise exception.ValidationError(e) - return ProjectV3.wrap_member(context, ref) - - @controller.filterprotected('domain_id', 'enabled', 'name', - 'parent_id', 'is_domain') - def list_projects(self, context, filters): - hints = ProjectV3.build_driver_hints(context, filters) - # If 'is_domain' has not been included as a query, we default it to - # False (which in query terms means '0' - if 'is_domain' not in context['query_string']: - hints.add_filter('is_domain', '0') - refs = self.resource_api.list_projects(hints=hints) - return ProjectV3.wrap_collection(context, refs, hints=hints) - - def _expand_project_ref(self, context, ref): - params = context['query_string'] - - parents_as_list = 'parents_as_list' in params and ( - self.query_filter_is_true(params['parents_as_list'])) - parents_as_ids = 'parents_as_ids' in params and ( - self.query_filter_is_true(params['parents_as_ids'])) - - subtree_as_list = 'subtree_as_list' in params and ( - self.query_filter_is_true(params['subtree_as_list'])) - subtree_as_ids = 'subtree_as_ids' in params and ( - self.query_filter_is_true(params['subtree_as_ids'])) - - # parents_as_list and parents_as_ids are mutually exclusive - if parents_as_list and parents_as_ids: - msg = _('Cannot use parents_as_list and parents_as_ids query ' - 'params at the same time.') - raise exception.ValidationError(msg) - - # subtree_as_list and subtree_as_ids are mutually exclusive - if subtree_as_list and subtree_as_ids: - msg = _('Cannot use subtree_as_list and subtree_as_ids query ' - 'params at the same time.') - raise exception.ValidationError(msg) - - user_id = self.get_auth_context(context).get('user_id') - - if parents_as_list: - parents = self.resource_api.list_project_parents( - ref['id'], user_id) - ref['parents'] = [ProjectV3.wrap_member(context, p) - for p in parents] - elif parents_as_ids: - ref['parents'] = self.resource_api.get_project_parents_as_ids(ref) - - if subtree_as_list: - subtree = self.resource_api.list_projects_in_subtree( - ref['id'], user_id) - ref['subtree'] = [ProjectV3.wrap_member(context, p) - for p in subtree] - elif subtree_as_ids: - ref['subtree'] = self.resource_api.get_projects_in_subtree_as_ids( - ref['id']) - - @controller.protected() - def get_project(self, context, project_id): - ref = self.resource_api.get_project(project_id) - self._expand_project_ref(context, ref) - return ProjectV3.wrap_member(context, ref) - - @controller.protected() - @validation.validated(schema.project_update, 'project') - def update_project(self, context, project_id, project): - self._require_matching_id(project_id, project) - self._require_matching_domain_id( - project_id, project, self.resource_api.get_project) - initiator = notifications._get_request_audit_info(context) - ref = self.resource_api.update_project(project_id, project, - initiator=initiator) - return ProjectV3.wrap_member(context, ref) - - @controller.protected() - def delete_project(self, context, project_id): - initiator = notifications._get_request_audit_info(context) - return self.resource_api.delete_project(project_id, - initiator=initiator) diff --git a/keystone-moon/keystone/resource/core.py b/keystone-moon/keystone/resource/core.py deleted file mode 100644 index f8d72e91..00000000 --- a/keystone-moon/keystone/resource/core.py +++ /dev/null @@ -1,2161 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into the Resource service.""" - -import abc -import copy - -from oslo_config import cfg -from oslo_log import log -from oslo_log import versionutils -import six - -from keystone import assignment -from keystone.common import cache -from keystone.common import clean -from keystone.common import dependency -from keystone.common import driver_hints -from keystone.common import manager -from keystone.common import utils -from keystone import exception -from keystone.i18n import _, _LE, _LW -from keystone import notifications - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) -MEMOIZE = cache.get_memoization_decorator(group='resource') - - -def calc_default_domain(): - return {'description': - (u'The default domain'), - 'enabled': True, - 'id': CONF.identity.default_domain_id, - 'name': u'Default'} - - -def _get_project_from_domain(domain_ref): - """Creates a project ref from the provided domain ref.""" - project_ref = domain_ref.copy() - project_ref['is_domain'] = True - project_ref['domain_id'] = None - project_ref['parent_id'] = None - - return project_ref - - -@dependency.provider('resource_api') -@dependency.requires('assignment_api', 'credential_api', 'domain_config_api', - 'identity_api', 'revoke_api') -class Manager(manager.Manager): - """Default pivot point for the Resource backend. - - See :mod:`keystone.common.manager.Manager` for more details on how this - dynamically calls the backend. - - """ - - driver_namespace = 'keystone.resource' - - _DOMAIN = 'domain' - _PROJECT = 'project' - - def __init__(self): - # If there is a specific driver specified for resource, then use it. - # Otherwise retrieve the driver type from the assignment driver. - resource_driver = CONF.resource.driver - - if resource_driver is None: - assignment_manager = dependency.get_provider('assignment_api') - resource_driver = assignment_manager.default_resource_driver() - - super(Manager, self).__init__(resource_driver) - - # Make sure it is a driver version we support, and if it is a legacy - # driver, then wrap it. - if isinstance(self.driver, ResourceDriverV8): - self.driver = V9ResourceWrapperForV8Driver(self.driver) - elif not isinstance(self.driver, ResourceDriverV9): - raise exception.UnsupportedDriverVersion(driver=resource_driver) - - def _get_hierarchy_depth(self, parents_list): - return len(parents_list) + 1 - - def _assert_max_hierarchy_depth(self, project_id, parents_list=None): - if parents_list is None: - parents_list = self.list_project_parents(project_id) - # NOTE(henry-nash): In upgrading to a scenario where domains are - # represented as projects acting as domains, we will effectively - # increase the depth of any existing project hierarchy by one. To avoid - # pushing any existing hierarchies over the limit, we add one to the - # maximum depth allowed, as specified in the configuration file. - max_depth = CONF.max_project_tree_depth + 1 - if self._get_hierarchy_depth(parents_list) > max_depth: - raise exception.ForbiddenNotSecurity( - _('Max hierarchy depth reached for %s branch.') % project_id) - - def _assert_is_domain_project_constraints(self, project_ref): - """Enforces specific constraints of projects that act as domains - - Called when is_domain is true, this method ensures that: - - * multiple domains are enabled - * the project name is not the reserved name for a federated domain - * the project is a root project - - :raises keystone.exception.ValidationError: If one of the constraints - was not satisfied. - """ - if (not self.identity_api.multiple_domains_supported and - project_ref['id'] != CONF.identity.default_domain_id): - raise exception.ValidationError( - message=_('Multiple domains are not supported')) - - self.assert_domain_not_federated(project_ref['id'], project_ref) - - if project_ref['parent_id']: - raise exception.ValidationError( - message=_('only root projects are allowed to act as ' - 'domains.')) - - def _assert_regular_project_constraints(self, project_ref): - """Enforces regular project hierarchy constraints - - Called when is_domain is false. The project must contain a valid - domain_id and parent_id. The goal of this method is to check - that the domain_id specified is consistent with the domain of its - parent. - - :raises keystone.exception.ValidationError: If one of the constraints - was not satisfied. - :raises keystone.exception.DomainNotFound: In case the domain is not - found. - """ - # Ensure domain_id is valid, and by inference will not be None. - domain = self.get_domain(project_ref['domain_id']) - parent_ref = self.get_project(project_ref['parent_id']) - - if parent_ref['is_domain']: - if parent_ref['id'] != domain['id']: - raise exception.ValidationError( - message=_('Cannot create project, since its parent ' - '(%(domain_id)s) is acting as a domain, ' - 'but project\'s specified parent_id ' - '(%(parent_id)s) does not match ' - 'this domain_id.') - % {'domain_id': domain['id'], - 'parent_id': parent_ref['id']}) - else: - parent_domain_id = parent_ref.get('domain_id') - if parent_domain_id != domain['id']: - raise exception.ValidationError( - message=_('Cannot create project, since it specifies ' - 'its owner as domain %(domain_id)s, but ' - 'specifies a parent in a different domain ' - '(%(parent_domain_id)s).') - % {'domain_id': domain['id'], - 'parent_domain_id': parent_domain_id}) - - def _enforce_project_constraints(self, project_ref): - if project_ref.get('is_domain'): - self._assert_is_domain_project_constraints(project_ref) - else: - self._assert_regular_project_constraints(project_ref) - # The whole hierarchy (upwards) must be enabled - parent_id = project_ref['parent_id'] - parents_list = self.list_project_parents(parent_id) - parent_ref = self.get_project(parent_id) - parents_list.append(parent_ref) - for ref in parents_list: - if not ref.get('enabled', True): - raise exception.ValidationError( - message=_('cannot create a project in a ' - 'branch containing a disabled ' - 'project: %s') % ref['id']) - - self._assert_max_hierarchy_depth(project_ref.get('parent_id'), - parents_list) - - def _raise_reserved_character_exception(self, entity_type, name): - msg = _('%(entity)s name cannot contain the following reserved ' - 'characters: %(chars)s') - raise exception.ValidationError( - message=msg % { - 'entity': entity_type, - 'chars': utils.list_url_unsafe_chars(name) - }) - - def _generate_project_name_conflict_msg(self, project): - if project['is_domain']: - return _('it is not permitted to have two projects ' - 'acting as domains with the same name: %s' - ) % project['name'] - else: - return _('it is not permitted to have two projects ' - 'within a domain with the same name : %s' - ) % project['name'] - - def create_project(self, project_id, project, initiator=None): - project = project.copy() - - if (CONF.resource.project_name_url_safe != 'off' and - utils.is_not_url_safe(project['name'])): - self._raise_reserved_character_exception('Project', - project['name']) - - project.setdefault('enabled', True) - project['enabled'] = clean.project_enabled(project['enabled']) - project.setdefault('description', '') - - # For regular projects, the controller will ensure we have a valid - # domain_id. For projects acting as a domain, the project_id - # is, effectively, the domain_id - and for such projects we don't - # bother to store a copy of it in the domain_id attribute. - project.setdefault('domain_id', None) - project.setdefault('parent_id', None) - if not project['parent_id']: - project['parent_id'] = project['domain_id'] - project.setdefault('is_domain', False) - - self._enforce_project_constraints(project) - - # We leave enforcing name uniqueness to the underlying driver (instead - # of doing it in code in the project_constraints above), so as to allow - # this check to be done at the storage level, avoiding race conditions - # in multi-process keystone configurations. - try: - ret = self.driver.create_project(project_id, project) - except exception.Conflict: - raise exception.Conflict( - type='project', - details=self._generate_project_name_conflict_msg(project)) - - if project.get('is_domain'): - notifications.Audit.created(self._DOMAIN, project_id, initiator) - else: - notifications.Audit.created(self._PROJECT, project_id, initiator) - if MEMOIZE.should_cache(ret): - self.get_project.set(ret, self, project_id) - self.get_project_by_name.set(ret, self, ret['name'], - ret['domain_id']) - return ret - - def assert_domain_enabled(self, domain_id, domain=None): - """Assert the Domain is enabled. - - :raise AssertionError: if domain is disabled. - """ - if domain is None: - domain = self.get_domain(domain_id) - if not domain.get('enabled', True): - raise AssertionError(_('Domain is disabled: %s') % domain_id) - - def assert_domain_not_federated(self, domain_id, domain): - """Assert the Domain's name and id do not match the reserved keyword. - - Note that the reserved keyword is defined in the configuration file, - by default, it is 'Federated', it is also case insensitive. - If config's option is empty the default hardcoded value 'Federated' - will be used. - - :raise AssertionError: if domain named match the value in the config. - - """ - # NOTE(marek-denis): We cannot create this attribute in the __init__ as - # config values are always initialized to default value. - federated_domain = CONF.federation.federated_domain_name.lower() - if (domain.get('name') and domain['name'].lower() == federated_domain): - raise AssertionError(_('Domain cannot be named %s') - % domain['name']) - if (domain_id.lower() == federated_domain): - raise AssertionError(_('Domain cannot have ID %s') - % domain_id) - - def assert_project_enabled(self, project_id, project=None): - """Assert the project is enabled and its associated domain is enabled. - - :raise AssertionError: if the project or domain is disabled. - """ - if project is None: - project = self.get_project(project_id) - # If it's a regular project (i.e. it has a domain_id), we need to make - # sure the domain itself is not disabled - if project['domain_id']: - self.assert_domain_enabled(domain_id=project['domain_id']) - if not project.get('enabled', True): - raise AssertionError(_('Project is disabled: %s') % project_id) - - def _assert_all_parents_are_enabled(self, project_id): - parents_list = self.list_project_parents(project_id) - for project in parents_list: - if not project.get('enabled', True): - raise exception.ForbiddenNotSecurity( - _('Cannot enable project %s since it has disabled ' - 'parents') % project_id) - - def _check_whole_subtree_is_disabled(self, project_id, subtree_list=None): - if not subtree_list: - subtree_list = self.list_projects_in_subtree(project_id) - subtree_enabled = [ref.get('enabled', True) for ref in subtree_list] - return (not any(subtree_enabled)) - - def _update_project(self, project_id, project, initiator=None, - cascade=False): - # Use the driver directly to prevent using old cached value. - original_project = self.driver.get_project(project_id) - project = project.copy() - - if original_project['is_domain']: - domain = self._get_domain_from_project(original_project) - self.assert_domain_not_federated(project_id, domain) - if 'enabled' in domain: - domain['enabled'] = clean.domain_enabled(domain['enabled']) - url_safe_option = CONF.resource.domain_name_url_safe - exception_entity = 'Domain' - else: - url_safe_option = CONF.resource.project_name_url_safe - exception_entity = 'Project' - - if (url_safe_option != 'off' and - 'name' in project and - project['name'] != original_project['name'] and - utils.is_not_url_safe(project['name'])): - self._raise_reserved_character_exception(exception_entity, - project['name']) - - parent_id = original_project.get('parent_id') - if 'parent_id' in project and project.get('parent_id') != parent_id: - raise exception.ForbiddenNotSecurity( - _('Update of `parent_id` is not allowed.')) - - if ('is_domain' in project and - project['is_domain'] != original_project['is_domain']): - raise exception.ValidationError( - message=_('Update of `is_domain` is not allowed.')) - - update_domain = ('domain_id' in project and - project['domain_id'] != original_project['domain_id']) - - # NOTE(htruta): Even if we are allowing domain_ids to be - # modified (i.e. 'domain_id_immutable' is set False), - # a project.domain_id can only be updated for root projects - # that have no children. The update of domain_id of a project in - # the middle of the hierarchy creates an inconsistent project - # hierarchy. - if update_domain: - if original_project['is_domain']: - raise exception.ValidationError( - message=_('Update of domain_id of projects acting as ' - 'domains is not allowed.')) - parent_project = ( - self.driver.get_project(original_project['parent_id'])) - is_root_project = parent_project['is_domain'] - if not is_root_project: - raise exception.ValidationError( - message=_('Update of domain_id is only allowed for ' - 'root projects.')) - subtree_list = self.list_projects_in_subtree(project_id) - if subtree_list: - raise exception.ValidationError( - message=_('Cannot update domain_id of a project that ' - 'has children.')) - versionutils.report_deprecated_feature( - LOG, - _('update of domain_id is deprecated as of Mitaka ' - 'and will be removed in O.') - ) - - if 'enabled' in project: - project['enabled'] = clean.project_enabled(project['enabled']) - - original_project_enabled = original_project.get('enabled', True) - project_enabled = project.get('enabled', True) - if not original_project_enabled and project_enabled: - self._assert_all_parents_are_enabled(project_id) - if original_project_enabled and not project_enabled: - # NOTE(htruta): In order to disable a regular project, all its - # children must already be disabled. However, to keep - # compatibility with the existing domain behaviour, we allow a - # project acting as a domain to be disabled irrespective of the - # state of its children. Disabling a project acting as domain - # effectively disables its children. - if (not original_project.get('is_domain') and not cascade and not - self._check_whole_subtree_is_disabled(project_id)): - raise exception.ForbiddenNotSecurity( - _('Cannot disable project %(project_id)s since its ' - 'subtree contains enabled projects.') - % {'project_id': project_id}) - - notifications.Audit.disabled(self._PROJECT, project_id, - public=False) - if cascade: - self._only_allow_enabled_to_update_cascade(project, - original_project) - self._update_project_enabled_cascade(project_id, project_enabled) - - try: - project['is_domain'] = (project.get('is_domain') or - original_project['is_domain']) - ret = self.driver.update_project(project_id, project) - except exception.Conflict: - raise exception.Conflict( - type='project', - details=self._generate_project_name_conflict_msg(project)) - - notifications.Audit.updated(self._PROJECT, project_id, initiator) - if original_project['is_domain']: - notifications.Audit.updated(self._DOMAIN, project_id, initiator) - # If the domain is being disabled, issue the disable notification - # as well - if original_project_enabled and not project_enabled: - notifications.Audit.disabled(self._DOMAIN, project_id, - public=False) - - self.get_project.invalidate(self, project_id) - self.get_project_by_name.invalidate(self, original_project['name'], - original_project['domain_id']) - - if ('domain_id' in project and - project['domain_id'] != original_project['domain_id']): - # If the project's domain_id has been updated, invalidate user - # role assignments cache region, as it may be caching inherited - # assignments from the old domain to the specified project - assignment.COMPUTED_ASSIGNMENTS_REGION.invalidate() - - return ret - - def _only_allow_enabled_to_update_cascade(self, project, original_project): - for attr in project: - if attr != 'enabled': - if project.get(attr) != original_project.get(attr): - raise exception.ValidationError( - message=_('Cascade update is only allowed for ' - 'enabled attribute.')) - - def _update_project_enabled_cascade(self, project_id, enabled): - subtree = self.list_projects_in_subtree(project_id) - # Update enabled only if different from original value - subtree_to_update = [child for child in subtree - if child['enabled'] != enabled] - for child in subtree_to_update: - child['enabled'] = enabled - - if not enabled: - # Does not in fact disable the project, only emits a - # notification that it was disabled. The actual disablement - # is done in the next line. - notifications.Audit.disabled(self._PROJECT, child['id'], - public=False) - - self.driver.update_project(child['id'], child) - - def update_project(self, project_id, project, initiator=None, - cascade=False): - ret = self._update_project(project_id, project, initiator, cascade) - if ret['is_domain']: - self.get_domain.invalidate(self, project_id) - self.get_domain_by_name.invalidate(self, ret['name']) - - return ret - - def _pre_delete_cleanup_project(self, project_id, project, initiator=None): - project_user_ids = ( - self.assignment_api.list_user_ids_for_project(project_id)) - for user_id in project_user_ids: - payload = {'user_id': user_id, 'project_id': project_id} - notifications.Audit.internal( - notifications.INVALIDATE_USER_PROJECT_TOKEN_PERSISTENCE, - payload - ) - - def _post_delete_cleanup_project(self, project_id, project, - initiator=None): - self.assignment_api.delete_project_assignments(project_id) - self.get_project.invalidate(self, project_id) - self.get_project_by_name.invalidate(self, project['name'], - project['domain_id']) - self.credential_api.delete_credentials_for_project(project_id) - notifications.Audit.deleted(self._PROJECT, project_id, initiator) - # Invalidate user role assignments cache region, as it may - # be caching role assignments where the target is - # the specified project - assignment.COMPUTED_ASSIGNMENTS_REGION.invalidate() - - def delete_project(self, project_id, initiator=None, cascade=False): - project = self.driver.get_project(project_id) - if project.get('is_domain'): - self.delete_domain(project_id, initiator) - else: - self._delete_project(project_id, initiator, cascade) - - def _delete_project(self, project_id, initiator=None, cascade=False): - # Use the driver directly to prevent using old cached value. - project = self.driver.get_project(project_id) - if project['is_domain'] and project['enabled']: - raise exception.ValidationError( - message=_('cannot delete an enabled project acting as a ' - 'domain. Please disable the project %s first.') - % project.get('id')) - - if not self.is_leaf_project(project_id) and not cascade: - raise exception.ForbiddenNotSecurity( - _('Cannot delete the project %s since it is not a leaf in the ' - 'hierarchy. Use the cascade option if you want to delete a ' - 'whole subtree.') - % project_id) - - if cascade: - # Getting reversed project's subtrees list, i.e. from the leaves - # to the root, so we do not break parent_id FK. - subtree_list = self.list_projects_in_subtree(project_id) - subtree_list.reverse() - if not self._check_whole_subtree_is_disabled( - project_id, subtree_list=subtree_list): - raise exception.ForbiddenNotSecurity( - _('Cannot delete project %(project_id)s since its subtree ' - 'contains enabled projects.') - % {'project_id': project_id}) - - project_list = subtree_list + [project] - projects_ids = [x['id'] for x in project_list] - - for prj in project_list: - self._pre_delete_cleanup_project(prj['id'], prj, initiator) - ret = self.driver.delete_projects_from_ids(projects_ids) - for prj in project_list: - self._post_delete_cleanup_project(prj['id'], prj, initiator) - else: - self._pre_delete_cleanup_project(project_id, project, initiator) - ret = self.driver.delete_project(project_id) - self._post_delete_cleanup_project(project_id, project, initiator) - - return ret - - def _filter_projects_list(self, projects_list, user_id): - user_projects = self.assignment_api.list_projects_for_user(user_id) - user_projects_ids = set([proj['id'] for proj in user_projects]) - # Keep only the projects present in user_projects - return [proj for proj in projects_list - if proj['id'] in user_projects_ids] - - def _assert_valid_project_id(self, project_id): - if project_id is None: - msg = _('Project field is required and cannot be empty.') - raise exception.ValidationError(message=msg) - # Check if project_id exists - self.get_project(project_id) - - def list_project_parents(self, project_id, user_id=None): - self._assert_valid_project_id(project_id) - parents = self.driver.list_project_parents(project_id) - # If a user_id was provided, the returned list should be filtered - # against the projects this user has access to. - if user_id: - parents = self._filter_projects_list(parents, user_id) - return parents - - def _build_parents_as_ids_dict(self, project, parents_by_id): - # NOTE(rodrigods): we don't rely in the order of the projects returned - # by the list_project_parents() method. Thus, we create a project cache - # (parents_by_id) in order to access each parent in constant time and - # traverse up the hierarchy. - def traverse_parents_hierarchy(project): - parent_id = project.get('parent_id') - if not parent_id: - return None - - parent = parents_by_id[parent_id] - return {parent_id: traverse_parents_hierarchy(parent)} - - return traverse_parents_hierarchy(project) - - def get_project_parents_as_ids(self, project): - """Gets the IDs from the parents from a given project. - - The project IDs are returned as a structured dictionary traversing up - the hierarchy to the top level project. For example, considering the - following project hierarchy:: - - A - | - +-B-+ - | | - C D - - If we query for project C parents, the expected return is the following - dictionary:: - - 'parents': { - B['id']: { - A['id']: None - } - } - - """ - parents_list = self.list_project_parents(project['id']) - parents_as_ids = self._build_parents_as_ids_dict( - project, {proj['id']: proj for proj in parents_list}) - return parents_as_ids - - def list_projects_in_subtree(self, project_id, user_id=None): - self._assert_valid_project_id(project_id) - subtree = self.driver.list_projects_in_subtree(project_id) - # If a user_id was provided, the returned list should be filtered - # against the projects this user has access to. - if user_id: - subtree = self._filter_projects_list(subtree, user_id) - return subtree - - def _build_subtree_as_ids_dict(self, project_id, subtree_by_parent): - # NOTE(rodrigods): we perform a depth first search to construct the - # dictionaries representing each level of the subtree hierarchy. In - # order to improve this traversal performance, we create a cache of - # projects (subtree_py_parent) that accesses in constant time the - # direct children of a given project. - def traverse_subtree_hierarchy(project_id): - children = subtree_by_parent.get(project_id) - if not children: - return None - - children_ids = {} - for child in children: - children_ids[child['id']] = traverse_subtree_hierarchy( - child['id']) - return children_ids - - return traverse_subtree_hierarchy(project_id) - - def get_projects_in_subtree_as_ids(self, project_id): - """Gets the IDs from the projects in the subtree from a given project. - - The project IDs are returned as a structured dictionary representing - their hierarchy. For example, considering the following project - hierarchy:: - - A - | - +-B-+ - | | - C D - - If we query for project A subtree, the expected return is the following - dictionary:: - - 'subtree': { - B['id']: { - C['id']: None, - D['id']: None - } - } - - """ - def _projects_indexed_by_parent(projects_list): - projects_by_parent = {} - for proj in projects_list: - parent_id = proj.get('parent_id') - if parent_id: - if parent_id in projects_by_parent: - projects_by_parent[parent_id].append(proj) - else: - projects_by_parent[parent_id] = [proj] - return projects_by_parent - - subtree_list = self.list_projects_in_subtree(project_id) - subtree_as_ids = self._build_subtree_as_ids_dict( - project_id, _projects_indexed_by_parent(subtree_list)) - return subtree_as_ids - - def list_domains_from_ids(self, domain_ids): - """List domains for the provided list of ids. - - :param domain_ids: list of ids - - :returns: a list of domain_refs. - - This method is used internally by the assignment manager to bulk read - a set of domains given their ids. - - """ - # Retrieve the projects acting as domains get their correspondent - # domains - projects = self.list_projects_from_ids(domain_ids) - domains = [self._get_domain_from_project(project) - for project in projects] - - return domains - - @MEMOIZE - def get_domain(self, domain_id): - try: - # Retrieve the corresponding project that acts as a domain - project = self.driver.get_project(domain_id) - except exception.ProjectNotFound: - raise exception.DomainNotFound(domain_id=domain_id) - - # Return its correspondent domain - return self._get_domain_from_project(project) - - @MEMOIZE - def get_domain_by_name(self, domain_name): - try: - # Retrieve the corresponding project that acts as a domain - project = self.driver.get_project_by_name(domain_name, - domain_id=None) - except exception.ProjectNotFound: - raise exception.DomainNotFound(domain_id=domain_name) - - # Return its correspondent domain - return self._get_domain_from_project(project) - - def _get_domain_from_project(self, project_ref): - """Creates a domain ref from a project ref. - - Based on the provided project ref, create a domain ref, so that the - result can be returned in response to a domain API call. - """ - if not project_ref['is_domain']: - LOG.error(_LE('Asked to convert a non-domain project into a ' - 'domain - Domain: %(domain_id)s, Project ID: ' - '%(id)s, Project Name: %(project_name)s'), - {'domain_id': project_ref['domain_id'], - 'id': project_ref['id'], - 'project_name': project_ref['name']}) - raise exception.DomainNotFound(domain_id=project_ref['id']) - - domain_ref = project_ref.copy() - # As well as the project specific attributes that we need to remove, - # there is an old compatibility issue in that update project (as well - # as extracting an extra attributes), also includes a copy of the - # actual extra dict as well - something that update domain does not do. - for k in ['parent_id', 'domain_id', 'is_domain', 'extra']: - domain_ref.pop(k, None) - - return domain_ref - - def create_domain(self, domain_id, domain, initiator=None): - if (CONF.resource.domain_name_url_safe != 'off' and - utils.is_not_url_safe(domain['name'])): - self._raise_reserved_character_exception('Domain', domain['name']) - project_from_domain = _get_project_from_domain(domain) - is_domain_project = self.create_project( - domain_id, project_from_domain, initiator) - - return self._get_domain_from_project(is_domain_project) - - @manager.response_truncated - def list_domains(self, hints=None): - projects = self.list_projects_acting_as_domain(hints) - domains = [self._get_domain_from_project(project) - for project in projects] - return domains - - def update_domain(self, domain_id, domain, initiator=None): - # TODO(henry-nash): We shouldn't have to check for the federated domain - # here as well as _update_project, but currently our tests assume the - # checks are done in a specific order. The tests should be refactored. - self.assert_domain_not_federated(domain_id, domain) - project = _get_project_from_domain(domain) - try: - original_domain = self.driver.get_project(domain_id) - project = self._update_project(domain_id, project, initiator) - except exception.ProjectNotFound: - raise exception.DomainNotFound(domain_id=domain_id) - - domain_from_project = self._get_domain_from_project(project) - self.get_domain.invalidate(self, domain_id) - self.get_domain_by_name.invalidate(self, original_domain['name']) - - return domain_from_project - - def delete_domain(self, domain_id, initiator=None): - # Use the driver directly to get the project that acts as a domain and - # prevent using old cached value. - try: - domain = self.driver.get_project(domain_id) - except exception.ProjectNotFound: - raise exception.DomainNotFound(domain_id=domain_id) - - # To help avoid inadvertent deletes, we insist that the domain - # has been previously disabled. This also prevents a user deleting - # their own domain since, once it is disabled, they won't be able - # to get a valid token to issue this delete. - if domain['enabled']: - raise exception.ForbiddenNotSecurity( - _('Cannot delete a domain that is enabled, please disable it ' - 'first.')) - - self._delete_domain_contents(domain_id) - self._delete_project(domain_id, initiator) - # Delete any database stored domain config - self.domain_config_api.delete_config_options(domain_id) - self.domain_config_api.delete_config_options(domain_id, sensitive=True) - self.domain_config_api.release_registration(domain_id) - # TODO(henry-nash): Although the controller will ensure deletion of - # all users & groups within the domain (which will cause all - # assignments for those users/groups to also be deleted), there - # could still be assignments on this domain for users/groups in - # other domains - so we should delete these here by making a call - # to the backend to delete all assignments for this domain. - # (see Bug #1277847) - notifications.Audit.deleted(self._DOMAIN, domain_id, initiator) - self.get_domain.invalidate(self, domain_id) - self.get_domain_by_name.invalidate(self, domain['name']) - - # Invalidate user role assignments cache region, as it may be caching - # role assignments where the target is the specified domain - assignment.COMPUTED_ASSIGNMENTS_REGION.invalidate() - - def _delete_domain_contents(self, domain_id): - """Delete the contents of a domain. - - Before we delete a domain, we need to remove all the entities - that are owned by it, i.e. Projects. To do this we - call the delete function for these entities, which are - themselves responsible for deleting any credentials and role grants - associated with them as well as revoking any relevant tokens. - - """ - def _delete_projects(project, projects, examined): - if project['id'] in examined: - msg = _LE('Circular reference or a repeated entry found ' - 'projects hierarchy - %(project_id)s.') - LOG.error(msg, {'project_id': project['id']}) - return - - examined.add(project['id']) - children = [proj for proj in projects - if proj.get('parent_id') == project['id']] - for proj in children: - _delete_projects(proj, projects, examined) - - try: - self.delete_project(project['id'], initiator=None) - except exception.ProjectNotFound: - LOG.debug(('Project %(projectid)s not found when ' - 'deleting domain contents for %(domainid)s, ' - 'continuing with cleanup.'), - {'projectid': project['id'], - 'domainid': domain_id}) - - proj_refs = self.list_projects_in_domain(domain_id) - - # Deleting projects recursively - roots = [x for x in proj_refs if x.get('parent_id') == domain_id] - examined = set() - for project in roots: - _delete_projects(project, proj_refs, examined) - - @manager.response_truncated - def list_projects(self, hints=None): - return self.driver.list_projects(hints or driver_hints.Hints()) - - # NOTE(henry-nash): list_projects_in_domain is actually an internal method - # and not exposed via the API. Therefore there is no need to support - # driver hints for it. - def list_projects_in_domain(self, domain_id): - return self.driver.list_projects_in_domain(domain_id) - - def list_projects_acting_as_domain(self, hints=None): - return self.driver.list_projects_acting_as_domain( - hints or driver_hints.Hints()) - - @MEMOIZE - def get_project(self, project_id): - return self.driver.get_project(project_id) - - @MEMOIZE - def get_project_by_name(self, project_name, domain_id): - return self.driver.get_project_by_name(project_name, domain_id) - - def ensure_default_domain_exists(self): - """Creates the default domain if it doesn't exist. - - This is only used for the v2 API and can go away when V2 does. - - """ - try: - default_domain_attrs = { - 'name': 'Default', - 'id': CONF.identity.default_domain_id, - 'description': 'Domain created automatically to support V2.0 ' - 'operations.', - } - self.create_domain(CONF.identity.default_domain_id, - default_domain_attrs) - LOG.warning(_LW( - 'The default domain was created automatically to contain V2 ' - 'resources. This is deprecated in the M release and will not ' - 'be supported in the O release. Create the default domain ' - 'manually or use the keystone-manage bootstrap command.')) - except exception.Conflict: - LOG.debug('The default domain already exists.') - except Exception: - LOG.error(_LE('Failed to create the default domain.')) - raise - - -# The ResourceDriverBase class is the set of driver methods from earlier -# drivers that we still support, that have not been removed or modified. This -# class is then used to created the augmented V8 and V9 version abstract driver -# classes, without having to duplicate a lot of abstract method signatures. -# If you remove a method from V9, then move the abstract methods from this Base -# class to the V8 class. Do not modify any of the method signatures in the Base -# class - changes should only be made in the V8 and subsequent classes. - -# Starting with V9, some drivers use a special value to represent a domain_id -# of None. See comment in Project class of resource/backends/sql.py for more -# details. -NULL_DOMAIN_ID = '<>' - - -@six.add_metaclass(abc.ABCMeta) -class ResourceDriverBase(object): - - def _get_list_limit(self): - return CONF.resource.list_limit or CONF.list_limit - - # project crud - @abc.abstractmethod - def list_projects(self, hints): - """List projects in the system. - - :param hints: filter hints which the driver should - implement if at all possible. - - :returns: a list of project_refs or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_projects_from_ids(self, project_ids): - """List projects for the provided list of ids. - - :param project_ids: list of ids - - :returns: a list of project_refs. - - This method is used internally by the assignment manager to bulk read - a set of projects given their ids. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_project_ids_from_domain_ids(self, domain_ids): - """List project ids for the provided list of domain ids. - - :param domain_ids: list of domain ids - - :returns: a list of project ids owned by the specified domain ids. - - This method is used internally by the assignment manager to bulk read - a set of project ids given a list of domain ids. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_projects_in_domain(self, domain_id): - """List projects in the domain. - - :param domain_id: the driver MUST only return projects - within this domain. - - :returns: a list of project_refs or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_project(self, project_id): - """Get a project by ID. - - :returns: project_ref - :raises keystone.exception.ProjectNotFound: if project_id does not - exist - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_project(self, project_id, project): - """Updates an existing project. - - :raises keystone.exception.ProjectNotFound: if project_id does not - exist - :raises keystone.exception.Conflict: if project name already exists - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_project(self, project_id): - """Deletes an existing project. - - :raises keystone.exception.ProjectNotFound: if project_id does not - exist - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_project_parents(self, project_id): - """List all parents from a project by its ID. - - :param project_id: the driver will list the parents of this - project. - - :returns: a list of project_refs or an empty list. - :raises keystone.exception.ProjectNotFound: if project_id does not - exist - - """ - raise exception.NotImplemented() - - @abc.abstractmethod - def list_projects_in_subtree(self, project_id): - """List all projects in the subtree of a given project. - - :param project_id: the driver will get the subtree under - this project. - - :returns: a list of project_refs or an empty list - :raises keystone.exception.ProjectNotFound: if project_id does not - exist - - """ - raise exception.NotImplemented() - - @abc.abstractmethod - def is_leaf_project(self, project_id): - """Checks if a project is a leaf in the hierarchy. - - :param project_id: the driver will check if this project - is a leaf in the hierarchy. - - :raises keystone.exception.ProjectNotFound: if project_id does not - exist - - """ - raise exception.NotImplemented() - - def _validate_default_domain(self, ref): - """Validate that either the default domain or nothing is specified. - - Also removes the domain from the ref so that LDAP doesn't have to - persist the attribute. - - """ - ref = ref.copy() - domain_id = ref.pop('domain_id', CONF.identity.default_domain_id) - self._validate_default_domain_id(domain_id) - return ref - - def _validate_default_domain_id(self, domain_id): - """Validate that the domain ID belongs to the default domain.""" - if domain_id != CONF.identity.default_domain_id: - raise exception.DomainNotFound(domain_id=domain_id) - - -class ResourceDriverV8(ResourceDriverBase): - """Removed or redefined methods from V8. - - Move the abstract methods of any methods removed or modified in later - versions of the driver from ResourceDriverBase to here. We maintain this - so that legacy drivers, which will be a subclass of ResourceDriverV8, can - still reference them. - - """ - - @abc.abstractmethod - def create_project(self, tenant_id, tenant): - """Creates a new project. - - :param tenant_id: This parameter can be ignored. - :param dict tenant: The new project - - Project schema:: - - type: object - properties: - id: - type: string - name: - type: string - domain_id: - type: string - description: - type: string - enabled: - type: boolean - parent_id: - type: string - is_domain: - type: boolean - required: [id, name, domain_id] - additionalProperties: true - - If project doesn't match the schema the behavior is undefined. - - The driver can impose requirements such as the maximum length of a - field. If these requirements are not met the behavior is undefined. - - :raises keystone.exception.Conflict: if the project id already exists - or the name already exists for the domain_id. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_project_by_name(self, tenant_name, domain_id): - """Get a tenant by name. - - :returns: tenant_ref - :raises keystone.exception.ProjectNotFound: if a project with the - tenant_name does not exist within the domain - - """ - raise exception.NotImplemented() # pragma: no cover - - # Domain management functions for backends that only allow a single - # domain. Although we no longer use this, a custom legacy driver might - # have made use of it, so keep it here in case. - def _set_default_domain(self, ref): - """If the domain ID has not been set, set it to the default.""" - if isinstance(ref, dict): - if 'domain_id' not in ref: - ref = ref.copy() - ref['domain_id'] = CONF.identity.default_domain_id - return ref - elif isinstance(ref, list): - return [self._set_default_domain(x) for x in ref] - else: - raise ValueError(_('Expected dict or list: %s') % type(ref)) - - # domain crud - @abc.abstractmethod - def create_domain(self, domain_id, domain): - """Creates a new domain. - - :raises keystone.exception.Conflict: if the domain_id or domain name - already exists - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_domains(self, hints): - """List domains in the system. - - :param hints: filter hints which the driver should - implement if at all possible. - - :returns: a list of domain_refs or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_domains_from_ids(self, domain_ids): - """List domains for the provided list of ids. - - :param domain_ids: list of ids - - :returns: a list of domain_refs. - - This method is used internally by the assignment manager to bulk read - a set of domains given their ids. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_domain(self, domain_id): - """Get a domain by ID. - - :returns: domain_ref - :raises keystone.exception.DomainNotFound: if domain_id does not exist - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_domain_by_name(self, domain_name): - """Get a domain by name. - - :returns: domain_ref - :raises keystone.exception.DomainNotFound: if domain_name does not - exist - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_domain(self, domain_id, domain): - """Updates an existing domain. - - :raises keystone.exception.DomainNotFound: if domain_id does not exist - :raises keystone.exception.Conflict: if domain name already exists - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_domain(self, domain_id): - """Deletes an existing domain. - - :raises keystone.exception.DomainNotFound: if domain_id does not exist - - """ - raise exception.NotImplemented() # pragma: no cover - - -class ResourceDriverV9(ResourceDriverBase): - """New or redefined methods from V8. - - Add any new V9 abstract methods (or those with modified signatures) to - this class. - - """ - - @abc.abstractmethod - def create_project(self, project_id, project): - """Creates a new project. - - :param project_id: This parameter can be ignored. - :param dict project: The new project - - Project schema:: - - type: object - properties: - id: - type: string - name: - type: string - domain_id: - type: [string, null] - description: - type: string - enabled: - type: boolean - parent_id: - type: string - is_domain: - type: boolean - required: [id, name, domain_id] - additionalProperties: true - - If the project doesn't match the schema the behavior is undefined. - - The driver can impose requirements such as the maximum length of a - field. If these requirements are not met the behavior is undefined. - - :raises keystone.exception.Conflict: if the project id already exists - or the name already exists for the domain_id. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_project_by_name(self, project_name, domain_id): - """Get a project by name. - - :returns: project_ref - :raises keystone.exception.ProjectNotFound: if a project with the - project_name does not exist within the domain - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_projects_from_ids(self, project_ids): - """Deletes a given list of projects. - - Deletes a list of projects. Ensures no project on the list exists - after it is successfully called. If an empty list is provided, - the it is silently ignored. In addition, if a project ID in the list - of project_ids is not found in the backend, no exception is raised, - but a message is logged. - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_projects_acting_as_domain(self, hints): - """List all projects acting as domains. - - :param hints: filter hints which the driver should - implement if at all possible. - - :returns: a list of project_refs or an empty list. - - """ - raise exception.NotImplemented() # pragma: no cover - - -class V9ResourceWrapperForV8Driver(ResourceDriverV9): - """Wrapper class to supported a V8 legacy driver. - - In order to support legacy drivers without having to make the manager code - driver-version aware, we wrap legacy drivers so that they look like the - latest version. For the various changes made in a new driver, here are the - actions needed in this wrapper: - - Method removed from new driver - remove the call-through method from this - class, since the manager will no longer be - calling it. - Method signature (or meaning) changed - wrap the old method in a new - signature here, and munge the input - and output parameters accordingly. - New method added to new driver - add a method to implement the new - functionality here if possible. If that is - not possible, then return NotImplemented, - since we do not guarantee to support new - functionality with legacy drivers. - - This wrapper contains the following support for newer manager code: - - - The current manager code expects domains to be represented as projects - acting as domains, something that may not be possible in a legacy driver. - Hence the wrapper will map any calls for projects acting as a domain back - onto the driver domain methods. The caveat for this, is that this assumes - that there can not be a clash between a project_id and a domain_id, in - which case it may not be able to locate the correct entry. - - """ - - @versionutils.deprecated( - as_of=versionutils.deprecated.MITAKA, - what='keystone.resource.ResourceDriverV8', - in_favor_of='keystone.resource.ResourceDriverV9', - remove_in=+2) - def __init__(self, wrapped_driver): - self.driver = wrapped_driver - - def _get_domain_from_project(self, project_ref): - """Creates a domain ref from a project ref. - - Based on the provided project ref (or partial ref), creates a - domain ref, so that the result can be passed to the driver - domain methods. - """ - domain_ref = project_ref.copy() - for k in ['parent_id', 'domain_id', 'is_domain']: - domain_ref.pop(k, None) - return domain_ref - - def get_project_by_name(self, project_name, domain_id): - if domain_id is None: - try: - domain_ref = self.driver.get_domain_by_name(project_name) - return _get_project_from_domain(domain_ref) - except exception.DomainNotFound: - raise exception.ProjectNotFound(project_id=project_name) - else: - return self.driver.get_project_by_name(project_name, domain_id) - - def create_project(self, project_id, project): - if project['is_domain']: - new_domain = self._get_domain_from_project(project) - domain_ref = self.driver.create_domain(project_id, new_domain) - return _get_project_from_domain(domain_ref) - else: - return self.driver.create_project(project_id, project) - - def list_projects(self, hints): - """List projects and/or domains. - - We use the hints filter to determine whether we are listing projects, - domains or both. - - If the filter includes domain_id==None, then we should only list - domains (convert to a project acting as a domain) since regular - projcets always have a non-None value for domain_id. - - Likewise, if the filter includes domain_id==, then we - should only list projects. - - If there is no domain_id filter, then we need to do a combained listing - of domains and projects, converting domains to projects acting as a - domain. - - """ - domain_listing_filter = None - for f in hints.filters: - if (f['name'] == 'domain_id'): - domain_listing_filter = f - - if domain_listing_filter is not None: - if domain_listing_filter['value'] is not None: - proj_list = self.driver.list_projects(hints) - else: - domains = self.driver.list_domains(hints) - proj_list = [_get_project_from_domain(p) for p in domains] - hints.filters.remove(domain_listing_filter) - return proj_list - else: - # No domain_id filter, so combine domains and projects. Although - # we hand any remaining filters into each driver, since each filter - # might need to be carried out more than once, we use copies of the - # filters, allowing the original filters to be passed back up to - # controller level where a final filter will occur. - local_hints = copy.deepcopy(hints) - proj_list = self.driver.list_projects(local_hints) - local_hints = copy.deepcopy(hints) - domains = self.driver.list_domains(local_hints) - for domain in domains: - proj_list.append(_get_project_from_domain(domain)) - return proj_list - - def list_projects_from_ids(self, project_ids): - return [self.get_project(id) for id in project_ids] - - def list_project_ids_from_domain_ids(self, domain_ids): - return self.driver.list_project_ids_from_domain_ids(domain_ids) - - def list_projects_in_domain(self, domain_id): - return self.driver.list_projects_in_domain(domain_id) - - def get_project(self, project_id): - try: - domain_ref = self.driver.get_domain(project_id) - return _get_project_from_domain(domain_ref) - except exception.DomainNotFound: - return self.driver.get_project(project_id) - - def _is_domain(self, project_id): - ref = self.get_project(project_id) - return ref.get('is_domain', False) - - def update_project(self, project_id, project): - if self._is_domain(project_id): - update_domain = self._get_domain_from_project(project) - domain_ref = self.driver.update_domain(project_id, update_domain) - return _get_project_from_domain(domain_ref) - else: - return self.driver.update_project(project_id, project) - - def delete_project(self, project_id): - if self._is_domain(project_id): - try: - self.driver.delete_domain(project_id) - except exception.DomainNotFound: - raise exception.ProjectNotFound(project_id=project_id) - else: - self.driver.delete_project(project_id) - - def delete_projects_from_ids(self, project_ids): - raise exception.NotImplemented() # pragma: no cover - - def list_project_parents(self, project_id): - """List a project's ancestors. - - The current manager expects the ancestor tree to end with the project - acting as the domain (since that's now the top of the tree), but a - legacy driver will not have that top project in their projects table, - since it's still in the domain table. Hence we lift the algorithm for - traversing up the tree from the driver to here, so that our version of - get_project() is called, which will fetch the "project" from the right - table. - - """ - project = self.get_project(project_id) - parents = [] - examined = set() - while project.get('parent_id') is not None: - if project['id'] in examined: - msg = _LE('Circular reference or a repeated ' - 'entry found in projects hierarchy - ' - '%(project_id)s.') - LOG.error(msg, {'project_id': project['id']}) - return - - examined.add(project['id']) - parent_project = self.get_project(project['parent_id']) - parents.append(parent_project) - project = parent_project - return parents - - def list_projects_in_subtree(self, project_id): - return self.driver.list_projects_in_subtree(project_id) - - def is_leaf_project(self, project_id): - return self.driver.is_leaf_project(project_id) - - def list_projects_acting_as_domain(self, hints): - refs = self.driver.list_domains(hints) - return [_get_project_from_domain(p) for p in refs] - - -Driver = manager.create_legacy_driver(ResourceDriverV8) - - -MEMOIZE_CONFIG = cache.get_memoization_decorator(group='domain_config') - - -@dependency.provider('domain_config_api') -class DomainConfigManager(manager.Manager): - """Default pivot point for the Domain Config backend.""" - - # NOTE(henry-nash): In order for a config option to be stored in the - # standard table, it must be explicitly whitelisted. Options marked as - # sensitive are stored in a separate table. Attempting to store options - # that are not listed as either whitelisted or sensitive will raise an - # exception. - # - # Only those options that affect the domain-specific driver support in - # the identity manager are supported. - - driver_namespace = 'keystone.resource.domain_config' - - whitelisted_options = { - 'identity': ['driver', 'list_limit'], - 'ldap': [ - 'url', 'user', 'suffix', 'use_dumb_member', 'dumb_member', - 'allow_subtree_delete', 'query_scope', 'page_size', - 'alias_dereferencing', 'debug_level', 'chase_referrals', - 'user_tree_dn', 'user_filter', 'user_objectclass', - 'user_id_attribute', 'user_name_attribute', 'user_mail_attribute', - 'user_description_attribute', 'user_pass_attribute', - 'user_enabled_attribute', 'user_enabled_invert', - 'user_enabled_mask', 'user_enabled_default', - 'user_attribute_ignore', 'user_default_project_id_attribute', - 'user_allow_create', 'user_allow_update', 'user_allow_delete', - 'user_enabled_emulation', 'user_enabled_emulation_dn', - 'user_enabled_emulation_use_group_config', - 'user_additional_attribute_mapping', 'group_tree_dn', - 'group_filter', 'group_objectclass', 'group_id_attribute', - 'group_name_attribute', 'group_member_attribute', - 'group_desc_attribute', 'group_attribute_ignore', - 'group_allow_create', 'group_allow_update', 'group_allow_delete', - 'group_additional_attribute_mapping', 'tls_cacertfile', - 'tls_cacertdir', 'use_tls', 'tls_req_cert', 'use_pool', - 'pool_size', 'pool_retry_max', 'pool_retry_delay', - 'pool_connection_timeout', 'pool_connection_lifetime', - 'use_auth_pool', 'auth_pool_size', 'auth_pool_connection_lifetime' - ] - } - sensitive_options = { - 'identity': [], - 'ldap': ['password'] - } - - def __init__(self): - super(DomainConfigManager, self).__init__(CONF.domain_config.driver) - - def _assert_valid_config(self, config): - """Ensure the options in the config are valid. - - This method is called to validate the request config in create and - update manager calls. - - :param config: config structure being created or updated - - """ - # Something must be defined in the request - if not config: - raise exception.InvalidDomainConfig( - reason=_('No options specified')) - - # Make sure the groups/options defined in config itself are valid - for group in config: - if (not config[group] or not - isinstance(config[group], dict)): - msg = _('The value of group %(group)s specified in the ' - 'config should be a dictionary of options') % { - 'group': group} - raise exception.InvalidDomainConfig(reason=msg) - for option in config[group]: - self._assert_valid_group_and_option(group, option) - - def _assert_valid_group_and_option(self, group, option): - """Ensure the combination of group and option is valid. - - :param group: optional group name, if specified it must be one - we support - :param option: optional option name, if specified it must be one - we support and a group must also be specified - - """ - if not group and not option: - # For all calls, it's OK for neither to be defined, it means you - # are operating on all config options for that domain. - return - - if not group and option: - # Our API structure should prevent this from ever happening, so if - # it does, then this is coding error. - msg = _('Option %(option)s found with no group specified while ' - 'checking domain configuration request') % { - 'option': option} - raise exception.UnexpectedError(exception=msg) - - if (group and group not in self.whitelisted_options and - group not in self.sensitive_options): - msg = _('Group %(group)s is not supported ' - 'for domain specific configurations') % {'group': group} - raise exception.InvalidDomainConfig(reason=msg) - - if option: - if (option not in self.whitelisted_options[group] and option not in - self.sensitive_options[group]): - msg = _('Option %(option)s in group %(group)s is not ' - 'supported for domain specific configurations') % { - 'group': group, 'option': option} - raise exception.InvalidDomainConfig(reason=msg) - - def _is_sensitive(self, group, option): - return option in self.sensitive_options[group] - - def _config_to_list(self, config): - """Build whitelisted and sensitive lists for use by backend drivers.""" - whitelisted = [] - sensitive = [] - for group in config: - for option in config[group]: - the_list = (sensitive if self._is_sensitive(group, option) - else whitelisted) - the_list.append({ - 'group': group, 'option': option, - 'value': config[group][option]}) - - return whitelisted, sensitive - - def _list_to_config(self, whitelisted, sensitive=None, req_option=None): - """Build config dict from a list of option dicts. - - :param whitelisted: list of dicts containing options and their groups, - this has already been filtered to only contain - those options to include in the output. - :param sensitive: list of dicts containing sensitive options and their - groups, this has already been filtered to only - contain those options to include in the output. - :param req_option: the individual option requested - - :returns: a config dict, including sensitive if specified - - """ - the_list = whitelisted + (sensitive or []) - if not the_list: - return {} - - if req_option: - # The request was specific to an individual option, so - # no need to include the group in the output. We first check that - # there is only one option in the answer (and that it's the right - # one) - if not, something has gone wrong and we raise an error - if len(the_list) > 1 or the_list[0]['option'] != req_option: - LOG.error(_LE('Unexpected results in response for domain ' - 'config - %(count)s responses, first option is ' - '%(option)s, expected option %(expected)s'), - {'count': len(the_list), 'option': list[0]['option'], - 'expected': req_option}) - raise exception.UnexpectedError( - _('An unexpected error occurred when retrieving domain ' - 'configs')) - return {the_list[0]['option']: the_list[0]['value']} - - config = {} - for option in the_list: - config.setdefault(option['group'], {}) - config[option['group']][option['option']] = option['value'] - - return config - - def create_config(self, domain_id, config): - """Create config for a domain - - :param domain_id: the domain in question - :param config: the dict of config groups/options to assign to the - domain - - Creates a new config, overwriting any previous config (no Conflict - error will be generated). - - :returns: a dict of group dicts containing the options, with any that - are sensitive removed - :raises keystone.exception.InvalidDomainConfig: when the config - contains options we do not support - - """ - self._assert_valid_config(config) - whitelisted, sensitive = self._config_to_list(config) - # Delete any existing config - self.delete_config_options(domain_id) - self.delete_config_options(domain_id, sensitive=True) - # ...and create the new one - for option in whitelisted: - self.create_config_option( - domain_id, option['group'], option['option'], option['value']) - for option in sensitive: - self.create_config_option( - domain_id, option['group'], option['option'], option['value'], - sensitive=True) - # Since we are caching on the full substituted config, we just - # invalidate here, rather than try and create the right result to - # cache. - self.get_config_with_sensitive_info.invalidate(self, domain_id) - return self._list_to_config(whitelisted) - - def get_config(self, domain_id, group=None, option=None): - """Get config, or partial config, for a domain - - :param domain_id: the domain in question - :param group: an optional specific group of options - :param option: an optional specific option within the group - - :returns: a dict of group dicts containing the whitelisted options, - filtered by group and option specified - :raises keystone.exception.DomainConfigNotFound: when no config found - that matches domain_id, group and option specified - :raises keystone.exception.InvalidDomainConfig: when the config - and group/option parameters specify an option we do not - support - - An example response:: - - { - 'ldap': { - 'url': 'myurl' - 'user_tree_dn': 'OU=myou'}, - 'identity': { - 'driver': 'ldap'} - - } - - """ - self._assert_valid_group_and_option(group, option) - whitelisted = self.list_config_options(domain_id, group, option) - if whitelisted: - return self._list_to_config(whitelisted, req_option=option) - - if option: - msg = _('option %(option)s in group %(group)s') % { - 'group': group, 'option': option} - elif group: - msg = _('group %(group)s') % {'group': group} - else: - msg = _('any options') - raise exception.DomainConfigNotFound( - domain_id=domain_id, group_or_option=msg) - - def update_config(self, domain_id, config, group=None, option=None): - """Update config, or partial config, for a domain - - :param domain_id: the domain in question - :param config: the config dict containing and groups/options being - updated - :param group: an optional specific group of options, which if specified - must appear in config, with no other groups - :param option: an optional specific option within the group, which if - specified must appear in config, with no other options - - The contents of the supplied config will be merged with the existing - config for this domain, updating or creating new options if these did - not previously exist. If group or option is specified, then the update - will be limited to those specified items and the inclusion of other - options in the supplied config will raise an exception, as will the - situation when those options do not already exist in the current - config. - - :returns: a dict of groups containing all whitelisted options - :raises keystone.exception.InvalidDomainConfig: when the config - and group/option parameters specify an option we do not - support or one that does not exist in the original config - - """ - def _assert_valid_update(domain_id, config, group=None, option=None): - """Ensure the combination of config, group and option is valid.""" - self._assert_valid_config(config) - self._assert_valid_group_and_option(group, option) - - # If a group has been specified, then the request is to - # explicitly only update the options in that group - so the config - # must not contain anything else. Further, that group must exist in - # the original config. Likewise, if an option has been specified, - # then the group in the config must only contain that option and it - # also must exist in the original config. - if group: - if len(config) != 1 or (option and len(config[group]) != 1): - if option: - msg = _('Trying to update option %(option)s in group ' - '%(group)s, so that, and only that, option ' - 'must be specified in the config') % { - 'group': group, 'option': option} - else: - msg = _('Trying to update group %(group)s, so that, ' - 'and only that, group must be specified in ' - 'the config') % {'group': group} - raise exception.InvalidDomainConfig(reason=msg) - - # So we now know we have the right number of entries in the - # config that align with a group/option being specified, but we - # must also make sure they match. - if group not in config: - msg = _('request to update group %(group)s, but config ' - 'provided contains group %(group_other)s ' - 'instead') % { - 'group': group, - 'group_other': list(config.keys())[0]} - raise exception.InvalidDomainConfig(reason=msg) - if option and option not in config[group]: - msg = _('Trying to update option %(option)s in group ' - '%(group)s, but config provided contains option ' - '%(option_other)s instead') % { - 'group': group, 'option': option, - 'option_other': list(config[group].keys())[0]} - raise exception.InvalidDomainConfig(reason=msg) - - # Finally, we need to check if the group/option specified - # already exists in the original config - since if not, to keep - # with the semantics of an update, we need to fail with - # a DomainConfigNotFound - if not self._get_config_with_sensitive_info(domain_id, - group, option): - if option: - msg = _('option %(option)s in group %(group)s') % { - 'group': group, 'option': option} - raise exception.DomainConfigNotFound( - domain_id=domain_id, group_or_option=msg) - else: - msg = _('group %(group)s') % {'group': group} - raise exception.DomainConfigNotFound( - domain_id=domain_id, group_or_option=msg) - - def _update_or_create(domain_id, option, sensitive): - """Update the option, if it doesn't exist then create it.""" - try: - self.create_config_option( - domain_id, option['group'], option['option'], - option['value'], sensitive=sensitive) - except exception.Conflict: - self.update_config_option( - domain_id, option['group'], option['option'], - option['value'], sensitive=sensitive) - - update_config = config - if group and option: - # The config will just be a dict containing the option and - # its value, so make it look like a single option under the - # group in question - update_config = {group: config} - - _assert_valid_update(domain_id, update_config, group, option) - - whitelisted, sensitive = self._config_to_list(update_config) - - for new_option in whitelisted: - _update_or_create(domain_id, new_option, sensitive=False) - for new_option in sensitive: - _update_or_create(domain_id, new_option, sensitive=True) - - self.get_config_with_sensitive_info.invalidate(self, domain_id) - return self.get_config(domain_id) - - def delete_config(self, domain_id, group=None, option=None): - """Delete config, or partial config, for the domain. - - :param domain_id: the domain in question - :param group: an optional specific group of options - :param option: an optional specific option within the group - - If group and option are None, then the entire config for the domain - is deleted. If group is not None, then just that group of options will - be deleted. If group and option are both specified, then just that - option is deleted. - - :raises keystone.exception.InvalidDomainConfig: when group/option - parameters specify an option we do not support or one that - does not exist in the original config. - - """ - self._assert_valid_group_and_option(group, option) - if group: - # As this is a partial delete, then make sure the items requested - # are valid and exist in the current config - current_config = self._get_config_with_sensitive_info(domain_id) - # Raise an exception if the group/options specified don't exist in - # the current config so that the delete method provides the - # correct error semantics. - current_group = current_config.get(group) - if not current_group: - msg = _('group %(group)s') % {'group': group} - raise exception.DomainConfigNotFound( - domain_id=domain_id, group_or_option=msg) - if option and not current_group.get(option): - msg = _('option %(option)s in group %(group)s') % { - 'group': group, 'option': option} - raise exception.DomainConfigNotFound( - domain_id=domain_id, group_or_option=msg) - - self.delete_config_options(domain_id, group, option) - self.delete_config_options(domain_id, group, option, sensitive=True) - self.get_config_with_sensitive_info.invalidate(self, domain_id) - - def _get_config_with_sensitive_info(self, domain_id, group=None, - option=None): - """Get config for a domain/group/option with sensitive info included. - - This is only used by the methods within this class, which may need to - check individual groups or options. - - """ - whitelisted = self.list_config_options(domain_id, group, option) - sensitive = self.list_config_options(domain_id, group, option, - sensitive=True) - - # Check if there are any sensitive substitutions needed. We first try - # and simply ensure any sensitive options that have valid substitution - # references in the whitelisted options are substituted. We then check - # the resulting whitelisted option and raise a warning if there - # appears to be an unmatched or incorrectly constructed substitution - # reference. To avoid the risk of logging any sensitive options that - # have already been substituted, we first take a copy of the - # whitelisted option. - - # Build a dict of the sensitive options ready to try substitution - sensitive_dict = {s['option']: s['value'] for s in sensitive} - - for each_whitelisted in whitelisted: - if not isinstance(each_whitelisted['value'], six.string_types): - # We only support substitutions into string types, if its an - # integer, list etc. then just continue onto the next one - continue - - # Store away the original value in case we need to raise a warning - # after substitution. - original_value = each_whitelisted['value'] - warning_msg = '' - try: - each_whitelisted['value'] = ( - each_whitelisted['value'] % sensitive_dict) - except KeyError: - warning_msg = _LW( - 'Found what looks like an unmatched config option ' - 'substitution reference - domain: %(domain)s, group: ' - '%(group)s, option: %(option)s, value: %(value)s. Perhaps ' - 'the config option to which it refers has yet to be ' - 'added?') - except (ValueError, TypeError): - warning_msg = _LW( - 'Found what looks like an incorrectly constructed ' - 'config option substitution reference - domain: ' - '%(domain)s, group: %(group)s, option: %(option)s, ' - 'value: %(value)s.') - - if warning_msg: - LOG.warning(warning_msg % { - 'domain': domain_id, - 'group': each_whitelisted['group'], - 'option': each_whitelisted['option'], - 'value': original_value}) - - return self._list_to_config(whitelisted, sensitive) - - @MEMOIZE_CONFIG - def get_config_with_sensitive_info(self, domain_id): - """Get config for a domain with sensitive info included. - - This method is not exposed via the public API, but is used by the - identity manager to initialize a domain with the fully formed config - options. - - """ - return self._get_config_with_sensitive_info(domain_id) - - def get_config_default(self, group=None, option=None): - """Get default config, or partial default config - - :param group: an optional specific group of options - :param option: an optional specific option within the group - - :returns: a dict of group dicts containing the default options, - filtered by group and option if specified - :raises keystone.exception.InvalidDomainConfig: when the config - and group/option parameters specify an option we do not - support (or one that is not whitelisted). - - An example response:: - - { - 'ldap': { - 'url': 'myurl', - 'user_tree_dn': 'OU=myou', - ....}, - 'identity': { - 'driver': 'ldap'} - - } - - """ - def _option_dict(group, option): - group_attr = getattr(CONF, group) - if group_attr is None: - msg = _('Group %s not found in config') % group - raise exception.UnexpectedError(msg) - return {'group': group, 'option': option, - 'value': getattr(group_attr, option)} - - self._assert_valid_group_and_option(group, option) - config_list = [] - if group: - if option: - if option not in self.whitelisted_options[group]: - msg = _('Reading the default for option %(option)s in ' - 'group %(group)s is not supported') % { - 'option': option, 'group': group} - raise exception.InvalidDomainConfig(reason=msg) - config_list.append(_option_dict(group, option)) - else: - for each_option in self.whitelisted_options[group]: - config_list.append(_option_dict(group, each_option)) - else: - for each_group in self.whitelisted_options: - for each_option in self.whitelisted_options[each_group]: - config_list.append(_option_dict(each_group, each_option)) - - return self._list_to_config(config_list, req_option=option) - - -@six.add_metaclass(abc.ABCMeta) -class DomainConfigDriverV8(object): - """Interface description for a Domain Config driver.""" - - @abc.abstractmethod - def create_config_option(self, domain_id, group, option, value, - sensitive=False): - """Creates a config option for a domain. - - :param domain_id: the domain for this option - :param group: the group name - :param option: the option name - :param value: the value to assign to this option - :param sensitive: whether the option is sensitive - - :returns: dict containing group, option and value - :raises keystone.exception.Conflict: when the option already exists - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_config_option(self, domain_id, group, option, sensitive=False): - """Gets the config option for a domain. - - :param domain_id: the domain for this option - :param group: the group name - :param option: the option name - :param sensitive: whether the option is sensitive - - :returns: dict containing group, option and value - :raises keystone.exception.DomainConfigNotFound: the option doesn't - exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_config_options(self, domain_id, group=None, option=False, - sensitive=False): - """Gets a config options for a domain. - - :param domain_id: the domain for this option - :param group: optional group option name - :param option: optional option name. If group is None, then this - parameter is ignored - :param sensitive: whether the option is sensitive - - :returns: list of dicts containing group, option and value - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def update_config_option(self, domain_id, group, option, value, - sensitive=False): - """Updates a config option for a domain. - - :param domain_id: the domain for this option - :param group: the group option name - :param option: the option name - :param value: the value to assign to this option - :param sensitive: whether the option is sensitive - - :returns: dict containing updated group, option and value - :raises keystone.exception.DomainConfigNotFound: the option doesn't - exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_config_options(self, domain_id, group=None, option=None, - sensitive=False): - """Deletes config options for a domain. - - Allows deletion of all options for a domain, all options in a group - or a specific option. The driver is silent if there are no options - to delete. - - :param domain_id: the domain for this option - :param group: optional group option name - :param option: optional option name. If group is None, then this - parameter is ignored - :param sensitive: whether the option is sensitive - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def obtain_registration(self, domain_id, type): - """Try and register this domain to use the type specified. - - :param domain_id: the domain required - :param type: type of registration - :returns: True if the domain was registered, False otherwise. Failing - to register means that someone already has it (which could - even be the domain being requested). - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def read_registration(self, type): - """Get the domain ID of who is registered to use this type. - - :param type: type of registration - :returns: domain_id of who is registered. - :raises keystone.exception.ConfigRegistrationNotFound: If nobody is - registered. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def release_registration(self, domain_id, type=None): - """Release registration if it is held by the domain specified. - - If the specified domain is registered for this domain then free it, - if it is not then do nothing - no exception is raised. - - :param domain_id: the domain in question - :param type: type of registration, if None then all registrations - for this domain will be freed - - """ - raise exception.NotImplemented() # pragma: no cover - - -DomainConfigDriver = manager.create_legacy_driver(DomainConfigDriverV8) diff --git a/keystone-moon/keystone/resource/routers.py b/keystone-moon/keystone/resource/routers.py deleted file mode 100644 index d58474e2..00000000 --- a/keystone-moon/keystone/resource/routers.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright 2013 Metacloud, Inc. -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""WSGI Routers for the Resource service.""" - -from keystone.common import json_home -from keystone.common import router -from keystone.common import wsgi -from keystone.resource import controllers - - -class Admin(wsgi.ComposableRouter): - def add_routes(self, mapper): - # Tenant Operations - tenant_controller = controllers.Tenant() - mapper.connect('/tenants', - controller=tenant_controller, - action='get_all_projects', - conditions=dict(method=['GET'])) - mapper.connect('/tenants/{tenant_id}', - controller=tenant_controller, - action='get_project', - conditions=dict(method=['GET'])) - - -class Routers(wsgi.RoutersBase): - - def append_v3_routers(self, mapper, routers): - routers.append( - router.Router(controllers.DomainV3(), - 'domains', 'domain', - resource_descriptions=self.v3_resources)) - - config_controller = controllers.DomainConfigV3() - - self._add_resource( - mapper, config_controller, - path='/domains/{domain_id}/config', - get_head_action='get_domain_config', - put_action='create_domain_config', - patch_action='update_domain_config_only', - delete_action='delete_domain_config', - rel=json_home.build_v3_resource_relation('domain_config'), - status=json_home.Status.EXPERIMENTAL, - path_vars={ - 'domain_id': json_home.Parameters.DOMAIN_ID - }) - - config_group_param = ( - json_home.build_v3_parameter_relation('config_group')) - self._add_resource( - mapper, config_controller, - path='/domains/{domain_id}/config/{group}', - get_head_action='get_domain_config', - patch_action='update_domain_config_group', - delete_action='delete_domain_config', - rel=json_home.build_v3_resource_relation('domain_config_group'), - status=json_home.Status.EXPERIMENTAL, - path_vars={ - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'group': config_group_param - }) - - self._add_resource( - mapper, config_controller, - path='/domains/{domain_id}/config/{group}/{option}', - get_head_action='get_domain_config', - patch_action='update_domain_config', - delete_action='delete_domain_config', - rel=json_home.build_v3_resource_relation('domain_config_option'), - status=json_home.Status.EXPERIMENTAL, - path_vars={ - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'group': config_group_param, - 'option': json_home.build_v3_parameter_relation( - 'config_option') - }) - - self._add_resource( - mapper, config_controller, - path='/domains/config/default', - get_action='get_domain_config_default', - rel=json_home.build_v3_resource_relation('domain_config_default'), - status=json_home.Status.EXPERIMENTAL) - - self._add_resource( - mapper, config_controller, - path='/domains/config/{group}/default', - get_action='get_domain_config_default', - rel=json_home.build_v3_resource_relation( - 'domain_config_default_group'), - status=json_home.Status.EXPERIMENTAL, - path_vars={ - 'group': config_group_param - }) - - self._add_resource( - mapper, config_controller, - path='/domains/config/{group}/{option}/default', - get_action='get_domain_config_default', - rel=json_home.build_v3_resource_relation( - 'domain_config_default_option'), - status=json_home.Status.EXPERIMENTAL, - path_vars={ - 'group': config_group_param, - 'option': json_home.build_v3_parameter_relation( - 'config_option') - }) - - routers.append( - router.Router(controllers.ProjectV3(), - 'projects', 'project', - resource_descriptions=self.v3_resources)) diff --git a/keystone-moon/keystone/resource/schema.py b/keystone-moon/keystone/resource/schema.py deleted file mode 100644 index 7e2cd667..00000000 --- a/keystone-moon/keystone/resource/schema.py +++ /dev/null @@ -1,74 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import validation -from keystone.common.validation import parameter_types - - -_project_properties = { - 'description': validation.nullable(parameter_types.description), - # NOTE(htruta): domain_id is nullable for projects acting as a domain. - 'domain_id': validation.nullable(parameter_types.id_string), - 'enabled': parameter_types.boolean, - 'is_domain': parameter_types.boolean, - 'parent_id': validation.nullable(parameter_types.id_string), - 'name': { - 'type': 'string', - 'minLength': 1, - 'maxLength': 64 - } -} - -project_create = { - 'type': 'object', - 'properties': _project_properties, - # NOTE(lbragstad): A project name is the only parameter required for - # project creation according to the Identity V3 API. We should think - # about using the maxProperties validator here, and in update. - 'required': ['name'], - 'additionalProperties': True -} - -project_update = { - 'type': 'object', - 'properties': _project_properties, - # NOTE(lbragstad): Make sure at least one property is being updated - 'minProperties': 1, - 'additionalProperties': True -} - -_domain_properties = { - 'description': validation.nullable(parameter_types.description), - 'enabled': parameter_types.boolean, - 'name': { - 'type': 'string', - 'minLength': 1, - 'maxLength': 64 - } -} - -domain_create = { - 'type': 'object', - 'properties': _domain_properties, - # TODO(lbragstad): According to the V3 API spec, name isn't required but - # the current implementation in assignment.controller:DomainV3 requires a - # name for the domain. - 'required': ['name'], - 'additionalProperties': True -} - -domain_update = { - 'type': 'object', - 'properties': _domain_properties, - 'minProperties': 1, - 'additionalProperties': True -} diff --git a/keystone-moon/keystone/revoke/__init__.py b/keystone-moon/keystone/revoke/__init__.py deleted file mode 100644 index 6d4ee0bc..00000000 --- a/keystone-moon/keystone/revoke/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.revoke.core import * # noqa diff --git a/keystone-moon/keystone/revoke/backends/__init__.py b/keystone-moon/keystone/revoke/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/revoke/backends/sql.py b/keystone-moon/keystone/revoke/backends/sql.py deleted file mode 100644 index 9f8a82db..00000000 --- a/keystone-moon/keystone/revoke/backends/sql.py +++ /dev/null @@ -1,100 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import sql -from keystone.models import revoke_model -from keystone import revoke - - -class RevocationEvent(sql.ModelBase, sql.ModelDictMixin): - __tablename__ = 'revocation_event' - attributes = revoke_model.REVOKE_KEYS - - # The id field is not going to be exposed to the outside world. - # It is, however, necessary for SQLAlchemy. - id = sql.Column(sql.Integer, primary_key=True, nullable=False) - domain_id = sql.Column(sql.String(64)) - project_id = sql.Column(sql.String(64)) - user_id = sql.Column(sql.String(64)) - role_id = sql.Column(sql.String(64)) - trust_id = sql.Column(sql.String(64)) - consumer_id = sql.Column(sql.String(64)) - access_token_id = sql.Column(sql.String(64)) - issued_before = sql.Column(sql.DateTime(), nullable=False) - expires_at = sql.Column(sql.DateTime()) - revoked_at = sql.Column(sql.DateTime(), nullable=False, index=True) - audit_id = sql.Column(sql.String(32)) - audit_chain_id = sql.Column(sql.String(32)) - - -class Revoke(revoke.RevokeDriverV8): - def _flush_batch_size(self, dialect): - batch_size = 0 - if dialect == 'ibm_db_sa': - # This functionality is limited to DB2, because - # it is necessary to prevent the transaction log - # from filling up, whereas at least some of the - # other supported databases do not support update - # queries with LIMIT subqueries nor do they appear - # to require the use of such queries when deleting - # large numbers of records at once. - batch_size = 100 - # Limit of 100 is known to not fill a transaction log - # of default maximum size while not significantly - # impacting the performance of large token purges on - # systems where the maximum transaction log size has - # been increased beyond the default. - return batch_size - - def _prune_expired_events(self): - oldest = revoke.revoked_before_cutoff_time() - - with sql.session_for_write() as session: - dialect = session.bind.dialect.name - batch_size = self._flush_batch_size(dialect) - if batch_size > 0: - query = session.query(RevocationEvent.id) - query = query.filter(RevocationEvent.revoked_at < oldest) - query = query.limit(batch_size).subquery() - delete_query = (session.query(RevocationEvent). - filter(RevocationEvent.id.in_(query))) - while True: - rowcount = delete_query.delete(synchronize_session=False) - if rowcount == 0: - break - else: - query = session.query(RevocationEvent) - query = query.filter(RevocationEvent.revoked_at < oldest) - query.delete(synchronize_session=False) - - session.flush() - - def list_events(self, last_fetch=None): - with sql.session_for_read() as session: - query = session.query(RevocationEvent).order_by( - RevocationEvent.revoked_at) - - if last_fetch: - query = query.filter(RevocationEvent.revoked_at > last_fetch) - - events = [revoke_model.RevokeEvent(**e.to_dict()) for e in query] - - return events - - def revoke(self, event): - kwargs = dict() - for attr in revoke_model.REVOKE_KEYS: - kwargs[attr] = getattr(event, attr) - record = RevocationEvent(**kwargs) - with sql.session_for_write() as session: - session.add(record) - self._prune_expired_events() diff --git a/keystone-moon/keystone/revoke/controllers.py b/keystone-moon/keystone/revoke/controllers.py deleted file mode 100644 index 40151bae..00000000 --- a/keystone-moon/keystone/revoke/controllers.py +++ /dev/null @@ -1,44 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_utils import timeutils - -from keystone.common import controller -from keystone.common import dependency -from keystone import exception -from keystone.i18n import _ - - -@dependency.requires('revoke_api') -class RevokeController(controller.V3Controller): - @controller.protected() - def list_revoke_events(self, context): - since = context['query_string'].get('since') - last_fetch = None - if since: - try: - last_fetch = timeutils.normalize_time( - timeutils.parse_isotime(since)) - except ValueError: - raise exception.ValidationError( - message=_('invalid date format %s') % since) - events = self.revoke_api.list_events(last_fetch=last_fetch) - # Build the links by hand as the standard controller calls require ids - response = {'events': [event.to_dict() for event in events], - 'links': { - 'next': None, - 'self': RevokeController.base_url( - context, - path=context['path']), - 'previous': None} - } - return response diff --git a/keystone-moon/keystone/revoke/core.py b/keystone-moon/keystone/revoke/core.py deleted file mode 100644 index 64d2e998..00000000 --- a/keystone-moon/keystone/revoke/core.py +++ /dev/null @@ -1,261 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into the Revoke service.""" - -import abc -import datetime - -from oslo_config import cfg -from oslo_log import versionutils -from oslo_utils import timeutils -import six - -from keystone.common import cache -from keystone.common import dependency -from keystone.common import extension -from keystone.common import manager -from keystone import exception -from keystone.i18n import _ -from keystone.models import revoke_model -from keystone import notifications - - -CONF = cfg.CONF - - -EXTENSION_DATA = { - 'name': 'OpenStack Revoke API', - 'namespace': 'http://docs.openstack.org/identity/api/ext/' - 'OS-REVOKE/v1.0', - 'alias': 'OS-REVOKE', - 'updated': '2014-02-24T20:51:0-00:00', - 'description': 'OpenStack revoked token reporting mechanism.', - 'links': [ - { - 'rel': 'describedby', - 'type': 'text/html', - 'href': 'http://specs.openstack.org/openstack/keystone-specs/api/' - 'v3/identity-api-v3-os-revoke-ext.html', - } - ]} -extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) -extension.register_public_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) - -MEMOIZE = cache.get_memoization_decorator(group='revoke') - - -def revoked_before_cutoff_time(): - expire_delta = datetime.timedelta( - seconds=CONF.token.expiration + CONF.revoke.expiration_buffer) - oldest = timeutils.utcnow() - expire_delta - return oldest - - -@dependency.provider('revoke_api') -class Manager(manager.Manager): - """Default pivot point for the Revoke backend. - - Performs common logic for recording revocations. - - See :mod:`keystone.common.manager.Manager` for more details on - how this dynamically calls the backend. - - """ - - driver_namespace = 'keystone.revoke' - - def __init__(self): - super(Manager, self).__init__(CONF.revoke.driver) - self._register_listeners() - self.model = revoke_model - - def _user_callback(self, service, resource_type, operation, - payload): - self.revoke_by_user(payload['resource_info']) - - def _role_callback(self, service, resource_type, operation, - payload): - self.revoke( - revoke_model.RevokeEvent(role_id=payload['resource_info'])) - - def _project_callback(self, service, resource_type, operation, - payload): - self.revoke( - revoke_model.RevokeEvent(project_id=payload['resource_info'])) - - def _domain_callback(self, service, resource_type, operation, - payload): - self.revoke( - revoke_model.RevokeEvent(domain_id=payload['resource_info'])) - - def _trust_callback(self, service, resource_type, operation, - payload): - self.revoke( - revoke_model.RevokeEvent(trust_id=payload['resource_info'])) - - def _consumer_callback(self, service, resource_type, operation, - payload): - self.revoke( - revoke_model.RevokeEvent(consumer_id=payload['resource_info'])) - - def _access_token_callback(self, service, resource_type, operation, - payload): - self.revoke( - revoke_model.RevokeEvent(access_token_id=payload['resource_info'])) - - def _role_assignment_callback(self, service, resource_type, operation, - payload): - info = payload['resource_info'] - self.revoke_by_grant(role_id=info['role_id'], user_id=info['user_id'], - domain_id=info.get('domain_id'), - project_id=info.get('project_id')) - - def _register_listeners(self): - callbacks = { - notifications.ACTIONS.deleted: [ - ['OS-TRUST:trust', self._trust_callback], - ['OS-OAUTH1:consumer', self._consumer_callback], - ['OS-OAUTH1:access_token', self._access_token_callback], - ['role', self._role_callback], - ['user', self._user_callback], - ['project', self._project_callback], - ['role_assignment', self._role_assignment_callback] - ], - notifications.ACTIONS.disabled: [ - ['user', self._user_callback], - ['project', self._project_callback], - ['domain', self._domain_callback], - ], - notifications.ACTIONS.internal: [ - [notifications.INVALIDATE_USER_TOKEN_PERSISTENCE, - self._user_callback], - ] - } - - for event, cb_info in callbacks.items(): - for resource_type, callback_fns in cb_info: - notifications.register_event_callback(event, resource_type, - callback_fns) - - def revoke_by_user(self, user_id): - return self.revoke(revoke_model.RevokeEvent(user_id=user_id)) - - def _assert_not_domain_and_project_scoped(self, domain_id=None, - project_id=None): - if domain_id is not None and project_id is not None: - msg = _('The revoke call must not have both domain_id and ' - 'project_id. This is a bug in the Keystone server. The ' - 'current request is aborted.') - raise exception.UnexpectedError(exception=msg) - - @versionutils.deprecated(as_of=versionutils.deprecated.JUNO, - remove_in=0) - def revoke_by_expiration(self, user_id, expires_at, - domain_id=None, project_id=None): - - self._assert_not_domain_and_project_scoped(domain_id=domain_id, - project_id=project_id) - - self.revoke( - revoke_model.RevokeEvent(user_id=user_id, - expires_at=expires_at, - domain_id=domain_id, - project_id=project_id)) - - def revoke_by_audit_id(self, audit_id): - self.revoke(revoke_model.RevokeEvent(audit_id=audit_id)) - - def revoke_by_audit_chain_id(self, audit_chain_id, project_id=None, - domain_id=None): - - self._assert_not_domain_and_project_scoped(domain_id=domain_id, - project_id=project_id) - - self.revoke(revoke_model.RevokeEvent(audit_chain_id=audit_chain_id, - domain_id=domain_id, - project_id=project_id)) - - def revoke_by_grant(self, role_id, user_id=None, - domain_id=None, project_id=None): - self.revoke( - revoke_model.RevokeEvent(user_id=user_id, - role_id=role_id, - domain_id=domain_id, - project_id=project_id)) - - def revoke_by_user_and_project(self, user_id, project_id): - self.revoke( - revoke_model.RevokeEvent(project_id=project_id, user_id=user_id)) - - def revoke_by_project_role_assignment(self, project_id, role_id): - self.revoke(revoke_model.RevokeEvent(project_id=project_id, - role_id=role_id)) - - def revoke_by_domain_role_assignment(self, domain_id, role_id): - self.revoke(revoke_model.RevokeEvent(domain_id=domain_id, - role_id=role_id)) - - @MEMOIZE - def _get_revoke_tree(self): - events = self.driver.list_events() - revoke_tree = revoke_model.RevokeTree(revoke_events=events) - - return revoke_tree - - def check_token(self, token_values): - """Checks the values from a token against the revocation list - - :param token_values: dictionary of values from a token, normalized for - differences between v2 and v3. The checked values - are a subset of the attributes of model.TokenEvent - - :raises keystone.exception.TokenNotFound: If the token is invalid. - - """ - if self._get_revoke_tree().is_revoked(token_values): - raise exception.TokenNotFound(_('Failed to validate token')) - - def revoke(self, event): - self.driver.revoke(event) - self._get_revoke_tree.invalidate(self) - - -@six.add_metaclass(abc.ABCMeta) -class RevokeDriverV8(object): - """Interface for recording and reporting revocation events.""" - - @abc.abstractmethod - def list_events(self, last_fetch=None): - """return the revocation events, as a list of objects - - :param last_fetch: Time of last fetch. Return all events newer. - :returns: A list of keystone.revoke.model.RevokeEvent - newer than `last_fetch.` - If no last_fetch is specified, returns all events - for tokens issued after the expiration cutoff. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def revoke(self, event): - """register a revocation event - - :param event: An instance of - keystone.revoke.model.RevocationEvent - - """ - raise exception.NotImplemented() # pragma: no cover - - -Driver = manager.create_legacy_driver(RevokeDriverV8) diff --git a/keystone-moon/keystone/revoke/model.py b/keystone-moon/keystone/revoke/model.py deleted file mode 100644 index 28a8d07f..00000000 --- a/keystone-moon/keystone/revoke/model.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.models.revoke_model import * # noqa diff --git a/keystone-moon/keystone/revoke/routers.py b/keystone-moon/keystone/revoke/routers.py deleted file mode 100644 index aab78493..00000000 --- a/keystone-moon/keystone/revoke/routers.py +++ /dev/null @@ -1,29 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import json_home -from keystone.common import wsgi -from keystone.revoke import controllers - - -class Routers(wsgi.RoutersBase): - - PATH_PREFIX = '/OS-REVOKE' - - def append_v3_routers(self, mapper, routers): - revoke_controller = controllers.RevokeController() - self._add_resource( - mapper, revoke_controller, - path=self.PATH_PREFIX + '/events', - get_action='list_revoke_events', - rel=json_home.build_v3_extension_resource_relation( - 'OS-REVOKE', '1.0', 'events')) diff --git a/keystone-moon/keystone/routers.py b/keystone-moon/keystone/routers.py deleted file mode 100644 index a0f9ed22..00000000 --- a/keystone-moon/keystone/routers.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -The only types of routers in this file should be ``ComposingRouters``. - -The routers for the backends should be in the backend-specific router modules. -For example, the ``ComposableRouter`` for ``identity`` belongs in:: - - keystone.identity.routers - -""" - - -from keystone.common import wsgi -from keystone import controllers - - -class Extension(wsgi.ComposableRouter): - def __init__(self, is_admin=True): - if is_admin: - self.controller = controllers.AdminExtensions() - else: - self.controller = controllers.PublicExtensions() - - def add_routes(self, mapper): - extensions_controller = self.controller - mapper.connect('/extensions', - controller=extensions_controller, - action='get_extensions_info', - conditions=dict(method=['GET'])) - mapper.connect('/extensions/{extension_alias}', - controller=extensions_controller, - action='get_extension_info', - conditions=dict(method=['GET'])) - - -class VersionV2(wsgi.ComposableRouter): - def __init__(self, description): - self.description = description - - def add_routes(self, mapper): - version_controller = controllers.Version(self.description) - mapper.connect('/', - controller=version_controller, - action='get_version_v2') - - -class VersionV3(wsgi.ComposableRouter): - def __init__(self, description, routers): - self.description = description - self._routers = routers - - def add_routes(self, mapper): - version_controller = controllers.Version(self.description, - routers=self._routers) - mapper.connect('/', - controller=version_controller, - action='get_version_v3') - - -class Versions(wsgi.ComposableRouter): - def __init__(self, description): - self.description = description - - def add_routes(self, mapper): - version_controller = controllers.Version(self.description) - mapper.connect('/', - controller=version_controller, - action='get_versions') diff --git a/keystone-moon/keystone/server/__init__.py b/keystone-moon/keystone/server/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/server/backends.py b/keystone-moon/keystone/server/backends.py deleted file mode 100644 index a518e777..00000000 --- a/keystone-moon/keystone/server/backends.py +++ /dev/null @@ -1,74 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import assignment -from keystone import auth -from keystone import catalog -from keystone.common import cache -from keystone import credential -from keystone import endpoint_policy -from keystone import federation -from keystone import identity -from keystone import oauth1 -from keystone import policy -from keystone import resource -from keystone import revoke -from keystone import token -from keystone import trust - - -def load_backends(): - - # Configure and build the cache - cache.configure_cache() - cache.configure_cache(region=catalog.COMPUTED_CATALOG_REGION) - cache.apply_invalidation_patch( - region=catalog.COMPUTED_CATALOG_REGION, - region_name=catalog.COMPUTED_CATALOG_REGION.name) - cache.configure_cache(region=assignment.COMPUTED_ASSIGNMENTS_REGION) - cache.apply_invalidation_patch( - region=assignment.COMPUTED_ASSIGNMENTS_REGION, - region_name=assignment.COMPUTED_ASSIGNMENTS_REGION.name) - - # Ensure that the identity driver is created before the assignment manager - # and that the assignment driver is created before the resource manager. - # The default resource driver depends on assignment, which in turn - # depends on identity - hence we need to ensure the chain is available. - # TODO(morganfainberg): In "O" release move _IDENTITY_API to be directly - # instantiated in the DRIVERS dict once assignment driver being selected - # based upon [identity]/driver is removed. - _IDENTITY_API = identity.Manager() - _ASSIGNMENT_API = assignment.Manager() - - DRIVERS = dict( - assignment_api=_ASSIGNMENT_API, - catalog_api=catalog.Manager(), - credential_api=credential.Manager(), - domain_config_api=resource.DomainConfigManager(), - endpoint_policy_api=endpoint_policy.Manager(), - federation_api=federation.Manager(), - id_generator_api=identity.generator.Manager(), - id_mapping_api=identity.MappingManager(), - identity_api=_IDENTITY_API, - shadow_users_api=identity.ShadowUsersManager(), - oauth_api=oauth1.Manager(), - policy_api=policy.Manager(), - resource_api=resource.Manager(), - revoke_api=revoke.Manager(), - role_api=assignment.RoleManager(), - token_api=token.persistence.Manager(), - trust_api=trust.Manager(), - token_provider_api=token.provider.Manager()) - - auth.controllers.load_auth_methods() - - return DRIVERS diff --git a/keystone-moon/keystone/server/common.py b/keystone-moon/keystone/server/common.py deleted file mode 100644 index 4b1ee469..00000000 --- a/keystone-moon/keystone/server/common.py +++ /dev/null @@ -1,53 +0,0 @@ - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from oslo_config import cfg -from oslo_log import log - -from keystone.common import config -from keystone.common import dependency -from keystone.common import sql -from keystone.i18n import _LW -from keystone.server import backends - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -def configure(version=None, config_files=None, - pre_setup_logging_fn=lambda: None): - config.configure() - sql.initialize() - config.set_config_defaults() - - CONF(project='keystone', version=version, - default_config_files=config_files) - - pre_setup_logging_fn() - config.setup_logging() - - if CONF.insecure_debug: - LOG.warning(_LW( - 'insecure_debug is enabled so responses may include sensitive ' - 'information.')) - - -def setup_backends(load_extra_backends_fn=lambda: {}, - startup_application_fn=lambda: None): - drivers = backends.load_backends() - drivers.update(load_extra_backends_fn()) - res = startup_application_fn() - drivers.update(dependency.resolve_future_dependencies()) - return drivers, res diff --git a/keystone-moon/keystone/server/eventlet.py b/keystone-moon/keystone/server/eventlet.py deleted file mode 100644 index e688baed..00000000 --- a/keystone-moon/keystone/server/eventlet.py +++ /dev/null @@ -1,156 +0,0 @@ - -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging -import os -import socket - -from oslo_concurrency import processutils -from oslo_config import cfg -import oslo_i18n -from oslo_service import service -from oslo_service import systemd -import pbr.version - - -# NOTE(dstanek): i18n.enable_lazy() must be called before -# keystone.i18n._() is called to ensure it has the desired lazy lookup -# behavior. This includes cases, like keystone.exceptions, where -# keystone.i18n._() is called at import time. -oslo_i18n.enable_lazy() - - -from keystone.common import config -from keystone.common import environment -from keystone.common import utils -from keystone.i18n import _ -from keystone.server import common -from keystone.version import service as keystone_service - - -CONF = cfg.CONF - - -class ServerWrapper(object): - """Wraps a Server with some launching info & capabilities.""" - - def __init__(self, server, workers): - self.server = server - self.workers = workers - - def launch_with(self, launcher): - self.server.listen() - if self.workers > 1: - # Use multi-process launcher - launcher.launch_service(self.server, self.workers) - else: - # Use single process launcher - launcher.launch_service(self.server) - - -def create_server(conf, name, host, port, workers): - app = keystone_service.loadapp('config:%s' % conf, name) - server = environment.Server(app, host=host, port=port, - keepalive=CONF.eventlet_server.tcp_keepalive, - keepidle=CONF.eventlet_server.tcp_keepidle) - if CONF.eventlet_server_ssl.enable: - server.set_ssl(CONF.eventlet_server_ssl.certfile, - CONF.eventlet_server_ssl.keyfile, - CONF.eventlet_server_ssl.ca_certs, - CONF.eventlet_server_ssl.cert_required) - return name, ServerWrapper(server, workers) - - -def serve(*servers): - logging.warning(_('Running keystone via eventlet is deprecated as of Kilo ' - 'in favor of running in a WSGI server (e.g. mod_wsgi). ' - 'Support for keystone under eventlet will be removed in ' - 'the "M"-Release.')) - if max([server[1].workers for server in servers]) > 1: - launcher = service.ProcessLauncher(CONF) - else: - launcher = service.ServiceLauncher(CONF) - - for name, server in servers: - try: - server.launch_with(launcher) - except socket.error: - logging.exception(_('Failed to start the %(name)s server') % { - 'name': name}) - raise - - # notify calling process we are ready to serve - systemd.notify_once() - - for name, server in servers: - launcher.wait() - - -def _get_workers(worker_type_config_opt): - # Get the value from config, if the config value is None (not set), return - # the number of cpus with a minimum of 2. - worker_count = CONF.eventlet_server.get(worker_type_config_opt) - if not worker_count: - worker_count = max(2, processutils.get_worker_count()) - return worker_count - - -def configure_threading(): - monkeypatch_thread = not CONF.standard_threads - pydev_debug_url = utils.setup_remote_pydev_debug() - if pydev_debug_url: - # in order to work around errors caused by monkey patching we have to - # set the thread to False. An explanation is here: - # http://lists.openstack.org/pipermail/openstack-dev/2012-August/ - # 000794.html - monkeypatch_thread = False - environment.use_eventlet(monkeypatch_thread) - - -def run(possible_topdir): - dev_conf = os.path.join(possible_topdir, - 'etc', - 'keystone.conf') - config_files = None - if os.path.exists(dev_conf): - config_files = [dev_conf] - - common.configure( - version=pbr.version.VersionInfo('keystone').version_string(), - config_files=config_files, - pre_setup_logging_fn=configure_threading) - - paste_config = config.find_paste_config() - - def create_servers(): - admin_worker_count = _get_workers('admin_workers') - public_worker_count = _get_workers('public_workers') - - servers = [] - servers.append(create_server(paste_config, - 'admin', - CONF.eventlet_server.admin_bind_host, - CONF.eventlet_server.admin_port, - admin_worker_count)) - servers.append(create_server(paste_config, - 'main', - CONF.eventlet_server.public_bind_host, - CONF.eventlet_server.public_port, - public_worker_count)) - return servers - - _unused, servers = common.setup_backends( - startup_application_fn=create_servers) - serve(*servers) diff --git a/keystone-moon/keystone/server/wsgi.py b/keystone-moon/keystone/server/wsgi.py deleted file mode 100644 index a62a8460..00000000 --- a/keystone-moon/keystone/server/wsgi.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging - -from oslo_config import cfg -import oslo_i18n - - -# NOTE(dstanek): i18n.enable_lazy() must be called before -# keystone.i18n._() is called to ensure it has the desired lazy lookup -# behavior. This includes cases, like keystone.exceptions, where -# keystone.i18n._() is called at import time. -oslo_i18n.enable_lazy() - - -from keystone.common import config -from keystone.common import environment -from keystone.server import common -from keystone.version import service as keystone_service - - -CONF = cfg.CONF - - -def initialize_application(name, post_log_configured_function=lambda: None): - common.configure() - - # Log the options used when starting if we're in debug mode... - if CONF.debug: - CONF.log_opt_values(logging.getLogger(CONF.prog), logging.DEBUG) - - environment.use_stdlib() - - post_log_configured_function() - - def loadapp(): - return keystone_service.loadapp( - 'config:%s' % config.find_paste_config(), name) - - _unused, application = common.setup_backends( - startup_application_fn=loadapp) - return application - - -def initialize_admin_application(): - return initialize_application('admin') - - -def initialize_public_application(): - return initialize_application('main') diff --git a/keystone-moon/keystone/service.py b/keystone-moon/keystone/service.py deleted file mode 100644 index 20869731..00000000 --- a/keystone-moon/keystone/service.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_log import versionutils -import six - -from keystone.version import service - - -def deprecated_to_version(f): - """Specialized deprecation wrapper for service module. - - This wraps the standard deprecation wrapper and fills in the method - names automatically. - - """ - @six.wraps(f) - def wrapper(*args, **kwargs): - x = versionutils.deprecated( - what='keystone.service.' + f.__name__ + '()', - as_of=versionutils.deprecated.MITAKA, - remove_in=+2, - in_favor_of='keystone.version.service.' + f.__name__ + '()') - return x(f) - return wrapper() - - -@deprecated_to_version -def public_app_factory(global_conf, **local_conf): - return service.public_app_factory(global_conf, **local_conf) - - -@deprecated_to_version -def admin_app_factory(global_conf, **local_conf): - return service.admin_app_factory(global_conf, **local_conf) - - -@deprecated_to_version -def public_version_app_factory(global_conf, **local_conf): - return service.public_version_app_factory(global_conf, **local_conf) - - -@deprecated_to_version -def admin_version_app_factory(global_conf, **local_conf): - return service.admin_app_factory(global_conf, **local_conf) - - -@deprecated_to_version -def v3_app_factory(global_conf, **local_conf): - return service.v3_app_factory(global_conf, **local_conf) diff --git a/keystone-moon/keystone/tests/__init__.py b/keystone-moon/keystone/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/common/__init__.py b/keystone-moon/keystone/tests/common/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/common/auth.py b/keystone-moon/keystone/tests/common/auth.py deleted file mode 100644 index 547418cf..00000000 --- a/keystone-moon/keystone/tests/common/auth.py +++ /dev/null @@ -1,109 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -class AuthTestMixin(object): - """To hold auth building helper functions.""" - - def _build_auth_scope(self, project_id=None, project_name=None, - project_domain_id=None, project_domain_name=None, - domain_id=None, domain_name=None, trust_id=None, - unscoped=None): - scope_data = {} - if unscoped: - scope_data['unscoped'] = {} - if project_id or project_name: - scope_data['project'] = {} - if project_id: - scope_data['project']['id'] = project_id - else: - scope_data['project']['name'] = project_name - if project_domain_id or project_domain_name: - project_domain_json = {} - if project_domain_id: - project_domain_json['id'] = project_domain_id - else: - project_domain_json['name'] = project_domain_name - scope_data['project']['domain'] = project_domain_json - if domain_id or domain_name: - scope_data['domain'] = {} - if domain_id: - scope_data['domain']['id'] = domain_id - else: - scope_data['domain']['name'] = domain_name - if trust_id: - scope_data['OS-TRUST:trust'] = {} - scope_data['OS-TRUST:trust']['id'] = trust_id - return scope_data - - def _build_auth(self, user_id=None, username=None, user_domain_id=None, - user_domain_name=None, **kwargs): - - # NOTE(dstanek): just to ensure sanity in the tests - self.assertEqual(1, len(kwargs), - message='_build_auth requires 1 (and only 1) ' - 'secret type and value') - - secret_type, secret_value = list(kwargs.items())[0] - - # NOTE(dstanek): just to ensure sanity in the tests - self.assertIn(secret_type, ('passcode', 'password'), - message="_build_auth only supports 'passcode' " - "and 'password' secret types") - - data = {'user': {}} - if user_id: - data['user']['id'] = user_id - else: - data['user']['name'] = username - if user_domain_id or user_domain_name: - data['user']['domain'] = {} - if user_domain_id: - data['user']['domain']['id'] = user_domain_id - else: - data['user']['domain']['name'] = user_domain_name - data['user'][secret_type] = secret_value - return data - - def _build_token_auth(self, token): - return {'id': token} - - def build_authentication_request(self, token=None, user_id=None, - username=None, user_domain_id=None, - user_domain_name=None, password=None, - kerberos=False, passcode=None, **kwargs): - """Build auth dictionary. - - It will create an auth dictionary based on all the arguments - that it receives. - """ - auth_data = {} - auth_data['identity'] = {'methods': []} - if kerberos: - auth_data['identity']['methods'].append('kerberos') - auth_data['identity']['kerberos'] = {} - if token: - auth_data['identity']['methods'].append('token') - auth_data['identity']['token'] = self._build_token_auth(token) - if password and (user_id or username): - auth_data['identity']['methods'].append('password') - auth_data['identity']['password'] = self._build_auth( - user_id, username, user_domain_id, user_domain_name, - password=password) - if passcode and (user_id or username): - auth_data['identity']['methods'].append('totp') - auth_data['identity']['totp'] = self._build_auth( - user_id, username, user_domain_id, user_domain_name, - passcode=passcode) - if kwargs: - auth_data['scope'] = self._build_auth_scope(**kwargs) - return {'auth': auth_data} diff --git a/keystone-moon/keystone/tests/functional/__init__.py b/keystone-moon/keystone/tests/functional/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/functional/core.py b/keystone-moon/keystone/tests/functional/core.py deleted file mode 100644 index 2759412b..00000000 --- a/keystone-moon/keystone/tests/functional/core.py +++ /dev/null @@ -1,85 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os - -import requests -import testtools - -from keystone.tests.common import auth as common_auth - - -class BaseTestCase(testtools.TestCase, common_auth.AuthTestMixin): - - request_headers = {'content-type': 'application/json'} - - def setUp(self): - self.ADMIN_URL = os.environ.get('KSTEST_ADMIN_URL', - 'http://localhost:35357') - self.PUBLIC_URL = os.environ.get('KSTEST_PUBLIC_URL', - 'http://localhost:5000') - self.admin = { - 'name': os.environ.get('KSTEST_ADMIN_USERNAME', 'admin'), - 'password': os.environ.get('KSTEST_ADMIN_PASSWORD', ''), - 'domain_id': os.environ.get('KSTEST_ADMIN_DOMAIN_ID', 'default') - } - - self.user = { - 'name': os.environ.get('KSTEST_USER_USERNAME', 'demo'), - 'password': os.environ.get('KSTEST_USER_PASSWORD', ''), - 'domain_id': os.environ.get('KSTEST_USER_DOMAIN_ID', 'default') - } - - self.project_id = os.environ.get('KSTEST_PROJECT_ID') - - super(BaseTestCase, self).setUp() - - def _http_headers(self, token=None): - headers = {'content-type': 'application/json'} - if token: - headers['X-Auth-Token'] = token - return headers - - def get_scoped_token_response(self, user): - """Convenience method so that we can test authenticated requests - - :param user: A dictionary with user information like 'username', - 'password', 'domain_id' - :returns: urllib3.Response object - - """ - body = self.build_authentication_request( - username=user['name'], user_domain_name=user['domain_id'], - password=user['password'], project_id=self.project_id) - return requests.post(self.PUBLIC_URL + '/v3/auth/tokens', - headers=self.request_headers, - json=body) - - def get_scoped_token(self, user): - """Convenience method for getting scoped token - - This method doesn't do any token validaton. - - :param user: A dictionary with user information like 'username', - 'password', 'domain_id' - :returns: An OpenStack token for further use - :rtype: str - - """ - r = self.get_scoped_token_response(user) - return r.headers.get('X-Subject-Token') - - def get_scoped_admin_token(self): - return self.get_scoped_token(self.admin) - - def get_scoped_user_token(self): - return self.get_scoped_token(self.user) diff --git a/keystone-moon/keystone/tests/functional/shared/__init__.py b/keystone-moon/keystone/tests/functional/shared/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/functional/shared/test_running.py b/keystone-moon/keystone/tests/functional/shared/test_running.py deleted file mode 100644 index 1b46b32d..00000000 --- a/keystone-moon/keystone/tests/functional/shared/test_running.py +++ /dev/null @@ -1,58 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import requests -import testtools.matchers - -from keystone.tests.functional import core as functests - - -is_multiple_choices = testtools.matchers.Equals( - requests.status_codes.codes.multiple_choices) -is_ok = testtools.matchers.Equals(requests.status_codes.codes.ok) - -versions = ('v2.0', 'v3') - - -class TestServerRunning(functests.BaseTestCase): - - def test_admin_responds_with_multiple_choices(self): - resp = requests.get(self.ADMIN_URL) - self.assertThat(resp.status_code, is_multiple_choices) - - def test_admin_versions(self): - for version in versions: - resp = requests.get(self.ADMIN_URL + '/' + version) - self.assertThat( - resp.status_code, - testtools.matchers.Annotate( - 'failed for version %s' % version, is_ok)) - - def test_public_responds_with_multiple_choices(self): - resp = requests.get(self.PUBLIC_URL) - self.assertThat(resp.status_code, is_multiple_choices) - - def test_public_versions(self): - for version in versions: - resp = requests.get(self.PUBLIC_URL + '/' + version) - self.assertThat( - resp.status_code, - testtools.matchers.Annotate( - 'failed for version %s' % version, is_ok)) - - def test_get_user_token(self): - token = self.get_scoped_user_token() - self.assertIsNotNone(token) - - def test_get_admin_token(self): - token = self.get_scoped_admin_token() - self.assertIsNotNone(token) diff --git a/keystone-moon/keystone/tests/hacking/__init__.py b/keystone-moon/keystone/tests/hacking/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/hacking/checks.py b/keystone-moon/keystone/tests/hacking/checks.py deleted file mode 100644 index 581dbcf9..00000000 --- a/keystone-moon/keystone/tests/hacking/checks.py +++ /dev/null @@ -1,445 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Keystone's pep8 extensions. - -In order to make the review process faster and easier for core devs we are -adding some Keystone specific pep8 checks. This will catch common errors -so that core devs don't have to. - -There are two types of pep8 extensions. One is a function that takes either -a physical or logical line. The physical or logical line is the first param -in the function definition and can be followed by other parameters supported -by pep8. The second type is a class that parses AST trees. For more info -please see pep8.py. -""" - -import ast -import re - -import six - - -class BaseASTChecker(ast.NodeVisitor): - """Provides a simple framework for writing AST-based checks. - - Subclasses should implement visit_* methods like any other AST visitor - implementation. When they detect an error for a particular node the - method should call ``self.add_error(offending_node)``. Details about - where in the code the error occurred will be pulled from the node - object. - - Subclasses should also provide a class variable named CHECK_DESC to - be used for the human readable error message. - - """ - - def __init__(self, tree, filename): - """This object is created automatically by pep8. - - :param tree: an AST tree - :param filename: name of the file being analyzed - (ignored by our checks) - """ - self._tree = tree - self._errors = [] - - def run(self): - """Called automatically by pep8.""" - self.visit(self._tree) - return self._errors - - def add_error(self, node, message=None): - """Add an error caused by a node to the list of errors for pep8.""" - message = message or self.CHECK_DESC - error = (node.lineno, node.col_offset, message, self.__class__) - self._errors.append(error) - - -class CheckForMutableDefaultArgs(BaseASTChecker): - """Checks for the use of mutable objects as function/method defaults. - - We are only checking for list and dict literals at this time. This means - that a developer could specify an instance of their own and cause a bug. - The fix for this is probably more work than it's worth because it will - get caught during code review. - - """ - - CHECK_DESC = 'K001 Using mutable as a function/method default' - MUTABLES = ( - ast.List, ast.ListComp, - ast.Dict, ast.DictComp, - ast.Set, ast.SetComp, - ast.Call) - - def visit_FunctionDef(self, node): - for arg in node.args.defaults: - if isinstance(arg, self.MUTABLES): - self.add_error(arg) - - super(CheckForMutableDefaultArgs, self).generic_visit(node) - - -def block_comments_begin_with_a_space(physical_line, line_number): - """There should be a space after the # of block comments. - - There is already a check in pep8 that enforces this rule for - inline comments. - - Okay: # this is a comment - Okay: #!/usr/bin/python - Okay: # this is a comment - K002: #this is a comment - - """ - MESSAGE = "K002 block comments should start with '# '" - - # shebangs are OK - if line_number == 1 and physical_line.startswith('#!'): - return - - text = physical_line.strip() - if text.startswith('#'): # look for block comments - if len(text) > 1 and not text[1].isspace(): - return physical_line.index('#'), MESSAGE - - -class CheckForAssertingNoneEquality(BaseASTChecker): - """Ensures that code does not use a None with assert(Not*)Equal.""" - - CHECK_DESC_IS = ('K003 Use self.assertIsNone(...) when comparing ' - 'against None') - CHECK_DESC_ISNOT = ('K004 Use assertIsNotNone(...) when comparing ' - ' against None') - - def visit_Call(self, node): - # NOTE(dstanek): I wrote this in a verbose way to make it easier to - # read for those that have little experience with Python's AST. - - def _is_None(node): - if six.PY3: - return (isinstance(node, ast.NameConstant) - and node.value is None) - else: - return isinstance(node, ast.Name) and node.id == 'None' - - if isinstance(node.func, ast.Attribute): - if node.func.attr == 'assertEqual': - for arg in node.args: - if _is_None(arg): - self.add_error(node, message=self.CHECK_DESC_IS) - elif node.func.attr == 'assertNotEqual': - for arg in node.args: - if _is_None(arg): - self.add_error(node, message=self.CHECK_DESC_ISNOT) - - super(CheckForAssertingNoneEquality, self).generic_visit(node) - - -class CheckForLoggingIssues(BaseASTChecker): - - DEBUG_CHECK_DESC = 'K005 Using translated string in debug logging' - NONDEBUG_CHECK_DESC = 'K006 Not using translating helper for logging' - EXCESS_HELPER_CHECK_DESC = 'K007 Using hints when _ is necessary' - USING_DEPRECATED_WARN = 'K009 Using the deprecated Logger.warn' - LOG_MODULES = ('logging', 'oslo_log.log') - I18N_MODULES = ( - 'keystone.i18n._', - 'keystone.i18n._LI', - 'keystone.i18n._LW', - 'keystone.i18n._LE', - 'keystone.i18n._LC', - ) - TRANS_HELPER_MAP = { - 'debug': None, - 'info': '_LI', - 'warning': '_LW', - 'error': '_LE', - 'exception': '_LE', - 'critical': '_LC', - } - - def __init__(self, tree, filename): - super(CheckForLoggingIssues, self).__init__(tree, filename) - - self.logger_names = [] - self.logger_module_names = [] - self.i18n_names = {} - - # NOTE(dstanek): this kinda accounts for scopes when talking - # about only leaf node in the graph - self.assignments = {} - - def generic_visit(self, node): - """Called if no explicit visitor function exists for a node.""" - for field, value in ast.iter_fields(node): - if isinstance(value, list): - for item in value: - if isinstance(item, ast.AST): - item._parent = node - self.visit(item) - elif isinstance(value, ast.AST): - value._parent = node - self.visit(value) - - def _filter_imports(self, module_name, alias): - """Keeps lists of logging and i18n imports.""" - if module_name in self.LOG_MODULES: - self.logger_module_names.append(alias.asname or alias.name) - elif module_name in self.I18N_MODULES: - self.i18n_names[alias.asname or alias.name] = alias.name - - def visit_Import(self, node): - for alias in node.names: - self._filter_imports(alias.name, alias) - return super(CheckForLoggingIssues, self).generic_visit(node) - - def visit_ImportFrom(self, node): - for alias in node.names: - full_name = '%s.%s' % (node.module, alias.name) - self._filter_imports(full_name, alias) - return super(CheckForLoggingIssues, self).generic_visit(node) - - def _find_name(self, node): - """Return the fully qualified name or a Name or Attribute.""" - if isinstance(node, ast.Name): - return node.id - elif (isinstance(node, ast.Attribute) - and isinstance(node.value, (ast.Name, ast.Attribute))): - method_name = node.attr - obj_name = self._find_name(node.value) - if obj_name is None: - return None - return obj_name + '.' + method_name - elif isinstance(node, six.string_types): - return node - else: # could be Subscript, Call or many more - return None - - def visit_Assign(self, node): - """Look for 'LOG = logging.getLogger' - - This handles the simple case: - name = [logging_module].getLogger(...) - - - or - - - name = [i18n_name](...) - - And some much more comple ones: - name = [i18n_name](...) % X - - - or - - - self.name = [i18n_name](...) % X - - """ - attr_node_types = (ast.Name, ast.Attribute) - - if (len(node.targets) != 1 - or not isinstance(node.targets[0], attr_node_types)): - # say no to: "x, y = ..." - return super(CheckForLoggingIssues, self).generic_visit(node) - - target_name = self._find_name(node.targets[0]) - - if (isinstance(node.value, ast.BinOp) and - isinstance(node.value.op, ast.Mod)): - if (isinstance(node.value.left, ast.Call) and - isinstance(node.value.left.func, ast.Name) and - node.value.left.func.id in self.i18n_names): - # NOTE(dstanek): this is done to match cases like: - # `msg = _('something %s') % x` - node = ast.Assign(value=node.value.left) - - if not isinstance(node.value, ast.Call): - # node.value must be a call to getLogger - self.assignments.pop(target_name, None) - return super(CheckForLoggingIssues, self).generic_visit(node) - - # is this a call to an i18n function? - if (isinstance(node.value.func, ast.Name) - and node.value.func.id in self.i18n_names): - self.assignments[target_name] = node.value.func.id - return super(CheckForLoggingIssues, self).generic_visit(node) - - if (not isinstance(node.value.func, ast.Attribute) - or not isinstance(node.value.func.value, attr_node_types)): - # function must be an attribute on an object like - # logging.getLogger - return super(CheckForLoggingIssues, self).generic_visit(node) - - object_name = self._find_name(node.value.func.value) - func_name = node.value.func.attr - - if (object_name in self.logger_module_names - and func_name == 'getLogger'): - self.logger_names.append(target_name) - - return super(CheckForLoggingIssues, self).generic_visit(node) - - def visit_Call(self, node): - """Look for the 'LOG.*' calls.""" - # obj.method - if isinstance(node.func, ast.Attribute): - obj_name = self._find_name(node.func.value) - if isinstance(node.func.value, ast.Name): - method_name = node.func.attr - elif isinstance(node.func.value, ast.Attribute): - obj_name = self._find_name(node.func.value) - method_name = node.func.attr - else: # could be Subscript, Call or many more - return super(CheckForLoggingIssues, self).generic_visit(node) - - # if dealing with a logger the method can't be "warn" - if obj_name in self.logger_names and method_name == 'warn': - msg = node.args[0] # first arg to a logging method is the msg - self.add_error(msg, message=self.USING_DEPRECATED_WARN) - - # must be a logger instance and one of the support logging methods - if (obj_name not in self.logger_names - or method_name not in self.TRANS_HELPER_MAP): - return super(CheckForLoggingIssues, self).generic_visit(node) - - # the call must have arguments - if not node.args: - return super(CheckForLoggingIssues, self).generic_visit(node) - - if method_name == 'debug': - self._process_debug(node) - elif method_name in self.TRANS_HELPER_MAP: - self._process_non_debug(node, method_name) - - return super(CheckForLoggingIssues, self).generic_visit(node) - - def _process_debug(self, node): - msg = node.args[0] # first arg to a logging method is the msg - - # if first arg is a call to a i18n name - if (isinstance(msg, ast.Call) - and isinstance(msg.func, ast.Name) - and msg.func.id in self.i18n_names): - self.add_error(msg, message=self.DEBUG_CHECK_DESC) - - # if the first arg is a reference to a i18n call - elif (isinstance(msg, ast.Name) - and msg.id in self.assignments - and not self._is_raised_later(node, msg.id)): - self.add_error(msg, message=self.DEBUG_CHECK_DESC) - - def _process_non_debug(self, node, method_name): - msg = node.args[0] # first arg to a logging method is the msg - - # if first arg is a call to a i18n name - if isinstance(msg, ast.Call): - try: - func_name = msg.func.id - except AttributeError: - # in the case of logging only an exception, the msg function - # will not have an id associated with it, for instance: - # LOG.warning(six.text_type(e)) - return - - # the function name is the correct translation helper - # for the logging method - if func_name == self.TRANS_HELPER_MAP[method_name]: - return - - # the function name is an alias for the correct translation - # helper for the loggine method - if (self.i18n_names[func_name] == - self.TRANS_HELPER_MAP[method_name]): - return - - self.add_error(msg, message=self.NONDEBUG_CHECK_DESC) - - # if the first arg is not a reference to the correct i18n hint - elif isinstance(msg, ast.Name): - - # FIXME(dstanek): to make sure more robust we should be checking - # all names passed into a logging method. we can't right now - # because: - # 1. We have code like this that we'll fix when dealing with the %: - # msg = _('....') % {} - # LOG.warning(msg) - # 2. We also do LOG.exception(e) in several places. I'm not sure - # exactly what we should be doing about that. - if msg.id not in self.assignments: - return - - helper_method_name = self.TRANS_HELPER_MAP[method_name] - if (self.assignments[msg.id] != helper_method_name - and not self._is_raised_later(node, msg.id)): - self.add_error(msg, message=self.NONDEBUG_CHECK_DESC) - elif (self.assignments[msg.id] == helper_method_name - and self._is_raised_later(node, msg.id)): - self.add_error(msg, message=self.EXCESS_HELPER_CHECK_DESC) - - def _is_raised_later(self, node, name): - - def find_peers(node): - node_for_line = node._parent - for _field, value in ast.iter_fields(node._parent._parent): - if isinstance(value, list) and node_for_line in value: - return value[value.index(node_for_line) + 1:] - continue - return [] - - peers = find_peers(node) - for peer in peers: - if isinstance(peer, ast.Raise): - if six.PY3: - exc = peer.exc - else: - exc = peer.type - if (isinstance(exc, ast.Call) and - len(exc.args) > 0 and - isinstance(exc.args[0], ast.Name) and - name in (a.id for a in exc.args)): - return True - else: - return False - elif isinstance(peer, ast.Assign): - if name in (t.id for t in peer.targets if hasattr(t, 'id')): - return False - - -def dict_constructor_with_sequence_copy(logical_line): - """Should use a dict comprehension instead of a dict constructor. - - PEP-0274 introduced dict comprehension with performance enhancement - and it also makes code more readable. - - Okay: lower_res = {k.lower(): v for k, v in six.iteritems(res[1])} - Okay: fool = dict(a='a', b='b') - K008: lower_res = dict((k.lower(), v) for k, v in six.iteritems(res[1])) - K008: attrs = dict([(k, _from_json(v)) - K008: dict([[i,i] for i in range(3)]) - - """ - MESSAGE = ("K008 Must use a dict comprehension instead of a dict" - " constructor with a sequence of key-value pairs.") - - dict_constructor_with_sequence_re = ( - re.compile(r".*\bdict\((\[)?(\(|\[)(?!\{)")) - - if dict_constructor_with_sequence_re.match(logical_line): - yield (0, MESSAGE) - - -def factory(register): - register(CheckForMutableDefaultArgs) - register(block_comments_begin_with_a_space) - register(CheckForAssertingNoneEquality) - register(CheckForLoggingIssues) - register(dict_constructor_with_sequence_copy) diff --git a/keystone-moon/keystone/tests/moon/__init__.py b/keystone-moon/keystone/tests/moon/__init__.py deleted file mode 100644 index 1b678d53..00000000 --- a/keystone-moon/keystone/tests/moon/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. diff --git a/keystone-moon/keystone/tests/moon/backends/__init__.py b/keystone-moon/keystone/tests/moon/backends/__init__.py deleted file mode 100644 index 5b02576c..00000000 --- a/keystone-moon/keystone/tests/moon/backends/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__author__ = 'vdsq3226' diff --git a/keystone-moon/keystone/tests/moon/backends/test_sql_backend.py b/keystone-moon/keystone/tests/moon/backends/test_sql_backend.py deleted file mode 100644 index 27b8d3a0..00000000 --- a/keystone-moon/keystone/tests/moon/backends/test_sql_backend.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -"""Unit tests for core configuration.""" - -import uuid -from oslo_config import cfg -from keystone.tests import unit as tests -from keystone.contrib.moon.backends import sql -from keystone.tests.unit.ksfixtures import database -from keystone.contrib.moon.exception import * -from keystone.tests.unit import default_fixtures -from keystone.contrib.moon.core import LogManager - -CONF = cfg.CONF - - -class TestSQL(tests.TestCase): - - def setUp(self): - self.useFixture(database.Database()) - super(TestSQL, self).setUp() - self.load_backends() - self.load_fixtures(default_fixtures) - self.driver = sql.IntraExtensionConnector() - - def load_extra_backends(self): - return { - "moonlog_api": LogManager() - } - - def config_overrides(self): - super(TestSQL, self).config_overrides() - self.config_fixture.config( - group='moon', - tenant_driver='keystone.contrib.moon.backends.sql.ConfigurationConnector') - - def test_intra_extensions(self): - result = self.driver.get_intra_extensions_dict() - print(type(result)) - self.assertIn("toto", result) diff --git a/keystone-moon/keystone/tests/moon/func/__init__.py b/keystone-moon/keystone/tests/moon/func/__init__.py deleted file mode 100644 index 1b678d53..00000000 --- a/keystone-moon/keystone/tests/moon/func/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. diff --git a/keystone-moon/keystone/tests/moon/func/test_func_api_authz.py b/keystone-moon/keystone/tests/moon/func/test_func_api_authz.py deleted file mode 100644 index 77438e95..00000000 --- a/keystone-moon/keystone/tests/moon/func/test_func_api_authz.py +++ /dev/null @@ -1,129 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -import unittest -import json -import httplib - - -CREDENTIALS = { - "host": "127.0.0.1", - "port": "35357", - "login": "admin", - "password": "nomoresecrete", - "tenant_name": "demo", - "sessionid": "kxb50d9uusiywfcs2fiidmu1j5nsyckr", - "csrftoken": "", - "x-subject-token": "" -} - - -def get_url(url, post_data=None, delete_data=None, crsftoken=None, method="GET", authtoken=None): - # MOON_SERVER_IP["URL"] = url - # _url = "http://{HOST}:{PORT}".format(**MOON_SERVER_IP) - if post_data: - method = "POST" - if delete_data: - method = "DELETE" - print("\033[32m{} {}\033[m".format(method, url)) - conn = httplib.HTTPConnection(CREDENTIALS["host"], CREDENTIALS["port"]) - headers = { - "Content-type": "application/x-www-form-urlencoded", - # "Accept": "text/plain", - "Accept": "text/plain,text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Cookie': 'sessionid={}'.format(CREDENTIALS["sessionid"]), - } - if crsftoken: - headers["Cookie"] = "csrftoken={}; sessionid={}; NG_TRANSLATE_LANG_KEY:\"en\"".format(crsftoken, CREDENTIALS["sessionid"]) - CREDENTIALS["crsftoken"] = crsftoken - if authtoken: - headers["X-Auth-Token"] = CREDENTIALS["x-subject-token"] - if post_data: - method = "POST" - headers["Content-type"] = "application/json" - if crsftoken: - post_data = "&".join(map(lambda x: "=".join(x), post_data)) - elif "crsftoken" in CREDENTIALS and "sessionid" in CREDENTIALS: - post_data = json.dumps(post_data) - headers["Cookie"] = "csrftoken={}; sessionid={}; NG_TRANSLATE_LANG_KEY:\"en\"".format( - CREDENTIALS["crsftoken"], - CREDENTIALS["sessionid"]) - else: - post_data = json.dumps(post_data) - # conn.request(method, url, json.dumps(post_data), headers=headers) - conn.request(method, url, post_data, headers=headers) - elif delete_data: - method = "DELETE" - conn.request(method, url, json.dumps(delete_data), headers=headers) - else: - conn.request(method, url, headers=headers) - resp = conn.getresponse() - headers = resp.getheaders() - try: - CREDENTIALS["x-subject-token"] = dict(headers)["x-subject-token"] - except KeyError: - pass - if crsftoken: - sessionid_start = dict(headers)["set-cookie"].index("sessionid=")+len("sessionid=") - sessionid_end = dict(headers)["set-cookie"].index(";", sessionid_start) - sessionid = dict(headers)["set-cookie"][sessionid_start:sessionid_end] - CREDENTIALS["sessionid"] = sessionid - content = resp.read() - conn.close() - try: - return json.loads(content) - except ValueError: - return {"content": content} - - -class AuthTest(unittest.TestCase): - - def setUp(self): - post = { - "auth": { - "identity": { - "methods": [ - "password" - ], - "password": { - "user": { - "domain": { - "id": "Default" - }, - "name": "admin", - "password": "nomoresecrete" - } - } - }, - "scope": { - "project": { - "domain": { - "id": "Default" - }, - "name": "demo" - } - } - } - } - data = get_url("/v3/auth/tokens", post_data=post) - self.assertIn("token", data) - - def tearDown(self): - pass - - def test_authz(self): - data = get_url("/v3/OS-MOON/authz/1234567890/1111111/2222222/3333333", authtoken=True) - for key in ("authz", "subject_id", "tenant_id", "object_id", "action_id"): - self.assertIn(key, data) - print(data) - data = get_url("/v3/OS-MOON/authz/961420e0aeed4fd88e09cf4ae2ae700e/" - "4cff0936eeed42439d746e8071245235/df60c814-bafd-44a8-ad34-6c649e75295f/unpause", authtoken=True) - for key in ("authz", "subject_id", "tenant_id", "object_id", "action_id"): - self.assertIn(key, data) - print(data) - - -if __name__ == "__main__": - unittest.main() diff --git a/keystone-moon/keystone/tests/moon/func/test_func_api_intra_extension_admin.py b/keystone-moon/keystone/tests/moon/func/test_func_api_intra_extension_admin.py deleted file mode 100644 index 607691ea..00000000 --- a/keystone-moon/keystone/tests/moon/func/test_func_api_intra_extension_admin.py +++ /dev/null @@ -1,1011 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -import unittest -import json -import httplib -from uuid import uuid4 -import copy - -CREDENTIALS = { - "host": "127.0.0.1", - "port": "35357", - "login": "admin", - "password": "nomoresecrete", - "tenant_name": "demo", - "sessionid": "kxb50d9uusiywfcs2fiidmu1j5nsyckr", - "csrftoken": "", - "x-subject-token": "" -} - - -def get_url(url, post_data=None, delete_data=None, crsftoken=None, method="GET", authtoken=None): - # MOON_SERVER_IP["URL"] = url - # _url = "http://{HOST}:{PORT}".format(**MOON_SERVER_IP) - if post_data: - method = "POST" - if delete_data: - method = "DELETE" - # print("\033[32m{} {}\033[m".format(method, url)) - conn = httplib.HTTPConnection(CREDENTIALS["host"], CREDENTIALS["port"]) - headers = { - "Content-type": "application/x-www-form-urlencoded", - # "Accept": "text/plain", - "Accept": "text/plain,text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Cookie': 'sessionid={}'.format(CREDENTIALS["sessionid"]), - } - if crsftoken: - headers["Cookie"] = "csrftoken={}; sessionid={}; NG_TRANSLATE_LANG_KEY:\"en\"".format(crsftoken, CREDENTIALS["sessionid"]) - CREDENTIALS["crsftoken"] = crsftoken - if authtoken: - headers["X-Auth-Token"] = CREDENTIALS["x-subject-token"] - if post_data: - method = "POST" - headers["Content-type"] = "application/json" - if crsftoken: - post_data = "&".join(map(lambda x: "=".join(x), post_data)) - elif "crsftoken" in CREDENTIALS and "sessionid" in CREDENTIALS: - post_data = json.dumps(post_data) - headers["Cookie"] = "csrftoken={}; sessionid={}; NG_TRANSLATE_LANG_KEY:\"en\"".format( - CREDENTIALS["crsftoken"], - CREDENTIALS["sessionid"]) - else: - post_data = json.dumps(post_data) - # conn.request(method, url, json.dumps(post_data), headers=headers) - conn.request(method, url, post_data, headers=headers) - elif delete_data: - method = "DELETE" - conn.request(method, url, json.dumps(delete_data), headers=headers) - else: - conn.request(method, url, headers=headers) - resp = conn.getresponse() - headers = resp.getheaders() - try: - CREDENTIALS["x-subject-token"] = dict(headers)["x-subject-token"] - except KeyError: - pass - if crsftoken: - sessionid_start = dict(headers)["set-cookie"].index("sessionid=")+len("sessionid=") - sessionid_end = dict(headers)["set-cookie"].index(";", sessionid_start) - sessionid = dict(headers)["set-cookie"][sessionid_start:sessionid_end] - CREDENTIALS["sessionid"] = sessionid - content = resp.read() - conn.close() - try: - return json.loads(content) - except ValueError: - return {"content": content} - -def get_keystone_user(name="demo", intra_extension_uuid=None): - users = get_url("/v3/users", authtoken=True)["users"] - demo_user_uuid = None - for user in users: - if user["name"] == name: - demo_user_uuid = user["id"] - break - # if user "name" is not present, fallback to admin - if user["name"] == "admin": - demo_user_uuid = user["id"] - if intra_extension_uuid: - post_data = {"subject_id": demo_user_uuid} - get_url("/v3/OS-MOON/intra_extensions/{}/subjects".format( - intra_extension_uuid), post_data=post_data, authtoken=True) - return demo_user_uuid - -class IntraExtensionsTest(unittest.TestCase): - - def setUp(self): - post = { - "auth": { - "identity": { - "methods": [ - "password" - ], - "password": { - "user": { - "domain": { - "id": "Default" - }, - "name": "admin", - "password": "nomoresecrete" - } - } - }, - "scope": { - "project": { - "domain": { - "id": "Default" - }, - "name": "demo" - } - } - } - } - data = get_url("/v3/auth/tokens", post_data=post) - self.assertIn("token", data) - - def tearDown(self): - pass - - def test_create_intra_extensions(self): - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertIn("intra_extensions", data) - data = get_url("/v3/OS-MOON/authz_policies", authtoken=True) - self.assertIn("authz_policies", data) - for model in data["authz_policies"]: - # Create a new intra_extension - new_ie = { - "name": "new_intra_extension", - "description": "new_intra_extension", - "policymodel": model - } - data = get_url("/v3/OS-MOON/intra_extensions/", post_data=new_ie, authtoken=True) - for key in [u'model', u'id', u'name', u'description']: - self.assertIn(key, data) - ie_id = data["id"] - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertIn(ie_id, data["intra_extensions"]) - - # Get all subjects - data = get_url("/v3/OS-MOON/intra_extensions/{}/subjects".format(ie_id), authtoken=True) - self.assertIn("subjects", data) - self.assertIs(type(data["subjects"]), dict) - - # Get all objects - data = get_url("/v3/OS-MOON/intra_extensions/{}/objects".format(ie_id), authtoken=True) - self.assertIn("objects", data) - self.assertIsInstance(data["objects"], dict) - - # Get all actions - data = get_url("/v3/OS-MOON/intra_extensions/{}/actions".format(ie_id), authtoken=True) - self.assertIn("actions", data) - self.assertIsInstance(data["actions"], dict) - - # # get current tenant - # data = get_url("/v3/OS-MOON/intra_extensions/{}/tenant".format(ie_id), authtoken=True) - # self.assertIn("tenant", data) - # self.assertIn(type(data["tenant"]), (str, unicode)) - # - # # set current tenant - # tenants = get_url("/v3/projects", authtoken=True)["projects"] - # post_data = {"tenant_id": ""} - # for tenant in tenants: - # if tenant["name"] == "admin": - # post_data = {"tenant_id": tenant["id"]} - # break - # data = get_url("/v3/OS-MOON/intra_extensions/{}/tenant".format(ie_id), - # post_data=post_data, - # authtoken=True) - # self.assertIn("tenant", data) - # self.assertIn(type(data["tenant"]), (str, unicode)) - # self.assertEqual(data["tenant"], post_data["tenant_id"]) - # - # # check current tenant - # data = get_url("/v3/OS-MOON/intra_extensions/{}/tenant".format(ie_id), authtoken=True) - # self.assertIn("tenant", data) - # self.assertIn(type(data["tenant"]), (str, unicode)) - # self.assertEqual(data["tenant"], post_data["tenant_id"]) - - # Delete the intra_extension - data = get_url("/v3/OS-MOON/intra_extensions/{}".format(ie_id), method="DELETE", authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertNotIn(ie_id, data["intra_extensions"]) - - def test_perimeter_data(self): - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertIn("intra_extensions", data) - data = get_url("/v3/OS-MOON/authz_policies", authtoken=True) - self.assertIn("authz_policies", data) - for model in data["authz_policies"]: - # Create a new intra_extension - new_ie = { - "name": "new_intra_extension", - "description": "new_intra_extension", - "policymodel": model - } - data = get_url("/v3/OS-MOON/intra_extensions/", post_data=new_ie, authtoken=True) - for key in [u'model', u'id', u'name', u'description']: - self.assertIn(key, data) - ie_id = data["id"] - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertIn(ie_id, data["intra_extensions"]) - - # Get all subjects - data = get_url("/v3/OS-MOON/intra_extensions/{}/subjects".format(ie_id), authtoken=True) - self.assertIn("subjects", data) - self.assertIs(type(data["subjects"]), dict) - self.assertTrue(len(data["subjects"]) > 0) - - # Add a new subject - users = get_url("/v3/users", authtoken=True)["users"] - demo_user_uuid = None - for user in users: - if user["name"] == "demo": - demo_user_uuid = user["id"] - break - # if user demo is not present - if user["name"] == "admin": - demo_user_uuid = user["id"] - post_data = {"subject_id": demo_user_uuid} - data = get_url("/v3/OS-MOON/intra_extensions/{}/subjects".format(ie_id), post_data=post_data, authtoken=True) - self.assertIn("subject", data) - self.assertIs(type(data["subject"]), dict) - self.assertEqual(post_data["subject_id"], data["subject"]["uuid"]) - data = get_url("/v3/OS-MOON/intra_extensions/{}/subjects".format(ie_id), authtoken=True) - self.assertIn("subjects", data) - self.assertIsInstance(data["subjects"], dict) - self.assertIn(post_data["subject_id"], data["subjects"]) - # delete the previous subject - data = get_url("/v3/OS-MOON/intra_extensions/{}/subjects/{}".format(ie_id, post_data["subject_id"]), - method="DELETE", authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions/{}/subjects".format(ie_id), authtoken=True) - self.assertIn("subjects", data) - self.assertIsInstance(data["subjects"], dict) - self.assertNotIn(post_data["subject_id"], data["subjects"]) - - # Get all objects - data = get_url("/v3/OS-MOON/intra_extensions/{}/objects".format(ie_id), authtoken=True) - self.assertIn("objects", data) - self.assertIs(type(data["objects"]), dict) - self.assertTrue(len(data["objects"]) > 0) - - # Add a new object - post_data = {"object_id": "my_new_object"} - data = get_url("/v3/OS-MOON/intra_extensions/{}/objects".format(ie_id), post_data=post_data, authtoken=True) - self.assertIn("object", data) - self.assertIsInstance(data["object"], dict) - self.assertEqual(post_data["object_id"], data["object"]["name"]) - object_id = data["object"]["uuid"] - data = get_url("/v3/OS-MOON/intra_extensions/{}/objects".format(ie_id), authtoken=True) - self.assertIn("objects", data) - self.assertIsInstance(data["objects"], dict) - self.assertIn(post_data["object_id"], data["objects"].values()) - - # delete the previous object - data = get_url("/v3/OS-MOON/intra_extensions/{}/objects/{}".format(ie_id, object_id), - method="DELETE", authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions/{}/objects".format(ie_id), authtoken=True) - self.assertIn("objects", data) - self.assertIsInstance(data["objects"], dict) - self.assertNotIn(post_data["object_id"], data["objects"].values()) - - # Get all actions - data = get_url("/v3/OS-MOON/intra_extensions/{}/actions".format(ie_id), authtoken=True) - self.assertIn("actions", data) - self.assertIs(type(data["actions"]), dict) - self.assertTrue(len(data["actions"]) > 0) - - # Add a new action - post_data = {"action_id": "create2"} - data = get_url("/v3/OS-MOON/intra_extensions/{}/actions".format(ie_id), post_data=post_data, authtoken=True) - action_id = data["action"]["uuid"] - self.assertIn("action", data) - self.assertIsInstance(data["action"], dict) - self.assertEqual(post_data["action_id"], data["action"]["name"]) - data = get_url("/v3/OS-MOON/intra_extensions/{}/actions".format(ie_id), authtoken=True) - self.assertIn("actions", data) - self.assertIsInstance(data["actions"], dict) - self.assertIn(post_data["action_id"], data["actions"].values()) - - # delete the previous action - data = get_url("/v3/OS-MOON/intra_extensions/{}/actions/{}".format(ie_id, action_id), - method="DELETE", authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions/{}/actions".format(ie_id), authtoken=True) - self.assertIn("actions", data) - self.assertIsInstance(data["actions"], dict) - self.assertNotIn(post_data["action_id"], data["actions"]) - - # Delete the intra_extension - data = get_url("/v3/OS-MOON/intra_extensions/{}".format(ie_id), method="DELETE", authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertNotIn(ie_id, data["intra_extensions"]) - - def test_assignments_data(self): - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertIn("intra_extensions", data) - data = get_url("/v3/OS-MOON/authz_policies", authtoken=True) - self.assertIn("authz_policies", data) - for model in data["authz_policies"]: - # Create a new intra_extension - new_ie = { - "name": "new_intra_extension", - "description": "new_intra_extension", - "policymodel": model - } - data = get_url("/v3/OS-MOON/intra_extensions/", post_data=new_ie, authtoken=True) - for key in [u'model', u'id', u'name', u'description']: - self.assertIn(key, data) - ie_id = data["id"] - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertIn(ie_id, data["intra_extensions"]) - - # Get all subject_assignments - data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_assignments/{}".format( - ie_id, get_keystone_user(intra_extension_uuid=ie_id)), authtoken=True) - self.assertIn("subject_category_assignments", data) - self.assertIs(type(data["subject_category_assignments"]), dict) - - # Add subject_assignments - # get one subject - data = get_url("/v3/OS-MOON/intra_extensions/{}/subjects".format(ie_id), authtoken=True) - self.assertIn("subjects", data) - self.assertIs(type(data["subjects"]), dict) - # subject_id = data["subjects"].keys()[0] - subject_id = get_keystone_user() - # get one subject category - data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories".format(ie_id), authtoken=True) - self.assertIn("subject_categories", data) - self.assertIs(type(data["subject_categories"]), dict) - subject_category_id = data["subject_categories"].keys()[0] - # get all subject category scope - data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_category_scope/{}".format( - ie_id, subject_category_id), authtoken=True) - self.assertIn("subject_category_scope", data) - self.assertIs(type(data["subject_category_scope"]), dict) - subject_category_scope_id = data["subject_category_scope"][subject_category_id].keys()[0] - post_data = { - "subject_id": subject_id, - "subject_category": subject_category_id, - "subject_category_scope": subject_category_scope_id - } - data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_assignments".format(ie_id), post_data=post_data, authtoken=True) - self.assertIn("subject_category_assignments", data) - self.assertIs(type(data["subject_category_assignments"]), dict) - self.assertIn(post_data["subject_category"], data["subject_category_assignments"][subject_id]) - self.assertIn(post_data["subject_category"], data["subject_category_assignments"][subject_id]) - self.assertIn(post_data["subject_category_scope"], - data["subject_category_assignments"][subject_id][post_data["subject_category"]]) - # data = get_url("/v3/OS-MOON/intra_extensions/{}/subjects".format(ie_id), authtoken=True) - # self.assertIn("subjects", data) - # self.assertIsInstance(data["subjects"], dict) - # self.assertIn(post_data["subject_id"], data["subjects"]) - - # delete the previous subject assignment - get_url("/v3/OS-MOON/intra_extensions/{}/subject_assignments/{}/{}/{}".format( - ie_id, - post_data["subject_id"], - post_data["subject_category"], - post_data["subject_category_scope"], - ), - method="DELETE", authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_assignments/{}".format( - ie_id, get_keystone_user()), authtoken=True) - self.assertIn("subject_category_assignments", data) - self.assertIs(type(data["subject_category_assignments"]), dict) - if post_data["subject_category"] in data["subject_category_assignments"][subject_id]: - if post_data["subject_category"] in data["subject_category_assignments"][subject_id]: - self.assertNotIn(post_data["subject_category_scope"], - data["subject_category_assignments"][subject_id][post_data["subject_category"]]) - - # Get all object_assignments - - # get one object - post_data = {"object_id": "my_new_object"} - new_object = get_url("/v3/OS-MOON/intra_extensions/{}/objects".format(ie_id), post_data=post_data, authtoken=True) - object_id = new_object["object"]["uuid"] - - data = get_url("/v3/OS-MOON/intra_extensions/{}/object_assignments/{}".format( - ie_id, object_id), authtoken=True) - self.assertIn("object_category_assignments", data) - self.assertIsInstance(data["object_category_assignments"], dict) - - # Add object_assignments - # get one object category - post_data = {"object_category_id": uuid4().hex} - object_category = get_url("/v3/OS-MOON/intra_extensions/{}/object_categories".format(ie_id), - post_data=post_data, - authtoken=True) - object_category_id = object_category["object_category"]["uuid"] - # get all object category scope - post_data = { - "object_category_id": object_category_id, - "object_category_scope_id": uuid4().hex - } - data = get_url("/v3/OS-MOON/intra_extensions/{}/object_category_scope".format(ie_id), - post_data=post_data, - authtoken=True) - object_category_scope_id = data["object_category_scope"]["uuid"] - data = get_url("/v3/OS-MOON/intra_extensions/{}/object_category_scope/{}".format( - ie_id, object_category_id), authtoken=True) - self.assertIn("object_category_scope", data) - self.assertIs(type(data["object_category_scope"]), dict) - post_data = { - "object_id": object_id, - "object_category": object_category_id, - "object_category_scope": object_category_scope_id - } - data = get_url("/v3/OS-MOON/intra_extensions/{}/object_assignments".format(ie_id), post_data=post_data, authtoken=True) - self.assertIn("object_category_assignments", data) - self.assertIs(type(data["object_category_assignments"]), dict) - self.assertIn(post_data["object_id"], data["object_category_assignments"]) - self.assertIn(post_data["object_category"], data["object_category_assignments"][post_data["object_id"]]) - self.assertIn(post_data["object_category_scope"], - data["object_category_assignments"][post_data["object_id"]][post_data["object_category"]]) - data = get_url("/v3/OS-MOON/intra_extensions/{}/objects".format(ie_id), authtoken=True) - self.assertIn("objects", data) - self.assertIsInstance(data["objects"], dict) - self.assertIn(post_data["object_id"], data["objects"]) - # delete the previous object - data = get_url("/v3/OS-MOON/intra_extensions/{}/objects/{}".format(ie_id, post_data["object_id"]), - method="DELETE", authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions/{}/objects".format(ie_id), authtoken=True) - self.assertIn("objects", data) - self.assertIsInstance(data["objects"], dict) - self.assertNotIn(post_data["object_id"], data["objects"]) - - # Get all actions_assignments - - # get one action - post_data = {"action_id": "my_new_action"} - new_object = get_url("/v3/OS-MOON/intra_extensions/{}/actions".format(ie_id), post_data=post_data, authtoken=True) - action_id = new_object["action"]["uuid"] - - post_data = {"action_category_id": uuid4().hex} - action_category = get_url("/v3/OS-MOON/intra_extensions/{}/action_categories".format(ie_id), - post_data=post_data, - authtoken=True) - action_category_id = action_category["action_category"]["uuid"] - - data = get_url("/v3/OS-MOON/intra_extensions/{}/action_assignments/{}".format( - ie_id, action_id), authtoken=True) - self.assertIn("action_category_assignments", data) - self.assertIsInstance(data["action_category_assignments"], dict) - - # Add action_assignments - # get one action category - # data = get_url("/v3/OS-MOON/intra_extensions/{}/action_categories".format(ie_id), authtoken=True) - # self.assertIn("action_categories", data) - # self.assertIs(type(data["action_categories"]), dict) - # action_category_id = data["action_categories"][0] - # get all action category scope - post_data = { - "action_category_id": action_category_id, - "action_category_scope_id": uuid4().hex - } - data = get_url("/v3/OS-MOON/intra_extensions/{}/action_category_scope".format(ie_id), - post_data=post_data, - authtoken=True) - action_category_scope_id = data["action_category_scope"]["uuid"] - data = get_url("/v3/OS-MOON/intra_extensions/{}/action_category_scope/{}".format( - ie_id, action_category_id), authtoken=True) - self.assertIn("action_category_scope", data) - self.assertIs(type(data["action_category_scope"]), dict) - # action_category_scope_id = data["action_category_scope"][action_category_id].keys()[0] - post_data = { - "action_id": action_id, - "action_category": action_category_id, - "action_category_scope": action_category_scope_id - } - data = get_url("/v3/OS-MOON/intra_extensions/{}/action_assignments".format(ie_id), post_data=post_data, authtoken=True) - self.assertIn("action_category_assignments", data) - self.assertIs(type(data["action_category_assignments"]), dict) - self.assertIn(post_data["action_id"], data["action_category_assignments"]) - self.assertIn(post_data["action_category"], data["action_category_assignments"][post_data["action_id"]]) - self.assertIn(post_data["action_category_scope"], - data["action_category_assignments"][post_data["action_id"]][post_data["action_category"]]) - data = get_url("/v3/OS-MOON/intra_extensions/{}/actions".format(ie_id), authtoken=True) - self.assertIn("actions", data) - self.assertIsInstance(data["actions"], dict) - self.assertIn(post_data["action_id"], data["actions"]) - # delete the previous action - data = get_url("/v3/OS-MOON/intra_extensions/{}/actions/{}".format(ie_id, post_data["action_id"]), - method="DELETE", authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions/{}/actions".format(ie_id), authtoken=True) - self.assertIn("actions", data) - self.assertIsInstance(data["actions"], dict) - self.assertNotIn(post_data["action_id"], data["actions"]) - - # Delete the intra_extension - get_url("/v3/OS-MOON/intra_extensions/{}".format(ie_id), method="DELETE", authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertNotIn(ie_id, data["intra_extensions"]) - - def test_metadata_data(self): - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertIn("intra_extensions", data) - data = get_url("/v3/OS-MOON/authz_policies", authtoken=True) - self.assertIn("authz_policies", data) - for model in data["authz_policies"]: - # Create a new intra_extension - new_ie = { - "name": "new_intra_extension", - "description": "new_intra_extension", - "policymodel": model - } - data = get_url("/v3/OS-MOON/intra_extensions/", post_data=new_ie, authtoken=True) - for key in [u'model', u'id', u'name', u'description']: - self.assertIn(key, data) - ie_id = data["id"] - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertIn(ie_id, data["intra_extensions"]) - - # Get all subject_categories - data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories".format(ie_id), authtoken=True) - self.assertIn("subject_categories", data) - self.assertIs(type(data["subject_categories"]), dict) - - # Add a new subject_category - post_data = {"subject_category_id": uuid4().hex} - data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories".format(ie_id), - post_data=post_data, - authtoken=True) - self.assertIn("subject_category", data) - self.assertIsInstance(data["subject_category"], dict) - self.assertEqual(post_data["subject_category_id"], data["subject_category"]["name"]) - subject_category_id = data["subject_category"]["uuid"] - data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories".format(ie_id), authtoken=True) - self.assertIn("subject_categories", data) - self.assertIsInstance(data["subject_categories"], dict) - self.assertIn(post_data["subject_category_id"], data["subject_categories"].values()) - # delete the previous subject_category - get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories/{}".format(ie_id, - subject_category_id), - method="DELETE", - authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories".format(ie_id), authtoken=True) - self.assertIn("subject_categories", data) - self.assertIsInstance(data["subject_categories"], dict) - self.assertNotIn(post_data["subject_category_id"], data["subject_categories"].values()) - - # Get all object_categories - data = get_url("/v3/OS-MOON/intra_extensions/{}/object_categories".format(ie_id), authtoken=True) - self.assertIn("object_categories", data) - self.assertIsInstance(data["object_categories"], dict) - - # Add a new object_category - post_data = {"object_category_id": uuid4().hex} - data = get_url("/v3/OS-MOON/intra_extensions/{}/object_categories".format(ie_id), - post_data=post_data, - authtoken=True) - self.assertIn("object_category", data) - self.assertIsInstance(data["object_category"], dict) - self.assertIn(post_data["object_category_id"], data["object_category"]["name"]) - object_category_id = data["object_category"]["uuid"] - data = get_url("/v3/OS-MOON/intra_extensions/{}/object_categories".format(ie_id), authtoken=True) - self.assertIn("object_categories", data) - self.assertIsInstance(data["object_categories"], dict) - self.assertIn(post_data["object_category_id"], data["object_categories"].values()) - # delete the previous subject_category - get_url("/v3/OS-MOON/intra_extensions/{}/object_categories/{}".format(ie_id, - object_category_id), - method="DELETE", - authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions/{}/object_categories".format(ie_id), authtoken=True) - self.assertIn("object_categories", data) - self.assertIsInstance(data["object_categories"], dict) - self.assertNotIn(post_data["object_category_id"], data["object_categories"].values()) - - # Get all action_categories - data = get_url("/v3/OS-MOON/intra_extensions/{}/action_categories".format(ie_id), authtoken=True) - self.assertIn("action_categories", data) - self.assertIsInstance(data["action_categories"], dict) - - # Add a new action_category - post_data = {"action_category_id": uuid4().hex} - data = get_url("/v3/OS-MOON/intra_extensions/{}/action_categories".format(ie_id), - post_data=post_data, - authtoken=True) - self.assertIn("action_category", data) - self.assertIsInstance(data["action_category"], dict) - self.assertIn(post_data["action_category_id"], data["action_category"]["name"]) - action_category_id = data["action_category"]["uuid"] - data = get_url("/v3/OS-MOON/intra_extensions/{}/action_categories".format(ie_id), authtoken=True) - self.assertIn("action_categories", data) - self.assertIsInstance(data["action_categories"], dict) - self.assertIn(post_data["action_category_id"], data["action_categories"].values()) - # delete the previous subject_category - get_url("/v3/OS-MOON/intra_extensions/{}/action_categories/{}".format(ie_id, - action_category_id), - method="DELETE", - authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions/{}/action_categories".format(ie_id), authtoken=True) - self.assertIn("action_categories", data) - self.assertIsInstance(data["action_categories"], dict) - self.assertNotIn(post_data["action_category_id"], data["action_categories"].values()) - - # Delete the intra_extension - get_url("/v3/OS-MOON/intra_extensions/{}".format(ie_id), method="DELETE", authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertNotIn(ie_id, data["intra_extensions"]) - - def test_scope_data(self): - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertIn("intra_extensions", data) - data = get_url("/v3/OS-MOON/authz_policies", authtoken=True) - self.assertIn("authz_policies", data) - for model in data["authz_policies"]: - # Create a new intra_extension - new_ie = { - "name": "new_intra_extension", - "description": "new_intra_extension", - "policymodel": model - } - data = get_url("/v3/OS-MOON/intra_extensions/", post_data=new_ie, authtoken=True) - for key in [u'model', u'id', u'name', u'description']: - self.assertIn(key, data) - ie_id = data["id"] - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertIn(ie_id, data["intra_extensions"]) - - # Get all subject_category_scope - categories = get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories".format(ie_id), authtoken=True) - for category in categories["subject_categories"]: - data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_category_scope/{}".format( - ie_id, category), authtoken=True) - self.assertIn("subject_category_scope", data) - self.assertIs(type(data["subject_category_scope"]), dict) - - # Add a new subject_category_scope - post_data = { - "subject_category_id": category, - "subject_category_scope_id": uuid4().hex - } - data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_category_scope".format(ie_id), - post_data=post_data, - authtoken=True) - self.assertIn("subject_category_scope", data) - self.assertIsInstance(data["subject_category_scope"], dict) - self.assertEqual(post_data["subject_category_scope_id"], data["subject_category_scope"]["name"]) - data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_category_scope/{}".format( - ie_id, category), authtoken=True) - self.assertIn("subject_category_scope", data) - self.assertIsInstance(data["subject_category_scope"], dict) - self.assertIn(post_data["subject_category_id"], data["subject_category_scope"]) - self.assertIn(post_data["subject_category_scope_id"], - data["subject_category_scope"][category].values()) - # delete the previous subject_category_scope - get_url("/v3/OS-MOON/intra_extensions/{}/subject_category_scope/{}/{}".format( - ie_id, - post_data["subject_category_id"], - post_data["subject_category_scope_id"]), - method="DELETE", - authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_category_scope/{}".format( - ie_id, category), authtoken=True) - self.assertIn("subject_category_scope", data) - self.assertIsInstance(data["subject_category_scope"], dict) - self.assertIn(post_data["subject_category_id"], data["subject_category_scope"]) - self.assertNotIn(post_data["subject_category_scope_id"], - data["subject_category_scope"][post_data["subject_category_id"]]) - - # Get all object_category_scope - # get object_categories - categories = get_url("/v3/OS-MOON/intra_extensions/{}/object_categories".format(ie_id), authtoken=True) - for category in categories["object_categories"]: - post_data = { - "object_category_id": category, - "object_category_scope_id": uuid4().hex - } - data = get_url("/v3/OS-MOON/intra_extensions/{}/object_category_scope".format(ie_id), - post_data=post_data, - authtoken=True) - self.assertIn("object_category_scope", data) - self.assertIsInstance(data["object_category_scope"], dict) - self.assertEqual(post_data["object_category_scope_id"], data["object_category_scope"]["name"]) - data = get_url("/v3/OS-MOON/intra_extensions/{}/object_category_scope/{}".format( - ie_id, category), authtoken=True) - self.assertIn("object_category_scope", data) - self.assertIsInstance(data["object_category_scope"], dict) - self.assertIn(post_data["object_category_id"], data["object_category_scope"]) - self.assertIn(post_data["object_category_scope_id"], - data["object_category_scope"][category].values()) - # delete the previous object_category_scope - get_url("/v3/OS-MOON/intra_extensions/{}/object_category_scope/{}/{}".format( - ie_id, - post_data["object_category_id"], - post_data["object_category_scope_id"]), - method="DELETE", - authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions/{}/object_category_scope/{}".format( - ie_id, category), authtoken=True) - self.assertIn("object_category_scope", data) - self.assertIsInstance(data["object_category_scope"], dict) - self.assertIn(post_data["object_category_id"], data["object_category_scope"]) - self.assertNotIn(post_data["object_category_scope_id"], - data["object_category_scope"][post_data["object_category_id"]]) - - # Get all action_category_scope - categories = get_url("/v3/OS-MOON/intra_extensions/{}/action_categories".format(ie_id), authtoken=True) - print(categories) - for category in categories["action_categories"]: - print(category) - data = get_url("/v3/OS-MOON/intra_extensions/{}/action_category_scope/{}".format( - ie_id, category), authtoken=True) - self.assertIn("action_category_scope", data) - self.assertIsInstance(data["action_category_scope"], dict) - - # Add a new action_category_scope - post_data = { - "action_category_id": category, - "action_category_scope_id": uuid4().hex - } - data = get_url("/v3/OS-MOON/intra_extensions/{}/action_category_scope".format(ie_id), - post_data=post_data, - authtoken=True) - self.assertIn("action_category_scope", data) - self.assertIsInstance(data["action_category_scope"], dict) - self.assertEqual(post_data["action_category_scope_id"], data["action_category_scope"]["name"]) - data = get_url("/v3/OS-MOON/intra_extensions/{}/action_category_scope/{}".format( - ie_id, category), authtoken=True) - self.assertIn("action_category_scope", data) - self.assertIsInstance(data["action_category_scope"], dict) - self.assertIn(post_data["action_category_id"], data["action_category_scope"]) - self.assertIn(post_data["action_category_scope_id"], - data["action_category_scope"][category].values()) - # delete the previous action_category_scope - get_url("/v3/OS-MOON/intra_extensions/{}/action_category_scope/{}/{}".format( - ie_id, - post_data["action_category_id"], - post_data["action_category_scope_id"]), - method="DELETE", - authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions/{}/action_category_scope/{}".format( - ie_id, category), authtoken=True) - self.assertIn("action_category_scope", data) - self.assertIsInstance(data["action_category_scope"], dict) - self.assertIn(post_data["action_category_id"], data["action_category_scope"]) - self.assertNotIn(post_data["action_category_scope_id"], - data["action_category_scope"][post_data["action_category_id"]]) - - # Delete the intra_extension - get_url("/v3/OS-MOON/intra_extensions/{}".format(ie_id), method="DELETE", authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertNotIn(ie_id, data["intra_extensions"]) - - def test_metarule_data(self): - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertIn("intra_extensions", data) - data = get_url("/v3/OS-MOON/authz_policies", authtoken=True) - self.assertIn("authz_policies", data) - for model in data["authz_policies"]: - # Create a new intra_extension - new_ie = { - "name": "new_intra_extension", - "description": "new_intra_extension", - "policymodel": model - } - data = get_url("/v3/OS-MOON/intra_extensions/", post_data=new_ie, authtoken=True) - for key in [u'model', u'id', u'name', u'description']: - self.assertIn(key, data) - ie_id = data["id"] - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertIn(ie_id, data["intra_extensions"]) - - # Get all aggregation_algorithms - data = get_url("/v3/OS-MOON/intra_extensions/{}/aggregation_algorithms".format(ie_id), authtoken=True) - self.assertIn("aggregation_algorithms", data) - self.assertIs(type(data["aggregation_algorithms"]), list) - aggregation_algorithms = data["aggregation_algorithms"] - - # Get all sub_meta_rule_relations - data = get_url("/v3/OS-MOON/intra_extensions/{}/sub_meta_rule_relations".format(ie_id), authtoken=True) - self.assertIn("sub_meta_rule_relations", data) - self.assertIs(type(data["sub_meta_rule_relations"]), list) - sub_meta_rule_relations = data["sub_meta_rule_relations"] - - # Get current aggregation_algorithm - data = get_url("/v3/OS-MOON/intra_extensions/{}/aggregation_algorithm".format(ie_id), authtoken=True) - self.assertIn("aggregation", data) - self.assertIn(type(data["aggregation"]), (str, unicode)) - aggregation_algorithm = data["aggregation"] - - # Set current aggregation_algorithm - post_data = {"aggregation_algorithm": ""} - for _algo in aggregation_algorithms: - if _algo != aggregation_algorithm: - post_data = {"aggregation_algorithm": _algo} - data = get_url("/v3/OS-MOON/intra_extensions/{}/aggregation_algorithm".format(ie_id), - post_data=post_data, - authtoken=True) - self.assertIn("aggregation", data) - self.assertIn(type(data["aggregation"]), (str, unicode)) - self.assertEqual(post_data["aggregation_algorithm"], data["aggregation"]) - new_aggregation_algorithm = data["aggregation"] - data = get_url("/v3/OS-MOON/intra_extensions/{}/aggregation_algorithm".format(ie_id), authtoken=True) - self.assertIn("aggregation", data) - self.assertIn(type(data["aggregation"]), (str, unicode)) - self.assertEqual(post_data["aggregation_algorithm"], new_aggregation_algorithm) - # Get back to the old value - post_data = {"aggregation_algorithm": aggregation_algorithm} - data = get_url("/v3/OS-MOON/intra_extensions/{}/aggregation_algorithm".format(ie_id), - post_data=post_data, - authtoken=True) - self.assertIn("aggregation", data) - self.assertIn(type(data["aggregation"]), (str, unicode)) - self.assertEqual(post_data["aggregation_algorithm"], aggregation_algorithm) - - # Get current sub_meta_rule - data = get_url("/v3/OS-MOON/intra_extensions/{}/sub_meta_rule".format(ie_id), authtoken=True) - self.assertIn("sub_meta_rules", data) - self.assertIs(type(data["sub_meta_rules"]), dict) - self.assertGreater(len(data["sub_meta_rules"].keys()), 0) - relation = data["sub_meta_rules"].keys()[0] - new_relation = "" - self.assertIn(relation, sub_meta_rule_relations) - sub_meta_rule = data["sub_meta_rules"] - post_data = dict() - for _relation in sub_meta_rule_relations: - if _relation != data["sub_meta_rules"].keys()[0]: - post_data[_relation] = copy.deepcopy(sub_meta_rule[relation]) - post_data[_relation]["relation"] = _relation - new_relation = _relation - break - # Add a new subject category - subject_category = uuid4().hex - data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories".format(ie_id), - post_data={"subject_category_id": subject_category}, - authtoken=True) - self.assertIn("subject_category", data) - self.assertIsInstance(data["subject_category"], dict) - self.assertIn(subject_category, data["subject_category"].values()) - subject_category_id = data["subject_category"]['uuid'] - # Add a new object category - object_category = uuid4().hex - data = get_url("/v3/OS-MOON/intra_extensions/{}/object_categories".format(ie_id), - post_data={"object_category_id": object_category}, - authtoken=True) - self.assertIn("object_category", data) - self.assertIsInstance(data["object_category"], dict) - self.assertIn(object_category, data["object_category"].values()) - object_category_id = data["object_category"]['uuid'] - # Add a new action category - action_category = uuid4().hex - data = get_url("/v3/OS-MOON/intra_extensions/{}/action_categories".format(ie_id), - post_data={"action_category_id": action_category}, - authtoken=True) - self.assertIn("action_category", data) - self.assertIsInstance(data["action_category"], dict) - self.assertIn(action_category, data["action_category"].values()) - action_category_id = data["action_category"]['uuid'] - # Modify the post_data to add new categories - post_data[new_relation]["subject_categories"].append(subject_category_id) - post_data[new_relation]["object_categories"].append(object_category_id) - post_data[new_relation]["action_categories"].append(action_category_id) - data = get_url("/v3/OS-MOON/intra_extensions/{}/sub_meta_rule".format(ie_id), - post_data=post_data, - authtoken=True) - self.assertIn("sub_meta_rules", data) - self.assertIs(type(data["sub_meta_rules"]), dict) - self.assertGreater(len(data["sub_meta_rules"].keys()), 0) - self.assertEqual(new_relation, data["sub_meta_rules"].keys()[0]) - self.assertIn(subject_category_id, data["sub_meta_rules"][new_relation]["subject_categories"]) - self.assertIn(object_category_id, data["sub_meta_rules"][new_relation]["object_categories"]) - self.assertIn(action_category_id, data["sub_meta_rules"][new_relation]["action_categories"]) - self.assertEqual(new_relation, data["sub_meta_rules"][new_relation]["relation"]) - - # Delete the intra_extension - data = get_url("/v3/OS-MOON/intra_extensions/{}".format(ie_id), method="DELETE", authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertNotIn(ie_id, data["intra_extensions"]) - - def test_rules_data(self): - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertIn("intra_extensions", data) - data = get_url("/v3/OS-MOON/authz_policies", authtoken=True) - self.assertIn("authz_policies", data) - for model in data["authz_policies"]: - # Create a new intra_extension - print("=====> {}".format(model)) - new_ie = { - "name": "new_intra_extension", - "description": "new_intra_extension", - "policymodel": model - } - data = get_url("/v3/OS-MOON/intra_extensions/", post_data=new_ie, authtoken=True) - for key in [u'model', u'id', u'name', u'description']: - self.assertIn(key, data) - ie_id = data["id"] - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertIn(ie_id, data["intra_extensions"]) - - # Get all sub_meta_rule_relations - data = get_url("/v3/OS-MOON/intra_extensions/{}/sub_meta_rule_relations".format(ie_id), authtoken=True) - self.assertIn("sub_meta_rule_relations", data) - self.assertIs(type(data["sub_meta_rule_relations"]), list) - sub_meta_rule_relations = data["sub_meta_rule_relations"] - - # Get current sub_meta_rule - data = get_url("/v3/OS-MOON/intra_extensions/{}/sub_meta_rule".format(ie_id), authtoken=True) - self.assertIn("sub_meta_rules", data) - self.assertIs(type(data["sub_meta_rules"]), dict) - self.assertGreater(len(data["sub_meta_rules"].keys()), 0) - relation = data["sub_meta_rules"].keys()[0] - self.assertIn(relation, sub_meta_rule_relations) - sub_meta_rule = data["sub_meta_rules"] - sub_meta_rule_length = dict() - sub_meta_rule_length[relation] = len(data["sub_meta_rules"][relation]["subject_categories"]) + \ - len(data["sub_meta_rules"][relation]["object_categories"]) + \ - len(data["sub_meta_rules"][relation]["action_categories"]) +1 - - # Get all rules - data = get_url("/v3/OS-MOON/intra_extensions/{}/sub_rules".format(ie_id), authtoken=True) - self.assertIn("rules", data) - self.assertIs(type(data["rules"]), dict) - length = dict() - for key in data["rules"]: - self.assertIn(key, sub_meta_rule_relations) - self.assertGreater(len(data["rules"][key]), 0) - self.assertIs(type(data["rules"][key]), list) - for sub_rule in data["rules"][key]: - self.assertEqual(len(sub_rule), sub_meta_rule_length[key]) - length[key] = len(data["rules"][key]) - - # Get one value of subject category scope - # FIXME: a better test would be to add a new value in scope and then add it to a new sub-rule - categories = get_url("/v3/OS-MOON/intra_extensions/{}/subject_categories".format(ie_id), - authtoken=True)["subject_categories"].keys() - data = get_url("/v3/OS-MOON/intra_extensions/{}/subject_category_scope/{}".format( - ie_id, categories[0]), authtoken=True) - self.assertIn("subject_category_scope", data) - self.assertIs(type(data["subject_category_scope"]), dict) - subject_category = categories.pop() - subject_value = data["subject_category_scope"][subject_category].keys()[0] - # Get one value of object category scope - # FIXME: a better test would be to add a new value in scope and then add it to a new sub-rule - categories = get_url("/v3/OS-MOON/intra_extensions/{}/object_categories".format(ie_id), - authtoken=True)["object_categories"].keys() - data = get_url("/v3/OS-MOON/intra_extensions/{}/object_category_scope/{}".format( - ie_id, categories[0]), authtoken=True) - self.assertIn("object_category_scope", data) - self.assertIs(type(data["object_category_scope"]), dict) - object_category = categories.pop() - object_value = data["object_category_scope"][object_category].keys()[0] - # Get one or more values in action category scope - _sub_meta_action_value = list() - for _sub_meta_cat in sub_meta_rule[relation]["action_categories"]: - data = get_url("/v3/OS-MOON/intra_extensions/{}/action_category_scope/{}".format( - ie_id, _sub_meta_cat), authtoken=True) - action_value = data["action_category_scope"][_sub_meta_cat].keys()[0] - _sub_meta_action_value.append(action_value) - _sub_meta_rules = list() - _sub_meta_rules.append(subject_value) - _sub_meta_rules.extend(_sub_meta_action_value) - _sub_meta_rules.append(object_value) - # Must append True because the sub_rule need a boolean to know if it is a positive or a negative value - _sub_meta_rules.append(True) - post_data = { - "rule": _sub_meta_rules, - "relation": "relation_super" - } - # Add a new sub-rule - data = get_url("/v3/OS-MOON/intra_extensions/{}/sub_rules".format(ie_id), - post_data=post_data, authtoken=True) - self.assertIn("rules", data) - self.assertIs(type(data["rules"]), dict) - for key in data["rules"]: - self.assertIn(key, sub_meta_rule_relations) - self.assertGreater(len(data["rules"][key]), 0) - for sub_rule in data["rules"][key]: - self.assertEqual(len(sub_rule), sub_meta_rule_length[key]) - if key == "relation_super": - self.assertEqual(len(data["rules"][key]), length[key]+1) - else: - self.assertEqual(len(data["rules"][key]), length[key]) - - # Delete the new sub-rule - data = get_url("/v3/OS-MOON/intra_extensions/{}/sub_rules/{rel}/{rule}".format( - ie_id, - rel=post_data["relation"], - rule="+".join(map(lambda x: str(x), post_data["rule"]))), - method="DELETE", authtoken=True) - self.assertIn("rules", data) - self.assertIs(type(data["rules"]), dict) - for key in data["rules"]: - self.assertIn(key, sub_meta_rule_relations) - self.assertGreater(len(data["rules"][key]), 0) - for sub_rule in data["rules"][key]: - if key == "relation_super": - self.assertEqual(len(data["rules"][key]), length[key]) - else: - self.assertEqual(len(data["rules"][key]), length[key]) - - # Delete the intra_extension - data = get_url("/v3/OS-MOON/intra_extensions/{}".format(ie_id), method="DELETE", authtoken=True) - data = get_url("/v3/OS-MOON/intra_extensions", authtoken=True) - self.assertNotIn(ie_id, data["intra_extensions"]) - - -if __name__ == "__main__": - unittest.main() diff --git a/keystone-moon/keystone/tests/moon/func/test_func_api_log.py b/keystone-moon/keystone/tests/moon/func/test_func_api_log.py deleted file mode 100644 index 58448b18..00000000 --- a/keystone-moon/keystone/tests/moon/func/test_func_api_log.py +++ /dev/null @@ -1,148 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -import unittest -import json -import httplib -import time -from uuid import uuid4 -import copy - -CREDENTIALS = { - "host": "127.0.0.1", - "port": "35357", - "login": "admin", - "password": "nomoresecrete", - "tenant_name": "demo", - "sessionid": "kxb50d9uusiywfcs2fiidmu1j5nsyckr", - "csrftoken": "", - "x-subject-token": "" -} - - -def get_url(url, post_data=None, delete_data=None, crsftoken=None, method="GET", authtoken=None): - # MOON_SERVER_IP["URL"] = url - # _url = "http://{HOST}:{PORT}".format(**MOON_SERVER_IP) - if post_data: - method = "POST" - if delete_data: - method = "DELETE" - # print("\033[32m{} {}\033[m".format(method, url)) - conn = httplib.HTTPConnection(CREDENTIALS["host"], CREDENTIALS["port"]) - headers = { - "Content-type": "application/x-www-form-urlencoded", - # "Accept": "text/plain", - "Accept": "text/plain,text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Cookie': 'sessionid={}'.format(CREDENTIALS["sessionid"]), - } - if crsftoken: - headers["Cookie"] = "csrftoken={}; sessionid={}; NG_TRANSLATE_LANG_KEY:\"en\"".format(crsftoken, CREDENTIALS["sessionid"]) - CREDENTIALS["crsftoken"] = crsftoken - if authtoken: - headers["X-Auth-Token"] = CREDENTIALS["x-subject-token"] - if post_data: - method = "POST" - headers["Content-type"] = "application/json" - if crsftoken: - post_data = "&".join(map(lambda x: "=".join(x), post_data)) - elif "crsftoken" in CREDENTIALS and "sessionid" in CREDENTIALS: - post_data = json.dumps(post_data) - headers["Cookie"] = "csrftoken={}; sessionid={}; NG_TRANSLATE_LANG_KEY:\"en\"".format( - CREDENTIALS["crsftoken"], - CREDENTIALS["sessionid"]) - else: - post_data = json.dumps(post_data) - # conn.request(method, url, json.dumps(post_data), headers=headers) - conn.request(method, url, post_data, headers=headers) - elif delete_data: - method = "DELETE" - conn.request(method, url, json.dumps(delete_data), headers=headers) - else: - conn.request(method, url, headers=headers) - resp = conn.getresponse() - headers = resp.getheaders() - try: - CREDENTIALS["x-subject-token"] = dict(headers)["x-subject-token"] - except KeyError: - pass - if crsftoken: - sessionid_start = dict(headers)["set-cookie"].index("sessionid=")+len("sessionid=") - sessionid_end = dict(headers)["set-cookie"].index(";", sessionid_start) - sessionid = dict(headers)["set-cookie"][sessionid_start:sessionid_end] - CREDENTIALS["sessionid"] = sessionid - content = resp.read() - conn.close() - try: - return json.loads(content) - except ValueError: - return {"content": content} - - -class IntraExtensionsTest(unittest.TestCase): - - TIME_FORMAT = '%Y-%m-%d-%H:%M:%S' - - def setUp(self): - post = { - "auth": { - "identity": { - "methods": [ - "password" - ], - "password": { - "user": { - "domain": { - "id": "Default" - }, - "name": "admin", - "password": "nomoresecrete" - } - } - }, - "scope": { - "project": { - "domain": { - "id": "Default" - }, - "name": "demo" - } - } - } - } - data = get_url("/v3/auth/tokens", post_data=post) - self.assertIn("token", data) - - def tearDown(self): - pass - - def test_get_logs(self): - all_data = get_url("/v3/OS-MOON/logs", authtoken=True) - len_all_data = len(all_data["logs"]) - data_1 = all_data["logs"][len_all_data/2] - time_data_1 = data_1.split(" ")[0] - data_2 = all_data["logs"][len_all_data/2+10] - time_data_2 = data_2.split(" ")[0] - self.assertIn("logs", all_data) - data = get_url("/v3/OS-MOON/logs/filter=authz", authtoken=True) - self.assertIn("logs", data) - self.assertGreater(len_all_data, len(data["logs"])) - data = get_url("/v3/OS-MOON/logs/from={}".format(time_data_1), authtoken=True) - self.assertIn("logs", data) - self.assertGreater(len_all_data, len(data["logs"])) - # for _data in data["logs"]: - # self.assertGreater(time.strptime(_data.split(" "), self.TIME_FORMAT), - # time.strptime(time_data_1, self.TIME_FORMAT)) - data = get_url("/v3/OS-MOON/logs/from={},to={}".format(time_data_1, time_data_2), authtoken=True) - self.assertIn("logs", data) - self.assertGreater(len_all_data, len(data["logs"])) - # self.assertEqual(10, len(data["logs"])) - data = get_url("/v3/OS-MOON/logs/event_number=20", authtoken=True) - self.assertIn("logs", data) - self.assertGreater(len_all_data, len(data["logs"])) - self.assertEqual(20, len(data["logs"])) - - -if __name__ == "__main__": - unittest.main() diff --git a/keystone-moon/keystone/tests/moon/func/test_func_api_tenant.py b/keystone-moon/keystone/tests/moon/func/test_func_api_tenant.py deleted file mode 100644 index 77751bb5..00000000 --- a/keystone-moon/keystone/tests/moon/func/test_func_api_tenant.py +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -import unittest -import json -import httplib -import time -from uuid import uuid4 -import copy - -CREDENTIALS = { - "host": "127.0.0.1", - "port": "35357", - "login": "admin", - "password": "nomoresecrete", - "tenant_name": "demo", - "sessionid": "kxb50d9uusiywfcs2fiidmu1j5nsyckr", - "csrftoken": "", - "x-subject-token": "" -} - - -def get_url(url, post_data=None, delete_data=None, crsftoken=None, method="GET", authtoken=None): - # MOON_SERVER_IP["URL"] = url - # _url = "http://{HOST}:{PORT}".format(**MOON_SERVER_IP) - if post_data: - method = "POST" - if delete_data: - method = "DELETE" - # print("\033[32m{} {}\033[m".format(method, url)) - conn = httplib.HTTPConnection(CREDENTIALS["host"], CREDENTIALS["port"]) - headers = { - "Content-type": "application/x-www-form-urlencoded", - # "Accept": "text/plain", - "Accept": "text/plain,text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - 'Cookie': 'sessionid={}'.format(CREDENTIALS["sessionid"]), - } - if crsftoken: - headers["Cookie"] = "csrftoken={}; sessionid={}; NG_TRANSLATE_LANG_KEY:\"en\"".format(crsftoken, CREDENTIALS["sessionid"]) - CREDENTIALS["crsftoken"] = crsftoken - if authtoken: - headers["X-Auth-Token"] = CREDENTIALS["x-subject-token"] - if post_data: - method = "POST" - headers["Content-type"] = "application/json" - if crsftoken: - post_data = "&".join(map(lambda x: "=".join(x), post_data)) - elif "crsftoken" in CREDENTIALS and "sessionid" in CREDENTIALS: - post_data = json.dumps(post_data) - headers["Cookie"] = "csrftoken={}; sessionid={}; NG_TRANSLATE_LANG_KEY:\"en\"".format( - CREDENTIALS["crsftoken"], - CREDENTIALS["sessionid"]) - else: - post_data = json.dumps(post_data) - # conn.request(method, url, json.dumps(post_data), headers=headers) - conn.request(method, url, post_data, headers=headers) - elif delete_data: - method = "DELETE" - conn.request(method, url, json.dumps(delete_data), headers=headers) - else: - conn.request(method, url, headers=headers) - resp = conn.getresponse() - headers = resp.getheaders() - try: - CREDENTIALS["x-subject-token"] = dict(headers)["x-subject-token"] - except KeyError: - pass - if crsftoken: - sessionid_start = dict(headers)["set-cookie"].index("sessionid=")+len("sessionid=") - sessionid_end = dict(headers)["set-cookie"].index(";", sessionid_start) - sessionid = dict(headers)["set-cookie"][sessionid_start:sessionid_end] - CREDENTIALS["sessionid"] = sessionid - content = resp.read() - conn.close() - try: - return json.loads(content) - except ValueError: - return {"content": content} - - -class MappingsTest(unittest.TestCase): - - def setUp(self): - post = { - "auth": { - "identity": { - "methods": [ - "password" - ], - "password": { - "user": { - "domain": { - "id": "Default" - }, - "name": "admin", - "password": "nomoresecrete" - } - } - }, - "scope": { - "project": { - "domain": { - "id": "Default" - }, - "name": "demo" - } - } - } - } - data = get_url("/v3/auth/tokens", post_data=post) - self.assertIn("token", data) - - def tearDown(self): - pass - - def test_get_tenants(self): - data = get_url("/v3/OS-MOON/tenants", authtoken=True) - self.assertIn("tenants", data) - self.assertIsInstance(data["tenants"], dict) - print(data) - - def test_add_delete_mapping(self): - data = get_url("/v3/projects", authtoken=True) - project_id = None - for project in data["projects"]: - if project["name"] == "demo": - project_id = project["id"] - data = get_url("/v3/OS-MOON/tenant", - post_data={ - "id": project_id, - "name": "tenant1", - "authz": "intra_extension_uuid1", - "admin": "intra_extension_uuid2" - }, - authtoken=True) - self.assertIn("tenant", data) - self.assertIsInstance(data["tenant"], dict) - uuid = data["tenant"]["id"] - data = get_url("/v3/OS-MOON/tenants", authtoken=True) - self.assertIn("tenants", data) - self.assertIsInstance(data["tenants"], dict) - print(data) - data = get_url("/v3/OS-MOON/tenant/{}".format(uuid), - method="DELETE", - authtoken=True) - data = get_url("/v3/OS-MOON/tenants", authtoken=True) - self.assertIn("tenants", data) - self.assertIsInstance(data["tenants"], dict) - print(data) - -if __name__ == "__main__": - unittest.main() diff --git a/keystone-moon/keystone/tests/moon/func/test_func_moon_auth.py b/keystone-moon/keystone/tests/moon/func/test_func_moon_auth.py deleted file mode 100644 index 56132609..00000000 --- a/keystone-moon/keystone/tests/moon/func/test_func_moon_auth.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -import unittest -import json -import requests - - -class AuthTest(unittest.TestCase): - - def setUp(self): - self.data_auth = { - "username": "", - "password": "" - } - - def tearDown(self): - pass - - def test_authz(self): - self.data_auth['username'] = 'admin' - self.data_auth['password'] = '' - req = requests.post("http://localhost:5000/moon/auth/tokens", - json=self.data_auth, - headers={"Content-Type": "application/json"} - ) - self.assertIn(req.status_code, (200, 201)) - result = req.json() - self.assertIn("token", result.keys()) - self.assertEqual(result["token"], None) - - self.data_auth['username'] = 'admin' - self.data_auth['password'] = 'nomoresecrete' - req = requests.post("http://localhost:5000/moon/auth/tokens", - json=self.data_auth, - headers={"Content-Type": "application/json"} - ) - self.assertIn(req.status_code, (200, 201)) - result = req.json() - self.assertIn("token", result.keys()) - self.assertNotEqual(result["token"], None) - -if __name__ == "__main__": - unittest.main() - - diff --git a/keystone-moon/keystone/tests/moon/scenario/test_nova_a.sh b/keystone-moon/keystone/tests/moon/scenario/test_nova_a.sh deleted file mode 100644 index 36afd5a1..00000000 --- a/keystone-moon/keystone/tests/moon/scenario/test_nova_a.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env bash - -# as user admin - -# create authz intraextension -moon intraextension add policy_mls_authz test_authz - -# create admin intraextension -moon intraextension add policy_rbac_admin test_admin - -# create tenant -moon tenant add --authz xxx --admin xxx `demo` - -# check that now moon authorizes the manipulation list_servers -nova list - -# select the authz intraextension -moon intraextension select `test_authz_uuid` - -# del object assignment for servers -moon object assignment del `servers_uuid` `object_security_level_uuid` `low_uuid` - -# add object assignment for servers -moon object assignment add `servers_uuid` `object_security_level_uuid` `high_uuid` - -# check now moon block the manipulation list_servers -nova list - -# del object assignment for servers -moon object assignment del `servers_uuid` `object_security_level_uuid` `high_uuid` - -# add object assignment for servers -moon object assignment add `servers_uuid` `object_security_level_uuid` `low_uuid` \ No newline at end of file diff --git a/keystone-moon/keystone/tests/moon/scenario/test_nova_b.sh b/keystone-moon/keystone/tests/moon/scenario/test_nova_b.sh deleted file mode 100644 index f2c0e4fc..00000000 --- a/keystone-moon/keystone/tests/moon/scenario/test_nova_b.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env bash - -# as user admin - -# create authz intraextension -moon intraextension add policy_mls_authz test_authz - -# create admin intraextension -moon intraextension add policy_rbac_admin test_admin - -# create tenant -moon tenant add --authz xxx --admin xxx demo - -# select the authz tenant -moon intraextension select `test_authz_uuid` - -# create a VM (vm1) in OpenStack -nova create vm1..... - -# add corresponding object in moon -moon object add vm1 - -# check that moon blocks the vm1 manipulatin -nova vm1 suspend .... - -# add object assignment for vm1 -moon object assignment `vm1_uuid` `object_security_level_uuid` `high_uuid` - -# check now moon block the manipulation of vm1 -nova vm1 suspend .... - -# del object assignment for servers -moon object assignment del `vm1_uuid` `object_security_level_uuid` `high_uuid` - -# add object assignment for servers -moon object assignment add `vm1_uuid` `object_security_level_uuid` `low_uuid` - -# check now moon unblock the manipulation of vm1 -nova vm1 suspend .... \ No newline at end of file diff --git a/keystone-moon/keystone/tests/moon/scenario/test_nova_c.sh b/keystone-moon/keystone/tests/moon/scenario/test_nova_c.sh deleted file mode 100644 index bf4bd3c8..00000000 --- a/keystone-moon/keystone/tests/moon/scenario/test_nova_c.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env bash - -# as user demo -. openrc demo - -# create authz intraextension -moon intraextension add policy_mls_authz test_authz - -# create admin intraextension -moon intraextension add policy_rbac_admin test_admin - -# create tenant -moon tenant add --authz xxx --admin xxx demo - -# select the authz tenant -moon intraextension select `test_authz_uuid` - -# check that moon blocks modification of object assignments -moon object assignment add `vm1_uuid` `object_security_level_uuid` `high_uuid` - -# as user admin -. openrc admin - -# select the admin intraextension -moon intraextension select `test_admin_uuid` - -# add write permission to the dev_role user for assignment table -moon rule add `rbac_rule_uuid` [`dev_role_uuid`, `write_uuid`, `authz.assignment`] - -# as user demo -. openrc demo - -# select the authz intraextension -moon intraextension select `test_authz_uuid` - -# check that moon authorizes modification of rule table by demo -moon object assignment add `vm1_uuid` `object_security_level_uuid` `high_uuid` diff --git a/keystone-moon/keystone/tests/moon/unit/__init__.py b/keystone-moon/keystone/tests/moon/unit/__init__.py deleted file mode 100644 index d801d015..00000000 --- a/keystone-moon/keystone/tests/moon/unit/__init__.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. -import uuid - -USER = { - 'name': 'admin', - 'domain_id': "default", - 'password': 'admin' -} -IE = { - "name": "test IE", - "policymodel": "policy_authz", - "description": "a simple description." -} - - -def create_intra_extension(self, policy_model="policy_authz"): - - IE["model"] = policy_model - IE["name"] = uuid.uuid4().hex - genre = "admin" - if "authz" in policy_model: - genre = "authz" - IE["genre"] = genre - ref = self.admin_api.load_intra_extension_dict(self.root_api.root_admin_id, - intra_extension_dict=IE) - self.admin_api.populate_default_data(ref) - self.assertIsInstance(ref, dict) - return ref - - -def create_user(self, username="TestAdminIntraExtensionManagerUser"): - user = { - "id": uuid.uuid4().hex, - "name": username, - "enabled": True, - "description": "", - "domain_id": "default" - } - _user = self.identity_api.create_user(user) - return _user - - -def create_mapping(self, tenant_name=None, authz_id=None, admin_id=None): - - if not tenant_name: - tenant_name = uuid.uuid4().hex - - tenant = { - "id": uuid.uuid4().hex, - "name": tenant_name, - "description": uuid.uuid4().hex, - "intra_authz_extension_id": authz_id, - "intra_admin_extension_id": admin_id, - "enabled": True, - "domain_id": "default" - } - keystone_tenant = self.resource_api.create_project(tenant["id"], tenant) - mapping = self.tenant_api.add_tenant_dict(self.root_api.root_admin_id, tenant["id"], tenant) - self.assertIsInstance(mapping, dict) - self.assertIn("intra_authz_extension_id", mapping[tenant["id"]]) - self.assertIn("intra_admin_extension_id", mapping[tenant["id"]]) - self.assertEqual(mapping[tenant["id"]]["intra_authz_extension_id"], authz_id) - self.assertEqual(mapping[tenant["id"]]["intra_admin_extension_id"], admin_id) - return tenant, mapping diff --git a/keystone-moon/keystone/tests/moon/unit/test_unit_core_configuration.py b/keystone-moon/keystone/tests/moon/unit/test_unit_core_configuration.py deleted file mode 100644 index 59eb3d25..00000000 --- a/keystone-moon/keystone/tests/moon/unit/test_unit_core_configuration.py +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -"""Unit tests for core configuration.""" - -from oslo_config import cfg -from keystone.tests import unit as tests -from keystone.tests.unit.ksfixtures import database -from keystone.tests.unit import default_fixtures -from keystone.contrib.moon.core import LogManager -from keystone.contrib.moon.core import IntraExtensionAdminManager -from keystone.contrib.moon.core import IntraExtensionRootManager -from keystone.contrib.moon.core import ConfigurationManager -from keystone.contrib.moon.core import IntraExtensionAuthzManager -from keystone.tests.moon.unit import * - -CONF = cfg.CONF - - -class TestConfigurationManager(tests.TestCase): - - def setUp(self): - self.useFixture(database.Database()) - super(TestConfigurationManager, self).setUp() - self.load_fixtures(default_fixtures) - self.load_backends() - domain = {'id': "default", 'name': "default"} - self.resource_api.create_domain(domain['id'], domain) - self.admin = create_user(self, username="admin") - self.demo = create_user(self, username="demo") - ref = self.root_api.load_root_intra_extension_dict() - self.root_api.populate_default_data(ref) - self.root_intra_extension = self.root_api.get_root_extension_dict() - self.root_intra_extension_id = self.root_intra_extension.keys()[0] - self.ADMIN_ID = self.root_api.root_admin_id - self.authz_manager = self.authz_api - self.admin_manager = self.admin_api - self.configuration_manager = self.configuration_api - - def load_extra_backends(self): - return { - "moonlog_api": LogManager(), - "admin_api": IntraExtensionAdminManager(), - "configuration_api": ConfigurationManager(), - "root_api": IntraExtensionRootManager(), - "authz_api": IntraExtensionAuthzManager() - } - - def config_overrides(self): - super(TestConfigurationManager, self).config_overrides() - self.config_fixture.config( - group='moon', - configuration_driver='keystone.contrib.moon.backends.memory.ConfigurationConnector' - ) - self.config_fixture.config( - group='moon', - tenant_driver='keystone.contrib.moon.backends.sql.TenantConnector') - self.policy_directory = '/etc/keystone/policies' - self.config_fixture.config( - group='moon', - intraextension_driver='keystone.contrib.moon.backends.sql.IntraExtensionConnector') - self.config_fixture.config( - group='moon', - policy_directory=self.policy_directory) - - def test_get_policy_template_dict(self): - data = self.configuration_manager.get_policy_templates_dict(self.ADMIN_ID) - self.assertIsInstance(data, dict) - self.assertIn("policy_root", data) - diff --git a/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_admin.py b/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_admin.py deleted file mode 100644 index f32df5dd..00000000 --- a/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_admin.py +++ /dev/null @@ -1,2107 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -"""Unit tests for core IntraExtensionAdminManager""" - -from oslo_config import cfg -from keystone.tests import unit as tests -from keystone.contrib.moon.core import IntraExtensionAdminManager, IntraExtensionAuthzManager -from keystone.contrib.moon.core import ConfigurationManager -from keystone.tests.unit.ksfixtures import database -from keystone.contrib.moon.exception import * -from keystone.tests.unit import default_fixtures -from keystone.contrib.moon.core import LogManager, TenantManager -from keystone.tests.moon.unit import * - -CONF = cfg.CONF - -USER = { - 'name': 'admin', - 'domain_id': "default", - 'password': 'admin' -} - -IE = { - "name": "test IE", - "policymodel": "policy_rbac_authz", - "description": "a simple description." -} - - -@dependency.requires('admin_api', 'authz_api', 'tenant_api', 'configuration_api', 'moonlog_api') -class TestIntraExtensionAdminManagerOK(tests.TestCase): - - # TODO: must be reviewed because some tests are on the authz interface - def setUp(self): - self.useFixture(database.Database()) - super(TestIntraExtensionAdminManagerOK, self).setUp() - self.load_fixtures(default_fixtures) - self.load_backends() - domain = {'id': "default", 'name': "default"} - self.resource_api.create_domain(domain['id'], domain) - self.admin = create_user(self, username="admin") - self.demo = create_user(self, username="demo") - ref = self.root_api.load_root_intra_extension_dict() - self.root_api.populate_default_data(ref) - self.root_intra_extension = self.root_api.get_root_extension_dict() - self.root_intra_extension_id = self.root_intra_extension.keys()[0] - self.ADMIN_ID = self.root_api.root_admin_id - self.authz_manager = self.authz_api - self.admin_manager = self.admin_api - - def __get_key_from_value(self, value, values_dict): - return filter(lambda v: v[1] == value, values_dict.iteritems())[0][0] - - def load_extra_backends(self): - return { - "moonlog_api": LogManager(), - "tenant_api": TenantManager(), - "admin_api": IntraExtensionAdminManager(), - "authz_api": IntraExtensionAuthzManager(), - "configuration_api": ConfigurationManager(), - } - - def config_overrides(self): - super(TestIntraExtensionAdminManagerOK, self).config_overrides() - self.policy_directory = '/etc/keystone/policies' - self.config_fixture.config( - group='moon', - intraextension_driver='keystone.contrib.moon.backends.sql.IntraExtensionConnector') - self.config_fixture.config( - group='moon', - policy_directory=self.policy_directory) - - def delete_admin_intra_extension(self): - self.authz_manager.del_intra_extension(self.ref["id"]) - - def test_subjects(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - subjects = self.authz_manager.get_subjects_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(subjects, dict) - for key, value in subjects.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - self.assertIn("keystone_name", value) - self.assertIn("keystone_id", value) - - create_user(self, "subject_test") - new_subject = {"name": "subject_test", "description": "subject_test"} - - subjects = self.admin_manager.add_subject_dict(admin_subject_id, authz_ie_dict["id"], new_subject) - _subjects = dict(subjects) - self.assertEqual(len(_subjects.keys()), 1) - new_subject["id"] = _subjects.keys()[0] - value = subjects[new_subject["id"]] - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertEqual(value["name"], new_subject["name"]) - self.assertIn("description", value) - self.assertEqual(value["description"], new_subject["description"]) - - # Delete the new subject - self.admin_manager.del_subject(admin_subject_id, authz_ie_dict["id"], new_subject["id"]) - subjects = self.authz_manager.get_subjects_dict(admin_subject_id, authz_ie_dict["id"]) - for key, value in subjects.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIsNot(new_subject["name"], value["name"]) - self.assertIn("description", value) - - def test_objects(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - objects = self.authz_manager.get_objects_dict(admin_subject_id, authz_ie_dict["id"]) - objects_id_list = [] - self.assertIsInstance(objects, dict) - for key, value in objects.iteritems(): - objects_id_list.append(key) - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - def test_actions(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - actions = self.authz_manager.get_actions_dict(admin_subject_id, authz_ie_dict["id"]) - actions_id_list = [] - self.assertIsInstance(actions, dict) - for key, value in actions.iteritems(): - actions_id_list.append(key) - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - def test_subject_categories(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - subject_categories = self.authz_manager.get_subject_categories_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(subject_categories, dict) - for key, value in subject_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - new_subject_category = {"name": "subject_category_test", "description": "subject_category_test"} - - subject_categories = self.admin_manager.add_subject_category_dict(admin_subject_id, authz_ie_dict["id"], new_subject_category) - _subject_categories = dict(subject_categories) - self.assertEqual(len(_subject_categories.keys()), 1) - new_subject_category["id"] = _subject_categories.keys()[0] - value = subject_categories[new_subject_category["id"]] - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertEqual(value["name"], new_subject_category["name"]) - self.assertIn("description", value) - self.assertEqual(value["description"], new_subject_category["description"]) - - # Delete the new subject_category - self.admin_manager.del_subject_category(admin_subject_id, authz_ie_dict["id"], new_subject_category["id"]) - subject_categories = self.authz_manager.get_subject_categories_dict(admin_subject_id, authz_ie_dict["id"]) - for key, value in subject_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIsNot(new_subject_category["name"], value["name"]) - self.assertIn("description", value) - - def test_object_categories(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - object_categories = self.authz_manager.get_object_categories_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(object_categories, dict) - for key, value in object_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - new_object_category = {"name": "object_category_test", "description": "object_category_test"} - - object_categories = self.admin_manager.add_object_category_dict(admin_subject_id, authz_ie_dict["id"], new_object_category) - _object_categories = dict(object_categories) - self.assertEqual(len(_object_categories.keys()), 1) - new_object_category["id"] = _object_categories.keys()[0] - value = object_categories[new_object_category["id"]] - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertEqual(value["name"], new_object_category["name"]) - self.assertIn("description", value) - self.assertEqual(value["description"], new_object_category["description"]) - - # Delete the new object_category - - self.admin_manager.del_object_category(admin_subject_id, authz_ie_dict["id"], new_object_category["id"]) - object_categories = self.authz_manager.get_object_categories_dict(admin_subject_id, authz_ie_dict["id"]) - for key, value in object_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIsNot(new_object_category["name"], value["name"]) - self.assertIn("description", value) - - def test_action_categories(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - action_categories = self.authz_manager.get_action_categories_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(action_categories, dict) - for key, value in action_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - new_action_category = {"name": "action_category_test", "description": "action_category_test"} - - action_categories = self.admin_manager.add_action_category_dict(admin_subject_id, authz_ie_dict["id"], new_action_category) - _action_categories = dict(action_categories) - self.assertEqual(len(_action_categories.keys()), 1) - new_action_category["id"] = _action_categories.keys()[0] - value = action_categories[new_action_category["id"]] - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertEqual(value["name"], new_action_category["name"]) - self.assertIn("description", value) - self.assertEqual(value["description"], new_action_category["description"]) - - # Delete the new action_category - - self.admin_manager.del_action_category(admin_subject_id, authz_ie_dict["id"], new_action_category["id"]) - action_categories = self.authz_manager.get_action_categories_dict(admin_subject_id, authz_ie_dict["id"]) - for key, value in action_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIsNot(new_action_category["name"], value["name"]) - self.assertIn("description", value) - - def test_subject_category_scope(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - subject_categories = self.admin_manager.add_subject_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "country", - "description": "country", - } - ) - - for subject_category_id in subject_categories: - - subject_category_scope = self.authz_manager.get_subject_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id) - self.assertIsInstance(subject_category_scope, dict) - self.assertEqual({}, subject_category_scope) - - new_subject_category_scope = { - "name": "france", - "description": "france", - } - - subject_category_scope = self.admin_manager.add_subject_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id, - new_subject_category_scope) - self.assertIsInstance(subject_category_scope, dict) - self.assertEqual(len(subject_category_scope.keys()), 1) - subject_category_scope_id = subject_category_scope.keys()[0] - subject_category_scope_value = subject_category_scope[subject_category_scope_id] - self.assertIn("name", subject_category_scope_value) - self.assertEqual(new_subject_category_scope["name"], subject_category_scope_value["name"]) - self.assertIn("description", subject_category_scope_value) - self.assertEqual(new_subject_category_scope["description"], subject_category_scope_value["description"]) - - # Delete the new subject_category_scope - - self.admin_manager.del_subject_scope( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id, - subject_category_scope_id) - subject_category_scope = self.admin_manager.get_subject_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id) - self.assertIsInstance(subject_category_scope, dict) - self.assertNotIn(subject_category_scope_id, subject_category_scope.keys()) - - def test_object_category_scope(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - object_categories = self.admin_manager.add_object_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "country", - "description": "country", - } - ) - - for object_category_id in object_categories: - - object_category_scope = self.authz_manager.get_object_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id) - self.assertIsInstance(object_category_scope, dict) - self.assertEqual({}, object_category_scope) - - new_object_category_scope = { - "name": "france", - "description": "france", - } - - object_category_scope = self.admin_manager.add_object_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id, - new_object_category_scope) - self.assertIsInstance(object_category_scope, dict) - self.assertEqual(len(object_category_scope.keys()), 1) - object_category_scope_id = object_category_scope.keys()[0] - object_category_scope_value = object_category_scope[object_category_scope_id] - self.assertIn("name", object_category_scope_value) - self.assertEqual(new_object_category_scope["name"], object_category_scope_value["name"]) - self.assertIn("description", object_category_scope_value) - self.assertEqual(new_object_category_scope["description"], object_category_scope_value["description"]) - - # Delete the new object_category_scope - - self.admin_manager.del_object_scope( - admin_subject_id, - authz_ie_dict["id"], - object_category_id, - object_category_scope_id) - object_category_scope = self.admin_manager.get_object_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id) - self.assertIsInstance(object_category_scope, dict) - self.assertNotIn(object_category_scope_id, object_category_scope.keys()) - - def test_action_category_scope(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - action_categories = self.admin_manager.add_action_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "swift", - "description": "swift actions", - } - ) - - for action_category_id in action_categories: - - action_category_scope = self.authz_manager.get_action_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id) - self.assertIsInstance(action_category_scope, dict) - self.assertEqual({}, action_category_scope) - - new_action_category_scope = { - "name": "get", - "description": "get swift files", - } - - action_category_scope = self.admin_manager.add_action_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id, - new_action_category_scope) - self.assertIsInstance(action_category_scope, dict) - self.assertEqual(len(action_category_scope.keys()), 1) - action_category_scope_id = action_category_scope.keys()[0] - action_category_scope_value = action_category_scope[action_category_scope_id] - self.assertIn("name", action_category_scope_value) - self.assertEqual(new_action_category_scope["name"], action_category_scope_value["name"]) - self.assertIn("description", action_category_scope_value) - self.assertEqual(new_action_category_scope["description"], action_category_scope_value["description"]) - - # Delete the new action_category_scope - - self.admin_manager.del_action_scope( - admin_subject_id, - authz_ie_dict["id"], - action_category_id, - action_category_scope_id) - action_category_scope = self.admin_manager.get_action_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id) - self.assertIsInstance(action_category_scope, dict) - self.assertNotIn(action_category_scope_id, action_category_scope.keys()) - - def test_subject_category_assignment(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - admin_authz_subject_id, admin_authz_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], authz_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - demo_authz_subject_id, demo_authz_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], authz_ie_dict['id'], 'demo').iteritems().next() - - subjects_dict = self.authz_manager.get_subjects_dict(admin_subject_id, authz_ie_dict["id"]) - - subject_categories = self.admin_manager.add_subject_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "country", - "description": "country", - } - ) - - for subject_category_id in subject_categories: - subject_category_scope = self.authz_manager.get_subject_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id) - self.assertIsInstance(subject_category_scope, dict) - self.assertEqual({}, subject_category_scope) - - new_subject_category_scope_1 = { - "name": "france", - "description": "france", - } - - subject_category_scope_1 = self.admin_manager.add_subject_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id, - new_subject_category_scope_1) - subject_category_scope_1_id = subject_category_scope_1.keys()[0] - - new_subject_category_scope_2 = { - "name": "china", - "description": "china", - } - - subject_category_scope_2 = self.admin_manager.add_subject_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id, - new_subject_category_scope_2) - subject_category_scope_2_id = subject_category_scope_2.keys()[0] - - subject_category_assignments = self.authz_manager.get_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, - subject_category_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual([], subject_category_assignments) - - subject_category_assignments = self.authz_manager.get_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - demo_authz_subject_id, - subject_category_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual([], subject_category_assignments) - - subject_category_assignments = self.admin_manager.add_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id, subject_category_scope_1_id - ) - self.assertIsInstance(subject_category_assignments, list) - - self.assertEqual(len(subject_category_assignments), 1) - - subject_category_assignments = self.admin_manager.add_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual(len(subject_category_assignments), 2) - - subject_category_assignments = self.admin_manager.add_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - demo_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual(len(subject_category_assignments), 1) - - subject_category_assignments = self.admin_manager.get_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual(len(subject_category_assignments), 2) - - self.admin_manager.del_subject_assignment( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - subject_category_assignments = self.admin_manager.get_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual(len(subject_category_assignments), 1) - - def test_object_category_assignment(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - objects_dict = self.authz_manager.get_objects_dict(admin_subject_id, authz_ie_dict["id"]) - - object_vm1 = self.admin_manager.add_object_dict(admin_subject_id, authz_ie_dict["id"], {"name": "vm1", "description": "vm1"}) - object_vm2 = self.admin_manager.add_object_dict(admin_subject_id, authz_ie_dict["id"], {"name": "vm2", "description": "vm2"}) - object_vm1_id = object_vm1.keys()[0] - object_vm2_id = object_vm2.keys()[0] - if not object_vm1_id or not object_vm2_id: - raise Exception("Cannot run tests, database is corrupted ? (need upload and list in objects)") - - object_categories = self.admin_manager.add_object_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "location", - "description": "location", - } - ) - - for object_category_id in object_categories: - object_category_scope = self.authz_manager.get_object_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id) - self.assertIsInstance(object_category_scope, dict) - self.assertEqual({}, object_category_scope) - - new_object_category_scope_1 = { - "name": "france", - "description": "france", - } - - object_category_scope_1 = self.admin_manager.add_object_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id, - new_object_category_scope_1) - object_category_scope_1_id = object_category_scope_1.keys()[0] - - new_object_category_scope_2 = { - "name": "china", - "description": "china", - } - - object_category_scope_2 = self.admin_manager.add_object_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id, - new_object_category_scope_2) - object_category_scope_2_id = object_category_scope_2.keys()[0] - - object_category_assignments = self.authz_manager.get_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, - object_category_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual([], object_category_assignments) - - object_category_assignments = self.authz_manager.get_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm2_id, - object_category_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual([], object_category_assignments) - - object_category_assignments = self.admin_manager.add_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id, object_category_scope_1_id - ) - self.assertIsInstance(object_category_assignments, list) - - self.assertEqual(len(object_category_assignments), 1) - - object_category_assignments = self.admin_manager.add_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id, object_category_scope_2_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual(len(object_category_assignments), 2) - - object_category_assignments = self.admin_manager.add_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm2_id, object_category_id, object_category_scope_2_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual(len(object_category_assignments), 1) - - object_category_assignments = self.admin_manager.get_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual(len(object_category_assignments), 2) - - self.admin_manager.del_object_assignment( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id, object_category_scope_2_id - ) - object_category_assignments = self.admin_manager.get_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual(len(object_category_assignments), 1) - - def test_action_category_assignment(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - actions_dict = self.authz_manager.get_actions_dict(admin_subject_id, authz_ie_dict["id"]) - - action_upload_id = None - action_list_id = None - for _action_id in actions_dict: - if actions_dict[_action_id]['name'] == 'upload': - action_upload_id = _action_id - if actions_dict[_action_id]['name'] == 'list': - action_list_id = _action_id - if not action_upload_id or not action_list_id: - raise Exception("Cannot run tests, database is corrupted ? (need upload and list in actions)") - - action_categories = self.admin_manager.add_action_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "swift", - "description": "swift actions", - } - ) - - for action_category_id in action_categories: - action_category_scope = self.authz_manager.get_action_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id) - self.assertIsInstance(action_category_scope, dict) - self.assertEqual({}, action_category_scope) - - new_action_category_scope_1 = { - "name": "swift_admin", - "description": "action require admin rights", - } - - action_category_scope_1 = self.admin_manager.add_action_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id, - new_action_category_scope_1) - action_category_scope_1_id = action_category_scope_1.keys()[0] - - new_action_category_scope_2 = { - "name": "swift_anonymous", - "description": "action require no right", - } - - action_category_scope_2 = self.admin_manager.add_action_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id, - new_action_category_scope_2) - action_category_scope_2_id = action_category_scope_2.keys()[0] - - action_category_assignments = self.authz_manager.get_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, - action_category_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual([], action_category_assignments) - - action_category_assignments = self.authz_manager.get_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_list_id, - action_category_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual([], action_category_assignments) - - action_category_assignments = self.admin_manager.add_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id, action_category_scope_1_id - ) - self.assertIsInstance(action_category_assignments, list) - - self.assertEqual(len(action_category_assignments), 1) - - action_category_assignments = self.admin_manager.add_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id, action_category_scope_2_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual(len(action_category_assignments), 2) - - action_category_assignments = self.admin_manager.add_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_list_id, action_category_id, action_category_scope_2_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual(len(action_category_assignments), 1) - - action_category_assignments = self.admin_manager.get_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual(len(action_category_assignments), 2) - - self.admin_manager.del_action_assignment( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id, action_category_scope_2_id - ) - action_category_assignments = self.admin_manager.get_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual(len(action_category_assignments), 1) - - def test_sub_meta_rules(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.admin_manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - aggregation_algorithm = self.admin_manager.get_aggregation_algorithm_id(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(aggregation_algorithm, dict) - - # TODO: need more tests on aggregation_algorithms (set and del) - - sub_meta_rules = self.admin_manager.get_sub_meta_rules_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(sub_meta_rules, dict) - categories = { - "subject_categories": self.admin_manager.get_subject_categories_dict(admin_subject_id, authz_ie_dict["id"]), - "object_categories": self.admin_manager.get_object_categories_dict(admin_subject_id, authz_ie_dict["id"]), - "action_categories": self.admin_manager.get_action_categories_dict(admin_subject_id, authz_ie_dict["id"]) - } - for key, value in sub_meta_rules.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("action_categories", value) - self.assertIn("object_categories", value) - self.assertIn("subject_categories", value) - self.assertIn("algorithm", value) - self.assertIn("name", value) - for action_category_id in value["action_categories"]: - self.assertIn(action_category_id, categories["action_categories"]) - for object_category_id in value["object_categories"]: - self.assertIn(object_category_id, categories["object_categories"]) - for subject_category_id in value["subject_categories"]: - self.assertIn(subject_category_id, categories["subject_categories"]) - # TODO: need more tests (set and del) - - def test_sub_rules(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.admin_manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - sub_meta_rules = self.admin_manager.get_sub_meta_rules_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(sub_meta_rules, dict) - - for relation_id in sub_meta_rules: - rules = self.admin_manager.get_rules_dict(admin_subject_id, authz_ie_dict["id"], relation_id) - rule_length = len(sub_meta_rules[relation_id]["subject_categories"]) + \ - len(sub_meta_rules[relation_id]["object_categories"]) + \ - len(sub_meta_rules[relation_id]["action_categories"]) + 1 - for rule_id in rules: - self.assertEqual(rule_length, len(rules[rule_id])) - rule = list(rules[rule_id]) - for cat, cat_func, func_name in ( - ("subject_categories", self.admin_manager.get_subject_scopes_dict, "subject_scope"), - ("action_categories", self.admin_manager.get_action_scopes_dict, "action_scope"), - ("object_categories", self.admin_manager.get_object_scopes_dict, "object_scope"), - ): - for cat_value in sub_meta_rules[relation_id][cat]: - scope = cat_func( - admin_subject_id, - authz_ie_dict["id"], - cat_value - ) - a_scope = rule.pop(0) - if type(a_scope) is not bool: - self.assertIn(a_scope, scope.keys()) - - # add a new subrule - - sub_rule = [] - for cat, cat_func, func_name in ( - ("subject_categories", self.admin_manager.get_subject_scopes_dict, "subject_scope"), - ("action_categories", self.admin_manager.get_action_scopes_dict, "action_scope"), - ("object_categories", self.admin_manager.get_object_scopes_dict, "object_scope"), - ): - for cat_value in sub_meta_rules[relation_id][cat]: - scope = cat_func( - admin_subject_id, - authz_ie_dict["id"], - cat_value - ) - sub_rule.append(scope.keys()[0]) - - sub_rule.append(False) - - sub_rules = self.admin_manager.add_rule_dict(admin_subject_id, authz_ie_dict["id"], relation_id, sub_rule) - self.assertIsInstance(sub_rules, dict) - self.assertIn(sub_rule, sub_rules.values()) - - for rule_id, rule_value in sub_rules.iteritems(): - for cat, cat_func, func_name in ( - ("subject_categories", self.admin_manager.get_subject_scopes_dict, "subject_category_scope"), - ("action_categories", self.admin_manager.get_action_scopes_dict, "action_category_scope"), - ("object_categories", self.admin_manager.get_object_scopes_dict, "object_category_scope"), - ): - for cat_value in sub_meta_rules[relation_id][cat]: - scope = cat_func( - admin_subject_id, - authz_ie_dict["id"], - cat_value - ) - a_scope = rule_value.pop(0) - self.assertIn(a_scope, scope.keys()) - - # TODO: add test for the delete function - - -@dependency.requires('admin_api', 'authz_api', 'tenant_api', 'configuration_api', 'moonlog_api') -class TestIntraExtensionAdminManagerKO(tests.TestCase): - - # TODO: must be reviewed because some tests are on the authz interface - def setUp(self): - self.useFixture(database.Database()) - super(TestIntraExtensionAdminManagerKO, self).setUp() - self.load_fixtures(default_fixtures) - self.load_backends() - domain = {'id': "default", 'name': "default"} - self.resource_api.create_domain(domain['id'], domain) - self.admin = create_user(self, username="admin") - self.demo = create_user(self, username="demo") - ref = self.root_api.load_root_intra_extension_dict() - self.root_api.populate_default_data(ref) - self.root_intra_extension = self.root_api.get_root_extension_dict() - self.root_intra_extension_id = self.root_intra_extension.keys()[0] - self.ADMIN_ID = self.root_api.root_admin_id - self.authz_manager = self.authz_api - self.admin_manager = self.admin_api - - def __get_key_from_value(self, value, values_dict): - return filter(lambda v: v[1] == value, values_dict.iteritems())[0][0] - - def load_extra_backends(self): - return { - "moonlog_api": LogManager(), - "tenant_api": TenantManager(), - "admin_api": IntraExtensionAdminManager(), - "authz_api": IntraExtensionAuthzManager(), - "configuration_api": ConfigurationManager(), - # "resource_api": resource.Manager(), - } - - def config_overrides(self): - super(TestIntraExtensionAdminManagerKO, self).config_overrides() - self.policy_directory = '/etc/keystone/policies' - self.config_fixture.config( - group='moon', - intraextension_driver='keystone.contrib.moon.backends.sql.IntraExtensionConnector') - self.config_fixture.config( - group='moon', - policy_directory=self.policy_directory) - - def test_subjects(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - subjects = self.authz_manager.get_subjects_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(subjects, dict) - for key, value in subjects.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - self.assertIn("keystone_name", value) - self.assertIn("keystone_id", value) - - create_user(self, "subject_test") - new_subject = {"name": "subject_test", "description": "subject_test"} - self.assertRaises( - AuthzException, - self.authz_manager.add_subject_dict, - demo_subject_id, admin_ie_dict["id"], new_subject) - - subjects = self.admin_manager.add_subject_dict(admin_subject_id, authz_ie_dict["id"], new_subject) - _subjects = dict(subjects) - self.assertEqual(len(_subjects.keys()), 1) - new_subject["id"] = _subjects.keys()[0] - value = subjects[new_subject["id"]] - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertEqual(value["name"], new_subject["name"]) - self.assertIn("description", value) - self.assertEqual(value["description"], new_subject["description"]) - - # Delete the new subject - self.assertRaises( - AuthzException, - self.authz_manager.del_subject, - demo_subject_id, authz_ie_dict["id"], new_subject["id"]) - - self.admin_manager.del_subject(admin_subject_id, authz_ie_dict["id"], new_subject["id"]) - subjects = self.authz_manager.get_subjects_dict(admin_subject_id, authz_ie_dict["id"]) - for key, value in subjects.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIsNot(new_subject["name"], value["name"]) - self.assertIn("description", value) - - def test_objects(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - objects = self.authz_manager.get_objects_dict(admin_subject_id, authz_ie_dict["id"]) - objects_id_list = [] - self.assertIsInstance(objects, dict) - for key, value in objects.iteritems(): - objects_id_list.append(key) - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - create_user(self, "subject_test") - new_object = {"name": "object_test", "description": "object_test"} - self.assertRaises( - AuthzException, - self.authz_manager.add_object_dict, - demo_subject_id, admin_ie_dict["id"], new_object) - - self.assertRaises( - ObjectsWriteNoAuthorized, - self.admin_manager.add_object_dict, - admin_subject_id, admin_ie_dict["id"], new_object - ) - - # Delete the new object - for key in objects_id_list: - self.assertRaises( - AuthzException, - self.authz_manager.del_object, - demo_subject_id, authz_ie_dict["id"], key) - self.assertRaises( - AuthzException, - self.authz_manager.del_object, - admin_subject_id, authz_ie_dict["id"], key) - - for key in objects_id_list: - self.assertRaises( - ObjectsWriteNoAuthorized, - self.admin_manager.del_object, - demo_subject_id, admin_ie_dict["id"], key) - self.assertRaises( - ObjectsWriteNoAuthorized, - self.admin_manager.del_object, - admin_subject_id, admin_ie_dict["id"], key) - - def test_actions(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - actions = self.authz_manager.get_actions_dict(admin_subject_id, authz_ie_dict["id"]) - actions_id_list = [] - self.assertIsInstance(actions, dict) - for key, value in actions.iteritems(): - actions_id_list.append(key) - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - create_user(self, "subject_test") - new_action = {"name": "action_test", "description": "action_test"} - self.assertRaises( - AuthzException, - self.authz_manager.add_action_dict, - demo_subject_id, admin_ie_dict["id"], new_action) - - self.assertRaises( - ActionsWriteNoAuthorized, - self.admin_manager.add_action_dict, - admin_subject_id, admin_ie_dict["id"], new_action - ) - - # Delete all actions - for key in actions_id_list: - self.assertRaises( - AuthzException, - self.authz_manager.del_action, - demo_subject_id, authz_ie_dict["id"], key) - self.assertRaises( - AuthzException, - self.authz_manager.del_action, - admin_subject_id, authz_ie_dict["id"], key) - - for key in actions_id_list: - self.assertRaises( - ActionsWriteNoAuthorized, - self.admin_manager.del_action, - demo_subject_id, admin_ie_dict["id"], key) - self.assertRaises( - ActionsWriteNoAuthorized, - self.admin_manager.del_action, - admin_subject_id, admin_ie_dict["id"], key) - - def test_subject_categories(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - subject_categories = self.authz_manager.get_subject_categories_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(subject_categories, dict) - for key, value in subject_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - new_subject_category = {"name": "subject_category_test", "description": "subject_category_test"} - self.assertRaises( - AuthzException, - self.authz_manager.add_subject_category_dict, - demo_subject_id, admin_ie_dict["id"], new_subject_category) - - subject_categories = self.admin_manager.add_subject_category_dict(admin_subject_id, authz_ie_dict["id"], new_subject_category) - _subject_categories = dict(subject_categories) - self.assertEqual(len(_subject_categories.keys()), 1) - new_subject_category["id"] = _subject_categories.keys()[0] - value = subject_categories[new_subject_category["id"]] - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertEqual(value["name"], new_subject_category["name"]) - self.assertIn("description", value) - self.assertEqual(value["description"], new_subject_category["description"]) - - # Delete the new subject_category - self.assertRaises( - AuthzException, - self.authz_manager.del_subject_category, - demo_subject_id, authz_ie_dict["id"], new_subject_category["id"]) - - self.admin_manager.del_subject_category(admin_subject_id, authz_ie_dict["id"], new_subject_category["id"]) - subject_categories = self.authz_manager.get_subject_categories_dict(admin_subject_id, authz_ie_dict["id"]) - for key, value in subject_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIsNot(new_subject_category["name"], value["name"]) - self.assertIn("description", value) - - def test_object_categories(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - object_categories = self.authz_manager.get_object_categories_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(object_categories, dict) - for key, value in object_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - new_object_category = {"name": "object_category_test", "description": "object_category_test"} - self.assertRaises( - AuthzException, - self.authz_manager.add_object_category_dict, - demo_subject_id, admin_ie_dict["id"], new_object_category) - - object_categories = self.admin_manager.add_object_category_dict(admin_subject_id, authz_ie_dict["id"], new_object_category) - _object_categories = dict(object_categories) - self.assertEqual(len(_object_categories.keys()), 1) - new_object_category["id"] = _object_categories.keys()[0] - value = object_categories[new_object_category["id"]] - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertEqual(value["name"], new_object_category["name"]) - self.assertIn("description", value) - self.assertEqual(value["description"], new_object_category["description"]) - - # Delete the new object_category - self.assertRaises( - AuthzException, - self.authz_manager.del_object_category, - demo_subject_id, authz_ie_dict["id"], new_object_category["id"]) - - self.admin_manager.del_object_category(admin_subject_id, authz_ie_dict["id"], new_object_category["id"]) - object_categories = self.authz_manager.get_object_categories_dict(admin_subject_id, authz_ie_dict["id"]) - for key, value in object_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIsNot(new_object_category["name"], value["name"]) - self.assertIn("description", value) - - def test_action_categories(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - action_categories = self.authz_manager.get_action_categories_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(action_categories, dict) - for key, value in action_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - new_action_category = {"name": "action_category_test", "description": "action_category_test"} - self.assertRaises( - AuthzException, - self.authz_manager.add_action_category_dict, - demo_subject_id, admin_ie_dict["id"], new_action_category) - - action_categories = self.admin_manager.add_action_category_dict(admin_subject_id, authz_ie_dict["id"], new_action_category) - _action_categories = dict(action_categories) - self.assertEqual(len(_action_categories.keys()), 1) - new_action_category["id"] = _action_categories.keys()[0] - value = action_categories[new_action_category["id"]] - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertEqual(value["name"], new_action_category["name"]) - self.assertIn("description", value) - self.assertEqual(value["description"], new_action_category["description"]) - - # Delete the new action_category - self.assertRaises( - AuthzException, - self.authz_manager.del_action_category, - demo_subject_id, authz_ie_dict["id"], new_action_category["id"]) - - self.admin_manager.del_action_category(admin_subject_id, authz_ie_dict["id"], new_action_category["id"]) - action_categories = self.authz_manager.get_action_categories_dict(admin_subject_id, authz_ie_dict["id"]) - for key, value in action_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIsNot(new_action_category["name"], value["name"]) - self.assertIn("description", value) - - def test_subject_category_scope(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - subject_categories = self.admin_manager.add_subject_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "country", - "description": "country", - } - ) - - for subject_category_id in subject_categories: - - subject_category_scope = self.authz_manager.get_subject_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id) - self.assertIsInstance(subject_category_scope, dict) - self.assertEqual({}, subject_category_scope) - - new_subject_category_scope = { - "name": "france", - "description": "france", - } - - self.assertRaises( - AuthzException, - self.admin_manager.add_subject_scope_dict, - demo_subject_id, authz_ie_dict["id"], subject_category_id, new_subject_category_scope) - - subject_category_scope = self.admin_manager.add_subject_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id, - new_subject_category_scope) - self.assertIsInstance(subject_category_scope, dict) - self.assertEqual(len(subject_category_scope.keys()), 1) - subject_category_scope_id = subject_category_scope.keys()[0] - subject_category_scope_value = subject_category_scope[subject_category_scope_id] - self.assertIn("name", subject_category_scope_value) - self.assertEqual(new_subject_category_scope["name"], subject_category_scope_value["name"]) - self.assertIn("description", subject_category_scope_value) - self.assertEqual(new_subject_category_scope["description"], subject_category_scope_value["description"]) - - # Delete the new subject_category_scope - self.assertRaises( - AuthzException, - self.admin_manager.del_subject_scope, - demo_subject_id, authz_ie_dict["id"], subject_category_id, subject_category_scope_id) - - self.admin_manager.del_subject_scope( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id, - subject_category_scope_id) - subject_category_scope = self.admin_manager.get_subject_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id) - self.assertIsInstance(subject_category_scope, dict) - self.assertNotIn(subject_category_scope_id, subject_category_scope.keys()) - - def test_object_category_scope(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - object_categories = self.admin_manager.add_object_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "country", - "description": "country", - } - ) - - for object_category_id in object_categories: - - object_category_scope = self.authz_manager.get_object_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id) - self.assertIsInstance(object_category_scope, dict) - self.assertEqual({}, object_category_scope) - - new_object_category_scope = { - "name": "france", - "description": "france", - } - - self.assertRaises( - AuthzException, - self.admin_manager.add_object_scope_dict, - demo_subject_id, authz_ie_dict["id"], object_category_id, new_object_category_scope) - - object_category_scope = self.admin_manager.add_object_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id, - new_object_category_scope) - self.assertIsInstance(object_category_scope, dict) - self.assertEqual(len(object_category_scope.keys()), 1) - object_category_scope_id = object_category_scope.keys()[0] - object_category_scope_value = object_category_scope[object_category_scope_id] - self.assertIn("name", object_category_scope_value) - self.assertEqual(new_object_category_scope["name"], object_category_scope_value["name"]) - self.assertIn("description", object_category_scope_value) - self.assertEqual(new_object_category_scope["description"], object_category_scope_value["description"]) - - # Delete the new object_category_scope - self.assertRaises( - AuthzException, - self.admin_manager.del_object_scope, - demo_subject_id, authz_ie_dict["id"], object_category_id, object_category_scope_id) - - self.admin_manager.del_object_scope( - admin_subject_id, - authz_ie_dict["id"], - object_category_id, - object_category_scope_id) - object_category_scope = self.admin_manager.get_object_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id) - self.assertIsInstance(object_category_scope, dict) - self.assertNotIn(object_category_scope_id, object_category_scope.keys()) - - def test_action_category_scope(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - action_categories = self.admin_manager.add_action_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "swift", - "description": "swift actions", - } - ) - - for action_category_id in action_categories: - - action_category_scope = self.authz_manager.get_action_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id) - self.assertIsInstance(action_category_scope, dict) - self.assertEqual({}, action_category_scope) - - new_action_category_scope = { - "name": "get", - "description": "get swift files", - } - - self.assertRaises( - AuthzException, - self.admin_manager.add_action_scope_dict, - demo_subject_id, authz_ie_dict["id"], action_category_id, new_action_category_scope) - - action_category_scope = self.admin_manager.add_action_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id, - new_action_category_scope) - self.assertIsInstance(action_category_scope, dict) - self.assertEqual(len(action_category_scope.keys()), 1) - action_category_scope_id = action_category_scope.keys()[0] - action_category_scope_value = action_category_scope[action_category_scope_id] - self.assertIn("name", action_category_scope_value) - self.assertEqual(new_action_category_scope["name"], action_category_scope_value["name"]) - self.assertIn("description", action_category_scope_value) - self.assertEqual(new_action_category_scope["description"], action_category_scope_value["description"]) - - # Delete the new action_category_scope - self.assertRaises( - AuthzException, - self.admin_manager.del_action_scope, - demo_subject_id, authz_ie_dict["id"], action_category_id, action_category_scope_id) - - self.admin_manager.del_action_scope( - admin_subject_id, - authz_ie_dict["id"], - action_category_id, - action_category_scope_id) - action_category_scope = self.admin_manager.get_action_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id) - self.assertIsInstance(action_category_scope, dict) - self.assertNotIn(action_category_scope_id, action_category_scope.keys()) - - def test_subject_category_assignment(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - admin_authz_subject_id, admin_authz_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], authz_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - demo_authz_subject_id, demo_authz_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], authz_ie_dict['id'], 'demo').iteritems().next() - - subjects_dict = self.authz_manager.get_subjects_dict(admin_subject_id, authz_ie_dict["id"]) - - subject_categories = self.admin_manager.add_subject_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "country", - "description": "country", - } - ) - - for subject_category_id in subject_categories: - subject_category_scope = self.authz_manager.get_subject_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id) - self.assertIsInstance(subject_category_scope, dict) - self.assertEqual({}, subject_category_scope) - - new_subject_category_scope_1 = { - "name": "france", - "description": "france", - } - - subject_category_scope_1 = self.admin_manager.add_subject_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id, - new_subject_category_scope_1) - subject_category_scope_1_id = subject_category_scope_1.keys()[0] - - new_subject_category_scope_2 = { - "name": "china", - "description": "china", - } - - subject_category_scope_2 = self.admin_manager.add_subject_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id, - new_subject_category_scope_2) - subject_category_scope_2_id = subject_category_scope_2.keys()[0] - - subject_category_assignments = self.authz_manager.get_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, - subject_category_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual([], subject_category_assignments) - - subject_category_assignments = self.authz_manager.get_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - demo_authz_subject_id, - subject_category_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual([], subject_category_assignments) - - self.assertRaises( - AuthzException, - self.authz_manager.add_subject_assignment_list, - demo_subject_id, authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id, subject_category_scope_1_id - ) - - self.assertRaises( - AuthzException, - self.authz_manager.add_subject_assignment_list, - demo_subject_id, authz_ie_dict["id"], - demo_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - - subject_category_assignments = self.admin_manager.add_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id, subject_category_scope_1_id - ) - self.assertIsInstance(subject_category_assignments, list) - - self.assertEqual(len(subject_category_assignments), 1) - - subject_category_assignments = self.admin_manager.add_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual(len(subject_category_assignments), 2) - - subject_category_assignments = self.admin_manager.add_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - demo_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual(len(subject_category_assignments), 1) - - subject_category_assignments = self.admin_manager.get_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual(len(subject_category_assignments), 2) - - self.assertRaises( - AuthzException, - self.admin_manager.del_subject_assignment, - demo_subject_id, authz_ie_dict["id"], - demo_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - - self.admin_manager.del_subject_assignment( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - subject_category_assignments = self.admin_manager.get_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual(len(subject_category_assignments), 1) - - self.assertRaises( - SubjectAssignmentUnknown, - self.admin_manager.del_subject_assignment, - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - - def test_object_category_assignment(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - objects_dict = self.authz_manager.get_objects_dict(admin_subject_id, authz_ie_dict["id"]) - - object_vm1 = self.admin_manager.add_object_dict(admin_subject_id, authz_ie_dict["id"], {"name": "vm1", "description": "vm1"}) - object_vm2 = self.admin_manager.add_object_dict(admin_subject_id, authz_ie_dict["id"], {"name": "vm2", "description": "vm2"}) - object_vm1_id = object_vm1.keys()[0] - object_vm2_id = object_vm2.keys()[0] - if not object_vm1_id or not object_vm2_id: - raise Exception("Cannot run tests, database is corrupted ? (need upload and list in objects)") - - object_categories = self.admin_manager.add_object_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "location", - "description": "location", - } - ) - - for object_category_id in object_categories: - object_category_scope = self.authz_manager.get_object_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id) - self.assertIsInstance(object_category_scope, dict) - self.assertEqual({}, object_category_scope) - - new_object_category_scope_1 = { - "name": "france", - "description": "france", - } - - object_category_scope_1 = self.admin_manager.add_object_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id, - new_object_category_scope_1) - object_category_scope_1_id = object_category_scope_1.keys()[0] - - new_object_category_scope_2 = { - "name": "china", - "description": "china", - } - - object_category_scope_2 = self.admin_manager.add_object_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id, - new_object_category_scope_2) - object_category_scope_2_id = object_category_scope_2.keys()[0] - - object_category_assignments = self.authz_manager.get_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, - object_category_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual([], object_category_assignments) - - object_category_assignments = self.authz_manager.get_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm2_id, - object_category_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual([], object_category_assignments) - - self.assertRaises( - AuthzException, - self.authz_manager.add_object_assignment_list, - demo_subject_id, authz_ie_dict["id"], - object_vm1_id, object_category_id, object_category_scope_1_id - ) - - self.assertRaises( - AuthzException, - self.authz_manager.add_object_assignment_list, - demo_subject_id, authz_ie_dict["id"], - object_vm2_id, object_category_id, object_category_scope_2_id - ) - - object_category_assignments = self.admin_manager.add_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id, object_category_scope_1_id - ) - self.assertIsInstance(object_category_assignments, list) - - self.assertEqual(len(object_category_assignments), 1) - - object_category_assignments = self.admin_manager.add_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id, object_category_scope_2_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual(len(object_category_assignments), 2) - - object_category_assignments = self.admin_manager.add_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm2_id, object_category_id, object_category_scope_2_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual(len(object_category_assignments), 1) - - object_category_assignments = self.admin_manager.get_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual(len(object_category_assignments), 2) - - self.assertRaises( - AuthzException, - self.admin_manager.del_object_assignment, - demo_subject_id, authz_ie_dict["id"], - object_vm2_id, object_category_id, object_category_scope_2_id - ) - - self.admin_manager.del_object_assignment( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id, object_category_scope_2_id - ) - object_category_assignments = self.admin_manager.get_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual(len(object_category_assignments), 1) - - self.assertRaises( - ObjectAssignmentUnknown, - self.admin_manager.del_object_assignment, - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id, object_category_scope_2_id - ) - - def test_action_category_assignment(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - actions_dict = self.authz_manager.get_actions_dict(admin_subject_id, authz_ie_dict["id"]) - - action_upload_id = None - action_list_id = None - for _action_id in actions_dict: - if actions_dict[_action_id]['name'] == 'upload': - action_upload_id = _action_id - if actions_dict[_action_id]['name'] == 'list': - action_list_id = _action_id - if not action_upload_id or not action_list_id: - raise Exception("Cannot run tests, database is corrupted ? (need upload and list in actions)") - - action_categories = self.admin_manager.add_action_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "swift", - "description": "swift actions", - } - ) - - for action_category_id in action_categories: - action_category_scope = self.authz_manager.get_action_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id) - self.assertIsInstance(action_category_scope, dict) - self.assertEqual({}, action_category_scope) - - new_action_category_scope_1 = { - "name": "swift_admin", - "description": "action require admin rights", - } - - action_category_scope_1 = self.admin_manager.add_action_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id, - new_action_category_scope_1) - action_category_scope_1_id = action_category_scope_1.keys()[0] - - new_action_category_scope_2 = { - "name": "swift_anonymous", - "description": "action require no right", - } - - action_category_scope_2 = self.admin_manager.add_action_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id, - new_action_category_scope_2) - action_category_scope_2_id = action_category_scope_2.keys()[0] - - action_category_assignments = self.authz_manager.get_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, - action_category_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual([], action_category_assignments) - - action_category_assignments = self.authz_manager.get_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_list_id, - action_category_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual([], action_category_assignments) - - self.assertRaises( - AuthzException, - self.authz_manager.add_action_assignment_list, - demo_subject_id, authz_ie_dict["id"], - action_upload_id, action_category_id, action_category_scope_1_id - ) - - self.assertRaises( - AuthzException, - self.authz_manager.add_action_assignment_list, - demo_subject_id, authz_ie_dict["id"], - action_list_id, action_category_id, action_category_scope_2_id - ) - - action_category_assignments = self.admin_manager.add_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id, action_category_scope_1_id - ) - self.assertIsInstance(action_category_assignments, list) - - self.assertEqual(len(action_category_assignments), 1) - - action_category_assignments = self.admin_manager.add_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id, action_category_scope_2_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual(len(action_category_assignments), 2) - - action_category_assignments = self.admin_manager.add_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_list_id, action_category_id, action_category_scope_2_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual(len(action_category_assignments), 1) - - action_category_assignments = self.admin_manager.get_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual(len(action_category_assignments), 2) - - self.assertRaises( - AuthzException, - self.admin_manager.del_action_assignment, - demo_subject_id, authz_ie_dict["id"], - action_list_id, action_category_id, action_category_scope_2_id - ) - - self.admin_manager.del_action_assignment( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id, action_category_scope_2_id - ) - action_category_assignments = self.admin_manager.get_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual(len(action_category_assignments), 1) - - self.assertRaises( - ActionAssignmentUnknown, - self.admin_manager.del_action_assignment, - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id, action_category_scope_2_id - ) - - def test_sub_meta_rules(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.admin_manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - aggregation_algorithm = self.admin_manager.get_aggregation_algorithm_id(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(aggregation_algorithm, dict) - - # TODO: need more tests on aggregation_algorithms (set and del) - - sub_meta_rules = self.admin_manager.get_sub_meta_rules_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(sub_meta_rules, dict) - categories = { - "subject_categories": self.admin_manager.get_subject_categories_dict(admin_subject_id, authz_ie_dict["id"]), - "object_categories": self.admin_manager.get_object_categories_dict(admin_subject_id, authz_ie_dict["id"]), - "action_categories": self.admin_manager.get_action_categories_dict(admin_subject_id, authz_ie_dict["id"]) - } - for key, value in sub_meta_rules.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("action_categories", value) - self.assertIn("object_categories", value) - self.assertIn("subject_categories", value) - self.assertIn("algorithm", value) - self.assertIn("name", value) - for action_category_id in value["action_categories"]: - self.assertIn(action_category_id, categories["action_categories"]) - for object_category_id in value["object_categories"]: - self.assertIn(object_category_id, categories["object_categories"]) - for subject_category_id in value["subject_categories"]: - self.assertIn(subject_category_id, categories["subject_categories"]) - # TODO: need more tests (set and del) - - def test_sub_rules(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # demo_subject_dict = self.admin_manager.add_subject_dict(admin_subject_id, admin_ie_dict["id"], - # {"name": "demo", "description": "demo"}) - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - sub_meta_rules = self.admin_manager.get_sub_meta_rules_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(sub_meta_rules, dict) - - for relation_id in sub_meta_rules: - rules = self.admin_manager.get_rules_dict(admin_subject_id, authz_ie_dict["id"], relation_id) - rule_length = len(sub_meta_rules[relation_id]["subject_categories"]) + \ - len(sub_meta_rules[relation_id]["object_categories"]) + \ - len(sub_meta_rules[relation_id]["action_categories"]) + 1 - for rule_id in rules: - self.assertEqual(rule_length, len(rules[rule_id])) - rule = list(rules[rule_id]) - for cat, cat_func, func_name in ( - ("subject_categories", self.admin_manager.get_subject_scopes_dict, "subject_scope"), - ("action_categories", self.admin_manager.get_action_scopes_dict, "action_scope"), - ("object_categories", self.admin_manager.get_object_scopes_dict, "object_scope"), - ): - for cat_value in sub_meta_rules[relation_id][cat]: - scope = cat_func( - admin_subject_id, - authz_ie_dict["id"], - cat_value - ) - a_scope = rule.pop(0) - if type(a_scope) is not bool: - self.assertIn(a_scope, scope.keys()) - - # add a new subrule - - sub_rule = [] - for cat, cat_func, func_name in ( - ("subject_categories", self.admin_manager.get_subject_scopes_dict, "subject_scope"), - ("action_categories", self.admin_manager.get_action_scopes_dict, "action_scope"), - ("object_categories", self.admin_manager.get_object_scopes_dict, "object_scope"), - ): - for cat_value in sub_meta_rules[relation_id][cat]: - scope = cat_func( - admin_subject_id, - authz_ie_dict["id"], - cat_value - ) - sub_rule.append(scope.keys()[0]) - - sub_rule.append(False) - self.assertRaises( - AuthzException, - self.admin_manager.add_rule_dict, - demo_subject_id, authz_ie_dict["id"], relation_id, sub_rule - ) - - sub_rules = self.admin_manager.add_rule_dict(admin_subject_id, authz_ie_dict["id"], relation_id, sub_rule) - self.assertIsInstance(sub_rules, dict) - self.assertIn(sub_rule, sub_rules.values()) - - for rule_id, rule_value in sub_rules.iteritems(): - for cat, cat_func, func_name in ( - ("subject_categories", self.admin_manager.get_subject_scopes_dict, "subject_category_scope"), - ("action_categories", self.admin_manager.get_action_scopes_dict, "action_category_scope"), - ("object_categories", self.admin_manager.get_object_scopes_dict, "object_category_scope"), - ): - for cat_value in sub_meta_rules[relation_id][cat]: - scope = cat_func( - admin_subject_id, - authz_ie_dict["id"], - cat_value - ) - a_scope = rule_value.pop(0) - self.assertIn(a_scope, scope.keys()) - - # TODO: add test for the delete function diff --git a/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_authz.py b/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_authz.py deleted file mode 100644 index 13d9dcd1..00000000 --- a/keystone-moon/keystone/tests/moon/unit/test_unit_core_intra_extension_authz.py +++ /dev/null @@ -1,2322 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -"""Unit tests for core IntraExtensionAuthzManager""" - -from oslo_config import cfg -from keystone.tests import unit as tests -from keystone.contrib.moon.core import IntraExtensionAdminManager, IntraExtensionAuthzManager, IntraExtensionRootManager -from keystone.contrib.moon.core import ConfigurationManager -from keystone.tests.unit.ksfixtures import database -from keystone.contrib.moon.exception import * -from keystone.tests.unit import default_fixtures -from keystone.contrib.moon.core import LogManager, TenantManager -from keystone.tests.moon.unit import * - -CONF = cfg.CONF - -USER = { - 'name': 'admin', - 'domain_id': "default", - 'password': 'admin' -} - -IE = { - "name": "test IE", - "policymodel": "policy_authz", - "description": "a simple description." -} - - -class TestIntraExtensionAuthzManagerAuthzOK(tests.TestCase): - - def setUp(self): - self.useFixture(database.Database()) - super(TestIntraExtensionAuthzManagerAuthzOK, self).setUp() - self.load_fixtures(default_fixtures) - self.load_backends() - domain = {'id': "default", 'name': "default"} - self.resource_api.create_domain(domain['id'], domain) - self.admin = create_user(self, username="admin") - self.demo = create_user(self, username="demo") - ref = self.root_api.load_root_intra_extension_dict() - self.root_api.populate_default_data(ref) - self.root_intra_extension = self.root_api.get_root_extension_dict() - self.root_intra_extension_id = self.root_intra_extension.keys()[0] - self.ADMIN_ID = self.root_api.root_admin_id - self.authz_manager = self.authz_api - self.admin_manager = self.admin_api - - def __get_key_from_value(self, value, values_dict): - return filter(lambda v: v[1] == value, values_dict.iteritems())[0][0] - - def load_extra_backends(self): - return { - "moonlog_api": LogManager(), - "tenant_api": TenantManager(), - "admin_api": IntraExtensionAdminManager(), - "authz_api": IntraExtensionAuthzManager(), - "configuration_api": ConfigurationManager(), - # "resource_api": resource.Manager(), - } - - def config_overrides(self): - super(TestIntraExtensionAuthzManagerAuthzOK, self).config_overrides() - self.policy_directory = '/etc/keystone/policies' - self.config_fixture.config( - group='moon', - intraextension_driver='keystone.contrib.moon.backends.sql.IntraExtensionConnector') - self.config_fixture.config( - group='moon', - policy_directory=self.policy_directory) - - def delete_admin_intra_extension(self): - self.authz_manager.del_intra_extension(self.ref["id"]) - - def test_subjects(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - subjects = self.authz_manager.get_subjects_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(subjects, dict) - for key, value in subjects.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - self.assertIn("keystone_name", value) - self.assertIn("keystone_id", value) - - create_user(self, "subject_test") - new_subject = {"name": "subject_test", "description": "subject_test"} - - subjects = self.admin_manager.add_subject_dict(admin_subject_id, authz_ie_dict["id"], new_subject) - _subjects = dict(subjects) - self.assertEqual(len(_subjects.keys()), 1) - new_subject["id"] = _subjects.keys()[0] - value = subjects[new_subject["id"]] - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertEqual(value["name"], new_subject["name"]) - self.assertIn("description", value) - self.assertEqual(value["description"], new_subject["description"]) - - # Delete the new subject - self.admin_manager.del_subject(admin_subject_id, authz_ie_dict["id"], new_subject["id"]) - subjects = self.authz_manager.get_subjects_dict(admin_subject_id, authz_ie_dict["id"]) - for key, value in subjects.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIsNot(new_subject["name"], value["name"]) - self.assertIn("description", value) - - def test_objects(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - objects = self.authz_manager.get_objects_dict(admin_subject_id, authz_ie_dict["id"]) - objects_id_list = [] - self.assertIsInstance(objects, dict) - for key, value in objects.iteritems(): - objects_id_list.append(key) - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - def test_actions(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - actions = self.authz_manager.get_actions_dict(admin_subject_id, authz_ie_dict["id"]) - actions_id_list = [] - self.assertIsInstance(actions, dict) - for key, value in actions.iteritems(): - actions_id_list.append(key) - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - def test_subject_categories(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - subject_categories = self.authz_manager.get_subject_categories_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(subject_categories, dict) - for key, value in subject_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - new_subject_category = {"name": "subject_category_test", "description": "subject_category_test"} - - subject_categories = self.admin_manager.add_subject_category_dict(admin_subject_id, authz_ie_dict["id"], new_subject_category) - _subject_categories = dict(subject_categories) - self.assertEqual(len(_subject_categories.keys()), 1) - new_subject_category["id"] = _subject_categories.keys()[0] - value = subject_categories[new_subject_category["id"]] - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertEqual(value["name"], new_subject_category["name"]) - self.assertIn("description", value) - self.assertEqual(value["description"], new_subject_category["description"]) - - # Delete the new subject_category - self.admin_manager.del_subject_category(admin_subject_id, authz_ie_dict["id"], new_subject_category["id"]) - subject_categories = self.authz_manager.get_subject_categories_dict(admin_subject_id, authz_ie_dict["id"]) - for key, value in subject_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIsNot(new_subject_category["name"], value["name"]) - self.assertIn("description", value) - - def test_object_categories(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - object_categories = self.authz_manager.get_object_categories_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(object_categories, dict) - for key, value in object_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - new_object_category = {"name": "object_category_test", "description": "object_category_test"} - - object_categories = self.admin_manager.add_object_category_dict(admin_subject_id, authz_ie_dict["id"], new_object_category) - _object_categories = dict(object_categories) - self.assertEqual(len(_object_categories.keys()), 1) - new_object_category["id"] = _object_categories.keys()[0] - value = object_categories[new_object_category["id"]] - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertEqual(value["name"], new_object_category["name"]) - self.assertIn("description", value) - self.assertEqual(value["description"], new_object_category["description"]) - - # Delete the new object_category - - self.admin_manager.del_object_category(admin_subject_id, authz_ie_dict["id"], new_object_category["id"]) - object_categories = self.authz_manager.get_object_categories_dict(admin_subject_id, authz_ie_dict["id"]) - for key, value in object_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIsNot(new_object_category["name"], value["name"]) - self.assertIn("description", value) - - def test_action_categories(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - action_categories = self.authz_manager.get_action_categories_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(action_categories, dict) - for key, value in action_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - new_action_category = {"name": "action_category_test", "description": "action_category_test"} - - action_categories = self.admin_manager.add_action_category_dict(admin_subject_id, authz_ie_dict["id"], new_action_category) - _action_categories = dict(action_categories) - self.assertEqual(len(_action_categories.keys()), 1) - new_action_category["id"] = _action_categories.keys()[0] - value = action_categories[new_action_category["id"]] - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertEqual(value["name"], new_action_category["name"]) - self.assertIn("description", value) - self.assertEqual(value["description"], new_action_category["description"]) - - # Delete the new action_category - - self.admin_manager.del_action_category(admin_subject_id, authz_ie_dict["id"], new_action_category["id"]) - action_categories = self.authz_manager.get_action_categories_dict(admin_subject_id, authz_ie_dict["id"]) - for key, value in action_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIsNot(new_action_category["name"], value["name"]) - self.assertIn("description", value) - - def test_subject_category_scope(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - subject_categories = self.admin_manager.add_subject_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "country", - "description": "country", - } - ) - - for subject_category_id in subject_categories: - - subject_category_scope = self.authz_manager.get_subject_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id) - self.assertIsInstance(subject_category_scope, dict) - self.assertEqual({}, subject_category_scope) - - new_subject_category_scope = { - "name": "france", - "description": "france", - } - - subject_category_scope = self.admin_manager.add_subject_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id, - new_subject_category_scope) - self.assertIsInstance(subject_category_scope, dict) - self.assertEqual(len(subject_category_scope.keys()), 1) - subject_category_scope_id = subject_category_scope.keys()[0] - subject_category_scope_value = subject_category_scope[subject_category_scope_id] - self.assertIn("name", subject_category_scope_value) - self.assertEqual(new_subject_category_scope["name"], subject_category_scope_value["name"]) - self.assertIn("description", subject_category_scope_value) - self.assertEqual(new_subject_category_scope["description"], subject_category_scope_value["description"]) - - # Delete the new subject_category_scope - - self.admin_manager.del_subject_scope( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id, - subject_category_scope_id) - subject_category_scope = self.admin_manager.get_subject_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id) - self.assertIsInstance(subject_category_scope, dict) - self.assertNotIn(subject_category_scope_id, subject_category_scope.keys()) - - def test_object_category_scope(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - object_categories = self.admin_manager.add_object_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "country", - "description": "country", - } - ) - - for object_category_id in object_categories: - - object_category_scope = self.authz_manager.get_object_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id) - self.assertIsInstance(object_category_scope, dict) - self.assertEqual({}, object_category_scope) - - new_object_category_scope = { - "name": "france", - "description": "france", - } - - object_category_scope = self.admin_manager.add_object_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id, - new_object_category_scope) - self.assertIsInstance(object_category_scope, dict) - self.assertEqual(len(object_category_scope.keys()), 1) - object_category_scope_id = object_category_scope.keys()[0] - object_category_scope_value = object_category_scope[object_category_scope_id] - self.assertIn("name", object_category_scope_value) - self.assertEqual(new_object_category_scope["name"], object_category_scope_value["name"]) - self.assertIn("description", object_category_scope_value) - self.assertEqual(new_object_category_scope["description"], object_category_scope_value["description"]) - - # Delete the new object_category_scope - - self.admin_manager.del_object_scope( - admin_subject_id, - authz_ie_dict["id"], - object_category_id, - object_category_scope_id) - object_category_scope = self.admin_manager.get_object_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id) - self.assertIsInstance(object_category_scope, dict) - self.assertNotIn(object_category_scope_id, object_category_scope.keys()) - - def test_action_category_scope(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - action_categories = self.admin_manager.add_action_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "swift", - "description": "swift actions", - } - ) - - for action_category_id in action_categories: - - action_category_scope = self.authz_manager.get_action_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id) - self.assertIsInstance(action_category_scope, dict) - self.assertEqual({}, action_category_scope) - - new_action_category_scope = { - "name": "get", - "description": "get swift files", - } - - action_category_scope = self.admin_manager.add_action_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id, - new_action_category_scope) - self.assertIsInstance(action_category_scope, dict) - self.assertEqual(len(action_category_scope.keys()), 1) - action_category_scope_id = action_category_scope.keys()[0] - action_category_scope_value = action_category_scope[action_category_scope_id] - self.assertIn("name", action_category_scope_value) - self.assertEqual(new_action_category_scope["name"], action_category_scope_value["name"]) - self.assertIn("description", action_category_scope_value) - self.assertEqual(new_action_category_scope["description"], action_category_scope_value["description"]) - - # Delete the new action_category_scope - - self.admin_manager.del_action_scope( - admin_subject_id, - authz_ie_dict["id"], - action_category_id, - action_category_scope_id) - action_category_scope = self.admin_manager.get_action_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id) - self.assertIsInstance(action_category_scope, dict) - self.assertNotIn(action_category_scope_id, action_category_scope.keys()) - - def test_subject_category_assignment(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - admin_authz_subject_id, admin_authz_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], authz_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - demo_authz_subject_id, demo_authz_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], authz_ie_dict['id'], 'demo').iteritems().next() - - subjects_dict = self.authz_manager.get_subjects_dict(admin_subject_id, authz_ie_dict["id"]) - - subject_categories = self.admin_manager.add_subject_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "country", - "description": "country", - } - ) - - for subject_category_id in subject_categories: - subject_category_scope = self.authz_manager.get_subject_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id) - self.assertIsInstance(subject_category_scope, dict) - self.assertEqual({}, subject_category_scope) - - new_subject_category_scope_1 = { - "name": "france", - "description": "france", - } - - subject_category_scope_1 = self.admin_manager.add_subject_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id, - new_subject_category_scope_1) - subject_category_scope_1_id = subject_category_scope_1.keys()[0] - - new_subject_category_scope_2 = { - "name": "china", - "description": "china", - } - - subject_category_scope_2 = self.admin_manager.add_subject_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id, - new_subject_category_scope_2) - subject_category_scope_2_id = subject_category_scope_2.keys()[0] - - subject_category_assignments = self.authz_manager.get_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, - subject_category_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual([], subject_category_assignments) - - subject_category_assignments = self.authz_manager.get_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - demo_authz_subject_id, - subject_category_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual([], subject_category_assignments) - - subject_category_assignments = self.admin_manager.add_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id, subject_category_scope_1_id - ) - self.assertIsInstance(subject_category_assignments, list) - - self.assertEqual(len(subject_category_assignments), 1) - - subject_category_assignments = self.admin_manager.add_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual(len(subject_category_assignments), 2) - - subject_category_assignments = self.admin_manager.add_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - demo_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual(len(subject_category_assignments), 1) - - subject_category_assignments = self.admin_manager.get_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual(len(subject_category_assignments), 2) - - self.admin_manager.del_subject_assignment( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - subject_category_assignments = self.admin_manager.get_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual(len(subject_category_assignments), 1) - - def test_object_category_assignment(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - objects_dict = self.authz_manager.get_objects_dict(admin_subject_id, authz_ie_dict["id"]) - - object_vm1 = self.admin_manager.add_object_dict(admin_subject_id, authz_ie_dict["id"], {"name": "vm1", "description": "vm1"}) - object_vm2 = self.admin_manager.add_object_dict(admin_subject_id, authz_ie_dict["id"], {"name": "vm2", "description": "vm2"}) - object_vm1_id = object_vm1.keys()[0] - object_vm2_id = object_vm2.keys()[0] - if not object_vm1_id or not object_vm2_id: - raise Exception("Cannot run tests, database is corrupted ? (need upload and list in objects)") - - object_categories = self.admin_manager.add_object_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "location", - "description": "location", - } - ) - - for object_category_id in object_categories: - object_category_scope = self.authz_manager.get_object_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id) - self.assertIsInstance(object_category_scope, dict) - self.assertEqual({}, object_category_scope) - - new_object_category_scope_1 = { - "name": "france", - "description": "france", - } - - object_category_scope_1 = self.admin_manager.add_object_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id, - new_object_category_scope_1) - object_category_scope_1_id = object_category_scope_1.keys()[0] - - new_object_category_scope_2 = { - "name": "china", - "description": "china", - } - - object_category_scope_2 = self.admin_manager.add_object_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id, - new_object_category_scope_2) - object_category_scope_2_id = object_category_scope_2.keys()[0] - - object_category_assignments = self.authz_manager.get_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, - object_category_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual([], object_category_assignments) - - object_category_assignments = self.authz_manager.get_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm2_id, - object_category_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual([], object_category_assignments) - - object_category_assignments = self.admin_manager.add_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id, object_category_scope_1_id - ) - self.assertIsInstance(object_category_assignments, list) - - self.assertEqual(len(object_category_assignments), 1) - - object_category_assignments = self.admin_manager.add_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id, object_category_scope_2_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual(len(object_category_assignments), 2) - - object_category_assignments = self.admin_manager.add_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm2_id, object_category_id, object_category_scope_2_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual(len(object_category_assignments), 1) - - object_category_assignments = self.admin_manager.get_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual(len(object_category_assignments), 2) - - self.admin_manager.del_object_assignment( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id, object_category_scope_2_id - ) - object_category_assignments = self.admin_manager.get_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual(len(object_category_assignments), 1) - - def test_action_category_assignment(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - actions_dict = self.authz_manager.get_actions_dict(admin_subject_id, authz_ie_dict["id"]) - - action_upload_id = None - action_list_id = None - for _action_id in actions_dict: - if actions_dict[_action_id]['name'] == 'upload': - action_upload_id = _action_id - if actions_dict[_action_id]['name'] == 'list': - action_list_id = _action_id - if not action_upload_id or not action_list_id: - raise Exception("Cannot run tests, database is corrupted ? (need upload and list in actions)") - - action_categories = self.admin_manager.add_action_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "swift", - "description": "swift actions", - } - ) - - for action_category_id in action_categories: - action_category_scope = self.authz_manager.get_action_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id) - self.assertIsInstance(action_category_scope, dict) - self.assertEqual({}, action_category_scope) - - new_action_category_scope_1 = { - "name": "swift_admin", - "description": "action require admin rights", - } - - action_category_scope_1 = self.admin_manager.add_action_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id, - new_action_category_scope_1) - action_category_scope_1_id = action_category_scope_1.keys()[0] - - new_action_category_scope_2 = { - "name": "swift_anonymous", - "description": "action require no right", - } - - action_category_scope_2 = self.admin_manager.add_action_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id, - new_action_category_scope_2) - action_category_scope_2_id = action_category_scope_2.keys()[0] - - action_category_assignments = self.authz_manager.get_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, - action_category_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual([], action_category_assignments) - - action_category_assignments = self.authz_manager.get_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_list_id, - action_category_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual([], action_category_assignments) - - action_category_assignments = self.admin_manager.add_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id, action_category_scope_1_id - ) - self.assertIsInstance(action_category_assignments, list) - - self.assertEqual(len(action_category_assignments), 1) - - action_category_assignments = self.admin_manager.add_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id, action_category_scope_2_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual(len(action_category_assignments), 2) - - action_category_assignments = self.admin_manager.add_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_list_id, action_category_id, action_category_scope_2_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual(len(action_category_assignments), 1) - - action_category_assignments = self.admin_manager.get_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual(len(action_category_assignments), 2) - - self.admin_manager.del_action_assignment( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id, action_category_scope_2_id - ) - action_category_assignments = self.admin_manager.get_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual(len(action_category_assignments), 1) - - def test_sub_meta_rules(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - aggregation_algorithm = self.admin_manager.get_aggregation_algorithm_id(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(aggregation_algorithm, dict) - - # TODO: need more tests on aggregation_algorithms (set and del) - - sub_meta_rules = self.admin_manager.get_sub_meta_rules_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(sub_meta_rules, dict) - categories = { - "subject_categories": self.admin_manager.get_subject_categories_dict(admin_subject_id, authz_ie_dict["id"]), - "object_categories": self.admin_manager.get_object_categories_dict(admin_subject_id, authz_ie_dict["id"]), - "action_categories": self.admin_manager.get_action_categories_dict(admin_subject_id, authz_ie_dict["id"]) - } - for key, value in sub_meta_rules.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("action_categories", value) - self.assertIn("object_categories", value) - self.assertIn("subject_categories", value) - self.assertIn("algorithm", value) - self.assertIn("name", value) - for action_category_id in value["action_categories"]: - self.assertIn(action_category_id, categories["action_categories"]) - for object_category_id in value["object_categories"]: - self.assertIn(object_category_id, categories["object_categories"]) - for subject_category_id in value["subject_categories"]: - self.assertIn(subject_category_id, categories["subject_categories"]) - # TODO: need more tests (set and del) - - def test_sub_rules(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - sub_meta_rules = self.admin_manager.get_sub_meta_rules_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(sub_meta_rules, dict) - - for relation_id in sub_meta_rules: - rules = self.admin_manager.get_rules_dict(admin_subject_id, authz_ie_dict["id"], relation_id) - rule_length = len(sub_meta_rules[relation_id]["subject_categories"]) + \ - len(sub_meta_rules[relation_id]["object_categories"]) + \ - len(sub_meta_rules[relation_id]["action_categories"]) + 1 - for rule_id in rules: - self.assertEqual(rule_length, len(rules[rule_id])) - rule = list(rules[rule_id]) - for cat, cat_func, func_name in ( - ("subject_categories", self.admin_manager.get_subject_scopes_dict, "subject_scope"), - ("action_categories", self.admin_manager.get_action_scopes_dict, "action_scope"), - ("object_categories", self.admin_manager.get_object_scopes_dict, "object_scope"), - ): - for cat_value in sub_meta_rules[relation_id][cat]: - scope = cat_func( - admin_subject_id, - authz_ie_dict["id"], - cat_value - ) - a_scope = rule.pop(0) - if type(a_scope) is not bool: - self.assertIn(a_scope, scope.keys()) - - # add a new subrule - - sub_rule = [] - for cat, cat_func, func_name in ( - ("subject_categories", self.admin_manager.get_subject_scopes_dict, "subject_scope"), - ("action_categories", self.admin_manager.get_action_scopes_dict, "action_scope"), - ("object_categories", self.admin_manager.get_object_scopes_dict, "object_scope"), - ): - for cat_value in sub_meta_rules[relation_id][cat]: - scope = cat_func( - admin_subject_id, - authz_ie_dict["id"], - cat_value - ) - sub_rule.append(scope.keys()[0]) - - sub_rule.append(False) - - sub_rules = self.admin_manager.add_rule_dict(admin_subject_id, authz_ie_dict["id"], relation_id, sub_rule) - self.assertIsInstance(sub_rules, dict) - self.assertIn(sub_rule, sub_rules.values()) - - for rule_id, rule_value in sub_rules.iteritems(): - for cat, cat_func, func_name in ( - ("subject_categories", self.admin_manager.get_subject_scopes_dict, "subject_category_scope"), - ("action_categories", self.admin_manager.get_action_scopes_dict, "action_category_scope"), - ("object_categories", self.admin_manager.get_object_scopes_dict, "object_category_scope"), - ): - for cat_value in sub_meta_rules[relation_id][cat]: - scope = cat_func( - admin_subject_id, - authz_ie_dict["id"], - cat_value - ) - a_scope = rule_value.pop(0) - self.assertIn(a_scope, scope.keys()) - - # TODO: add test for the delete function - - -class TestIntraExtensionAuthzManagerAuthzKO(tests.TestCase): - - def setUp(self): - self.useFixture(database.Database()) - super(TestIntraExtensionAuthzManagerAuthzKO, self).setUp() - self.load_fixtures(default_fixtures) - self.load_backends() - domain = {'id': "default", 'name': "default"} - self.resource_api.create_domain(domain['id'], domain) - self.admin = create_user(self, username="admin") - self.demo = create_user(self, username="demo") - ref = self.root_api.load_root_intra_extension_dict() - self.root_api.populate_default_data(ref) - self.root_intra_extension = self.root_api.get_root_extension_dict() - self.root_intra_extension_id = self.root_intra_extension.keys()[0] - self.ADMIN_ID = self.root_api.root_admin_id - self.authz_manager = self.authz_api - self.admin_manager = self.admin_api - - def tearDown(self): - # self.admin_manager.del_intra_extension(self.ADMIN_ID, self.root_intra_extension["id"]) - tests.TestCase.tearDown(self) - - def __get_key_from_value(self, value, values_dict): - return filter(lambda v: v[1] == value, values_dict.iteritems())[0][0] - - def load_extra_backends(self): - return { - "moonlog_api": LogManager(), - "tenant_api": TenantManager(), - "configuration_api": ConfigurationManager(), - "admin_api": IntraExtensionAdminManager(), - "authz_api": IntraExtensionAuthzManager(), - "root_api": IntraExtensionRootManager(), - # "resource_api": resource.Manager(), - } - - def config_overrides(self): - super(TestIntraExtensionAuthzManagerAuthzKO, self).config_overrides() - self.policy_directory = '/etc/keystone/policies' - self.root_policy_directory = 'policy_root' - self.config_fixture.config( - group='moon', - intraextension_driver='keystone.contrib.moon.backends.sql.IntraExtensionConnector') - self.config_fixture.config( - group='moon', - policy_directory=self.policy_directory) - self.config_fixture.config( - group='moon', - root_policy_directory=self.root_policy_directory) - - def test_delete_admin_intra_extension(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - self.assertRaises( - SubjectUnknown, - self.authz_manager.del_intra_extension, - uuid.uuid4().hex, - admin_ie_dict["id"]) - - def test_authz_exceptions(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - # Test when subject is unknown - self.assertRaises( - SubjectUnknown, - self.authz_manager.authz, - tenant["id"], uuid.uuid4().hex, uuid.uuid4().hex, uuid.uuid4().hex - ) - - # Test when subject is known but not the object - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], authz_ie_dict['id'], 'demo').iteritems().next() - - # self.manager.add_subject_dict( - # admin_subject_id, - # ie_authz["id"], - # demo_user["id"] - # ) - - self.assertRaises( - ObjectUnknown, - self.authz_manager.authz, - tenant["id"], demo_subject_dict["keystone_id"], uuid.uuid4().hex, uuid.uuid4().hex - ) - - # Test when subject and object are known but not the action - my_object = {"name": "my_object", "description": "my_object description"} - _tmp = self.admin_manager.add_object_dict( - admin_subject_id, - authz_ie_dict["id"], - my_object - ) - my_object["id"] = _tmp.keys()[0] - - self.assertRaises( - ActionUnknown, - self.authz_manager.authz, - tenant["id"], demo_subject_dict["keystone_id"], my_object["name"], uuid.uuid4().hex - ) - - # Test when subject and object and action are known - my_action = {"name": "my_action", "description": "my_action description"} - _tmp = self.admin_manager.add_action_dict( - admin_subject_id, - authz_ie_dict["id"], - my_action - ) - my_action["id"] = _tmp.keys()[0] - - self.assertRaises( - AuthzException, - self.authz_manager.authz, - tenant["id"], demo_subject_dict["keystone_id"], my_object["name"], my_action["name"] - ) - - # Add a subject scope and test ObjectCategoryAssignmentOutOfScope - my_subject_category = {"name": "my_subject_category", "description": "my_subject_category description"} - _tmp = self.admin_manager.add_subject_category_dict( - admin_subject_id, - authz_ie_dict["id"], - my_subject_category - ) - my_subject_category["id"] = _tmp.keys()[0] - - my_subject_scope = {"name": "my_subject_scope", "description": "my_subject_scope description"} - _tmp = self.admin_manager.add_subject_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - my_subject_category["id"], - my_subject_scope - ) - my_subject_scope["id"] = _tmp.keys()[0] - - self.assertRaises( - AuthzException, - self.authz_manager.authz, - tenant["id"], demo_subject_dict["keystone_id"], my_object["name"], my_action["name"] - ) - - # Add an object scope and test ActionCategoryAssignmentOutOfScope - my_object_category = {"name": "my_object_category", "description": "my_object_category description"} - _tmp = self.admin_manager.add_object_category_dict( - admin_subject_id, - authz_ie_dict["id"], - my_object_category - ) - my_object_category["id"] = _tmp.keys()[0] - - my_object_scope = {"name": "my_object_scope", "description": "my_object_scope description"} - _tmp = self.admin_manager.add_object_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - my_object_category["id"], - my_object_scope - ) - my_object_scope["id"] = _tmp.keys()[0] - - self.assertRaises( - AuthzException, - self.authz_manager.authz, - tenant["id"], demo_subject_dict["keystone_id"], my_object["name"], my_action["name"] - ) - - # Add an action scope and test SubjectCategoryAssignmentUnknown - my_action_category = {"name": "my_action_category", "description": "my_action_category description"} - _tmp = self.admin_manager.add_action_category_dict( - admin_subject_id, - authz_ie_dict["id"], - my_action_category - ) - my_action_category["id"] = _tmp.keys()[0] - - my_action_scope = {"name": "my_action_scope", "description": "my_action_scope description"} - _tmp = self.admin_manager.add_action_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - my_action_category["id"], - my_action_scope - ) - my_action_scope["id"] = _tmp.keys()[0] - - self.assertRaises( - AuthzException, - self.authz_manager.authz, - tenant["id"], demo_subject_dict["keystone_id"], my_object["name"], my_action["name"] - ) - - # Add a subject assignment and test ObjectCategoryAssignmentUnknown - self.admin_manager.add_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - demo_subject_id, - my_subject_category["id"], - my_subject_scope["id"] - ) - - self.assertRaises( - AuthzException, - self.authz_manager.authz, - tenant["id"], demo_subject_dict["keystone_id"], my_object["name"], my_action["name"] - ) - - # Add an object assignment and test ActionCategoryAssignmentUnknown - self.admin_manager.add_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - my_object["id"], - my_object_category["id"], - my_object_scope["id"] - ) - - self.assertRaises( - AuthzException, - self.authz_manager.authz, - tenant["id"], demo_subject_dict["keystone_id"], my_object["name"], my_action["name"] - ) - - # Add an action assignment and test RuleUnknown - self.admin_manager.add_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - my_action["id"], - my_action_category["id"], - my_action_scope["id"] - ) - - self.assertRaises( - AuthzException, - self.authz_manager.authz, - tenant["id"], admin_subject_dict["keystone_id"], my_object["name"], my_action["name"] - ) - - # Add the correct rule and test that no exception is raised - my_meta_rule = { - "name": "my_meta_rule", - "algorithm": "test", - "subject_categories": [my_subject_category["id"], ], - "action_categories": [my_action_category["id"], ], - "object_categories": [my_object_category["id"], ] - } - sub_meta_rules_dict = self.authz_manager.get_sub_meta_rules_dict( - admin_subject_id, - authz_ie_dict["id"] - ) - - self.assertRaises( - SubMetaRuleAlgorithmNotExisting, - self.admin_manager.add_sub_meta_rule_dict, - admin_subject_id, - authz_ie_dict["id"], - my_meta_rule - ) - - # TODO: the next request should be called with demo_subject_id - # but the demo user has no right in the root intra_extension - # algorithms = self.configuration_api.get_sub_meta_rule_algorithms_dict(admin_subject_id) - # for algorithm_id in algorithms: - # if algorithms[algorithm_id]["name"] == "inclusion": - # my_meta_rule["algorithm"] = algorithm_id - my_meta_rule['algorithm'] = 'inclusion' - - sub_meta_rule = self.admin_manager.add_sub_meta_rule_dict( - admin_subject_id, - authz_ie_dict["id"], - my_meta_rule - ) - sub_meta_rule_id, sub_meta_rule_dict = None, None - for key, value in sub_meta_rule.iteritems(): - if value["name"] == my_meta_rule["name"]: - sub_meta_rule_id, sub_meta_rule_dict = key, value - break - - aggregation_algorithms = self.configuration_api.get_aggregation_algorithms_dict(admin_subject_id) - for _id in aggregation_algorithms: - if aggregation_algorithms[_id]["name"] == "one_true": - agg = self.admin_manager.set_aggregation_algorithm_id(admin_subject_id, authz_ie_dict["id"], _id) - - rule = self.admin_manager.add_rule_dict( - admin_subject_id, - authz_ie_dict["id"], - sub_meta_rule_id, - [my_subject_scope["id"], my_action_scope["id"], my_object_scope["id"], True] - ) - - self.assertRaises( - AuthzException, - self.authz_manager.authz, - tenant["id"], admin_subject_dict["keystone_id"], my_object["name"], my_action["name"] - ) - - result = self.authz_manager.authz(tenant["id"], demo_subject_dict["keystone_id"], my_object["name"], my_action["name"]) - self.assertIsInstance(result, dict) - self.assertIn('authz', result) - self.assertEquals(result['authz'], True) - - def test_subjects(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - subjects = self.authz_manager.get_subjects_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(subjects, dict) - for key, value in subjects.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - self.assertIn("keystone_name", value) - self.assertIn("keystone_id", value) - - create_user(self, "subject_test") - new_subject = {"name": "subject_test", "description": "subject_test"} - self.assertRaises( - AuthzException, - self.admin_manager.add_subject_dict, - demo_subject_id, admin_ie_dict["id"], new_subject) - - subjects = self.admin_manager.add_subject_dict(admin_subject_id, authz_ie_dict["id"], new_subject) - _subjects = dict(subjects) - self.assertEqual(len(_subjects.keys()), 1) - new_subject["id"] = _subjects.keys()[0] - value = subjects[new_subject["id"]] - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertEqual(value["name"], new_subject["name"]) - self.assertIn("description", value) - self.assertEqual(value["description"], new_subject["description"]) - - # Delete the new subject - self.assertRaises( - AuthzException, - self.authz_manager.del_subject, - demo_subject_id, authz_ie_dict["id"], new_subject["id"]) - - self.admin_manager.del_subject(admin_subject_id, authz_ie_dict["id"], new_subject["id"]) - subjects = self.authz_manager.get_subjects_dict(admin_subject_id, authz_ie_dict["id"]) - for key, value in subjects.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIsNot(new_subject["name"], value["name"]) - self.assertIn("description", value) - - def test_objects(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - objects = self.authz_manager.get_objects_dict(admin_subject_id, authz_ie_dict["id"]) - objects_id_list = [] - self.assertIsInstance(objects, dict) - for key, value in objects.iteritems(): - objects_id_list.append(key) - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - # create_user(self, "subject_test") - new_object = {"name": "object_test", "description": "object_test"} - self.assertRaises( - AuthzException, - self.authz_manager.add_object_dict, - demo_subject_id, admin_ie_dict["id"], new_object) - - self.assertRaises( - ObjectsWriteNoAuthorized, - self.admin_manager.add_object_dict, - admin_subject_id, admin_ie_dict["id"], new_object - ) - - # Delete the new object - for key in objects_id_list: - self.assertRaises( - AuthzException, - self.authz_manager.del_object, - demo_subject_id, authz_ie_dict["id"], key) - self.assertRaises( - AuthzException, - self.authz_manager.del_object, - admin_subject_id, authz_ie_dict["id"], key) - - for key in objects_id_list: - self.assertRaises( - ObjectsWriteNoAuthorized, - self.admin_manager.del_object, - demo_subject_id, admin_ie_dict["id"], key) - self.assertRaises( - ObjectsWriteNoAuthorized, - self.admin_manager.del_object, - admin_subject_id, admin_ie_dict["id"], key) - - def test_actions(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - actions = self.authz_manager.get_actions_dict(admin_subject_id, authz_ie_dict["id"]) - actions_id_list = [] - self.assertIsInstance(actions, dict) - for key, value in actions.iteritems(): - actions_id_list.append(key) - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - create_user(self, "subject_test") - new_action = {"name": "action_test", "description": "action_test"} - self.assertRaises( - AuthzException, - self.authz_manager.add_action_dict, - demo_subject_id, admin_ie_dict["id"], new_action) - - self.assertRaises( - ActionsWriteNoAuthorized, - self.admin_manager.add_action_dict, - admin_subject_id, admin_ie_dict["id"], new_action - ) - - # Delete all actions - for key in actions_id_list: - self.assertRaises( - AuthzException, - self.authz_manager.del_action, - demo_subject_id, authz_ie_dict["id"], key) - self.assertRaises( - AuthzException, - self.authz_manager.del_action, - admin_subject_id, authz_ie_dict["id"], key) - - for key in actions_id_list: - self.assertRaises( - ActionsWriteNoAuthorized, - self.admin_manager.del_action, - demo_subject_id, admin_ie_dict["id"], key) - self.assertRaises( - ActionsWriteNoAuthorized, - self.admin_manager.del_action, - admin_subject_id, admin_ie_dict["id"], key) - - def test_subject_categories(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - subject_categories = self.authz_manager.get_subject_categories_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(subject_categories, dict) - for key, value in subject_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - new_subject_category = {"name": "subject_category_test", "description": "subject_category_test"} - self.assertRaises( - AuthzException, - self.authz_manager.add_subject_category_dict, - demo_subject_id, admin_ie_dict["id"], new_subject_category) - - subject_categories = self.admin_manager.add_subject_category_dict(admin_subject_id, authz_ie_dict["id"], new_subject_category) - _subject_categories = dict(subject_categories) - self.assertEqual(len(_subject_categories.keys()), 1) - new_subject_category["id"] = _subject_categories.keys()[0] - value = subject_categories[new_subject_category["id"]] - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertEqual(value["name"], new_subject_category["name"]) - self.assertIn("description", value) - self.assertEqual(value["description"], new_subject_category["description"]) - - # Delete the new subject_category - self.assertRaises( - AuthzException, - self.authz_manager.del_subject_category, - demo_subject_id, authz_ie_dict["id"], new_subject_category["id"]) - - self.admin_manager.del_subject_category(admin_subject_id, authz_ie_dict["id"], new_subject_category["id"]) - subject_categories = self.authz_manager.get_subject_categories_dict(admin_subject_id, authz_ie_dict["id"]) - for key, value in subject_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIsNot(new_subject_category["name"], value["name"]) - self.assertIn("description", value) - - def test_object_categories(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - object_categories = self.authz_manager.get_object_categories_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(object_categories, dict) - for key, value in object_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - new_object_category = {"name": "object_category_test", "description": "object_category_test"} - self.assertRaises( - AuthzException, - self.authz_manager.add_object_category_dict, - demo_subject_id, admin_ie_dict["id"], new_object_category) - - object_categories = self.admin_manager.add_object_category_dict(admin_subject_id, authz_ie_dict["id"], new_object_category) - _object_categories = dict(object_categories) - self.assertEqual(len(_object_categories.keys()), 1) - new_object_category["id"] = _object_categories.keys()[0] - value = object_categories[new_object_category["id"]] - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertEqual(value["name"], new_object_category["name"]) - self.assertIn("description", value) - self.assertEqual(value["description"], new_object_category["description"]) - - # Delete the new object_category - self.assertRaises( - AuthzException, - self.authz_manager.del_object_category, - demo_subject_id, authz_ie_dict["id"], new_object_category["id"]) - - self.admin_manager.del_object_category(admin_subject_id, authz_ie_dict["id"], new_object_category["id"]) - object_categories = self.authz_manager.get_object_categories_dict(admin_subject_id, authz_ie_dict["id"]) - for key, value in object_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIsNot(new_object_category["name"], value["name"]) - self.assertIn("description", value) - - def test_action_categories(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - action_categories = self.authz_manager.get_action_categories_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(action_categories, dict) - for key, value in action_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIn("description", value) - - new_action_category = {"name": "action_category_test", "description": "action_category_test"} - self.assertRaises( - AuthzException, - self.authz_manager.add_action_category_dict, - demo_subject_id, admin_ie_dict["id"], new_action_category) - - action_categories = self.admin_manager.add_action_category_dict(admin_subject_id, authz_ie_dict["id"], new_action_category) - _action_categories = dict(action_categories) - self.assertEqual(len(_action_categories.keys()), 1) - new_action_category["id"] = _action_categories.keys()[0] - value = action_categories[new_action_category["id"]] - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertEqual(value["name"], new_action_category["name"]) - self.assertIn("description", value) - self.assertEqual(value["description"], new_action_category["description"]) - - # Delete the new action_category - self.assertRaises( - AuthzException, - self.authz_manager.del_action_category, - demo_subject_id, authz_ie_dict["id"], new_action_category["id"]) - - self.admin_manager.del_action_category(admin_subject_id, authz_ie_dict["id"], new_action_category["id"]) - action_categories = self.authz_manager.get_action_categories_dict(admin_subject_id, authz_ie_dict["id"]) - for key, value in action_categories.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("name", value) - self.assertIsNot(new_action_category["name"], value["name"]) - self.assertIn("description", value) - - def test_subject_category_scope(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - subject_categories = self.admin_manager.add_subject_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "country", - "description": "country", - } - ) - - for subject_category_id in subject_categories: - - subject_category_scope = self.authz_manager.get_subject_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id) - self.assertIsInstance(subject_category_scope, dict) - self.assertEqual({}, subject_category_scope) - - new_subject_category_scope = { - "name": "france", - "description": "france", - } - - self.assertRaises( - AuthzException, - self.admin_manager.add_subject_scope_dict, - demo_subject_id, authz_ie_dict["id"], subject_category_id, new_subject_category_scope) - - subject_category_scope = self.admin_manager.add_subject_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id, - new_subject_category_scope) - self.assertIsInstance(subject_category_scope, dict) - self.assertEqual(len(subject_category_scope.keys()), 1) - subject_category_scope_id = subject_category_scope.keys()[0] - subject_category_scope_value = subject_category_scope[subject_category_scope_id] - self.assertIn("name", subject_category_scope_value) - self.assertEqual(new_subject_category_scope["name"], subject_category_scope_value["name"]) - self.assertIn("description", subject_category_scope_value) - self.assertEqual(new_subject_category_scope["description"], subject_category_scope_value["description"]) - - # Delete the new subject_category_scope - self.assertRaises( - AuthzException, - self.admin_manager.del_subject_scope, - demo_subject_id, authz_ie_dict["id"], subject_category_id, subject_category_scope_id) - - self.admin_manager.del_subject_scope( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id, - subject_category_scope_id) - subject_category_scope = self.admin_manager.get_subject_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id) - self.assertIsInstance(subject_category_scope, dict) - self.assertNotIn(subject_category_scope_id, subject_category_scope.keys()) - - def test_object_category_scope(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - object_categories = self.admin_manager.add_object_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "country", - "description": "country", - } - ) - - for object_category_id in object_categories: - - object_category_scope = self.authz_manager.get_object_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id) - self.assertIsInstance(object_category_scope, dict) - self.assertEqual({}, object_category_scope) - - new_object_category_scope = { - "name": "france", - "description": "france", - } - - self.assertRaises( - AuthzException, - self.admin_manager.add_object_scope_dict, - demo_subject_id, authz_ie_dict["id"], object_category_id, new_object_category_scope) - - object_category_scope = self.admin_manager.add_object_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id, - new_object_category_scope) - self.assertIsInstance(object_category_scope, dict) - self.assertEqual(len(object_category_scope.keys()), 1) - object_category_scope_id = object_category_scope.keys()[0] - object_category_scope_value = object_category_scope[object_category_scope_id] - self.assertIn("name", object_category_scope_value) - self.assertEqual(new_object_category_scope["name"], object_category_scope_value["name"]) - self.assertIn("description", object_category_scope_value) - self.assertEqual(new_object_category_scope["description"], object_category_scope_value["description"]) - - # Delete the new object_category_scope - self.assertRaises( - AuthzException, - self.admin_manager.del_object_scope, - demo_subject_id, authz_ie_dict["id"], object_category_id, object_category_scope_id) - - self.admin_manager.del_object_scope( - admin_subject_id, - authz_ie_dict["id"], - object_category_id, - object_category_scope_id) - object_category_scope = self.admin_manager.get_object_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id) - self.assertIsInstance(object_category_scope, dict) - self.assertNotIn(object_category_scope_id, object_category_scope.keys()) - - def test_action_category_scope(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - action_categories = self.admin_manager.add_action_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "swift", - "description": "swift actions", - } - ) - - for action_category_id in action_categories: - - action_category_scope = self.authz_manager.get_action_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id) - self.assertIsInstance(action_category_scope, dict) - self.assertEqual({}, action_category_scope) - - new_action_category_scope = { - "name": "get", - "description": "get swift files", - } - - self.assertRaises( - AuthzException, - self.admin_manager.add_action_scope_dict, - demo_subject_id, authz_ie_dict["id"], action_category_id, new_action_category_scope) - - action_category_scope = self.admin_manager.add_action_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id, - new_action_category_scope) - self.assertIsInstance(action_category_scope, dict) - self.assertEqual(len(action_category_scope.keys()), 1) - action_category_scope_id = action_category_scope.keys()[0] - action_category_scope_value = action_category_scope[action_category_scope_id] - self.assertIn("name", action_category_scope_value) - self.assertEqual(new_action_category_scope["name"], action_category_scope_value["name"]) - self.assertIn("description", action_category_scope_value) - self.assertEqual(new_action_category_scope["description"], action_category_scope_value["description"]) - - # Delete the new action_category_scope - self.assertRaises( - AuthzException, - self.admin_manager.del_action_scope, - demo_subject_id, authz_ie_dict["id"], action_category_id, action_category_scope_id) - - self.admin_manager.del_action_scope( - admin_subject_id, - authz_ie_dict["id"], - action_category_id, - action_category_scope_id) - action_category_scope = self.admin_manager.get_action_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id) - self.assertIsInstance(action_category_scope, dict) - self.assertNotIn(action_category_scope_id, action_category_scope.keys()) - - def test_subject_category_assignment(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - admin_authz_subject_id, admin_authz_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], authz_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - demo_authz_subject_id, demo_authz_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], authz_ie_dict['id'], 'demo').iteritems().next() - - subjects_dict = self.authz_manager.get_subjects_dict(admin_subject_id, authz_ie_dict["id"]) - - subject_categories = self.admin_manager.add_subject_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "country", - "description": "country", - } - ) - - for subject_category_id in subject_categories: - subject_category_scope = self.authz_manager.get_subject_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id) - self.assertIsInstance(subject_category_scope, dict) - self.assertEqual({}, subject_category_scope) - - new_subject_category_scope_1 = { - "name": "france", - "description": "france", - } - - subject_category_scope_1 = self.admin_manager.add_subject_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id, - new_subject_category_scope_1) - subject_category_scope_1_id = subject_category_scope_1.keys()[0] - - new_subject_category_scope_2 = { - "name": "china", - "description": "china", - } - - subject_category_scope_2 = self.admin_manager.add_subject_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - subject_category_id, - new_subject_category_scope_2) - subject_category_scope_2_id = subject_category_scope_2.keys()[0] - - subject_category_assignments = self.authz_manager.get_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, - subject_category_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual([], subject_category_assignments) - - subject_category_assignments = self.authz_manager.get_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - demo_authz_subject_id, - subject_category_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual([], subject_category_assignments) - - self.assertRaises( - AuthzException, - self.authz_manager.add_subject_assignment_list, - demo_subject_id, authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id, subject_category_scope_1_id - ) - - self.assertRaises( - AuthzException, - self.authz_manager.add_subject_assignment_list, - demo_subject_id, authz_ie_dict["id"], - demo_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - - subject_category_assignments = self.admin_manager.add_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id, subject_category_scope_1_id - ) - self.assertIsInstance(subject_category_assignments, list) - - self.assertEqual(len(subject_category_assignments), 1) - - subject_category_assignments = self.admin_manager.add_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual(len(subject_category_assignments), 2) - - subject_category_assignments = self.admin_manager.add_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - demo_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual(len(subject_category_assignments), 1) - - subject_category_assignments = self.admin_manager.get_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual(len(subject_category_assignments), 2) - - self.assertRaises( - AuthzException, - self.admin_manager.del_subject_assignment, - demo_subject_id, authz_ie_dict["id"], - demo_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - - self.admin_manager.del_subject_assignment( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - subject_category_assignments = self.admin_manager.get_subject_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id - ) - self.assertIsInstance(subject_category_assignments, list) - self.assertEqual(len(subject_category_assignments), 1) - - self.assertRaises( - SubjectAssignmentUnknown, - self.admin_manager.del_subject_assignment, - admin_subject_id, - authz_ie_dict["id"], - admin_authz_subject_id, subject_category_id, subject_category_scope_2_id - ) - - def test_object_category_assignment(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - objects_dict = self.authz_manager.get_objects_dict(admin_subject_id, authz_ie_dict["id"]) - - object_vm1 = self.admin_manager.add_object_dict(admin_subject_id, authz_ie_dict["id"], {"name": "vm1", "description": "vm1"}) - object_vm2 = self.admin_manager.add_object_dict(admin_subject_id, authz_ie_dict["id"], {"name": "vm2", "description": "vm2"}) - object_vm1_id = object_vm1.keys()[0] - object_vm2_id = object_vm2.keys()[0] - if not object_vm1_id or not object_vm2_id: - raise Exception("Cannot run tests, database is corrupted ? (need upload and list in objects)") - - object_categories = self.admin_manager.add_object_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "location", - "description": "location", - } - ) - - for object_category_id in object_categories: - object_category_scope = self.authz_manager.get_object_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id) - self.assertIsInstance(object_category_scope, dict) - self.assertEqual({}, object_category_scope) - - new_object_category_scope_1 = { - "name": "france", - "description": "france", - } - - object_category_scope_1 = self.admin_manager.add_object_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id, - new_object_category_scope_1) - object_category_scope_1_id = object_category_scope_1.keys()[0] - - new_object_category_scope_2 = { - "name": "china", - "description": "china", - } - - object_category_scope_2 = self.admin_manager.add_object_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - object_category_id, - new_object_category_scope_2) - object_category_scope_2_id = object_category_scope_2.keys()[0] - - object_category_assignments = self.authz_manager.get_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, - object_category_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual([], object_category_assignments) - - object_category_assignments = self.authz_manager.get_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm2_id, - object_category_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual([], object_category_assignments) - - self.assertRaises( - AuthzException, - self.authz_manager.add_object_assignment_list, - demo_subject_id, authz_ie_dict["id"], - object_vm1_id, object_category_id, object_category_scope_1_id - ) - - self.assertRaises( - AuthzException, - self.authz_manager.add_object_assignment_list, - demo_subject_id, authz_ie_dict["id"], - object_vm2_id, object_category_id, object_category_scope_2_id - ) - - object_category_assignments = self.admin_manager.add_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id, object_category_scope_1_id - ) - self.assertIsInstance(object_category_assignments, list) - - self.assertEqual(len(object_category_assignments), 1) - - object_category_assignments = self.admin_manager.add_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id, object_category_scope_2_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual(len(object_category_assignments), 2) - - object_category_assignments = self.admin_manager.add_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm2_id, object_category_id, object_category_scope_2_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual(len(object_category_assignments), 1) - - object_category_assignments = self.admin_manager.get_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual(len(object_category_assignments), 2) - - self.assertRaises( - AuthzException, - self.admin_manager.del_object_assignment, - demo_subject_id, authz_ie_dict["id"], - object_vm2_id, object_category_id, object_category_scope_2_id - ) - - self.admin_manager.del_object_assignment( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id, object_category_scope_2_id - ) - object_category_assignments = self.admin_manager.get_object_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id - ) - self.assertIsInstance(object_category_assignments, list) - self.assertEqual(len(object_category_assignments), 1) - - self.assertRaises( - ObjectAssignmentUnknown, - self.admin_manager.del_object_assignment, - admin_subject_id, - authz_ie_dict["id"], - object_vm1_id, object_category_id, object_category_scope_2_id - ) - - def test_action_category_assignment(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - actions_dict = self.authz_manager.get_actions_dict(admin_subject_id, authz_ie_dict["id"]) - - action_upload_id = None - action_list_id = None - for _action_id in actions_dict: - if actions_dict[_action_id]['name'] == 'upload': - action_upload_id = _action_id - if actions_dict[_action_id]['name'] == 'list': - action_list_id = _action_id - if not action_upload_id or not action_list_id: - raise Exception("Cannot run tests, database is corrupted ? (need upload and list in actions)") - - action_categories = self.admin_manager.add_action_category_dict( - admin_subject_id, - authz_ie_dict["id"], - { - "name": "swift", - "description": "swift actions", - } - ) - - for action_category_id in action_categories: - action_category_scope = self.authz_manager.get_action_scopes_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id) - self.assertIsInstance(action_category_scope, dict) - self.assertEqual({}, action_category_scope) - - new_action_category_scope_1 = { - "name": "swift_admin", - "description": "action require admin rights", - } - - action_category_scope_1 = self.admin_manager.add_action_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id, - new_action_category_scope_1) - action_category_scope_1_id = action_category_scope_1.keys()[0] - - new_action_category_scope_2 = { - "name": "swift_anonymous", - "description": "action require no right", - } - - action_category_scope_2 = self.admin_manager.add_action_scope_dict( - admin_subject_id, - authz_ie_dict["id"], - action_category_id, - new_action_category_scope_2) - action_category_scope_2_id = action_category_scope_2.keys()[0] - - action_category_assignments = self.authz_manager.get_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, - action_category_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual([], action_category_assignments) - - action_category_assignments = self.authz_manager.get_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_list_id, - action_category_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual([], action_category_assignments) - - self.assertRaises( - AuthzException, - self.authz_manager.add_action_assignment_list, - demo_subject_id, authz_ie_dict["id"], - action_upload_id, action_category_id, action_category_scope_1_id - ) - - self.assertRaises( - AuthzException, - self.authz_manager.add_action_assignment_list, - demo_subject_id, authz_ie_dict["id"], - action_list_id, action_category_id, action_category_scope_2_id - ) - - action_category_assignments = self.admin_manager.add_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id, action_category_scope_1_id - ) - self.assertIsInstance(action_category_assignments, list) - - self.assertEqual(len(action_category_assignments), 1) - - action_category_assignments = self.admin_manager.add_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id, action_category_scope_2_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual(len(action_category_assignments), 2) - - action_category_assignments = self.admin_manager.add_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_list_id, action_category_id, action_category_scope_2_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual(len(action_category_assignments), 1) - - action_category_assignments = self.admin_manager.get_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual(len(action_category_assignments), 2) - - self.assertRaises( - AuthzException, - self.admin_manager.del_action_assignment, - demo_subject_id, authz_ie_dict["id"], - action_list_id, action_category_id, action_category_scope_2_id - ) - - self.admin_manager.del_action_assignment( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id, action_category_scope_2_id - ) - action_category_assignments = self.admin_manager.get_action_assignment_list( - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id - ) - self.assertIsInstance(action_category_assignments, list) - self.assertEqual(len(action_category_assignments), 1) - - self.assertRaises( - ActionAssignmentUnknown, - self.admin_manager.del_action_assignment, - admin_subject_id, - authz_ie_dict["id"], - action_upload_id, action_category_id, action_category_scope_2_id - ) - - def test_sub_meta_rules(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - aggregation_algorithm = self.admin_manager.get_aggregation_algorithm_id(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(aggregation_algorithm, dict) - - # TODO: need more tests on aggregation_algorithms (set and del) - - sub_meta_rules = self.admin_manager.get_sub_meta_rules_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(sub_meta_rules, dict) - categories = { - "subject_categories": self.admin_manager.get_subject_categories_dict(admin_subject_id, authz_ie_dict["id"]), - "object_categories": self.admin_manager.get_object_categories_dict(admin_subject_id, authz_ie_dict["id"]), - "action_categories": self.admin_manager.get_action_categories_dict(admin_subject_id, authz_ie_dict["id"]) - } - for key, value in sub_meta_rules.iteritems(): - self.assertIsInstance(value, dict) - self.assertIn("action_categories", value) - self.assertIn("object_categories", value) - self.assertIn("subject_categories", value) - self.assertIn("algorithm", value) - self.assertIn("name", value) - for action_category_id in value["action_categories"]: - self.assertIn(action_category_id, categories["action_categories"]) - for object_category_id in value["object_categories"]: - self.assertIn(object_category_id, categories["object_categories"]) - for subject_category_id in value["subject_categories"]: - self.assertIn(subject_category_id, categories["subject_categories"]) - # TODO: need more tests (set and del) - - def test_sub_rules(self): - authz_ie_dict = create_intra_extension(self, "policy_authz") - admin_ie_dict = create_intra_extension(self, "policy_rbac_admin") - tenant, mapping = create_mapping(self, "demo", authz_ie_dict['id'], admin_ie_dict['id']) - - admin_subject_id, admin_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'admin').iteritems().next() - demo_subject_id, demo_subject_dict = \ - self.admin_api.get_subject_dict_from_keystone_name(tenant['id'], admin_ie_dict['id'], 'demo').iteritems().next() - - sub_meta_rules = self.admin_manager.get_sub_meta_rules_dict(admin_subject_id, authz_ie_dict["id"]) - self.assertIsInstance(sub_meta_rules, dict) - - for relation_id in sub_meta_rules: - rules = self.admin_manager.get_rules_dict(admin_subject_id, authz_ie_dict["id"], relation_id) - rule_length = len(sub_meta_rules[relation_id]["subject_categories"]) + \ - len(sub_meta_rules[relation_id]["object_categories"]) + \ - len(sub_meta_rules[relation_id]["action_categories"]) + 1 - for rule_id in rules: - self.assertEqual(rule_length, len(rules[rule_id])) - rule = list(rules[rule_id]) - for cat, cat_func, func_name in ( - ("subject_categories", self.admin_manager.get_subject_scopes_dict, "subject_scope"), - ("action_categories", self.admin_manager.get_action_scopes_dict, "action_scope"), - ("object_categories", self.admin_manager.get_object_scopes_dict, "object_scope"), - ): - for cat_value in sub_meta_rules[relation_id][cat]: - scope = cat_func( - admin_subject_id, - authz_ie_dict["id"], - cat_value - ) - a_scope = rule.pop(0) - if type(a_scope) is not bool: - self.assertIn(a_scope, scope.keys()) - - # add a new subrule - - sub_rule = [] - for cat, cat_func, func_name in ( - ("subject_categories", self.admin_manager.get_subject_scopes_dict, "subject_scope"), - ("action_categories", self.admin_manager.get_action_scopes_dict, "action_scope"), - ("object_categories", self.admin_manager.get_object_scopes_dict, "object_scope"), - ): - for cat_value in sub_meta_rules[relation_id][cat]: - scope = cat_func( - admin_subject_id, - authz_ie_dict["id"], - cat_value - ) - sub_rule.append(scope.keys()[0]) - - sub_rule.append(False) - self.assertRaises( - AuthzException, - self.admin_manager.add_rule_dict, - demo_subject_id, authz_ie_dict["id"], relation_id, sub_rule - ) - - sub_rules = self.admin_manager.add_rule_dict(admin_subject_id, authz_ie_dict["id"], relation_id, sub_rule) - self.assertIsInstance(sub_rules, dict) - self.assertIn(sub_rule, sub_rules.values()) - - for rule_id, rule_value in sub_rules.iteritems(): - for cat, cat_func, func_name in ( - ("subject_categories", self.admin_manager.get_subject_scopes_dict, "subject_category_scope"), - ("action_categories", self.admin_manager.get_action_scopes_dict, "action_category_scope"), - ("object_categories", self.admin_manager.get_object_scopes_dict, "object_category_scope"), - ): - for cat_value in sub_meta_rules[relation_id][cat]: - scope = cat_func( - admin_subject_id, - authz_ie_dict["id"], - cat_value - ) - a_scope = rule_value.pop(0) - self.assertIn(a_scope, scope.keys()) - - # TODO: add test for the delete function diff --git a/keystone-moon/keystone/tests/moon/unit/test_unit_core_log.py b/keystone-moon/keystone/tests/moon/unit/test_unit_core_log.py deleted file mode 100644 index 49886d32..00000000 --- a/keystone-moon/keystone/tests/moon/unit/test_unit_core_log.py +++ /dev/null @@ -1,145 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -"""Unit tests for LogManager""" - -import time -from oslo_config import cfg -from keystone.tests import unit as tests -from keystone.contrib.moon.core import ConfigurationManager -from keystone.contrib.moon.core import IntraExtensionAuthzManager -from keystone.tests.unit.ksfixtures import database -from keystone.contrib.moon.exception import * -from keystone.tests.unit import default_fixtures -from keystone.contrib.moon.core import LogManager, TenantManager -from keystone.tests.moon.unit import * - -CONF = cfg.CONF - -USER_ADMIN = { - 'name': 'admin', - 'domain_id': "default", - 'password': 'admin' -} - -IE = { - "name": "test IE", - "policymodel": "policy_rbac_authz", - "description": "a simple description." -} - -TIME_FORMAT = '%Y-%m-%d-%H:%M:%S' - - -class TestIntraExtensionAdminManager(tests.TestCase): - - def setUp(self): - self.useFixture(database.Database()) - super(TestIntraExtensionAdminManager, self).setUp() - self.load_fixtures(default_fixtures) - self.load_backends() - domain = {'id': "default", 'name': "default"} - self.resource_api.create_domain(domain['id'], domain) - self.admin = create_user(self, username="admin") - self.demo = create_user(self, username="demo") - ref = self.root_api.load_root_intra_extension_dict() - self.root_api.populate_default_data(ref) - self.root_intra_extension = self.root_api.get_root_extension_dict() - self.root_intra_extension_id = self.root_intra_extension.keys()[0] - self.ADMIN_ID = self.root_api.root_admin_id - self.authz_manager = self.authz_api - self.admin_manager = self.admin_api - self.tenant_manager = self.tenant_api - - def __get_key_from_value(self, value, values_dict): - return filter(lambda v: v[1] == value, values_dict.iteritems())[0][0] - - def load_extra_backends(self): - return { - "moonlog_api": LogManager(), - "authz_api": IntraExtensionAuthzManager(), - "tenant_api": TenantManager(), - "configuration_api": ConfigurationManager(), - } - - def config_overrides(self): - super(TestIntraExtensionAdminManager, self).config_overrides() - self.policy_directory = '/etc/keystone/policies' - self.config_fixture.config( - group='moon', - intraextension_driver='keystone.contrib.moon.backends.sql.IntraExtensionConnector') - self.config_fixture.config( - group='moon', - policy_directory=self.policy_directory) - - def send_logs(self): - log_authz = "Test for authz " + uuid.uuid4().hex - logs = [] - self.moonlog_api.authz(log_authz) - logs.append("Test for critical " + uuid.uuid4().hex) - self.moonlog_api.critical(logs[-1]) - logs.append("Test for error " + uuid.uuid4().hex) - self.moonlog_api.error(logs[-1]) - logs.append("Test for warning " + uuid.uuid4().hex) - self.moonlog_api.warning(logs[-1]) - logs.append("Test for info " + uuid.uuid4().hex) - self.moonlog_api.info(logs[-1]) - logs.append("Test for debug " + uuid.uuid4().hex) - self.moonlog_api.debug(logs[-1]) - return log_authz, logs - - def test_get_set_logs(self): - previous_authz_logs = self.moonlog_api.get_logs(logger="authz") - previous_sys_logs = self.moonlog_api.get_logs(logger="sys") - - log_authz, logs = self.send_logs() - time.sleep(1) - - authz_logs = self.moonlog_api.get_logs(logger="authz") - sys_logs = self.moonlog_api.get_logs(logger="sys") - - self.assertIsInstance(authz_logs, list) - self.assertIsInstance(sys_logs, list) - - self.assertIn(log_authz, " ".join(authz_logs)) - - self.assertEqual(len(authz_logs), len(previous_authz_logs)+1) - self.assertTrue(len(sys_logs) >= len(previous_sys_logs)+5) - for log in logs: - self.assertIn(log, " ".join(sys_logs)) - - def test_get_syslogger_with_options(self): - - all_logs = self.moonlog_api.get_logs(logger="sys") - - time_1 = time.strftime(TIME_FORMAT) - time.sleep(1) - - log_authz, logs = self.send_logs() - - NUMBER_OF_LOG = 5 - sys_logs = self.moonlog_api.get_logs(logger="sys", event_number=NUMBER_OF_LOG) - self.assertIsInstance(sys_logs, list) - self.assertEqual(len(sys_logs), NUMBER_OF_LOG) - - sys_logs = self.moonlog_api.get_logs(logger="sys", time_from=time_1) - self.assertIsInstance(sys_logs, list) - self.assertEqual(len(sys_logs), NUMBER_OF_LOG) - - log_authz, logs = self.send_logs() - - time.sleep(1) - time_2 = time.strftime(TIME_FORMAT) - - log_authz, logs = self.send_logs() - - sys_logs = self.moonlog_api.get_logs(logger="sys", time_to=time_2) - self.assertIsInstance(sys_logs, list) - self.assertEqual(len(sys_logs), len(all_logs)+3*NUMBER_OF_LOG) - - sys_logs = self.moonlog_api.get_logs(logger="sys", time_from=time_1, time_to=time_2) - self.assertIsInstance(sys_logs, list) - self.assertEqual(len(sys_logs), 3*NUMBER_OF_LOG) - diff --git a/keystone-moon/keystone/tests/moon/unit/test_unit_core_tenant.py b/keystone-moon/keystone/tests/moon/unit/test_unit_core_tenant.py deleted file mode 100644 index 47b0df8f..00000000 --- a/keystone-moon/keystone/tests/moon/unit/test_unit_core_tenant.py +++ /dev/null @@ -1,191 +0,0 @@ -# Copyright 2015 Open Platform for NFV Project, Inc. and its contributors -# This software is distributed under the terms and conditions of the 'Apache-2.0' -# license which can be found in the file 'LICENSE' in this package distribution -# or at 'http://www.apache.org/licenses/LICENSE-2.0'. - -"""Unit tests for core tenant.""" - -from oslo_config import cfg -from keystone.tests import unit as tests -from keystone.contrib.moon.core import ConfigurationManager -from keystone.tests.unit.ksfixtures import database -from keystone.contrib.moon.exception import * -from keystone.tests.unit import default_fixtures -from keystone.contrib.moon.core import LogManager -from keystone.contrib.moon.core import IntraExtensionRootManager -from keystone.contrib.moon.core import IntraExtensionAdminManager -from keystone.contrib.moon.core import IntraExtensionAuthzManager -from keystone.tests.moon.unit import * - - -CONF = cfg.CONF -USER = { - 'name': 'admin', - 'domain_id': "default", - 'password': 'admin' -} -IE = { - "name": "test IE", - "policymodel": "policy_authz", - "description": "a simple description." -} - - -class TestTenantManager(tests.TestCase): - ADMIN_ID = None - - def setUp(self): - self.useFixture(database.Database()) - super(TestTenantManager, self).setUp() - self.load_fixtures(default_fixtures) - self.load_backends() - domain = {'id': "default", 'name': "default"} - self.resource_api.create_domain(domain['id'], domain) - self.admin = create_user(self, username="admin") - self.demo = create_user(self, username="demo") - ref = self.root_api.load_root_intra_extension_dict() - self.root_api.populate_default_data(ref) - self.root_intra_extension = self.root_api.get_root_extension_dict() - self.root_intra_extension_id = self.root_intra_extension.keys()[0] - self.ADMIN_ID = self.root_api.root_admin_id - self.authz_manager = self.authz_api - self.admin_manager = self.admin_api - self.tenant_manager = self.tenant_api - - def load_extra_backends(self): - return { - "moonlog_api": LogManager(), - "admin_api": IntraExtensionAdminManager(), - "authz_api": IntraExtensionAuthzManager(), - "configuration_api": ConfigurationManager(), - "root_api": IntraExtensionRootManager(), - } - - def config_overrides(self): - super(TestTenantManager, self).config_overrides() - self.config_fixture.config( - group='moon', - tenant_driver='keystone.contrib.moon.backends.sql.TenantConnector') - self.policy_directory = '/etc/keystone/policies' - self.config_fixture.config( - group='moon', - intraextension_driver='keystone.contrib.moon.backends.sql.IntraExtensionConnector') - self.config_fixture.config( - group='moon', - policy_directory=self.policy_directory) - - def test_add_tenant(self): - authz_intra_extension = create_intra_extension(self, policy_model="policy_authz") - admin_intra_extension = create_intra_extension(self, policy_model="policy_rbac_admin") - new_tenant = { - "id": uuid.uuid4().hex, - "name": "demo", - "description": uuid.uuid4().hex, - "intra_authz_extension_id": authz_intra_extension['id'], - "intra_admin_extension_id": admin_intra_extension['id'], - } - data = self.tenant_manager.add_tenant_dict(user_id=self.ADMIN_ID, tenant_id=new_tenant['id'], tenant_dict=new_tenant) - data_id = data.keys()[0] - self.assertEquals(new_tenant["id"], data_id) - self.assertEquals(new_tenant["name"], data[data_id]["name"]) - self.assertEquals(new_tenant["intra_authz_extension_id"], data[data_id]["intra_authz_extension_id"]) - self.assertEquals(new_tenant["intra_admin_extension_id"], data[data_id]["intra_admin_extension_id"]) - data = self.tenant_manager.get_tenants_dict(self.ADMIN_ID) - self.assertNotEqual(data, {}) - data = self.admin_api.get_intra_extension_dict(self.ADMIN_ID, new_tenant["intra_authz_extension_id"]) - data_id = data["id"] - self.assertEquals(new_tenant["intra_authz_extension_id"], data_id) - data = self.admin_api.get_intra_extension_dict(self.ADMIN_ID, new_tenant["intra_admin_extension_id"]) - data_id = data["id"] - self.assertEquals(new_tenant["intra_admin_extension_id"], data_id) - - def test_del_tenant(self): - authz_intra_extension = create_intra_extension(self, policy_model="policy_authz") - admin_intra_extension = create_intra_extension(self, policy_model="policy_rbac_admin") - new_tenant = { - "id": uuid.uuid4().hex, - "name": "demo", - "description": uuid.uuid4().hex, - "intra_authz_extension_id": authz_intra_extension['id'], - "intra_admin_extension_id": admin_intra_extension['id'], - } - data = self.tenant_manager.add_tenant_dict(user_id=self.ADMIN_ID, tenant_id=new_tenant['id'], tenant_dict=new_tenant) - data_id = data.keys()[0] - self.assertEquals(new_tenant["name"], data[data_id]["name"]) - self.assertEquals(new_tenant["intra_authz_extension_id"], data[data_id]["intra_authz_extension_id"]) - self.assertEquals(new_tenant["intra_admin_extension_id"], data[data_id]["intra_admin_extension_id"]) - data = self.tenant_manager.get_tenants_dict(self.ADMIN_ID) - self.assertNotEqual(data, {}) - self.tenant_manager.del_tenant(self.ADMIN_ID, data_id) - data = self.tenant_manager.get_tenants_dict(self.ADMIN_ID) - self.assertEqual(data, {}) - - def test_set_tenant(self): - authz_intra_extension = create_intra_extension(self, policy_model="policy_authz") - admin_intra_extension = create_intra_extension(self, policy_model="policy_rbac_admin") - new_tenant = { - "id": uuid.uuid4().hex, - "name": "demo", - "description": uuid.uuid4().hex, - "intra_authz_extension_id": authz_intra_extension['id'], - "intra_admin_extension_id": admin_intra_extension['id'], - } - data = self.tenant_manager.add_tenant_dict(user_id=self.ADMIN_ID, tenant_id=new_tenant['id'], tenant_dict=new_tenant) - data_id = data.keys()[0] - self.assertEquals(new_tenant["name"], data[data_id]["name"]) - self.assertEquals(new_tenant["intra_authz_extension_id"], data[data_id]["intra_authz_extension_id"]) - self.assertEquals(new_tenant["intra_admin_extension_id"], data[data_id]["intra_admin_extension_id"]) - data = self.tenant_manager.get_tenants_dict(self.ADMIN_ID) - self.assertNotEqual(data, {}) - - new_tenant["name"] = "demo2" - print(new_tenant) - data = self.tenant_manager.set_tenant_dict(user_id=self.ADMIN_ID, tenant_id=data_id, tenant_dict=new_tenant) - data_id = data.keys()[0] - self.assertEquals(new_tenant["name"], data[data_id]["name"]) - self.assertEquals(new_tenant["intra_authz_extension_id"], data[data_id]["intra_authz_extension_id"]) - self.assertEquals(new_tenant["intra_admin_extension_id"], data[data_id]["intra_admin_extension_id"]) - - def test_exception_tenant_unknown(self): - self.assertRaises(TenantUnknown, self.tenant_manager.get_tenant_dict, self.ADMIN_ID, uuid.uuid4().hex) - self.assertRaises(TenantUnknown, self.tenant_manager.del_tenant, self.ADMIN_ID, uuid.uuid4().hex) - self.assertRaises(TenantUnknown, self.tenant_manager.set_tenant_dict, self.ADMIN_ID, uuid.uuid4().hex, {}) - - authz_intra_extension = create_intra_extension(self, policy_model="policy_authz") - admin_intra_extension = create_intra_extension(self, policy_model="policy_rbac_admin") - new_tenant = { - "id": uuid.uuid4().hex, - "name": "demo", - "description": uuid.uuid4().hex, - "intra_authz_extension_id": authz_intra_extension['id'], - "intra_admin_extension_id": admin_intra_extension['id'], - } - data = self.tenant_manager.add_tenant_dict(user_id=self.ADMIN_ID, tenant_id=new_tenant['id'], tenant_dict=new_tenant) - data_id = data.keys()[0] - self.assertEquals(new_tenant["name"], data[data_id]["name"]) - self.assertEquals(new_tenant["intra_authz_extension_id"], data[data_id]["intra_authz_extension_id"]) - self.assertEquals(new_tenant["intra_admin_extension_id"], data[data_id]["intra_admin_extension_id"]) - data = self.tenant_manager.get_tenants_dict(self.ADMIN_ID) - self.assertNotEqual(data, {}) - - self.assertRaises(TenantUnknown, self.tenant_manager.get_tenant_dict, self.ADMIN_ID, uuid.uuid4().hex) - - def test_exception_tenant_added_name_existing(self): - authz_intra_extension = create_intra_extension(self, policy_model="policy_authz") - admin_intra_extension = create_intra_extension(self, policy_model="policy_rbac_admin") - new_tenant = { - "id": uuid.uuid4().hex, - "name": "demo", - "description": uuid.uuid4().hex, - "intra_authz_extension_id": authz_intra_extension['id'], - "intra_admin_extension_id": admin_intra_extension['id'], - } - data = self.tenant_manager.add_tenant_dict(user_id=self.ADMIN_ID, tenant_id=new_tenant['id'], tenant_dict=new_tenant) - data_id = data.keys()[0] - self.assertEquals(new_tenant["name"], data[data_id]["name"]) - self.assertEquals(new_tenant["intra_authz_extension_id"], data[data_id]["intra_authz_extension_id"]) - self.assertEquals(new_tenant["intra_admin_extension_id"], data[data_id]["intra_admin_extension_id"]) - data = self.tenant_manager.get_tenants_dict(self.ADMIN_ID) - self.assertNotEqual(data, {}) - - self.assertRaises(TenantAddedNameExisting, self.tenant_manager.add_tenant_dict, self.ADMIN_ID, new_tenant['id'], new_tenant) diff --git a/keystone-moon/keystone/tests/unit/__init__.py b/keystone-moon/keystone/tests/unit/__init__.py deleted file mode 100644 index 0e92ca65..00000000 --- a/keystone-moon/keystone/tests/unit/__init__.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import oslo_i18n -import six - - -if six.PY3: - # NOTE(dstanek): This block will monkey patch libraries that are not - # yet supported in Python3. We do this that that it is possible to - # execute any tests at all. Without monkey patching modules the - # tests will fail with import errors. - - import sys - from unittest import mock # noqa: our import detection is naive? - - sys.modules['ldap'] = mock.Mock() - sys.modules['ldap.controls'] = mock.Mock() - sys.modules['ldap.dn'] = mock.Mock() - sys.modules['ldap.filter'] = mock.Mock() - sys.modules['ldap.modlist'] = mock.Mock() - sys.modules['ldappool'] = mock.Mock() - - -# NOTE(dstanek): oslo_i18n.enable_lazy() must be called before -# keystone.i18n._() is called to ensure it has the desired lazy lookup -# behavior. This includes cases, like keystone.exceptions, where -# keystone.i18n._() is called at import time. -oslo_i18n.enable_lazy() - -from keystone.tests.unit.core import * # noqa diff --git a/keystone-moon/keystone/tests/unit/assignment/__init__.py b/keystone-moon/keystone/tests/unit/assignment/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/assignment/role_backends/__init__.py b/keystone-moon/keystone/tests/unit/assignment/role_backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/assignment/role_backends/test_sql.py b/keystone-moon/keystone/tests/unit/assignment/role_backends/test_sql.py deleted file mode 100644 index 37e2d924..00000000 --- a/keystone-moon/keystone/tests/unit/assignment/role_backends/test_sql.py +++ /dev/null @@ -1,112 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from keystone.common import sql -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit.assignment import test_core -from keystone.tests.unit.backend import core_sql - - -class SqlRoleModels(core_sql.BaseBackendSqlModels): - - def test_role_model(self): - cols = (('id', sql.String, 64), - ('name', sql.String, 255), - ('domain_id', sql.String, 64)) - self.assertExpectedSchema('role', cols) - - -class SqlRole(core_sql.BaseBackendSqlTests, test_core.RoleTests): - - def test_create_null_role_name(self): - role = unit.new_role_ref(name=None) - self.assertRaises(exception.UnexpectedError, - self.role_api.create_role, - role['id'], - role) - self.assertRaises(exception.RoleNotFound, - self.role_api.get_role, - role['id']) - - def test_create_duplicate_role_domain_specific_name_fails(self): - domain = unit.new_domain_ref() - role1 = unit.new_role_ref(domain_id=domain['id']) - self.role_api.create_role(role1['id'], role1) - role2 = unit.new_role_ref(name=role1['name'], - domain_id=domain['id']) - self.assertRaises(exception.Conflict, - self.role_api.create_role, - role2['id'], - role2) - - def test_update_domain_id_of_role_fails(self): - # Create a global role - role1 = unit.new_role_ref() - role1 = self.role_api.create_role(role1['id'], role1) - # Try and update it to be domain specific - domainA = unit.new_domain_ref() - role1['domain_id'] = domainA['id'] - self.assertRaises(exception.ValidationError, - self.role_api.update_role, - role1['id'], - role1) - - # Create a domain specific role from scratch - role2 = unit.new_role_ref(domain_id=domainA['id']) - self.role_api.create_role(role2['id'], role2) - # Try to "move" it to another domain - domainB = unit.new_domain_ref() - role2['domain_id'] = domainB['id'] - self.assertRaises(exception.ValidationError, - self.role_api.update_role, - role2['id'], - role2) - # Now try to make it global - role2['domain_id'] = None - self.assertRaises(exception.ValidationError, - self.role_api.update_role, - role2['id'], - role2) - - def test_domain_specific_separation(self): - domain1 = unit.new_domain_ref() - role1 = unit.new_role_ref(domain_id=domain1['id']) - role_ref1 = self.role_api.create_role(role1['id'], role1) - self.assertDictEqual(role1, role_ref1) - # Check we can have the same named role in a different domain - domain2 = unit.new_domain_ref() - role2 = unit.new_role_ref(name=role1['name'], domain_id=domain2['id']) - role_ref2 = self.role_api.create_role(role2['id'], role2) - self.assertDictEqual(role2, role_ref2) - # ...and in fact that you can have the same named role as a global role - role3 = unit.new_role_ref(name=role1['name']) - role_ref3 = self.role_api.create_role(role3['id'], role3) - self.assertDictEqual(role3, role_ref3) - # Check that updating one doesn't change the others - role1['name'] = uuid.uuid4().hex - self.role_api.update_role(role1['id'], role1) - role_ref1 = self.role_api.get_role(role1['id']) - self.assertDictEqual(role1, role_ref1) - role_ref2 = self.role_api.get_role(role2['id']) - self.assertDictEqual(role2, role_ref2) - role_ref3 = self.role_api.get_role(role3['id']) - self.assertDictEqual(role3, role_ref3) - # Check that deleting one of these, doesn't affect the others - self.role_api.delete_role(role1['id']) - self.assertRaises(exception.RoleNotFound, - self.role_api.get_role, - role1['id']) - self.role_api.get_role(role2['id']) - self.role_api.get_role(role3['id']) diff --git a/keystone-moon/keystone/tests/unit/assignment/test_backends.py b/keystone-moon/keystone/tests/unit/assignment/test_backends.py deleted file mode 100644 index eb40e569..00000000 --- a/keystone-moon/keystone/tests/unit/assignment/test_backends.py +++ /dev/null @@ -1,3755 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -import mock -from oslo_config import cfg -from six.moves import range -from testtools import matchers - -from keystone import exception -from keystone.tests import unit - - -CONF = cfg.CONF - - -class AssignmentTestHelperMixin(object): - """Mixin class to aid testing of assignments. - - This class supports data driven test plans that enable: - - - Creation of initial entities, such as domains, users, groups, projects - and roles - - Creation of assignments referencing the above entities - - A set of input parameters and expected outputs to list_role_assignments - based on the above test data - - A test plan is a dict of the form: - - test_plan = { - entities: details and number of entities, - group_memberships: group-user entity memberships, - assignments: list of assignments to create, - tests: list of pairs of input params and expected outputs} - - An example test plan: - - test_plan = { - # First, create the entities required. Entities are specified by - # a dict with the key being the entity type and the value an - # entity specification which can be one of: - # - # - a simple number, e.g. {'users': 3} creates 3 users - # - a dict where more information regarding the contents of the entity - # is required, e.g. {'domains' : {'users : 3}} creates a domain - # with three users - # - a list of entity specifications if multiple are required - # - # The following creates a domain that contains a single user, group and - # project, as well as creating three roles. - - 'entities': {'domains': {'users': 1, 'groups': 1, 'projects': 1}, - 'roles': 3}, - - # If it is required that an existing domain be used for the new - # entities, then the id of that domain can be included in the - # domain dict. For example, if alternatively we wanted to add 3 users - # to the default domain, add a second domain containing 3 projects as - # well as 5 additional empty domains, the entities would be defined as: - # - # 'entities': {'domains': [{'id': DEFAULT_DOMAIN, 'users': 3}, - # {'projects': 3}, 5]}, - # - # A project hierarchy can be specified within the 'projects' section by - # nesting the 'project' key, for example to create a project with three - # sub-projects you would use: - - 'projects': {'project': 3} - - # A more complex hierarchy can also be defined, for example the - # following would define three projects each containing a - # sub-project, each of which contain a further three sub-projects. - - 'projects': [{'project': {'project': 3}}, - {'project': {'project': 3}}, - {'project': {'project': 3}}] - - # If the 'roles' entity count is defined as top level key in 'entities' - # dict then these are global roles. If it is placed within the - # 'domain' dict, then they will be domain specific roles. A mix of - # domain specific and global roles are allowed, with the role index - # being calculated in the order they are defined in the 'entities' - # dict. - - # A set of implied role specifications. In this case, prior role - # index 0 implies role index 1, and role 1 implies roles 2 and 3. - - 'roles': [{'role': 0, 'implied_roles': [1]}, - {'role': 1, 'implied_roles': [2, 3]}] - - # A list of groups and their members. In this case make users with - # index 0 and 1 members of group with index 0. Users and Groups are - # indexed in the order they appear in the 'entities' key above. - - 'group_memberships': [{'group': 0, 'users': [0, 1]}] - - # Next, create assignments between the entities, referencing the - # entities by index, i.e. 'user': 0 refers to user[0]. Entities are - # indexed in the order they appear in the 'entities' key above within - # their entity type. - - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'group': 0, 'role': 2, 'domain': 0}, - {'user': 0, 'role': 2, 'project': 0}], - - # Finally, define an array of tests where list_role_assignment() is - # called with the given input parameters and the results are then - # confirmed to be as given in 'results'. Again, all entities are - # referenced by index. - - 'tests': [ - {'params': {}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'group': 0, 'role': 2, 'domain': 0}, - {'user': 0, 'role': 2, 'project': 0}]}, - {'params': {'role': 2}, - 'results': [{'group': 0, 'role': 2, 'domain': 0}, - {'user': 0, 'role': 2, 'project': 0}]}] - - # The 'params' key also supports the 'effective', - # 'inherited_to_projects' and 'source_from_group_ids' options to - # list_role_assignments.} - - """ - - def _handle_project_spec(self, test_data, domain_id, project_spec, - parent_id=None): - """Handle the creation of a project or hierarchy of projects. - - project_spec may either be a count of the number of projects to - create, or it may be a list of the form: - - [{'project': project_spec}, {'project': project_spec}, ...] - - This method is called recursively to handle the creation of a - hierarchy of projects. - - """ - def _create_project(domain_id, parent_id): - new_project = unit.new_project_ref(domain_id=domain_id, - parent_id=parent_id) - new_project = self.resource_api.create_project(new_project['id'], - new_project) - return new_project - - if isinstance(project_spec, list): - for this_spec in project_spec: - self._handle_project_spec( - test_data, domain_id, this_spec, parent_id=parent_id) - elif isinstance(project_spec, dict): - new_proj = _create_project(domain_id, parent_id) - test_data['projects'].append(new_proj) - self._handle_project_spec( - test_data, domain_id, project_spec['project'], - parent_id=new_proj['id']) - else: - for _ in range(project_spec): - test_data['projects'].append( - _create_project(domain_id, parent_id)) - - def _create_role(self, domain_id=None): - new_role = unit.new_role_ref(domain_id=domain_id) - return self.role_api.create_role(new_role['id'], new_role) - - def _handle_domain_spec(self, test_data, domain_spec): - """Handle the creation of domains and their contents. - - domain_spec may either be a count of the number of empty domains to - create, a dict describing the domain contents, or a list of - domain_specs. - - In the case when a list is provided, this method calls itself - recursively to handle the list elements. - - This method will insert any entities created into test_data - - """ - def _create_domain(domain_id=None): - if domain_id is None: - new_domain = unit.new_domain_ref() - self.resource_api.create_domain(new_domain['id'], - new_domain) - return new_domain - else: - # The test plan specified an existing domain to use - return self.resource_api.get_domain(domain_id) - - def _create_entity_in_domain(entity_type, domain_id): - """Create a user or group entity in the domain.""" - if entity_type == 'users': - new_entity = unit.new_user_ref(domain_id=domain_id) - new_entity = self.identity_api.create_user(new_entity) - elif entity_type == 'groups': - new_entity = unit.new_group_ref(domain_id=domain_id) - new_entity = self.identity_api.create_group(new_entity) - elif entity_type == 'roles': - new_entity = self._create_role(domain_id=domain_id) - else: - # Must be a bad test plan - raise exception.NotImplemented() - return new_entity - - if isinstance(domain_spec, list): - for x in domain_spec: - self._handle_domain_spec(test_data, x) - elif isinstance(domain_spec, dict): - # If there is a domain ID specified, then use it - the_domain = _create_domain(domain_spec.get('id')) - test_data['domains'].append(the_domain) - for entity_type, value in domain_spec.items(): - if entity_type == 'id': - # We already used this above to determine whether to - # use and existing domain - continue - if entity_type == 'projects': - # If it's projects, we need to handle the potential - # specification of a project hierarchy - self._handle_project_spec( - test_data, the_domain['id'], value) - else: - # It's a count of number of entities - for _ in range(value): - test_data[entity_type].append( - _create_entity_in_domain( - entity_type, the_domain['id'])) - else: - for _ in range(domain_spec): - test_data['domains'].append(_create_domain()) - - def create_entities(self, entity_pattern): - """Create the entities specified in the test plan. - - Process the 'entities' key in the test plan, creating the requested - entities. Each created entity will be added to the array of entities - stored in the returned test_data object, e.g.: - - test_data['users'] = [user[0], user[1]....] - - """ - test_data = {} - for entity in ['users', 'groups', 'domains', 'projects', 'roles']: - test_data[entity] = [] - - # Create any domains requested and, if specified, any entities within - # those domains - if 'domains' in entity_pattern: - self._handle_domain_spec(test_data, entity_pattern['domains']) - - # Create any roles requested - if 'roles' in entity_pattern: - for _ in range(entity_pattern['roles']): - test_data['roles'].append(self._create_role()) - - return test_data - - def _convert_entity_shorthand(self, key, shorthand_data, reference_data): - """Convert a shorthand entity description into a full ID reference. - - In test plan definitions, we allow a shorthand for referencing to an - entity of the form: - - 'user': 0 - - which is actually shorthand for: - - 'user_id': reference_data['users'][0]['id'] - - This method converts the shorthand version into the full reference. - - """ - expanded_key = '%s_id' % key - reference_index = '%ss' % key - index_value = ( - reference_data[reference_index][shorthand_data[key]]['id']) - return expanded_key, index_value - - def create_implied_roles(self, implied_pattern, test_data): - """Create the implied roles specified in the test plan.""" - for implied_spec in implied_pattern: - # Each implied role specification is a dict of the form: - # - # {'role': 0, 'implied_roles': list of roles} - - prior_role = test_data['roles'][implied_spec['role']]['id'] - if isinstance(implied_spec['implied_roles'], list): - for this_role in implied_spec['implied_roles']: - implied_role = test_data['roles'][this_role]['id'] - self.role_api.create_implied_role(prior_role, implied_role) - else: - implied_role = ( - test_data['roles'][implied_spec['implied_roles']]['id']) - self.role_api.create_implied_role(prior_role, implied_role) - - def create_group_memberships(self, group_pattern, test_data): - """Create the group memberships specified in the test plan.""" - for group_spec in group_pattern: - # Each membership specification is a dict of the form: - # - # {'group': 0, 'users': [list of user indexes]} - # - # Add all users in the list to the specified group, first - # converting from index to full entity ID. - group_value = test_data['groups'][group_spec['group']]['id'] - for user_index in group_spec['users']: - user_value = test_data['users'][user_index]['id'] - self.identity_api.add_user_to_group(user_value, group_value) - return test_data - - def create_assignments(self, assignment_pattern, test_data): - """Create the assignments specified in the test plan.""" - # First store how many assignments are already in the system, - # so during the tests we can check the number of new assignments - # created. - test_data['initial_assignment_count'] = ( - len(self.assignment_api.list_role_assignments())) - - # Now create the new assignments in the test plan - for assignment in assignment_pattern: - # Each assignment is a dict of the form: - # - # { 'user': 0, 'project':1, 'role': 6} - # - # where the value of each item is the index into the array of - # entities created earlier. - # - # We process the assignment dict to create the args required to - # make the create_grant() call. - args = {} - for param in assignment: - if param == 'inherited_to_projects': - args[param] = assignment[param] - else: - # Turn 'entity : 0' into 'entity_id = ac6736ba873d' - # where entity in user, group, project or domain - key, value = self._convert_entity_shorthand( - param, assignment, test_data) - args[key] = value - self.assignment_api.create_grant(**args) - return test_data - - def execute_assignment_cases(self, test_plan, test_data): - """Execute the test plan, based on the created test_data.""" - def check_results(expected, actual, param_arg_count): - if param_arg_count == 0: - # It was an unfiltered call, so default fixture assignments - # might be polluting our answer - so we take into account - # how many assignments there were before the test. - self.assertEqual( - len(expected) + test_data['initial_assignment_count'], - len(actual)) - else: - self.assertThat(actual, matchers.HasLength(len(expected))) - - for each_expected in expected: - expected_assignment = {} - for param in each_expected: - if param == 'inherited_to_projects': - expected_assignment[param] = each_expected[param] - elif param == 'indirect': - # We're expecting the result to contain an indirect - # dict with the details how the role came to be placed - # on this entity - so convert the key/value pairs of - # that dict into real entity references. - indirect_term = {} - for indirect_param in each_expected[param]: - key, value = self._convert_entity_shorthand( - indirect_param, each_expected[param], - test_data) - indirect_term[key] = value - expected_assignment[param] = indirect_term - else: - # Convert a simple shorthand entry into a full - # entity reference - key, value = self._convert_entity_shorthand( - param, each_expected, test_data) - expected_assignment[key] = value - self.assertIn(expected_assignment, actual) - - def convert_group_ids_sourced_from_list(index_list, reference_data): - value_list = [] - for group_index in index_list: - value_list.append( - reference_data['groups'][group_index]['id']) - return value_list - - # Go through each test in the array, processing the input params, which - # we build into an args dict, and then call list_role_assignments. Then - # check the results against those specified in the test plan. - for test in test_plan.get('tests', []): - args = {} - for param in test['params']: - if param in ['effective', 'inherited', 'include_subtree']: - # Just pass the value into the args - args[param] = test['params'][param] - elif param == 'source_from_group_ids': - # Convert the list of indexes into a list of IDs - args[param] = convert_group_ids_sourced_from_list( - test['params']['source_from_group_ids'], test_data) - else: - # Turn 'entity : 0' into 'entity_id = ac6736ba873d' - # where entity in user, group, project or domain - key, value = self._convert_entity_shorthand( - param, test['params'], test_data) - args[key] = value - results = self.assignment_api.list_role_assignments(**args) - check_results(test['results'], results, len(args)) - - def execute_assignment_plan(self, test_plan): - """Create entities, assignments and execute the test plan. - - The standard method to call to create entities and assignments and - execute the tests as specified in the test_plan. The test_data - dict is returned so that, if required, the caller can execute - additional manual tests with the entities and assignments created. - - """ - test_data = self.create_entities(test_plan['entities']) - if 'implied_roles' in test_plan: - self.create_implied_roles(test_plan['implied_roles'], test_data) - if 'group_memberships' in test_plan: - self.create_group_memberships(test_plan['group_memberships'], - test_data) - if 'assignments' in test_plan: - test_data = self.create_assignments(test_plan['assignments'], - test_data) - self.execute_assignment_cases(test_plan, test_data) - return test_data - - -class AssignmentTests(AssignmentTestHelperMixin): - - def _get_domain_fixture(self): - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - return domain - - def test_project_add_and_remove_user_role(self): - user_ids = self.assignment_api.list_user_ids_for_project( - self.tenant_bar['id']) - self.assertNotIn(self.user_two['id'], user_ids) - - self.assignment_api.add_role_to_user_and_project( - tenant_id=self.tenant_bar['id'], - user_id=self.user_two['id'], - role_id=self.role_other['id']) - user_ids = self.assignment_api.list_user_ids_for_project( - self.tenant_bar['id']) - self.assertIn(self.user_two['id'], user_ids) - - self.assignment_api.remove_role_from_user_and_project( - tenant_id=self.tenant_bar['id'], - user_id=self.user_two['id'], - role_id=self.role_other['id']) - - user_ids = self.assignment_api.list_user_ids_for_project( - self.tenant_bar['id']) - self.assertNotIn(self.user_two['id'], user_ids) - - def test_remove_user_role_not_assigned(self): - # Expect failure if attempt to remove a role that was never assigned to - # the user. - self.assertRaises(exception.RoleNotFound, - self.assignment_api. - remove_role_from_user_and_project, - tenant_id=self.tenant_bar['id'], - user_id=self.user_two['id'], - role_id=self.role_other['id']) - - def test_list_user_ids_for_project(self): - user_ids = self.assignment_api.list_user_ids_for_project( - self.tenant_baz['id']) - self.assertEqual(2, len(user_ids)) - self.assertIn(self.user_two['id'], user_ids) - self.assertIn(self.user_badguy['id'], user_ids) - - def test_list_user_ids_for_project_no_duplicates(self): - # Create user - user_ref = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user_ref = self.identity_api.create_user(user_ref) - # Create project - project_ref = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project( - project_ref['id'], project_ref) - # Create 2 roles and give user each role in project - for i in range(2): - role_ref = unit.new_role_ref() - self.role_api.create_role(role_ref['id'], role_ref) - self.assignment_api.add_role_to_user_and_project( - user_id=user_ref['id'], - tenant_id=project_ref['id'], - role_id=role_ref['id']) - # Get the list of user_ids in project - user_ids = self.assignment_api.list_user_ids_for_project( - project_ref['id']) - # Ensure the user is only returned once - self.assertEqual(1, len(user_ids)) - - def test_get_project_user_ids_returns_not_found(self): - self.assertRaises(exception.ProjectNotFound, - self.assignment_api.list_user_ids_for_project, - uuid.uuid4().hex) - - def test_list_role_assignments_unfiltered(self): - """Test unfiltered listing of role assignments.""" - test_plan = { - # Create a domain, with a user, group & project - 'entities': {'domains': {'users': 1, 'groups': 1, 'projects': 1}, - 'roles': 3}, - # Create a grant of each type (user/group on project/domain) - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'group': 0, 'role': 2, 'domain': 0}, - {'group': 0, 'role': 2, 'project': 0}], - 'tests': [ - # Check that we get back the 4 assignments - {'params': {}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'group': 0, 'role': 2, 'domain': 0}, - {'group': 0, 'role': 2, 'project': 0}]} - ] - } - self.execute_assignment_plan(test_plan) - - def test_list_role_assignments_filtered_by_role(self): - """Test listing of role assignments filtered by role ID.""" - test_plan = { - # Create a user, group & project in the default domain - 'entities': {'domains': {'id': CONF.identity.default_domain_id, - 'users': 1, 'groups': 1, 'projects': 1}, - 'roles': 3}, - # Create a grant of each type (user/group on project/domain) - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'group': 0, 'role': 2, 'domain': 0}, - {'group': 0, 'role': 2, 'project': 0}], - 'tests': [ - # Check that when filtering by role, we only get back those - # that match - {'params': {'role': 2}, - 'results': [{'group': 0, 'role': 2, 'domain': 0}, - {'group': 0, 'role': 2, 'project': 0}]} - ] - } - self.execute_assignment_plan(test_plan) - - def test_list_group_role_assignment(self): - # When a group role assignment is created and the role assignments are - # listed then the group role assignment is included in the list. - - test_plan = { - 'entities': {'domains': {'id': CONF.identity.default_domain_id, - 'groups': 1, 'projects': 1}, - 'roles': 1}, - 'assignments': [{'group': 0, 'role': 0, 'project': 0}], - 'tests': [ - {'params': {}, - 'results': [{'group': 0, 'role': 0, 'project': 0}]} - ] - } - self.execute_assignment_plan(test_plan) - - def test_list_role_assignments_bad_role(self): - assignment_list = self.assignment_api.list_role_assignments( - role_id=uuid.uuid4().hex) - self.assertEqual([], assignment_list) - - def test_add_duplicate_role_grant(self): - roles_ref = self.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.tenant_bar['id']) - self.assertNotIn(self.role_admin['id'], roles_ref) - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], self.tenant_bar['id'], self.role_admin['id']) - self.assertRaises(exception.Conflict, - self.assignment_api.add_role_to_user_and_project, - self.user_foo['id'], - self.tenant_bar['id'], - self.role_admin['id']) - - def test_get_role_by_user_and_project_with_user_in_group(self): - """Test for get role by user and project, user was added into a group. - - Test Plan: - - - Create a user, a project & a group, add this user to group - - Create roles and grant them to user and project - - Check the role list get by the user and project was as expected - - """ - user_ref = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user_ref = self.identity_api.create_user(user_ref) - - project_ref = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(project_ref['id'], project_ref) - - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group_id = self.identity_api.create_group(group)['id'] - self.identity_api.add_user_to_group(user_ref['id'], group_id) - - role_ref_list = [] - for i in range(2): - role_ref = unit.new_role_ref() - self.role_api.create_role(role_ref['id'], role_ref) - role_ref_list.append(role_ref) - - self.assignment_api.add_role_to_user_and_project( - user_id=user_ref['id'], - tenant_id=project_ref['id'], - role_id=role_ref['id']) - - role_list = self.assignment_api.get_roles_for_user_and_project( - user_ref['id'], - project_ref['id']) - - self.assertEqual(set([r['id'] for r in role_ref_list]), - set(role_list)) - - def test_get_role_by_user_and_project(self): - roles_ref = self.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.tenant_bar['id']) - self.assertNotIn(self.role_admin['id'], roles_ref) - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], self.tenant_bar['id'], self.role_admin['id']) - roles_ref = self.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.tenant_bar['id']) - self.assertIn(self.role_admin['id'], roles_ref) - self.assertNotIn('member', roles_ref) - - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], self.tenant_bar['id'], 'member') - roles_ref = self.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.tenant_bar['id']) - self.assertIn(self.role_admin['id'], roles_ref) - self.assertIn('member', roles_ref) - - def test_get_roles_for_user_and_domain(self): - """Test for getting roles for user on a domain. - - Test Plan: - - - Create a domain, with 2 users - - Check no roles yet exit - - Give user1 two roles on the domain, user2 one role - - Get roles on user1 and the domain - maybe sure we only - get back the 2 roles on user1 - - Delete both roles from user1 - - Check we get no roles back for user1 on domain - - """ - new_domain = unit.new_domain_ref() - self.resource_api.create_domain(new_domain['id'], new_domain) - new_user1 = unit.new_user_ref(domain_id=new_domain['id']) - new_user1 = self.identity_api.create_user(new_user1) - new_user2 = unit.new_user_ref(domain_id=new_domain['id']) - new_user2 = self.identity_api.create_user(new_user2) - roles_ref = self.assignment_api.list_grants( - user_id=new_user1['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - # Now create the grants (roles are defined in default_fixtures) - self.assignment_api.create_grant(user_id=new_user1['id'], - domain_id=new_domain['id'], - role_id='member') - self.assignment_api.create_grant(user_id=new_user1['id'], - domain_id=new_domain['id'], - role_id='other') - self.assignment_api.create_grant(user_id=new_user2['id'], - domain_id=new_domain['id'], - role_id='admin') - # Read back the roles for user1 on domain - roles_ids = self.assignment_api.get_roles_for_user_and_domain( - new_user1['id'], new_domain['id']) - self.assertEqual(2, len(roles_ids)) - self.assertIn(self.role_member['id'], roles_ids) - self.assertIn(self.role_other['id'], roles_ids) - - # Now delete both grants for user1 - self.assignment_api.delete_grant(user_id=new_user1['id'], - domain_id=new_domain['id'], - role_id='member') - self.assignment_api.delete_grant(user_id=new_user1['id'], - domain_id=new_domain['id'], - role_id='other') - roles_ref = self.assignment_api.list_grants( - user_id=new_user1['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - - def test_get_roles_for_user_and_domain_returns_not_found(self): - """Test errors raised when getting roles for user on a domain. - - Test Plan: - - - Check non-existing user gives UserNotFound - - Check non-existing domain gives DomainNotFound - - """ - new_domain = self._get_domain_fixture() - new_user1 = unit.new_user_ref(domain_id=new_domain['id']) - new_user1 = self.identity_api.create_user(new_user1) - - self.assertRaises(exception.UserNotFound, - self.assignment_api.get_roles_for_user_and_domain, - uuid.uuid4().hex, - new_domain['id']) - - self.assertRaises(exception.DomainNotFound, - self.assignment_api.get_roles_for_user_and_domain, - new_user1['id'], - uuid.uuid4().hex) - - def test_get_roles_for_user_and_project_returns_not_found(self): - self.assertRaises(exception.UserNotFound, - self.assignment_api.get_roles_for_user_and_project, - uuid.uuid4().hex, - self.tenant_bar['id']) - - self.assertRaises(exception.ProjectNotFound, - self.assignment_api.get_roles_for_user_and_project, - self.user_foo['id'], - uuid.uuid4().hex) - - def test_add_role_to_user_and_project_returns_not_found(self): - self.assertRaises(exception.ProjectNotFound, - self.assignment_api.add_role_to_user_and_project, - self.user_foo['id'], - uuid.uuid4().hex, - self.role_admin['id']) - - self.assertRaises(exception.RoleNotFound, - self.assignment_api.add_role_to_user_and_project, - self.user_foo['id'], - self.tenant_bar['id'], - uuid.uuid4().hex) - - def test_add_role_to_user_and_project_no_user(self): - # If add_role_to_user_and_project and the user doesn't exist, then - # no error. - user_id_not_exist = uuid.uuid4().hex - self.assignment_api.add_role_to_user_and_project( - user_id_not_exist, self.tenant_bar['id'], self.role_admin['id']) - - def test_remove_role_from_user_and_project(self): - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], self.tenant_bar['id'], 'member') - self.assignment_api.remove_role_from_user_and_project( - self.user_foo['id'], self.tenant_bar['id'], 'member') - roles_ref = self.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.tenant_bar['id']) - self.assertNotIn('member', roles_ref) - self.assertRaises(exception.NotFound, - self.assignment_api. - remove_role_from_user_and_project, - self.user_foo['id'], - self.tenant_bar['id'], - 'member') - - def test_get_role_grant_by_user_and_project(self): - roles_ref = self.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.tenant_bar['id']) - self.assertEqual(1, len(roles_ref)) - self.assignment_api.create_grant(user_id=self.user_foo['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_admin['id']) - roles_ref = self.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.tenant_bar['id']) - self.assertIn(self.role_admin['id'], - [role_ref['id'] for role_ref in roles_ref]) - - self.assignment_api.create_grant(user_id=self.user_foo['id'], - project_id=self.tenant_bar['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.tenant_bar['id']) - - roles_ref_ids = [] - for ref in roles_ref: - roles_ref_ids.append(ref['id']) - self.assertIn(self.role_admin['id'], roles_ref_ids) - self.assertIn('member', roles_ref_ids) - - def test_remove_role_grant_from_user_and_project(self): - self.assignment_api.create_grant(user_id=self.user_foo['id'], - project_id=self.tenant_baz['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.tenant_baz['id']) - self.assertDictEqual(self.role_member, roles_ref[0]) - - self.assignment_api.delete_grant(user_id=self.user_foo['id'], - project_id=self.tenant_baz['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.tenant_baz['id']) - self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - user_id=self.user_foo['id'], - project_id=self.tenant_baz['id'], - role_id='member') - - def test_get_role_assignment_by_project_not_found(self): - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.check_grant_role_id, - user_id=self.user_foo['id'], - project_id=self.tenant_baz['id'], - role_id='member') - - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.check_grant_role_id, - group_id=uuid.uuid4().hex, - project_id=self.tenant_baz['id'], - role_id='member') - - def test_get_role_assignment_by_domain_not_found(self): - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.check_grant_role_id, - user_id=self.user_foo['id'], - domain_id=self.domain_default['id'], - role_id='member') - - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.check_grant_role_id, - group_id=uuid.uuid4().hex, - domain_id=self.domain_default['id'], - role_id='member') - - def test_del_role_assignment_by_project_not_found(self): - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - user_id=self.user_foo['id'], - project_id=self.tenant_baz['id'], - role_id='member') - - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - group_id=uuid.uuid4().hex, - project_id=self.tenant_baz['id'], - role_id='member') - - def test_del_role_assignment_by_domain_not_found(self): - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - user_id=self.user_foo['id'], - domain_id=self.domain_default['id'], - role_id='member') - - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - group_id=uuid.uuid4().hex, - domain_id=self.domain_default['id'], - role_id='member') - - def test_get_and_remove_role_grant_by_group_and_project(self): - new_domain = unit.new_domain_ref() - self.resource_api.create_domain(new_domain['id'], new_domain) - new_group = unit.new_group_ref(domain_id=new_domain['id']) - new_group = self.identity_api.create_group(new_group) - new_user = unit.new_user_ref(domain_id=new_domain['id']) - new_user = self.identity_api.create_user(new_user) - self.identity_api.add_user_to_group(new_user['id'], - new_group['id']) - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - project_id=self.tenant_bar['id']) - self.assertEqual(0, len(roles_ref)) - self.assignment_api.create_grant(group_id=new_group['id'], - project_id=self.tenant_bar['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - project_id=self.tenant_bar['id']) - self.assertDictEqual(self.role_member, roles_ref[0]) - - self.assignment_api.delete_grant(group_id=new_group['id'], - project_id=self.tenant_bar['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - project_id=self.tenant_bar['id']) - self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - group_id=new_group['id'], - project_id=self.tenant_bar['id'], - role_id='member') - - def test_get_and_remove_role_grant_by_group_and_domain(self): - new_domain = unit.new_domain_ref() - self.resource_api.create_domain(new_domain['id'], new_domain) - new_group = unit.new_group_ref(domain_id=new_domain['id']) - new_group = self.identity_api.create_group(new_group) - new_user = unit.new_user_ref(domain_id=new_domain['id']) - new_user = self.identity_api.create_user(new_user) - self.identity_api.add_user_to_group(new_user['id'], - new_group['id']) - - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - - self.assignment_api.create_grant(group_id=new_group['id'], - domain_id=new_domain['id'], - role_id='member') - - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) - self.assertDictEqual(self.role_member, roles_ref[0]) - - self.assignment_api.delete_grant(group_id=new_group['id'], - domain_id=new_domain['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - group_id=new_group['id'], - domain_id=new_domain['id'], - role_id='member') - - def test_get_and_remove_correct_role_grant_from_a_mix(self): - new_domain = unit.new_domain_ref() - self.resource_api.create_domain(new_domain['id'], new_domain) - new_project = unit.new_project_ref(domain_id=new_domain['id']) - self.resource_api.create_project(new_project['id'], new_project) - new_group = unit.new_group_ref(domain_id=new_domain['id']) - new_group = self.identity_api.create_group(new_group) - new_group2 = unit.new_group_ref(domain_id=new_domain['id']) - new_group2 = self.identity_api.create_group(new_group2) - new_user = unit.new_user_ref(domain_id=new_domain['id']) - new_user = self.identity_api.create_user(new_user) - new_user2 = unit.new_user_ref(domain_id=new_domain['id']) - new_user2 = self.identity_api.create_user(new_user2) - self.identity_api.add_user_to_group(new_user['id'], - new_group['id']) - # First check we have no grants - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - # Now add the grant we are going to test for, and some others as - # well just to make sure we get back the right one - self.assignment_api.create_grant(group_id=new_group['id'], - domain_id=new_domain['id'], - role_id='member') - - self.assignment_api.create_grant(group_id=new_group2['id'], - domain_id=new_domain['id'], - role_id=self.role_admin['id']) - self.assignment_api.create_grant(user_id=new_user2['id'], - domain_id=new_domain['id'], - role_id=self.role_admin['id']) - self.assignment_api.create_grant(group_id=new_group['id'], - project_id=new_project['id'], - role_id=self.role_admin['id']) - - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) - self.assertDictEqual(self.role_member, roles_ref[0]) - - self.assignment_api.delete_grant(group_id=new_group['id'], - domain_id=new_domain['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - group_id=new_group['id'], - domain_id=new_domain['id'], - role_id='member') - - def test_get_and_remove_role_grant_by_user_and_domain(self): - new_domain = unit.new_domain_ref() - self.resource_api.create_domain(new_domain['id'], new_domain) - new_user = unit.new_user_ref(domain_id=new_domain['id']) - new_user = self.identity_api.create_user(new_user) - roles_ref = self.assignment_api.list_grants( - user_id=new_user['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - self.assignment_api.create_grant(user_id=new_user['id'], - domain_id=new_domain['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - user_id=new_user['id'], - domain_id=new_domain['id']) - self.assertDictEqual(self.role_member, roles_ref[0]) - - self.assignment_api.delete_grant(user_id=new_user['id'], - domain_id=new_domain['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - user_id=new_user['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - user_id=new_user['id'], - domain_id=new_domain['id'], - role_id='member') - - def test_get_and_remove_role_grant_by_group_and_cross_domain(self): - group1_domain1_role = unit.new_role_ref() - self.role_api.create_role(group1_domain1_role['id'], - group1_domain1_role) - group1_domain2_role = unit.new_role_ref() - self.role_api.create_role(group1_domain2_role['id'], - group1_domain2_role) - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - group1 = unit.new_group_ref(domain_id=domain1['id']) - group1 = self.identity_api.create_group(group1) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain1['id']) - self.assertEqual(0, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain2['id']) - self.assertEqual(0, len(roles_ref)) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain1['id'], - role_id=group1_domain1_role['id']) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain2['id'], - role_id=group1_domain2_role['id']) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain1['id']) - self.assertDictEqual(group1_domain1_role, roles_ref[0]) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain2['id']) - self.assertDictEqual(group1_domain2_role, roles_ref[0]) - - self.assignment_api.delete_grant(group_id=group1['id'], - domain_id=domain2['id'], - role_id=group1_domain2_role['id']) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain2['id']) - self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - group_id=group1['id'], - domain_id=domain2['id'], - role_id=group1_domain2_role['id']) - - def test_get_and_remove_role_grant_by_user_and_cross_domain(self): - user1_domain1_role = unit.new_role_ref() - self.role_api.create_role(user1_domain1_role['id'], user1_domain1_role) - user1_domain2_role = unit.new_role_ref() - self.role_api.create_role(user1_domain2_role['id'], user1_domain2_role) - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - user1 = unit.new_user_ref(domain_id=domain1['id']) - user1 = self.identity_api.create_user(user1) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain1['id']) - self.assertEqual(0, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain2['id']) - self.assertEqual(0, len(roles_ref)) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain1['id'], - role_id=user1_domain1_role['id']) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain2['id'], - role_id=user1_domain2_role['id']) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain1['id']) - self.assertDictEqual(user1_domain1_role, roles_ref[0]) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain2['id']) - self.assertDictEqual(user1_domain2_role, roles_ref[0]) - - self.assignment_api.delete_grant(user_id=user1['id'], - domain_id=domain2['id'], - role_id=user1_domain2_role['id']) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain2['id']) - self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - user_id=user1['id'], - domain_id=domain2['id'], - role_id=user1_domain2_role['id']) - - def test_role_grant_by_group_and_cross_domain_project(self): - role1 = unit.new_role_ref() - self.role_api.create_role(role1['id'], role1) - role2 = unit.new_role_ref() - self.role_api.create_role(role2['id'], role2) - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - group1 = unit.new_group_ref(domain_id=domain1['id']) - group1 = self.identity_api.create_group(group1) - project1 = unit.new_project_ref(domain_id=domain2['id']) - self.resource_api.create_project(project1['id'], project1) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) - self.assertEqual(0, len(roles_ref)) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=role1['id']) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=role2['id']) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) - - roles_ref_ids = [] - for ref in roles_ref: - roles_ref_ids.append(ref['id']) - self.assertIn(role1['id'], roles_ref_ids) - self.assertIn(role2['id'], roles_ref_ids) - - self.assignment_api.delete_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=role1['id']) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) - self.assertEqual(1, len(roles_ref)) - self.assertDictEqual(role2, roles_ref[0]) - - def test_role_grant_by_user_and_cross_domain_project(self): - role1 = unit.new_role_ref() - self.role_api.create_role(role1['id'], role1) - role2 = unit.new_role_ref() - self.role_api.create_role(role2['id'], role2) - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - user1 = unit.new_user_ref(domain_id=domain1['id']) - user1 = self.identity_api.create_user(user1) - project1 = unit.new_project_ref(domain_id=domain2['id']) - self.resource_api.create_project(project1['id'], project1) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(0, len(roles_ref)) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role1['id']) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role2['id']) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - - roles_ref_ids = [] - for ref in roles_ref: - roles_ref_ids.append(ref['id']) - self.assertIn(role1['id'], roles_ref_ids) - self.assertIn(role2['id'], roles_ref_ids) - - self.assignment_api.delete_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role1['id']) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(1, len(roles_ref)) - self.assertDictEqual(role2, roles_ref[0]) - - def test_delete_user_grant_no_user(self): - # Can delete a grant where the user doesn't exist. - role = unit.new_role_ref() - role_id = role['id'] - self.role_api.create_role(role_id, role) - - user_id = uuid.uuid4().hex - - self.assignment_api.create_grant(role_id, user_id=user_id, - project_id=self.tenant_bar['id']) - - self.assignment_api.delete_grant(role_id, user_id=user_id, - project_id=self.tenant_bar['id']) - - def test_delete_group_grant_no_group(self): - # Can delete a grant where the group doesn't exist. - role = unit.new_role_ref() - role_id = role['id'] - self.role_api.create_role(role_id, role) - - group_id = uuid.uuid4().hex - - self.assignment_api.create_grant(role_id, group_id=group_id, - project_id=self.tenant_bar['id']) - - self.assignment_api.delete_grant(role_id, group_id=group_id, - project_id=self.tenant_bar['id']) - - def test_grant_crud_throws_exception_if_invalid_role(self): - """Ensure RoleNotFound thrown if role does not exist.""" - def assert_role_not_found_exception(f, **kwargs): - self.assertRaises(exception.RoleNotFound, f, - role_id=uuid.uuid4().hex, **kwargs) - - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user_resp = self.identity_api.create_user(user) - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group_resp = self.identity_api.create_group(group) - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - project_resp = self.resource_api.create_project(project['id'], project) - - for manager_call in [self.assignment_api.create_grant, - self.assignment_api.get_grant, - self.assignment_api.delete_grant]: - assert_role_not_found_exception( - manager_call, - user_id=user_resp['id'], project_id=project_resp['id']) - assert_role_not_found_exception( - manager_call, - group_id=group_resp['id'], project_id=project_resp['id']) - assert_role_not_found_exception( - manager_call, - user_id=user_resp['id'], - domain_id=CONF.identity.default_domain_id) - assert_role_not_found_exception( - manager_call, - group_id=group_resp['id'], - domain_id=CONF.identity.default_domain_id) - - def test_multi_role_grant_by_user_group_on_project_domain(self): - role_list = [] - for _ in range(10): - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - role_list.append(role) - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - user1 = unit.new_user_ref(domain_id=domain1['id']) - user1 = self.identity_api.create_user(user1) - group1 = unit.new_group_ref(domain_id=domain1['id']) - group1 = self.identity_api.create_group(group1) - group2 = unit.new_group_ref(domain_id=domain1['id']) - group2 = self.identity_api.create_group(group2) - project1 = unit.new_project_ref(domain_id=domain1['id']) - self.resource_api.create_project(project1['id'], project1) - - self.identity_api.add_user_to_group(user1['id'], - group1['id']) - self.identity_api.add_user_to_group(user1['id'], - group2['id']) - - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(0, len(roles_ref)) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain1['id'], - role_id=role_list[0]['id']) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain1['id'], - role_id=role_list[1]['id']) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain1['id'], - role_id=role_list[2]['id']) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain1['id'], - role_id=role_list[3]['id']) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role_list[4]['id']) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role_list[5]['id']) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=role_list[6]['id']) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=role_list[7]['id']) - roles_ref = self.assignment_api.list_grants(user_id=user1['id'], - domain_id=domain1['id']) - self.assertEqual(2, len(roles_ref)) - self.assertIn(role_list[0], roles_ref) - self.assertIn(role_list[1], roles_ref) - roles_ref = self.assignment_api.list_grants(group_id=group1['id'], - domain_id=domain1['id']) - self.assertEqual(2, len(roles_ref)) - self.assertIn(role_list[2], roles_ref) - self.assertIn(role_list[3], roles_ref) - roles_ref = self.assignment_api.list_grants(user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(2, len(roles_ref)) - self.assertIn(role_list[4], roles_ref) - self.assertIn(role_list[5], roles_ref) - roles_ref = self.assignment_api.list_grants(group_id=group1['id'], - project_id=project1['id']) - self.assertEqual(2, len(roles_ref)) - self.assertIn(role_list[6], roles_ref) - self.assertIn(role_list[7], roles_ref) - - # Now test the alternate way of getting back lists of grants, - # where user and group roles are combined. These should match - # the above results. - combined_list = self.assignment_api.get_roles_for_user_and_project( - user1['id'], project1['id']) - self.assertEqual(4, len(combined_list)) - self.assertIn(role_list[4]['id'], combined_list) - self.assertIn(role_list[5]['id'], combined_list) - self.assertIn(role_list[6]['id'], combined_list) - self.assertIn(role_list[7]['id'], combined_list) - - combined_role_list = self.assignment_api.get_roles_for_user_and_domain( - user1['id'], domain1['id']) - self.assertEqual(4, len(combined_role_list)) - self.assertIn(role_list[0]['id'], combined_role_list) - self.assertIn(role_list[1]['id'], combined_role_list) - self.assertIn(role_list[2]['id'], combined_role_list) - self.assertIn(role_list[3]['id'], combined_role_list) - - def test_multi_group_grants_on_project_domain(self): - """Test multiple group roles for user on project and domain. - - Test Plan: - - - Create 6 roles - - Create a domain, with a project, user and two groups - - Make the user a member of both groups - - Check no roles yet exit - - Assign a role to each user and both groups on both the - project and domain - - Get a list of effective roles for the user on both the - project and domain, checking we get back the correct three - roles - - """ - role_list = [] - for _ in range(6): - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - role_list.append(role) - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - user1 = unit.new_user_ref(domain_id=domain1['id']) - user1 = self.identity_api.create_user(user1) - group1 = unit.new_group_ref(domain_id=domain1['id']) - group1 = self.identity_api.create_group(group1) - group2 = unit.new_group_ref(domain_id=domain1['id']) - group2 = self.identity_api.create_group(group2) - project1 = unit.new_project_ref(domain_id=domain1['id']) - self.resource_api.create_project(project1['id'], project1) - - self.identity_api.add_user_to_group(user1['id'], - group1['id']) - self.identity_api.add_user_to_group(user1['id'], - group2['id']) - - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(0, len(roles_ref)) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain1['id'], - role_id=role_list[0]['id']) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain1['id'], - role_id=role_list[1]['id']) - self.assignment_api.create_grant(group_id=group2['id'], - domain_id=domain1['id'], - role_id=role_list[2]['id']) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role_list[3]['id']) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=role_list[4]['id']) - self.assignment_api.create_grant(group_id=group2['id'], - project_id=project1['id'], - role_id=role_list[5]['id']) - - # Read by the roles, ensuring we get the correct 3 roles for - # both project and domain - combined_list = self.assignment_api.get_roles_for_user_and_project( - user1['id'], project1['id']) - self.assertEqual(3, len(combined_list)) - self.assertIn(role_list[3]['id'], combined_list) - self.assertIn(role_list[4]['id'], combined_list) - self.assertIn(role_list[5]['id'], combined_list) - - combined_role_list = self.assignment_api.get_roles_for_user_and_domain( - user1['id'], domain1['id']) - self.assertEqual(3, len(combined_role_list)) - self.assertIn(role_list[0]['id'], combined_role_list) - self.assertIn(role_list[1]['id'], combined_role_list) - self.assertIn(role_list[2]['id'], combined_role_list) - - def test_delete_role_with_user_and_group_grants(self): - role1 = unit.new_role_ref() - self.role_api.create_role(role1['id'], role1) - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - project1 = unit.new_project_ref(domain_id=domain1['id']) - self.resource_api.create_project(project1['id'], project1) - user1 = unit.new_user_ref(domain_id=domain1['id']) - user1 = self.identity_api.create_user(user1) - group1 = unit.new_group_ref(domain_id=domain1['id']) - group1 = self.identity_api.create_group(group1) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role1['id']) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain1['id'], - role_id=role1['id']) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=role1['id']) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain1['id'], - role_id=role1['id']) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(1, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) - self.assertEqual(1, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain1['id']) - self.assertEqual(1, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain1['id']) - self.assertEqual(1, len(roles_ref)) - self.role_api.delete_role(role1['id']) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(0, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) - self.assertEqual(0, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain1['id']) - self.assertEqual(0, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain1['id']) - self.assertEqual(0, len(roles_ref)) - - def test_list_role_assignment_by_domain(self): - """Test listing of role assignment filtered by domain.""" - test_plan = { - # A domain with 3 users, 1 group, a spoiler domain and 2 roles. - 'entities': {'domains': [{'users': 3, 'groups': 1}, 1], - 'roles': 2}, - # Users 1 & 2 are in the group - 'group_memberships': [{'group': 0, 'users': [1, 2]}], - # Assign a role for user 0 and the group - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'group': 0, 'role': 1, 'domain': 0}], - 'tests': [ - # List all effective assignments for domain[0]. - # Should get one direct user role and user roles for each of - # the users in the group. - {'params': {'domain': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 1, 'role': 1, 'domain': 0, - 'indirect': {'group': 0}}, - {'user': 2, 'role': 1, 'domain': 0, - 'indirect': {'group': 0}} - ]}, - # Using domain[1] should return nothing - {'params': {'domain': 1, 'effective': True}, - 'results': []}, - ] - } - self.execute_assignment_plan(test_plan) - - def test_list_role_assignment_by_user_with_domain_group_roles(self): - """Test listing assignments by user, with group roles on a domain.""" - test_plan = { - # A domain with 3 users, 3 groups, a spoiler domain - # plus 3 roles. - 'entities': {'domains': [{'users': 3, 'groups': 3}, 1], - 'roles': 3}, - # Users 1 & 2 are in the group 0, User 1 also in group 1 - 'group_memberships': [{'group': 0, 'users': [0, 1]}, - {'group': 1, 'users': [0]}], - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'group': 0, 'role': 1, 'domain': 0}, - {'group': 1, 'role': 2, 'domain': 0}, - # ...and two spoiler assignments - {'user': 1, 'role': 1, 'domain': 0}, - {'group': 2, 'role': 2, 'domain': 0}], - 'tests': [ - # List all effective assignments for user[0]. - # Should get one direct user role and a user roles for each of - # groups 0 and 1 - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'domain': 0, - 'indirect': {'group': 0}}, - {'user': 0, 'role': 2, 'domain': 0, - 'indirect': {'group': 1}} - ]}, - # Adding domain[0] as a filter should return the same data - {'params': {'user': 0, 'domain': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'domain': 0, - 'indirect': {'group': 0}}, - {'user': 0, 'role': 2, 'domain': 0, - 'indirect': {'group': 1}} - ]}, - # Using domain[1] should return nothing - {'params': {'user': 0, 'domain': 1, 'effective': True}, - 'results': []}, - # Using user[2] should return nothing - {'params': {'user': 2, 'domain': 0, 'effective': True}, - 'results': []}, - ] - } - self.execute_assignment_plan(test_plan) - - def test_list_role_assignment_using_sourced_groups(self): - """Test listing assignments when restricted by source groups.""" - test_plan = { - # The default domain with 3 users, 3 groups, 3 projects, - # plus 3 roles. - 'entities': {'domains': {'id': CONF.identity.default_domain_id, - 'users': 3, 'groups': 3, 'projects': 3}, - 'roles': 3}, - # Users 0 & 1 are in the group 0, User 0 also in group 1 - 'group_memberships': [{'group': 0, 'users': [0, 1]}, - {'group': 1, 'users': [0]}], - # Spread the assignments around - we want to be able to show that - # if sourced by group, assignments from other sources are excluded - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'group': 0, 'role': 1, 'project': 1}, - {'group': 1, 'role': 2, 'project': 0}, - {'group': 1, 'role': 2, 'project': 1}, - {'user': 2, 'role': 1, 'project': 1}, - {'group': 2, 'role': 2, 'project': 2} - ], - 'tests': [ - # List all effective assignments sourced from groups 0 and 1 - {'params': {'source_from_group_ids': [0, 1], - 'effective': True}, - 'results': [{'group': 0, 'role': 1, 'project': 1}, - {'group': 1, 'role': 2, 'project': 0}, - {'group': 1, 'role': 2, 'project': 1} - ]}, - # Adding a role a filter should further restrict the entries - {'params': {'source_from_group_ids': [0, 1], 'role': 2, - 'effective': True}, - 'results': [{'group': 1, 'role': 2, 'project': 0}, - {'group': 1, 'role': 2, 'project': 1} - ]}, - ] - } - self.execute_assignment_plan(test_plan) - - def test_list_role_assignment_using_sourced_groups_with_domains(self): - """Test listing domain assignments when restricted by source groups.""" - test_plan = { - # A domain with 3 users, 3 groups, 3 projects, a second domain, - # plus 3 roles. - 'entities': {'domains': [{'users': 3, 'groups': 3, 'projects': 3}, - 1], - 'roles': 3}, - # Users 0 & 1 are in the group 0, User 0 also in group 1 - 'group_memberships': [{'group': 0, 'users': [0, 1]}, - {'group': 1, 'users': [0]}], - # Spread the assignments around - we want to be able to show that - # if sourced by group, assignments from other sources are excluded - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'group': 0, 'role': 1, 'domain': 1}, - {'group': 1, 'role': 2, 'project': 0}, - {'group': 1, 'role': 2, 'project': 1}, - {'user': 2, 'role': 1, 'project': 1}, - {'group': 2, 'role': 2, 'project': 2} - ], - 'tests': [ - # List all effective assignments sourced from groups 0 and 1 - {'params': {'source_from_group_ids': [0, 1], - 'effective': True}, - 'results': [{'group': 0, 'role': 1, 'domain': 1}, - {'group': 1, 'role': 2, 'project': 0}, - {'group': 1, 'role': 2, 'project': 1} - ]}, - # Adding a role a filter should further restrict the entries - {'params': {'source_from_group_ids': [0, 1], 'role': 1, - 'effective': True}, - 'results': [{'group': 0, 'role': 1, 'domain': 1}, - ]}, - ] - } - self.execute_assignment_plan(test_plan) - - def test_list_role_assignment_fails_with_userid_and_source_groups(self): - """Show we trap this unsupported internal combination of params.""" - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group = self.identity_api.create_group(group) - self.assertRaises(exception.UnexpectedError, - self.assignment_api.list_role_assignments, - effective=True, - user_id=self.user_foo['id'], - source_from_group_ids=[group['id']]) - - def test_add_user_to_project(self): - self.assignment_api.add_user_to_project(self.tenant_baz['id'], - self.user_foo['id']) - tenants = self.assignment_api.list_projects_for_user( - self.user_foo['id']) - self.assertIn(self.tenant_baz, tenants) - - def test_add_user_to_project_missing_default_role(self): - self.role_api.delete_role(CONF.member_role_id) - self.assertRaises(exception.RoleNotFound, - self.role_api.get_role, - CONF.member_role_id) - self.assignment_api.add_user_to_project(self.tenant_baz['id'], - self.user_foo['id']) - tenants = ( - self.assignment_api.list_projects_for_user(self.user_foo['id'])) - self.assertIn(self.tenant_baz, tenants) - default_role = self.role_api.get_role(CONF.member_role_id) - self.assertIsNotNone(default_role) - - def test_add_user_to_project_returns_not_found(self): - self.assertRaises(exception.ProjectNotFound, - self.assignment_api.add_user_to_project, - uuid.uuid4().hex, - self.user_foo['id']) - - def test_add_user_to_project_no_user(self): - # If add_user_to_project and the user doesn't exist, then - # no error. - user_id_not_exist = uuid.uuid4().hex - self.assignment_api.add_user_to_project(self.tenant_bar['id'], - user_id_not_exist) - - def test_remove_user_from_project(self): - self.assignment_api.add_user_to_project(self.tenant_baz['id'], - self.user_foo['id']) - self.assignment_api.remove_user_from_project(self.tenant_baz['id'], - self.user_foo['id']) - tenants = self.assignment_api.list_projects_for_user( - self.user_foo['id']) - self.assertNotIn(self.tenant_baz, tenants) - - def test_remove_user_from_project_race_delete_role(self): - self.assignment_api.add_user_to_project(self.tenant_baz['id'], - self.user_foo['id']) - self.assignment_api.add_role_to_user_and_project( - tenant_id=self.tenant_baz['id'], - user_id=self.user_foo['id'], - role_id=self.role_other['id']) - - # Mock a race condition, delete a role after - # get_roles_for_user_and_project() is called in - # remove_user_from_project(). - roles = self.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.tenant_baz['id']) - self.role_api.delete_role(self.role_other['id']) - self.assignment_api.get_roles_for_user_and_project = mock.Mock( - return_value=roles) - self.assignment_api.remove_user_from_project(self.tenant_baz['id'], - self.user_foo['id']) - tenants = self.assignment_api.list_projects_for_user( - self.user_foo['id']) - self.assertNotIn(self.tenant_baz, tenants) - - def test_remove_user_from_project_returns_not_found(self): - self.assertRaises(exception.ProjectNotFound, - self.assignment_api.remove_user_from_project, - uuid.uuid4().hex, - self.user_foo['id']) - - self.assertRaises(exception.UserNotFound, - self.assignment_api.remove_user_from_project, - self.tenant_bar['id'], - uuid.uuid4().hex) - - self.assertRaises(exception.NotFound, - self.assignment_api.remove_user_from_project, - self.tenant_baz['id'], - self.user_foo['id']) - - def test_list_user_project_ids_returns_not_found(self): - self.assertRaises(exception.UserNotFound, - self.assignment_api.list_projects_for_user, - uuid.uuid4().hex) - - def test_delete_user_with_project_association(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - self.assignment_api.add_user_to_project(self.tenant_bar['id'], - user['id']) - self.identity_api.delete_user(user['id']) - self.assertRaises(exception.UserNotFound, - self.assignment_api.list_projects_for_user, - user['id']) - - def test_delete_user_with_project_roles(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - self.assignment_api.add_role_to_user_and_project( - user['id'], - self.tenant_bar['id'], - self.role_member['id']) - self.identity_api.delete_user(user['id']) - self.assertRaises(exception.UserNotFound, - self.assignment_api.list_projects_for_user, - user['id']) - - def test_delete_role_returns_not_found(self): - self.assertRaises(exception.RoleNotFound, - self.role_api.delete_role, - uuid.uuid4().hex) - - def test_delete_project_with_role_assignments(self): - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(project['id'], project) - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], project['id'], 'member') - self.resource_api.delete_project(project['id']) - self.assertRaises(exception.ProjectNotFound, - self.assignment_api.list_user_ids_for_project, - project['id']) - - def test_delete_role_check_role_grant(self): - role = unit.new_role_ref() - alt_role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - self.role_api.create_role(alt_role['id'], alt_role) - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], self.tenant_bar['id'], role['id']) - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], self.tenant_bar['id'], alt_role['id']) - self.role_api.delete_role(role['id']) - roles_ref = self.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.tenant_bar['id']) - self.assertNotIn(role['id'], roles_ref) - self.assertIn(alt_role['id'], roles_ref) - - def test_list_projects_for_user(self): - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - user1 = unit.new_user_ref(domain_id=domain['id']) - user1 = self.identity_api.create_user(user1) - user_projects = self.assignment_api.list_projects_for_user(user1['id']) - self.assertEqual(0, len(user_projects)) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_member['id']) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=self.tenant_baz['id'], - role_id=self.role_member['id']) - user_projects = self.assignment_api.list_projects_for_user(user1['id']) - self.assertEqual(2, len(user_projects)) - - def test_list_projects_for_user_with_grants(self): - # Create two groups each with a role on a different project, and - # make user1 a member of both groups. Both these new projects - # should now be included, along with any direct user grants. - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - user1 = unit.new_user_ref(domain_id=domain['id']) - user1 = self.identity_api.create_user(user1) - group1 = unit.new_group_ref(domain_id=domain['id']) - group1 = self.identity_api.create_group(group1) - group2 = unit.new_group_ref(domain_id=domain['id']) - group2 = self.identity_api.create_group(group2) - project1 = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project1['id'], project1) - project2 = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project2['id'], project2) - self.identity_api.add_user_to_group(user1['id'], group1['id']) - self.identity_api.add_user_to_group(user1['id'], group2['id']) - - # Create 3 grants, one user grant, the other two as group grants - self.assignment_api.create_grant(user_id=user1['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_member['id']) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=self.role_admin['id']) - self.assignment_api.create_grant(group_id=group2['id'], - project_id=project2['id'], - role_id=self.role_admin['id']) - user_projects = self.assignment_api.list_projects_for_user(user1['id']) - self.assertEqual(3, len(user_projects)) - - def test_create_grant_no_user(self): - # If call create_grant with a user that doesn't exist, doesn't fail. - self.assignment_api.create_grant( - self.role_other['id'], - user_id=uuid.uuid4().hex, - project_id=self.tenant_bar['id']) - - def test_create_grant_no_group(self): - # If call create_grant with a group that doesn't exist, doesn't fail. - self.assignment_api.create_grant( - self.role_other['id'], - group_id=uuid.uuid4().hex, - project_id=self.tenant_bar['id']) - - def test_delete_group_removes_role_assignments(self): - # When a group is deleted any role assignments for the group are - # removed. - - MEMBER_ROLE_ID = 'member' - - def get_member_assignments(): - assignments = self.assignment_api.list_role_assignments() - return [x for x in assignments if x['role_id'] == MEMBER_ROLE_ID] - - orig_member_assignments = get_member_assignments() - - # Create a group. - new_group = unit.new_group_ref( - domain_id=CONF.identity.default_domain_id) - new_group = self.identity_api.create_group(new_group) - - # Create a project. - new_project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(new_project['id'], new_project) - - # Assign a role to the group. - self.assignment_api.create_grant( - group_id=new_group['id'], project_id=new_project['id'], - role_id=MEMBER_ROLE_ID) - - # Delete the group. - self.identity_api.delete_group(new_group['id']) - - # Check that the role assignment for the group is gone - member_assignments = get_member_assignments() - - self.assertThat(member_assignments, - matchers.Equals(orig_member_assignments)) - - def test_get_roles_for_groups_on_domain(self): - """Test retrieving group domain roles. - - Test Plan: - - - Create a domain, three groups and three roles - - Assign one an inherited and the others a non-inherited group role - to the domain - - Ensure that only the non-inherited roles are returned on the domain - - """ - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - group_list = [] - group_id_list = [] - role_list = [] - for _ in range(3): - group = unit.new_group_ref(domain_id=domain1['id']) - group = self.identity_api.create_group(group) - group_list.append(group) - group_id_list.append(group['id']) - - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - role_list.append(role) - - # Assign the roles - one is inherited - self.assignment_api.create_grant(group_id=group_list[0]['id'], - domain_id=domain1['id'], - role_id=role_list[0]['id']) - self.assignment_api.create_grant(group_id=group_list[1]['id'], - domain_id=domain1['id'], - role_id=role_list[1]['id']) - self.assignment_api.create_grant(group_id=group_list[2]['id'], - domain_id=domain1['id'], - role_id=role_list[2]['id'], - inherited_to_projects=True) - - # Now get the effective roles for the groups on the domain project. We - # shouldn't get back the inherited role. - - role_refs = self.assignment_api.get_roles_for_groups( - group_id_list, domain_id=domain1['id']) - - self.assertThat(role_refs, matchers.HasLength(2)) - self.assertIn(role_list[0], role_refs) - self.assertIn(role_list[1], role_refs) - - def test_get_roles_for_groups_on_project(self): - """Test retrieving group project roles. - - Test Plan: - - - Create two domains, two projects, six groups and six roles - - Project1 is in Domain1, Project2 is in Domain2 - - Domain2/Project2 are spoilers - - Assign a different direct group role to each project as well - as both an inherited and non-inherited role to each domain - - Get the group roles for Project 1 - depending on whether we have - enabled inheritance, we should either get back just the direct role - or both the direct one plus the inherited domain role from Domain 1 - - """ - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - project1 = unit.new_project_ref(domain_id=domain1['id']) - self.resource_api.create_project(project1['id'], project1) - project2 = unit.new_project_ref(domain_id=domain2['id']) - self.resource_api.create_project(project2['id'], project2) - group_list = [] - group_id_list = [] - role_list = [] - for _ in range(6): - group = unit.new_group_ref(domain_id=domain1['id']) - group = self.identity_api.create_group(group) - group_list.append(group) - group_id_list.append(group['id']) - - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - role_list.append(role) - - # Assign the roles - one inherited and one non-inherited on Domain1, - # plus one on Project1 - self.assignment_api.create_grant(group_id=group_list[0]['id'], - domain_id=domain1['id'], - role_id=role_list[0]['id']) - self.assignment_api.create_grant(group_id=group_list[1]['id'], - domain_id=domain1['id'], - role_id=role_list[1]['id'], - inherited_to_projects=True) - self.assignment_api.create_grant(group_id=group_list[2]['id'], - project_id=project1['id'], - role_id=role_list[2]['id']) - - # ...and a duplicate set of spoiler assignments to Domain2/Project2 - self.assignment_api.create_grant(group_id=group_list[3]['id'], - domain_id=domain2['id'], - role_id=role_list[3]['id']) - self.assignment_api.create_grant(group_id=group_list[4]['id'], - domain_id=domain2['id'], - role_id=role_list[4]['id'], - inherited_to_projects=True) - self.assignment_api.create_grant(group_id=group_list[5]['id'], - project_id=project2['id'], - role_id=role_list[5]['id']) - - # Now get the effective roles for all groups on the Project1. With - # inheritance off, we should only get back the direct role. - - self.config_fixture.config(group='os_inherit', enabled=False) - role_refs = self.assignment_api.get_roles_for_groups( - group_id_list, project_id=project1['id']) - - self.assertThat(role_refs, matchers.HasLength(1)) - self.assertIn(role_list[2], role_refs) - - # With inheritance on, we should also get back the inherited role from - # its owning domain. - - self.config_fixture.config(group='os_inherit', enabled=True) - role_refs = self.assignment_api.get_roles_for_groups( - group_id_list, project_id=project1['id']) - - self.assertThat(role_refs, matchers.HasLength(2)) - self.assertIn(role_list[1], role_refs) - self.assertIn(role_list[2], role_refs) - - def test_list_domains_for_groups(self): - """Test retrieving domains for a list of groups. - - Test Plan: - - - Create three domains, three groups and one role - - Assign a non-inherited group role to two domains, and an inherited - group role to the third - - Ensure only the domains with non-inherited roles are returned - - """ - domain_list = [] - group_list = [] - group_id_list = [] - for _ in range(3): - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - domain_list.append(domain) - - group = unit.new_group_ref(domain_id=domain['id']) - group = self.identity_api.create_group(group) - group_list.append(group) - group_id_list.append(group['id']) - - role1 = unit.new_role_ref() - self.role_api.create_role(role1['id'], role1) - - # Assign the roles - one is inherited - self.assignment_api.create_grant(group_id=group_list[0]['id'], - domain_id=domain_list[0]['id'], - role_id=role1['id']) - self.assignment_api.create_grant(group_id=group_list[1]['id'], - domain_id=domain_list[1]['id'], - role_id=role1['id']) - self.assignment_api.create_grant(group_id=group_list[2]['id'], - domain_id=domain_list[2]['id'], - role_id=role1['id'], - inherited_to_projects=True) - - # Now list the domains that have roles for any of the 3 groups - # We shouldn't get back domain[2] since that had an inherited role. - - domain_refs = ( - self.assignment_api.list_domains_for_groups(group_id_list)) - - self.assertThat(domain_refs, matchers.HasLength(2)) - self.assertIn(domain_list[0], domain_refs) - self.assertIn(domain_list[1], domain_refs) - - def test_list_projects_for_groups(self): - """Test retrieving projects for a list of groups. - - Test Plan: - - - Create two domains, four projects, seven groups and seven roles - - Project1-3 are in Domain1, Project4 is in Domain2 - - Domain2/Project4 are spoilers - - Project1 and 2 have direct group roles, Project3 has no direct - roles but should inherit a group role from Domain1 - - Get the projects for the group roles that are assigned to Project1 - Project2 and the inherited one on Domain1. Depending on whether we - have enabled inheritance, we should either get back just the projects - with direct roles (Project 1 and 2) or also Project3 due to its - inherited role from Domain1. - - """ - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - project1 = unit.new_project_ref(domain_id=domain1['id']) - project1 = self.resource_api.create_project(project1['id'], project1) - project2 = unit.new_project_ref(domain_id=domain1['id']) - project2 = self.resource_api.create_project(project2['id'], project2) - project3 = unit.new_project_ref(domain_id=domain1['id']) - project3 = self.resource_api.create_project(project3['id'], project3) - project4 = unit.new_project_ref(domain_id=domain2['id']) - project4 = self.resource_api.create_project(project4['id'], project4) - group_list = [] - role_list = [] - for _ in range(7): - group = unit.new_group_ref(domain_id=domain1['id']) - group = self.identity_api.create_group(group) - group_list.append(group) - - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - role_list.append(role) - - # Assign the roles - one inherited and one non-inherited on Domain1, - # plus one on Project1 and Project2 - self.assignment_api.create_grant(group_id=group_list[0]['id'], - domain_id=domain1['id'], - role_id=role_list[0]['id']) - self.assignment_api.create_grant(group_id=group_list[1]['id'], - domain_id=domain1['id'], - role_id=role_list[1]['id'], - inherited_to_projects=True) - self.assignment_api.create_grant(group_id=group_list[2]['id'], - project_id=project1['id'], - role_id=role_list[2]['id']) - self.assignment_api.create_grant(group_id=group_list[3]['id'], - project_id=project2['id'], - role_id=role_list[3]['id']) - - # ...and a few of spoiler assignments to Domain2/Project4 - self.assignment_api.create_grant(group_id=group_list[4]['id'], - domain_id=domain2['id'], - role_id=role_list[4]['id']) - self.assignment_api.create_grant(group_id=group_list[5]['id'], - domain_id=domain2['id'], - role_id=role_list[5]['id'], - inherited_to_projects=True) - self.assignment_api.create_grant(group_id=group_list[6]['id'], - project_id=project4['id'], - role_id=role_list[6]['id']) - - # Now get the projects for the groups that have roles on Project1, - # Project2 and the inherited role on Domain!. With inheritance off, - # we should only get back the projects with direct role. - - self.config_fixture.config(group='os_inherit', enabled=False) - group_id_list = [group_list[1]['id'], group_list[2]['id'], - group_list[3]['id']] - project_refs = ( - self.assignment_api.list_projects_for_groups(group_id_list)) - - self.assertThat(project_refs, matchers.HasLength(2)) - self.assertIn(project1, project_refs) - self.assertIn(project2, project_refs) - - # With inheritance on, we should also get back the Project3 due to the - # inherited role from its owning domain. - - self.config_fixture.config(group='os_inherit', enabled=True) - project_refs = ( - self.assignment_api.list_projects_for_groups(group_id_list)) - - self.assertThat(project_refs, matchers.HasLength(3)) - self.assertIn(project1, project_refs) - self.assertIn(project2, project_refs) - self.assertIn(project3, project_refs) - - def test_update_role_no_name(self): - # A user can update a role and not include the name. - - # description is picked just because it's not name. - self.role_api.update_role(self.role_member['id'], - {'description': uuid.uuid4().hex}) - # If the previous line didn't raise an exception then the test passes. - - def test_update_role_same_name(self): - # A user can update a role and set the name to be the same as it was. - - self.role_api.update_role(self.role_member['id'], - {'name': self.role_member['name']}) - # If the previous line didn't raise an exception then the test passes. - - def test_list_role_assignment_containing_names(self): - # Create Refs - new_role = unit.new_role_ref() - new_domain = self._get_domain_fixture() - new_user = unit.new_user_ref(domain_id=new_domain['id']) - new_project = unit.new_project_ref(domain_id=new_domain['id']) - new_group = unit.new_group_ref(domain_id=new_domain['id']) - # Create entities - new_role = self.role_api.create_role(new_role['id'], new_role) - new_user = self.identity_api.create_user(new_user) - new_group = self.identity_api.create_group(new_group) - self.resource_api.create_project(new_project['id'], new_project) - self.assignment_api.create_grant(user_id=new_user['id'], - project_id=new_project['id'], - role_id=new_role['id']) - self.assignment_api.create_grant(group_id=new_group['id'], - project_id=new_project['id'], - role_id=new_role['id']) - self.assignment_api.create_grant(domain_id=new_domain['id'], - user_id=new_user['id'], - role_id=new_role['id']) - # Get the created assignments with the include_names flag - _asgmt_prj = self.assignment_api.list_role_assignments( - user_id=new_user['id'], - project_id=new_project['id'], - include_names=True) - _asgmt_grp = self.assignment_api.list_role_assignments( - group_id=new_group['id'], - project_id=new_project['id'], - include_names=True) - _asgmt_dmn = self.assignment_api.list_role_assignments( - domain_id=new_domain['id'], - user_id=new_user['id'], - include_names=True) - # Make sure we can get back the correct number of assignments - self.assertThat(_asgmt_prj, matchers.HasLength(1)) - self.assertThat(_asgmt_grp, matchers.HasLength(1)) - self.assertThat(_asgmt_dmn, matchers.HasLength(1)) - # get the first assignment - first_asgmt_prj = _asgmt_prj[0] - first_asgmt_grp = _asgmt_grp[0] - first_asgmt_dmn = _asgmt_dmn[0] - # Assert the names are correct in the project response - self.assertEqual(new_project['name'], - first_asgmt_prj['project_name']) - self.assertEqual(new_project['domain_id'], - first_asgmt_prj['project_domain_id']) - self.assertEqual(new_user['name'], - first_asgmt_prj['user_name']) - self.assertEqual(new_user['domain_id'], - first_asgmt_prj['user_domain_id']) - self.assertEqual(new_role['name'], - first_asgmt_prj['role_name']) - # Assert the names are correct in the group response - self.assertEqual(new_group['name'], - first_asgmt_grp['group_name']) - self.assertEqual(new_group['domain_id'], - first_asgmt_grp['group_domain_id']) - self.assertEqual(new_project['name'], - first_asgmt_grp['project_name']) - self.assertEqual(new_project['domain_id'], - first_asgmt_grp['project_domain_id']) - self.assertEqual(new_role['name'], - first_asgmt_grp['role_name']) - # Assert the names are correct in the domain response - self.assertEqual(new_domain['name'], - first_asgmt_dmn['domain_name']) - self.assertEqual(new_user['name'], - first_asgmt_dmn['user_name']) - self.assertEqual(new_user['domain_id'], - first_asgmt_dmn['user_domain_id']) - self.assertEqual(new_role['name'], - first_asgmt_dmn['role_name']) - - def test_list_role_assignment_does_not_contain_names(self): - """Test names are not included with list role assignments. - - Scenario: - - names are NOT included by default - - names are NOT included when include_names=False - - """ - def assert_does_not_contain_names(assignment): - first_asgmt_prj = assignment[0] - self.assertNotIn('project_name', first_asgmt_prj) - self.assertNotIn('project_domain_id', first_asgmt_prj) - self.assertNotIn('user_name', first_asgmt_prj) - self.assertNotIn('user_domain_id', first_asgmt_prj) - self.assertNotIn('role_name', first_asgmt_prj) - - # Create Refs - new_role = unit.new_role_ref() - new_domain = self._get_domain_fixture() - new_user = unit.new_user_ref(domain_id=new_domain['id']) - new_project = unit.new_project_ref(domain_id=new_domain['id']) - # Create entities - new_role = self.role_api.create_role(new_role['id'], new_role) - new_user = self.identity_api.create_user(new_user) - self.resource_api.create_project(new_project['id'], new_project) - self.assignment_api.create_grant(user_id=new_user['id'], - project_id=new_project['id'], - role_id=new_role['id']) - # Get the created assignments with NO include_names flag - role_assign_without_names = self.assignment_api.list_role_assignments( - user_id=new_user['id'], - project_id=new_project['id']) - assert_does_not_contain_names(role_assign_without_names) - # Get the created assignments with include_names=False - role_assign_without_names = self.assignment_api.list_role_assignments( - user_id=new_user['id'], - project_id=new_project['id'], - include_names=False) - assert_does_not_contain_names(role_assign_without_names) - - def test_delete_user_assignments_user_same_id_as_group(self): - """Test deleting user assignments when user_id == group_id. - - In this scenario, only user assignments must be deleted (i.e. - USER_DOMAIN or USER_PROJECT). - - Test plan: - * Create a user and a group with the same ID; - * Create four roles and assign them to both user and group; - * Delete all user assignments; - * Group assignments must stay intact. - """ - # Create a common ID - common_id = uuid.uuid4().hex - # Create a project - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - project = self.resource_api.create_project(project['id'], project) - # Create a user - user = unit.new_user_ref(id=common_id, - domain_id=CONF.identity.default_domain_id) - user = self.identity_api.driver.create_user(common_id, user) - self.assertEqual(common_id, user['id']) - # Create a group - group = unit.new_group_ref(id=common_id, - domain_id=CONF.identity.default_domain_id) - group = self.identity_api.driver.create_group(common_id, group) - self.assertEqual(common_id, group['id']) - # Create four roles - roles = [] - for _ in range(4): - role = unit.new_role_ref() - roles.append(self.role_api.create_role(role['id'], role)) - # Assign roles for user - self.assignment_api.driver.create_grant( - user_id=user['id'], domain_id=CONF.identity.default_domain_id, - role_id=roles[0]['id']) - self.assignment_api.driver.create_grant(user_id=user['id'], - project_id=project['id'], - role_id=roles[1]['id']) - # Assign roles for group - self.assignment_api.driver.create_grant( - group_id=group['id'], domain_id=CONF.identity.default_domain_id, - role_id=roles[2]['id']) - self.assignment_api.driver.create_grant(group_id=group['id'], - project_id=project['id'], - role_id=roles[3]['id']) - # Make sure they were assigned - user_assignments = self.assignment_api.list_role_assignments( - user_id=user['id']) - self.assertThat(user_assignments, matchers.HasLength(2)) - group_assignments = self.assignment_api.list_role_assignments( - group_id=group['id']) - self.assertThat(group_assignments, matchers.HasLength(2)) - # Delete user assignments - self.assignment_api.delete_user_assignments(user_id=user['id']) - # Assert only user assignments were deleted - user_assignments = self.assignment_api.list_role_assignments( - user_id=user['id']) - self.assertThat(user_assignments, matchers.HasLength(0)) - group_assignments = self.assignment_api.list_role_assignments( - group_id=group['id']) - self.assertThat(group_assignments, matchers.HasLength(2)) - # Make sure these remaining assignments are group-related - for assignment in group_assignments: - self.assertThat(assignment.keys(), matchers.Contains('group_id')) - - def test_delete_group_assignments_group_same_id_as_user(self): - """Test deleting group assignments when group_id == user_id. - - In this scenario, only group assignments must be deleted (i.e. - GROUP_DOMAIN or GROUP_PROJECT). - - Test plan: - * Create a group and a user with the same ID; - * Create four roles and assign them to both group and user; - * Delete all group assignments; - * User assignments must stay intact. - """ - # Create a common ID - common_id = uuid.uuid4().hex - # Create a project - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - project = self.resource_api.create_project(project['id'], project) - # Create a user - user = unit.new_user_ref(id=common_id, - domain_id=CONF.identity.default_domain_id) - user = self.identity_api.driver.create_user(common_id, user) - self.assertEqual(common_id, user['id']) - # Create a group - group = unit.new_group_ref(id=common_id, - domain_id=CONF.identity.default_domain_id) - group = self.identity_api.driver.create_group(common_id, group) - self.assertEqual(common_id, group['id']) - # Create four roles - roles = [] - for _ in range(4): - role = unit.new_role_ref() - roles.append(self.role_api.create_role(role['id'], role)) - # Assign roles for user - self.assignment_api.driver.create_grant( - user_id=user['id'], domain_id=CONF.identity.default_domain_id, - role_id=roles[0]['id']) - self.assignment_api.driver.create_grant(user_id=user['id'], - project_id=project['id'], - role_id=roles[1]['id']) - # Assign roles for group - self.assignment_api.driver.create_grant( - group_id=group['id'], domain_id=CONF.identity.default_domain_id, - role_id=roles[2]['id']) - self.assignment_api.driver.create_grant(group_id=group['id'], - project_id=project['id'], - role_id=roles[3]['id']) - # Make sure they were assigned - user_assignments = self.assignment_api.list_role_assignments( - user_id=user['id']) - self.assertThat(user_assignments, matchers.HasLength(2)) - group_assignments = self.assignment_api.list_role_assignments( - group_id=group['id']) - self.assertThat(group_assignments, matchers.HasLength(2)) - # Delete group assignments - self.assignment_api.delete_group_assignments(group_id=group['id']) - # Assert only group assignments were deleted - group_assignments = self.assignment_api.list_role_assignments( - group_id=group['id']) - self.assertThat(group_assignments, matchers.HasLength(0)) - user_assignments = self.assignment_api.list_role_assignments( - user_id=user['id']) - self.assertThat(user_assignments, matchers.HasLength(2)) - # Make sure these remaining assignments are user-related - for assignment in group_assignments: - self.assertThat(assignment.keys(), matchers.Contains('user_id')) - - def test_remove_foreign_assignments_when_deleting_a_domain(self): - # A user and a group are in default domain and have assigned a role on - # two new domains. This test makes sure that when one of the new - # domains is deleted, the role assignments for the user and the group - # from the default domain are deleted only on that domain. - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group = self.identity_api.create_group(group) - - role = unit.new_role_ref() - role = self.role_api.create_role(role['id'], role) - - new_domains = [unit.new_domain_ref(), unit.new_domain_ref()] - for new_domain in new_domains: - self.resource_api.create_domain(new_domain['id'], new_domain) - - self.assignment_api.create_grant(group_id=group['id'], - domain_id=new_domain['id'], - role_id=role['id']) - self.assignment_api.create_grant(user_id=self.user_two['id'], - domain_id=new_domain['id'], - role_id=role['id']) - - # Check there are 4 role assignments for that role - role_assignments = self.assignment_api.list_role_assignments( - role_id=role['id']) - self.assertThat(role_assignments, matchers.HasLength(4)) - - # Delete first new domain and check only 2 assignments were left - self.resource_api.update_domain(new_domains[0]['id'], - {'enabled': False}) - self.resource_api.delete_domain(new_domains[0]['id']) - - role_assignments = self.assignment_api.list_role_assignments( - role_id=role['id']) - self.assertThat(role_assignments, matchers.HasLength(2)) - - # Delete second new domain and check no assignments were left - self.resource_api.update_domain(new_domains[1]['id'], - {'enabled': False}) - self.resource_api.delete_domain(new_domains[1]['id']) - - role_assignments = self.assignment_api.list_role_assignments( - role_id=role['id']) - self.assertEqual([], role_assignments) - - -class InheritanceTests(AssignmentTestHelperMixin): - - def test_role_assignments_user_domain_to_project_inheritance(self): - test_plan = { - 'entities': {'domains': {'users': 2, 'projects': 1}, - 'roles': 3}, - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'user': 0, 'role': 2, 'domain': 0, - 'inherited_to_projects': True}, - {'user': 1, 'role': 1, 'project': 0}], - 'tests': [ - # List all direct assignments for user[0] - {'params': {'user': 0}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'user': 0, 'role': 2, 'domain': 0, - 'inherited_to_projects': 'projects'}]}, - # Now the effective ones - so the domain role should turn into - # a project role - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'domain': 0}}]}, - # Narrow down to effective roles for user[0] and project[0] - {'params': {'user': 0, 'project': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 1, 'project': 0}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'domain': 0}}]} - ] - } - self.config_fixture.config(group='os_inherit', enabled=True) - self.execute_assignment_plan(test_plan) - - def test_inherited_role_assignments_excluded_if_os_inherit_false(self): - test_plan = { - 'entities': {'domains': {'users': 2, 'groups': 1, 'projects': 1}, - 'roles': 4}, - 'group_memberships': [{'group': 0, 'users': [0]}], - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'user': 0, 'role': 2, 'domain': 0, - 'inherited_to_projects': True}, - {'user': 1, 'role': 1, 'project': 0}, - {'group': 0, 'role': 3, 'project': 0}], - 'tests': [ - # List all direct assignments for user[0], since os-inherit is - # disabled, we should not see the inherited role - {'params': {'user': 0}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}]}, - # Same in effective mode - inherited roles should not be - # included or expanded...but the group role should now - # turn up as a user role, since group expansion is not - # part of os-inherit. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'user': 0, 'role': 3, 'project': 0, - 'indirect': {'group': 0}}]}, - ] - } - self.config_fixture.config(group='os_inherit', enabled=False) - self.execute_assignment_plan(test_plan) - - def _test_crud_inherited_and_direct_assignment(self, **kwargs): - """Tests inherited and direct assignments for the actor and target - - Ensure it is possible to create both inherited and direct role - assignments for the same actor on the same target. The actor and the - target are specified in the kwargs as ('user_id' or 'group_id') and - ('project_id' or 'domain_id'), respectively. - - """ - self.config_fixture.config(group='os_inherit', enabled=True) - # Create a new role to avoid assignments loaded from default fixtures - role = unit.new_role_ref() - role = self.role_api.create_role(role['id'], role) - - # Define the common assignment entity - assignment_entity = {'role_id': role['id']} - assignment_entity.update(kwargs) - - # Define assignments under test - direct_assignment_entity = assignment_entity.copy() - inherited_assignment_entity = assignment_entity.copy() - inherited_assignment_entity['inherited_to_projects'] = 'projects' - - # Create direct assignment and check grants - self.assignment_api.create_grant(inherited_to_projects=False, - **assignment_entity) - - grants = self.assignment_api.list_role_assignments(role_id=role['id']) - self.assertThat(grants, matchers.HasLength(1)) - self.assertIn(direct_assignment_entity, grants) - - # Now add inherited assignment and check grants - self.assignment_api.create_grant(inherited_to_projects=True, - **assignment_entity) - - grants = self.assignment_api.list_role_assignments(role_id=role['id']) - self.assertThat(grants, matchers.HasLength(2)) - self.assertIn(direct_assignment_entity, grants) - self.assertIn(inherited_assignment_entity, grants) - - # Delete both and check grants - self.assignment_api.delete_grant(inherited_to_projects=False, - **assignment_entity) - self.assignment_api.delete_grant(inherited_to_projects=True, - **assignment_entity) - - grants = self.assignment_api.list_role_assignments(role_id=role['id']) - self.assertEqual([], grants) - - def test_crud_inherited_and_direct_assignment_for_user_on_domain(self): - self._test_crud_inherited_and_direct_assignment( - user_id=self.user_foo['id'], - domain_id=CONF.identity.default_domain_id) - - def test_crud_inherited_and_direct_assignment_for_group_on_domain(self): - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group = self.identity_api.create_group(group) - - self._test_crud_inherited_and_direct_assignment( - group_id=group['id'], domain_id=CONF.identity.default_domain_id) - - def test_crud_inherited_and_direct_assignment_for_user_on_project(self): - self._test_crud_inherited_and_direct_assignment( - user_id=self.user_foo['id'], project_id=self.tenant_baz['id']) - - def test_crud_inherited_and_direct_assignment_for_group_on_project(self): - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group = self.identity_api.create_group(group) - - self._test_crud_inherited_and_direct_assignment( - group_id=group['id'], project_id=self.tenant_baz['id']) - - def test_inherited_role_grants_for_user(self): - """Test inherited user roles. - - Test Plan: - - - Enable OS-INHERIT extension - - Create 3 roles - - Create a domain, with a project and a user - - Check no roles yet exit - - Assign a direct user role to the project and a (non-inherited) - user role to the domain - - Get a list of effective roles - should only get the one direct role - - Now add an inherited user role to the domain - - Get a list of effective roles - should have two roles, one - direct and one by virtue of the inherited user role - - Also get effective roles for the domain - the role marked as - inherited should not show up - - """ - self.config_fixture.config(group='os_inherit', enabled=True) - role_list = [] - for _ in range(3): - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - role_list.append(role) - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - user1 = unit.new_user_ref(domain_id=domain1['id']) - user1 = self.identity_api.create_user(user1) - project1 = unit.new_project_ref(domain_id=domain1['id']) - self.resource_api.create_project(project1['id'], project1) - - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(0, len(roles_ref)) - - # Create the first two roles - the domain one is not inherited - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role_list[0]['id']) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain1['id'], - role_id=role_list[1]['id']) - - # Now get the effective roles for the user and project, this - # should only include the direct role assignment on the project - combined_list = self.assignment_api.get_roles_for_user_and_project( - user1['id'], project1['id']) - self.assertEqual(1, len(combined_list)) - self.assertIn(role_list[0]['id'], combined_list) - - # Now add an inherited role on the domain - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain1['id'], - role_id=role_list[2]['id'], - inherited_to_projects=True) - - # Now get the effective roles for the user and project again, this - # should now include the inherited role on the domain - combined_list = self.assignment_api.get_roles_for_user_and_project( - user1['id'], project1['id']) - self.assertEqual(2, len(combined_list)) - self.assertIn(role_list[0]['id'], combined_list) - self.assertIn(role_list[2]['id'], combined_list) - - # Finally, check that the inherited role does not appear as a valid - # directly assigned role on the domain itself - combined_role_list = self.assignment_api.get_roles_for_user_and_domain( - user1['id'], domain1['id']) - self.assertEqual(1, len(combined_role_list)) - self.assertIn(role_list[1]['id'], combined_role_list) - - # TODO(henry-nash): The test above uses get_roles_for_user_and_project - # and get_roles_for_user_and_domain, which will, in a subsequent patch, - # be re-implemented to simply call list_role_assignments (see blueprint - # remove-role-metadata). - # - # The test plan below therefore mirrors this test, to ensure that - # list_role_assignments works the same. Once get_roles_for_user_and - # project/domain have been re-implemented then the manual tests above - # can be refactored to simply ensure it gives the same answers. - test_plan = { - # A domain with a user & project, plus 3 roles. - 'entities': {'domains': {'users': 1, 'projects': 1}, - 'roles': 3}, - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'domain': 0}, - {'user': 0, 'role': 2, 'domain': 0, - 'inherited_to_projects': True}], - 'tests': [ - # List all effective assignments for user[0] on project[0]. - # Should get one direct role and one inherited role. - {'params': {'user': 0, 'project': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'domain': 0}}]}, - # Ensure effective mode on the domain does not list the - # inherited role on that domain - {'params': {'user': 0, 'domain': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 1, 'domain': 0}]}, - # Ensure non-inherited mode also only returns the non-inherited - # role on the domain - {'params': {'user': 0, 'domain': 0, 'inherited': False}, - 'results': [{'user': 0, 'role': 1, 'domain': 0}]}, - ] - } - self.execute_assignment_plan(test_plan) - - def test_inherited_role_grants_for_group(self): - """Test inherited group roles. - - Test Plan: - - - Enable OS-INHERIT extension - - Create 4 roles - - Create a domain, with a project, user and two groups - - Make the user a member of both groups - - Check no roles yet exit - - Assign a direct user role to the project and a (non-inherited) - group role on the domain - - Get a list of effective roles - should only get the one direct role - - Now add two inherited group roles to the domain - - Get a list of effective roles - should have three roles, one - direct and two by virtue of inherited group roles - - """ - self.config_fixture.config(group='os_inherit', enabled=True) - role_list = [] - for _ in range(4): - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - role_list.append(role) - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - user1 = unit.new_user_ref(domain_id=domain1['id']) - user1 = self.identity_api.create_user(user1) - group1 = unit.new_group_ref(domain_id=domain1['id']) - group1 = self.identity_api.create_group(group1) - group2 = unit.new_group_ref(domain_id=domain1['id']) - group2 = self.identity_api.create_group(group2) - project1 = unit.new_project_ref(domain_id=domain1['id']) - self.resource_api.create_project(project1['id'], project1) - - self.identity_api.add_user_to_group(user1['id'], - group1['id']) - self.identity_api.add_user_to_group(user1['id'], - group2['id']) - - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(0, len(roles_ref)) - - # Create two roles - the domain one is not inherited - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role_list[0]['id']) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain1['id'], - role_id=role_list[1]['id']) - - # Now get the effective roles for the user and project, this - # should only include the direct role assignment on the project - combined_list = self.assignment_api.get_roles_for_user_and_project( - user1['id'], project1['id']) - self.assertEqual(1, len(combined_list)) - self.assertIn(role_list[0]['id'], combined_list) - - # Now add to more group roles, both inherited, to the domain - self.assignment_api.create_grant(group_id=group2['id'], - domain_id=domain1['id'], - role_id=role_list[2]['id'], - inherited_to_projects=True) - self.assignment_api.create_grant(group_id=group2['id'], - domain_id=domain1['id'], - role_id=role_list[3]['id'], - inherited_to_projects=True) - - # Now get the effective roles for the user and project again, this - # should now include the inherited roles on the domain - combined_list = self.assignment_api.get_roles_for_user_and_project( - user1['id'], project1['id']) - self.assertEqual(3, len(combined_list)) - self.assertIn(role_list[0]['id'], combined_list) - self.assertIn(role_list[2]['id'], combined_list) - self.assertIn(role_list[3]['id'], combined_list) - - # TODO(henry-nash): The test above uses get_roles_for_user_and_project - # which will, in a subsequent patch, be re-implemented to simply call - # list_role_assignments (see blueprint remove-role-metadata). - # - # The test plan below therefore mirrors this test, to ensure that - # list_role_assignments works the same. Once - # get_roles_for_user_and_project has been re-implemented then the - # manual tests above can be refactored to simply ensure it gives - # the same answers. - test_plan = { - # A domain with a user and project, 2 groups, plus 4 roles. - 'entities': {'domains': {'users': 1, 'projects': 1, 'groups': 2}, - 'roles': 4}, - 'group_memberships': [{'group': 0, 'users': [0]}, - {'group': 1, 'users': [0]}], - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'group': 0, 'role': 1, 'domain': 0}, - {'group': 1, 'role': 2, 'domain': 0, - 'inherited_to_projects': True}, - {'group': 1, 'role': 3, 'domain': 0, - 'inherited_to_projects': True}], - 'tests': [ - # List all effective assignments for user[0] on project[0]. - # Should get one direct role and both inherited roles, but - # not the direct one on domain[0], even though user[0] is - # in group[0]. - {'params': {'user': 0, 'project': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'domain': 0, 'group': 1}}, - {'user': 0, 'role': 3, 'project': 0, - 'indirect': {'domain': 0, 'group': 1}}]} - ] - } - self.execute_assignment_plan(test_plan) - - def test_list_projects_for_user_with_inherited_grants(self): - """Test inherited user roles. - - Test Plan: - - - Enable OS-INHERIT extension - - Create a domain, with two projects and a user - - Assign an inherited user role on the domain, as well as a direct - user role to a separate project in a different domain - - Get a list of projects for user, should return all three projects - - """ - self.config_fixture.config(group='os_inherit', enabled=True) - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - user1 = unit.new_user_ref(domain_id=domain['id']) - user1 = self.identity_api.create_user(user1) - project1 = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project1['id'], project1) - project2 = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project2['id'], project2) - - # Create 2 grants, one on a project and one inherited grant - # on the domain - self.assignment_api.create_grant(user_id=user1['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_member['id']) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain['id'], - role_id=self.role_admin['id'], - inherited_to_projects=True) - # Should get back all three projects, one by virtue of the direct - # grant, plus both projects in the domain - user_projects = self.assignment_api.list_projects_for_user(user1['id']) - self.assertEqual(3, len(user_projects)) - - # TODO(henry-nash): The test above uses list_projects_for_user - # which may, in a subsequent patch, be re-implemented to call - # list_role_assignments and then report only the distinct projects. - # - # The test plan below therefore mirrors this test, to ensure that - # list_role_assignments works the same. Once list_projects_for_user - # has been re-implemented then the manual tests above can be - # refactored. - test_plan = { - # A domain with 1 project, plus a second domain with 2 projects, - # as well as a user. Also, create 2 roles. - 'entities': {'domains': [{'projects': 1}, - {'users': 1, 'projects': 2}], - 'roles': 2}, - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'domain': 1, - 'inherited_to_projects': True}], - 'tests': [ - # List all effective assignments for user[0] - # Should get one direct role plus one inherited role for each - # project in domain - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'project': 1, - 'indirect': {'domain': 1}}, - {'user': 0, 'role': 1, 'project': 2, - 'indirect': {'domain': 1}}]} - ] - } - self.execute_assignment_plan(test_plan) - - def test_list_projects_for_user_with_inherited_user_project_grants(self): - """Test inherited role assignments for users on nested projects. - - Test Plan: - - - Enable OS-INHERIT extension - - Create a hierarchy of projects with one root and one leaf project - - Assign an inherited user role on root project - - Assign a non-inherited user role on root project - - Get a list of projects for user, should return both projects - - Disable OS-INHERIT extension - - Get a list of projects for user, should return only root project - - """ - # Enable OS-INHERIT extension - self.config_fixture.config(group='os_inherit', enabled=True) - root_project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - root_project = self.resource_api.create_project(root_project['id'], - root_project) - leaf_project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, - parent_id=root_project['id']) - leaf_project = self.resource_api.create_project(leaf_project['id'], - leaf_project) - - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - - # Grant inherited user role - self.assignment_api.create_grant(user_id=user['id'], - project_id=root_project['id'], - role_id=self.role_admin['id'], - inherited_to_projects=True) - # Grant non-inherited user role - self.assignment_api.create_grant(user_id=user['id'], - project_id=root_project['id'], - role_id=self.role_member['id']) - # Should get back both projects: because the direct role assignment for - # the root project and inherited role assignment for leaf project - user_projects = self.assignment_api.list_projects_for_user(user['id']) - self.assertEqual(2, len(user_projects)) - self.assertIn(root_project, user_projects) - self.assertIn(leaf_project, user_projects) - - # Disable OS-INHERIT extension - self.config_fixture.config(group='os_inherit', enabled=False) - # Should get back just root project - due the direct role assignment - user_projects = self.assignment_api.list_projects_for_user(user['id']) - self.assertEqual(1, len(user_projects)) - self.assertIn(root_project, user_projects) - - # TODO(henry-nash): The test above uses list_projects_for_user - # which may, in a subsequent patch, be re-implemented to call - # list_role_assignments and then report only the distinct projects. - # - # The test plan below therefore mirrors this test, to ensure that - # list_role_assignments works the same. Once list_projects_for_user - # has been re-implemented then the manual tests above can be - # refactored. - test_plan = { - # A domain with a project and sub-project, plus a user. - # Also, create 2 roles. - 'entities': { - 'domains': {'id': CONF.identity.default_domain_id, 'users': 1, - 'projects': {'project': 1}}, - 'roles': 2}, - # A direct role and an inherited role on the parent - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'project': 0, - 'inherited_to_projects': True}], - 'tests': [ - # List all effective assignments for user[0] - should get back - # one direct role plus one inherited role. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'project': 1, - 'indirect': {'project': 0}}]} - ] - } - - test_plan_with_os_inherit_disabled = { - 'tests': [ - # List all effective assignments for user[0] - should only get - # back the one direct role. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}]} - ] - } - self.config_fixture.config(group='os_inherit', enabled=True) - test_data = self.execute_assignment_plan(test_plan) - self.config_fixture.config(group='os_inherit', enabled=False) - # Pass the existing test data in to allow execution of 2nd test plan - self.execute_assignment_cases( - test_plan_with_os_inherit_disabled, test_data) - - def test_list_projects_for_user_with_inherited_group_grants(self): - """Test inherited group roles. - - Test Plan: - - - Enable OS-INHERIT extension - - Create two domains, each with two projects - - Create a user and group - - Make the user a member of the group - - Assign a user role two projects, an inherited - group role to one domain and an inherited regular role on - the other domain - - Get a list of projects for user, should return both pairs of projects - from the domain, plus the one separate project - - """ - self.config_fixture.config(group='os_inherit', enabled=True) - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - project1 = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project1['id'], project1) - project2 = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project2['id'], project2) - project3 = unit.new_project_ref(domain_id=domain2['id']) - self.resource_api.create_project(project3['id'], project3) - project4 = unit.new_project_ref(domain_id=domain2['id']) - self.resource_api.create_project(project4['id'], project4) - user1 = unit.new_user_ref(domain_id=domain['id']) - user1 = self.identity_api.create_user(user1) - group1 = unit.new_group_ref(domain_id=domain['id']) - group1 = self.identity_api.create_group(group1) - self.identity_api.add_user_to_group(user1['id'], group1['id']) - - # Create 4 grants: - # - one user grant on a project in domain2 - # - one user grant on a project in the default domain - # - one inherited user grant on domain - # - one inherited group grant on domain2 - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project3['id'], - role_id=self.role_member['id']) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_member['id']) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain['id'], - role_id=self.role_admin['id'], - inherited_to_projects=True) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain2['id'], - role_id=self.role_admin['id'], - inherited_to_projects=True) - # Should get back all five projects, but without a duplicate for - # project3 (since it has both a direct user role and an inherited role) - user_projects = self.assignment_api.list_projects_for_user(user1['id']) - self.assertEqual(5, len(user_projects)) - - # TODO(henry-nash): The test above uses list_projects_for_user - # which may, in a subsequent patch, be re-implemented to call - # list_role_assignments and then report only the distinct projects. - # - # The test plan below therefore mirrors this test, to ensure that - # list_role_assignments works the same. Once list_projects_for_user - # has been re-implemented then the manual tests above can be - # refactored. - test_plan = { - # A domain with a 1 project, plus a second domain with 2 projects, - # as well as a user & group and a 3rd domain with 2 projects. - # Also, created 2 roles. - 'entities': {'domains': [{'projects': 1}, - {'users': 1, 'groups': 1, 'projects': 2}, - {'projects': 2}], - 'roles': 2}, - 'group_memberships': [{'group': 0, 'users': [0]}], - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 0, 'project': 3}, - {'user': 0, 'role': 1, 'domain': 1, - 'inherited_to_projects': True}, - {'user': 0, 'role': 1, 'domain': 2, - 'inherited_to_projects': True}], - 'tests': [ - # List all effective assignments for user[0] - # Should get back both direct roles plus roles on both projects - # from each domain. Duplicates should not be filtered out. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 3}, - {'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'project': 1, - 'indirect': {'domain': 1}}, - {'user': 0, 'role': 1, 'project': 2, - 'indirect': {'domain': 1}}, - {'user': 0, 'role': 1, 'project': 3, - 'indirect': {'domain': 2}}, - {'user': 0, 'role': 1, 'project': 4, - 'indirect': {'domain': 2}}]} - ] - } - self.execute_assignment_plan(test_plan) - - def test_list_projects_for_user_with_inherited_group_project_grants(self): - """Test inherited role assignments for groups on nested projects. - - Test Plan: - - - Enable OS-INHERIT extension - - Create a hierarchy of projects with one root and one leaf project - - Assign an inherited group role on root project - - Assign a non-inherited group role on root project - - Get a list of projects for user, should return both projects - - Disable OS-INHERIT extension - - Get a list of projects for user, should return only root project - - """ - self.config_fixture.config(group='os_inherit', enabled=True) - root_project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - root_project = self.resource_api.create_project(root_project['id'], - root_project) - leaf_project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, - parent_id=root_project['id']) - leaf_project = self.resource_api.create_project(leaf_project['id'], - leaf_project) - - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group = self.identity_api.create_group(group) - self.identity_api.add_user_to_group(user['id'], group['id']) - - # Grant inherited group role - self.assignment_api.create_grant(group_id=group['id'], - project_id=root_project['id'], - role_id=self.role_admin['id'], - inherited_to_projects=True) - # Grant non-inherited group role - self.assignment_api.create_grant(group_id=group['id'], - project_id=root_project['id'], - role_id=self.role_member['id']) - # Should get back both projects: because the direct role assignment for - # the root project and inherited role assignment for leaf project - user_projects = self.assignment_api.list_projects_for_user(user['id']) - self.assertEqual(2, len(user_projects)) - self.assertIn(root_project, user_projects) - self.assertIn(leaf_project, user_projects) - - # Disable OS-INHERIT extension - self.config_fixture.config(group='os_inherit', enabled=False) - # Should get back just root project - due the direct role assignment - user_projects = self.assignment_api.list_projects_for_user(user['id']) - self.assertEqual(1, len(user_projects)) - self.assertIn(root_project, user_projects) - - # TODO(henry-nash): The test above uses list_projects_for_user - # which may, in a subsequent patch, be re-implemented to call - # list_role_assignments and then report only the distinct projects. - # - # The test plan below therefore mirrors this test, to ensure that - # list_role_assignments works the same. Once list_projects_for_user - # has been re-implemented then the manual tests above can be - # refactored. - test_plan = { - # A domain with a project ans sub-project, plus a user. - # Also, create 2 roles. - 'entities': { - 'domains': {'id': CONF.identity.default_domain_id, 'users': 1, - 'groups': 1, - 'projects': {'project': 1}}, - 'roles': 2}, - 'group_memberships': [{'group': 0, 'users': [0]}], - # A direct role and an inherited role on the parent - 'assignments': [{'group': 0, 'role': 0, 'project': 0}, - {'group': 0, 'role': 1, 'project': 0, - 'inherited_to_projects': True}], - 'tests': [ - # List all effective assignments for user[0] - should get back - # one direct role plus one inherited role. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0, - 'indirect': {'group': 0}}, - {'user': 0, 'role': 1, 'project': 1, - 'indirect': {'group': 0, 'project': 0}}]} - ] - } - - test_plan_with_os_inherit_disabled = { - 'tests': [ - # List all effective assignments for user[0] - should only get - # back the one direct role. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0, - 'indirect': {'group': 0}}]} - ] - } - self.config_fixture.config(group='os_inherit', enabled=True) - test_data = self.execute_assignment_plan(test_plan) - self.config_fixture.config(group='os_inherit', enabled=False) - # Pass the existing test data in to allow execution of 2nd test plan - self.execute_assignment_cases( - test_plan_with_os_inherit_disabled, test_data) - - def test_list_assignments_for_tree(self): - """Test we correctly list direct assignments for a tree""" - # Enable OS-INHERIT extension - self.config_fixture.config(group='os_inherit', enabled=True) - - test_plan = { - # Create a domain with a project hierarchy 3 levels deep: - # - # project 0 - # ____________|____________ - # | | - # project 1 project 4 - # ______|_____ ______|_____ - # | | | | - # project 2 project 3 project 5 project 6 - # - # Also, create 1 user and 4 roles. - 'entities': { - 'domains': { - 'projects': {'project': [{'project': 2}, - {'project': 2}]}, - 'users': 1}, - 'roles': 4}, - 'assignments': [ - # Direct assignment to projects 1 and 2 - {'user': 0, 'role': 0, 'project': 1}, - {'user': 0, 'role': 1, 'project': 2}, - # Also an inherited assignment on project 1 - {'user': 0, 'role': 2, 'project': 1, - 'inherited_to_projects': True}, - # ...and two spoiler assignments, one to the root and one - # to project 4 - {'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 3, 'project': 4}], - 'tests': [ - # List all assignments for project 1 and its subtree. - {'params': {'project': 1, 'include_subtree': True}, - 'results': [ - # Only the actual assignments should be returned, no - # expansion of inherited assignments - {'user': 0, 'role': 0, 'project': 1}, - {'user': 0, 'role': 1, 'project': 2}, - {'user': 0, 'role': 2, 'project': 1, - 'inherited_to_projects': 'projects'}]} - ] - } - - self.execute_assignment_plan(test_plan) - - def test_list_effective_assignments_for_tree(self): - """Test we correctly list effective assignments for a tree""" - # Enable OS-INHERIT extension - self.config_fixture.config(group='os_inherit', enabled=True) - - test_plan = { - # Create a domain with a project hierarchy 3 levels deep: - # - # project 0 - # ____________|____________ - # | | - # project 1 project 4 - # ______|_____ ______|_____ - # | | | | - # project 2 project 3 project 5 project 6 - # - # Also, create 1 user and 4 roles. - 'entities': { - 'domains': { - 'projects': {'project': [{'project': 2}, - {'project': 2}]}, - 'users': 1}, - 'roles': 4}, - 'assignments': [ - # An inherited assignment on project 1 - {'user': 0, 'role': 1, 'project': 1, - 'inherited_to_projects': True}, - # A direct assignment to project 2 - {'user': 0, 'role': 2, 'project': 2}, - # ...and two spoiler assignments, one to the root and one - # to project 4 - {'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 3, 'project': 4}], - 'tests': [ - # List all effective assignments for project 1 and its subtree. - {'params': {'project': 1, 'effective': True, - 'include_subtree': True}, - 'results': [ - # The inherited assignment on project 1 should appear only - # on its children - {'user': 0, 'role': 1, 'project': 2, - 'indirect': {'project': 1}}, - {'user': 0, 'role': 1, 'project': 3, - 'indirect': {'project': 1}}, - # And finally the direct assignment on project 2 - {'user': 0, 'role': 2, 'project': 2}]} - ] - } - - self.execute_assignment_plan(test_plan) - - def test_list_effective_assignments_for_tree_with_mixed_assignments(self): - """Test that we correctly combine assignments for a tree. - - In this test we want to ensure that when asking for a list of - assignments in a subtree, any assignments inherited from above the - subtree are correctly combined with any assignments within the subtree - itself. - - """ - # Enable OS-INHERIT extension - self.config_fixture.config(group='os_inherit', enabled=True) - - test_plan = { - # Create a domain with a project hierarchy 3 levels deep: - # - # project 0 - # ____________|____________ - # | | - # project 1 project 4 - # ______|_____ ______|_____ - # | | | | - # project 2 project 3 project 5 project 6 - # - # Also, create 2 users, 1 group and 4 roles. - 'entities': { - 'domains': { - 'projects': {'project': [{'project': 2}, - {'project': 2}]}, - 'users': 2, 'groups': 1}, - 'roles': 4}, - # Both users are part of the same group - 'group_memberships': [{'group': 0, 'users': [0, 1]}], - # We are going to ask for listing of assignment on project 1 and - # it's subtree. So first we'll add two inherited assignments above - # this (one user and one for a group that contains this user). - 'assignments': [{'user': 0, 'role': 0, 'project': 0, - 'inherited_to_projects': True}, - {'group': 0, 'role': 1, 'project': 0, - 'inherited_to_projects': True}, - # Now an inherited assignment on project 1 itself, - # which should ONLY show up on its children - {'user': 0, 'role': 2, 'project': 1, - 'inherited_to_projects': True}, - # ...and a direct assignment on one of those - # children - {'user': 0, 'role': 3, 'project': 2}, - # The rest are spoiler assignments - {'user': 0, 'role': 2, 'project': 5}, - {'user': 0, 'role': 3, 'project': 4}], - 'tests': [ - # List all effective assignments for project 1 and its subtree. - {'params': {'project': 1, 'user': 0, 'effective': True, - 'include_subtree': True}, - 'results': [ - # First, we should see the inherited user assignment from - # project 0 on all projects in the subtree - {'user': 0, 'role': 0, 'project': 1, - 'indirect': {'project': 0}}, - {'user': 0, 'role': 0, 'project': 2, - 'indirect': {'project': 0}}, - {'user': 0, 'role': 0, 'project': 3, - 'indirect': {'project': 0}}, - # Also the inherited group assignment from project 0 on - # the subtree - {'user': 0, 'role': 1, 'project': 1, - 'indirect': {'project': 0, 'group': 0}}, - {'user': 0, 'role': 1, 'project': 2, - 'indirect': {'project': 0, 'group': 0}}, - {'user': 0, 'role': 1, 'project': 3, - 'indirect': {'project': 0, 'group': 0}}, - # The inherited assignment on project 1 should appear only - # on its children - {'user': 0, 'role': 2, 'project': 2, - 'indirect': {'project': 1}}, - {'user': 0, 'role': 2, 'project': 3, - 'indirect': {'project': 1}}, - # And finally the direct assignment on project 2 - {'user': 0, 'role': 3, 'project': 2}]} - ] - } - - self.execute_assignment_plan(test_plan) - - def test_list_effective_assignments_for_tree_with_domain_assignments(self): - """Test we correctly honor domain inherited assignments on the tree""" - # Enable OS-INHERIT extension - self.config_fixture.config(group='os_inherit', enabled=True) - - test_plan = { - # Create a domain with a project hierarchy 3 levels deep: - # - # project 0 - # ____________|____________ - # | | - # project 1 project 4 - # ______|_____ ______|_____ - # | | | | - # project 2 project 3 project 5 project 6 - # - # Also, create 1 user and 4 roles. - 'entities': { - 'domains': { - 'projects': {'project': [{'project': 2}, - {'project': 2}]}, - 'users': 1}, - 'roles': 4}, - 'assignments': [ - # An inherited assignment on the domain (which should be - # applied to all the projects) - {'user': 0, 'role': 1, 'domain': 0, - 'inherited_to_projects': True}, - # A direct assignment to project 2 - {'user': 0, 'role': 2, 'project': 2}, - # ...and two spoiler assignments, one to the root and one - # to project 4 - {'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 3, 'project': 4}], - 'tests': [ - # List all effective assignments for project 1 and its subtree. - {'params': {'project': 1, 'effective': True, - 'include_subtree': True}, - 'results': [ - # The inherited assignment from the domain should appear - # only on the part of the subtree we are interested in - {'user': 0, 'role': 1, 'project': 1, - 'indirect': {'domain': 0}}, - {'user': 0, 'role': 1, 'project': 2, - 'indirect': {'domain': 0}}, - {'user': 0, 'role': 1, 'project': 3, - 'indirect': {'domain': 0}}, - # And finally the direct assignment on project 2 - {'user': 0, 'role': 2, 'project': 2}]} - ] - } - - self.execute_assignment_plan(test_plan) - - def test_list_user_ids_for_project_with_inheritance(self): - test_plan = { - # A domain with a project and sub-project, plus four users, - # two groups, as well as 4 roles. - 'entities': { - 'domains': {'id': CONF.identity.default_domain_id, 'users': 4, - 'groups': 2, - 'projects': {'project': 1}}, - 'roles': 4}, - # Each group has a unique user member - 'group_memberships': [{'group': 0, 'users': [1]}, - {'group': 1, 'users': [3]}], - # Set up assignments so that there should end up with four - # effective assignments on project 1 - one direct, one due to - # group membership and one user assignment inherited from the - # parent and one group assignment inhertied from the parent. - 'assignments': [{'user': 0, 'role': 0, 'project': 1}, - {'group': 0, 'role': 1, 'project': 1}, - {'user': 2, 'role': 2, 'project': 0, - 'inherited_to_projects': True}, - {'group': 1, 'role': 3, 'project': 0, - 'inherited_to_projects': True}], - } - # Use assignment plan helper to create all the entities and - # assignments - then we'll run our own tests using the data - test_data = self.execute_assignment_plan(test_plan) - self.config_fixture.config(group='os_inherit', enabled=True) - user_ids = self.assignment_api.list_user_ids_for_project( - test_data['projects'][1]['id']) - self.assertThat(user_ids, matchers.HasLength(4)) - for x in range(0, 4): - self.assertIn(test_data['users'][x]['id'], user_ids) - - def test_list_role_assignment_using_inherited_sourced_groups(self): - """Test listing inherited assignments when restricted by groups.""" - test_plan = { - # A domain with 3 users, 3 groups, 3 projects, a second domain, - # plus 3 roles. - 'entities': {'domains': [{'users': 3, 'groups': 3, 'projects': 3}, - 1], - 'roles': 3}, - # Users 0 & 1 are in the group 0, User 0 also in group 1 - 'group_memberships': [{'group': 0, 'users': [0, 1]}, - {'group': 1, 'users': [0]}], - # Spread the assignments around - we want to be able to show that - # if sourced by group, assignments from other sources are excluded - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'group': 0, 'role': 1, 'domain': 1}, - {'group': 1, 'role': 2, 'domain': 0, - 'inherited_to_projects': True}, - {'group': 1, 'role': 2, 'project': 1}, - {'user': 2, 'role': 1, 'project': 1, - 'inherited_to_projects': True}, - {'group': 2, 'role': 2, 'project': 2} - ], - 'tests': [ - # List all effective assignments sourced from groups 0 and 1. - # We should see the inherited group assigned on the 3 projects - # from domain 0, as well as the direct assignments. - {'params': {'source_from_group_ids': [0, 1], - 'effective': True}, - 'results': [{'group': 0, 'role': 1, 'domain': 1}, - {'group': 1, 'role': 2, 'project': 0, - 'indirect': {'domain': 0}}, - {'group': 1, 'role': 2, 'project': 1, - 'indirect': {'domain': 0}}, - {'group': 1, 'role': 2, 'project': 2, - 'indirect': {'domain': 0}}, - {'group': 1, 'role': 2, 'project': 1} - ]}, - ] - } - self.execute_assignment_plan(test_plan) - - -class ImpliedRoleTests(AssignmentTestHelperMixin): - - def test_implied_role_crd(self): - prior_role_ref = unit.new_role_ref() - self.role_api.create_role(prior_role_ref['id'], prior_role_ref) - implied_role_ref = unit.new_role_ref() - self.role_api.create_role(implied_role_ref['id'], implied_role_ref) - - self.role_api.create_implied_role( - prior_role_ref['id'], - implied_role_ref['id']) - implied_role = self.role_api.get_implied_role( - prior_role_ref['id'], - implied_role_ref['id']) - expected_implied_role_ref = { - 'prior_role_id': prior_role_ref['id'], - 'implied_role_id': implied_role_ref['id']} - self.assertDictContainsSubset( - expected_implied_role_ref, - implied_role) - - self.role_api.delete_implied_role( - prior_role_ref['id'], - implied_role_ref['id']) - self.assertRaises(exception.ImpliedRoleNotFound, - self.role_api.get_implied_role, - uuid.uuid4().hex, - uuid.uuid4().hex) - - def test_delete_implied_role_returns_not_found(self): - self.assertRaises(exception.ImpliedRoleNotFound, - self.role_api.delete_implied_role, - uuid.uuid4().hex, - uuid.uuid4().hex) - - def test_role_assignments_simple_tree_of_implied_roles(self): - """Test that implied roles are expanded out.""" - test_plan = { - 'entities': {'domains': {'users': 1, 'projects': 1}, - 'roles': 4}, - # Three level tree of implied roles - 'implied_roles': [{'role': 0, 'implied_roles': 1}, - {'role': 1, 'implied_roles': [2, 3]}], - 'assignments': [{'user': 0, 'role': 0, 'project': 0}], - 'tests': [ - # List all direct assignments for user[0], this should just - # show the one top level role assignment - {'params': {'user': 0}, - 'results': [{'user': 0, 'role': 0, 'project': 0}]}, - # Listing in effective mode should show the implied roles - # expanded out - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'project': 0, - 'indirect': {'role': 0}}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'role': 1}}, - {'user': 0, 'role': 3, 'project': 0, - 'indirect': {'role': 1}}]}, - ] - } - self.execute_assignment_plan(test_plan) - - def test_circular_inferences(self): - """Test that implied roles are expanded out.""" - test_plan = { - 'entities': {'domains': {'users': 1, 'projects': 1}, - 'roles': 4}, - # Three level tree of implied roles - 'implied_roles': [{'role': 0, 'implied_roles': [1]}, - {'role': 1, 'implied_roles': [2, 3]}, - {'role': 3, 'implied_roles': [0]}], - 'assignments': [{'user': 0, 'role': 0, 'project': 0}], - 'tests': [ - # List all direct assignments for user[0], this should just - # show the one top level role assignment - {'params': {'user': 0}, - 'results': [{'user': 0, 'role': 0, 'project': 0}]}, - # Listing in effective mode should show the implied roles - # expanded out - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 0, 'project': 0, - 'indirect': {'role': 3}}, - {'user': 0, 'role': 1, 'project': 0, - 'indirect': {'role': 0}}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'role': 1}}, - {'user': 0, 'role': 3, 'project': 0, - 'indirect': {'role': 1}}]}, - ] - } - self.execute_assignment_plan(test_plan) - - def test_role_assignments_directed_graph_of_implied_roles(self): - """Test that a role can have multiple, different prior roles.""" - test_plan = { - 'entities': {'domains': {'users': 1, 'projects': 1}, - 'roles': 6}, - # Three level tree of implied roles, where one of the roles at the - # bottom is implied by more than one top level role - 'implied_roles': [{'role': 0, 'implied_roles': [1, 2]}, - {'role': 1, 'implied_roles': [3, 4]}, - {'role': 5, 'implied_roles': 4}], - # The user gets both top level roles - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 5, 'project': 0}], - 'tests': [ - # The implied roles should be expanded out and there should be - # two entries for the role that had two different prior roles. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 5, 'project': 0}, - {'user': 0, 'role': 1, 'project': 0, - 'indirect': {'role': 0}}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'role': 0}}, - {'user': 0, 'role': 3, 'project': 0, - 'indirect': {'role': 1}}, - {'user': 0, 'role': 4, 'project': 0, - 'indirect': {'role': 1}}, - {'user': 0, 'role': 4, 'project': 0, - 'indirect': {'role': 5}}]}, - ] - } - test_data = self.execute_assignment_plan(test_plan) - - # We should also be able to get a similar (yet summarized) answer to - # the above by calling get_roles_for_user_and_project(), which should - # list the role_ids, yet remove any duplicates - role_ids = self.assignment_api.get_roles_for_user_and_project( - test_data['users'][0]['id'], test_data['projects'][0]['id']) - # We should see 6 entries, not 7, since role index 5 appeared twice in - # the answer from list_role_assignments - self.assertThat(role_ids, matchers.HasLength(6)) - for x in range(0, 5): - self.assertIn(test_data['roles'][x]['id'], role_ids) - - def test_role_assignments_implied_roles_filtered_by_role(self): - """Test that you can filter by role even if roles are implied.""" - test_plan = { - 'entities': {'domains': {'users': 1, 'projects': 2}, - 'roles': 4}, - # Three level tree of implied roles - 'implied_roles': [{'role': 0, 'implied_roles': 1}, - {'role': 1, 'implied_roles': [2, 3]}], - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 3, 'project': 1}], - 'tests': [ - # List effective roles filtering by one of the implied roles, - # showing that the filter was implied post expansion of - # implied roles (and that non impled roles are included in - # the filter - {'params': {'role': 3, 'effective': True}, - 'results': [{'user': 0, 'role': 3, 'project': 0, - 'indirect': {'role': 1}}, - {'user': 0, 'role': 3, 'project': 1}]}, - ] - } - self.execute_assignment_plan(test_plan) - - def test_role_assignments_simple_tree_of_implied_roles_on_domain(self): - """Test that implied roles are expanded out when placed on a domain.""" - test_plan = { - 'entities': {'domains': {'users': 1}, - 'roles': 4}, - # Three level tree of implied roles - 'implied_roles': [{'role': 0, 'implied_roles': 1}, - {'role': 1, 'implied_roles': [2, 3]}], - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}], - 'tests': [ - # List all direct assignments for user[0], this should just - # show the one top level role assignment - {'params': {'user': 0}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}]}, - # Listing in effective mode should how the implied roles - # expanded out - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'domain': 0, - 'indirect': {'role': 0}}, - {'user': 0, 'role': 2, 'domain': 0, - 'indirect': {'role': 1}}, - {'user': 0, 'role': 3, 'domain': 0, - 'indirect': {'role': 1}}]}, - ] - } - self.execute_assignment_plan(test_plan) - - def test_role_assignments_inherited_implied_roles(self): - """Test that you can intermix inherited and implied roles.""" - test_plan = { - 'entities': {'domains': {'users': 1, 'projects': 1}, - 'roles': 4}, - # Simply one level of implied roles - 'implied_roles': [{'role': 0, 'implied_roles': 1}], - # Assign to top level role as an inherited assignment to the - # domain - 'assignments': [{'user': 0, 'role': 0, 'domain': 0, - 'inherited_to_projects': True}], - 'tests': [ - # List all direct assignments for user[0], this should just - # show the one top level role assignment - {'params': {'user': 0}, - 'results': [{'user': 0, 'role': 0, 'domain': 0, - 'inherited_to_projects': 'projects'}]}, - # List in effective mode - we should only see the initial and - # implied role on the project (since inherited roles are not - # active on their anchor point). - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0, - 'indirect': {'domain': 0}}, - {'user': 0, 'role': 1, 'project': 0, - 'indirect': {'domain': 0, 'role': 0}}]}, - ] - } - self.config_fixture.config(group='os_inherit', enabled=True) - self.execute_assignment_plan(test_plan) - - def test_role_assignments_domain_specific_with_implied_roles(self): - test_plan = { - 'entities': {'domains': {'users': 1, 'projects': 1, 'roles': 2}, - 'roles': 2}, - # Two level tree of implied roles, with the top and 1st level being - # domain specific roles, and the bottom level being infered global - # roles. - 'implied_roles': [{'role': 0, 'implied_roles': [1]}, - {'role': 1, 'implied_roles': [2, 3]}], - 'assignments': [{'user': 0, 'role': 0, 'project': 0}], - 'tests': [ - # List all direct assignments for user[0], this should just - # show the one top level role assignment, even though this is a - # domain specific role (since we are in non-effective mode and - # we show any direct role assignment in that mode). - {'params': {'user': 0}, - 'results': [{'user': 0, 'role': 0, 'project': 0}]}, - # Now the effective ones - so the implied roles should be - # expanded out, as well as any domain specific roles should be - # removed. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 2, 'project': 0, - 'indirect': {'role': 1}}, - {'user': 0, 'role': 3, 'project': 0, - 'indirect': {'role': 1}}]}, - ] - } - self.execute_assignment_plan(test_plan) diff --git a/keystone-moon/keystone/tests/unit/assignment/test_core.py b/keystone-moon/keystone/tests/unit/assignment/test_core.py deleted file mode 100644 index 494e19c3..00000000 --- a/keystone-moon/keystone/tests/unit/assignment/test_core.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import uuid - -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit import default_fixtures - - -class RoleTests(object): - - def test_get_role_returns_not_found(self): - self.assertRaises(exception.RoleNotFound, - self.role_api.get_role, - uuid.uuid4().hex) - - def test_create_duplicate_role_name_fails(self): - role = unit.new_role_ref(id='fake1', name='fake1name') - self.role_api.create_role('fake1', role) - role['id'] = 'fake2' - self.assertRaises(exception.Conflict, - self.role_api.create_role, - 'fake2', - role) - - def test_rename_duplicate_role_name_fails(self): - role1 = unit.new_role_ref(id='fake1', name='fake1name') - role2 = unit.new_role_ref(id='fake2', name='fake2name') - self.role_api.create_role('fake1', role1) - self.role_api.create_role('fake2', role2) - role1['name'] = 'fake2name' - self.assertRaises(exception.Conflict, - self.role_api.update_role, - 'fake1', - role1) - - def test_role_crud(self): - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - role_ref = self.role_api.get_role(role['id']) - role_ref_dict = {x: role_ref[x] for x in role_ref} - self.assertDictEqual(role, role_ref_dict) - - role['name'] = uuid.uuid4().hex - updated_role_ref = self.role_api.update_role(role['id'], role) - role_ref = self.role_api.get_role(role['id']) - role_ref_dict = {x: role_ref[x] for x in role_ref} - self.assertDictEqual(role, role_ref_dict) - self.assertDictEqual(role_ref_dict, updated_role_ref) - - self.role_api.delete_role(role['id']) - self.assertRaises(exception.RoleNotFound, - self.role_api.get_role, - role['id']) - - def test_update_role_returns_not_found(self): - role = unit.new_role_ref() - self.assertRaises(exception.RoleNotFound, - self.role_api.update_role, - role['id'], - role) - - def test_list_roles(self): - roles = self.role_api.list_roles() - self.assertEqual(len(default_fixtures.ROLES), len(roles)) - role_ids = set(role['id'] for role in roles) - expected_role_ids = set(role['id'] for role in default_fixtures.ROLES) - self.assertEqual(expected_role_ids, role_ids) - - @unit.skip_if_cache_disabled('role') - def test_cache_layer_role_crud(self): - role = unit.new_role_ref() - role_id = role['id'] - # Create role - self.role_api.create_role(role_id, role) - role_ref = self.role_api.get_role(role_id) - updated_role_ref = copy.deepcopy(role_ref) - updated_role_ref['name'] = uuid.uuid4().hex - # Update role, bypassing the role api manager - self.role_api.driver.update_role(role_id, updated_role_ref) - # Verify get_role still returns old ref - self.assertDictEqual(role_ref, self.role_api.get_role(role_id)) - # Invalidate Cache - self.role_api.get_role.invalidate(self.role_api, role_id) - # Verify get_role returns the new role_ref - self.assertDictEqual(updated_role_ref, - self.role_api.get_role(role_id)) - # Update role back to original via the assignment api manager - self.role_api.update_role(role_id, role_ref) - # Verify get_role returns the original role ref - self.assertDictEqual(role_ref, self.role_api.get_role(role_id)) - # Delete role bypassing the role api manager - self.role_api.driver.delete_role(role_id) - # Verify get_role still returns the role_ref - self.assertDictEqual(role_ref, self.role_api.get_role(role_id)) - # Invalidate cache - self.role_api.get_role.invalidate(self.role_api, role_id) - # Verify RoleNotFound is now raised - self.assertRaises(exception.RoleNotFound, - self.role_api.get_role, - role_id) - # recreate role - self.role_api.create_role(role_id, role) - self.role_api.get_role(role_id) - # delete role via the assignment api manager - self.role_api.delete_role(role_id) - # verity RoleNotFound is now raised - self.assertRaises(exception.RoleNotFound, - self.role_api.get_role, - role_id) diff --git a/keystone-moon/keystone/tests/unit/auth/__init__.py b/keystone-moon/keystone/tests/unit/auth/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/auth/test_controllers.py b/keystone-moon/keystone/tests/unit/auth/test_controllers.py deleted file mode 100644 index 76f2776a..00000000 --- a/keystone-moon/keystone/tests/unit/auth/test_controllers.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright 2015 IBM Corp. - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -import mock -from oslo_config import cfg -from oslo_config import fixture as config_fixture -from oslo_utils import importutils -from oslotest import mockpatch -import stevedore -from stevedore import extension - -from keystone.auth import controllers -from keystone.tests import unit - - -class TestLoadAuthMethod(unit.BaseTestCase): - def test_entrypoint_works(self): - method = uuid.uuid4().hex - plugin_name = self.getUniqueString() - - # Register the method using the given plugin - cf = self.useFixture(config_fixture.Config()) - cf.register_opt(cfg.StrOpt(method), group='auth') - cf.config(group='auth', **{method: plugin_name}) - - # Setup stevedore.DriverManager to return a driver for the plugin - extension_ = extension.Extension( - plugin_name, entry_point=mock.sentinel.entry_point, - plugin=mock.sentinel.plugin, - obj=mock.sentinel.driver) - auth_plugin_namespace = 'keystone.auth.%s' % method - fake_driver_manager = stevedore.DriverManager.make_test_instance( - extension_, namespace=auth_plugin_namespace) - - driver_manager_mock = self.useFixture(mockpatch.PatchObject( - stevedore, 'DriverManager', return_value=fake_driver_manager)).mock - - driver = controllers.load_auth_method(method) - - self.assertEqual(auth_plugin_namespace, fake_driver_manager.namespace) - driver_manager_mock.assert_called_once_with( - auth_plugin_namespace, plugin_name, invoke_on_load=True) - self.assertIs(driver, mock.sentinel.driver) - - def test_entrypoint_fails_import_works(self): - method = uuid.uuid4().hex - plugin_name = self.getUniqueString() - - # Register the method using the given plugin - cf = self.useFixture(config_fixture.Config()) - cf.register_opt(cfg.StrOpt(method), group='auth') - cf.config(group='auth', **{method: plugin_name}) - - # stevedore.DriverManager raises RuntimeError if it can't load the - # driver. - self.useFixture(mockpatch.PatchObject( - stevedore, 'DriverManager', side_effect=RuntimeError)) - - self.useFixture(mockpatch.PatchObject( - importutils, 'import_object', return_value=mock.sentinel.driver)) - - driver = controllers.load_auth_method(method) - self.assertIs(driver, mock.sentinel.driver) - - def test_entrypoint_fails_import_fails(self): - method = uuid.uuid4().hex - plugin_name = self.getUniqueString() - - # Register the method using the given plugin - cf = self.useFixture(config_fixture.Config()) - cf.register_opt(cfg.StrOpt(method), group='auth') - cf.config(group='auth', **{method: plugin_name}) - - # stevedore.DriverManager raises RuntimeError if it can't load the - # driver. - self.useFixture(mockpatch.PatchObject( - stevedore, 'DriverManager', side_effect=RuntimeError)) - - class TestException(Exception): - pass - - self.useFixture(mockpatch.PatchObject( - importutils, 'import_object', side_effect=TestException)) - - self.assertRaises(TestException, controllers.load_auth_method, method) diff --git a/keystone-moon/keystone/tests/unit/backend/__init__.py b/keystone-moon/keystone/tests/unit/backend/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/backend/core_ldap.py b/keystone-moon/keystone/tests/unit/backend/core_ldap.py deleted file mode 100644 index 8b72c62a..00000000 --- a/keystone-moon/keystone/tests/unit/backend/core_ldap.py +++ /dev/null @@ -1,146 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ldap - -from oslo_config import cfg - -from keystone.common import cache -from keystone.common import ldap as common_ldap -from keystone.common.ldap import core as common_ldap_core -from keystone.tests import unit -from keystone.tests.unit import default_fixtures -from keystone.tests.unit import fakeldap -from keystone.tests.unit.ksfixtures import database - - -CONF = cfg.CONF - - -def create_group_container(identity_api): - # Create the groups base entry (ou=Groups,cn=example,cn=com) - group_api = identity_api.driver.group - conn = group_api.get_connection() - dn = 'ou=Groups,cn=example,cn=com' - conn.add_s(dn, [('objectclass', ['organizationalUnit']), - ('ou', ['Groups'])]) - - -class BaseBackendLdapCommon(object): - """Mixin class to set up generic LDAP backends.""" - - def setUp(self): - super(BaseBackendLdapCommon, self).setUp() - - common_ldap.register_handler('fake://', fakeldap.FakeLdap) - self.load_backends() - self.load_fixtures(default_fixtures) - - self.addCleanup(common_ldap_core._HANDLERS.clear) - self.addCleanup(self.clear_database) - - def _get_domain_fixture(self): - """Domains in LDAP are read-only, so just return the static one.""" - return self.resource_api.get_domain(CONF.identity.default_domain_id) - - def clear_database(self): - for shelf in fakeldap.FakeShelves: - fakeldap.FakeShelves[shelf].clear() - - def get_config(self, domain_id): - # Only one conf structure unless we are using separate domain backends - return CONF - - def config_overrides(self): - super(BaseBackendLdapCommon, self).config_overrides() - self.config_fixture.config(group='identity', driver='ldap') - - def config_files(self): - config_files = super(BaseBackendLdapCommon, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_ldap.conf')) - return config_files - - def get_user_enabled_vals(self, user): - user_dn = ( - self.identity_api.driver.user._id_to_dn_string(user['id'])) - enabled_attr_name = CONF.ldap.user_enabled_attribute - - ldap_ = self.identity_api.driver.user.get_connection() - res = ldap_.search_s(user_dn, - ldap.SCOPE_BASE, - u'(sn=%s)' % user['name']) - if enabled_attr_name in res[0][1]: - return res[0][1][enabled_attr_name] - else: - return None - - -class BaseBackendLdap(object): - """Mixin class to set up an all-LDAP configuration.""" - - def setUp(self): - # NOTE(dstanek): The database must be setup prior to calling the - # parent's setUp. The parent's setUp uses services (like - # credentials) that require a database. - self.useFixture(database.Database()) - super(BaseBackendLdap, self).setUp() - - def load_fixtures(self, fixtures): - # Override super impl since need to create group container. - create_group_container(self.identity_api) - super(BaseBackendLdap, self).load_fixtures(fixtures) - - -class BaseBackendLdapIdentitySqlEverythingElse(unit.SQLDriverOverrides): - """Mixin base for Identity LDAP, everything else SQL backend tests.""" - - def config_files(self): - config_files = super(BaseBackendLdapIdentitySqlEverythingElse, - self).config_files() - config_files.append(unit.dirs.tests_conf('backend_ldap_sql.conf')) - return config_files - - def setUp(self): - sqldb = self.useFixture(database.Database()) - super(BaseBackendLdapIdentitySqlEverythingElse, self).setUp() - self.clear_database() - self.load_backends() - cache.configure_cache() - - sqldb.recreate() - self.load_fixtures(default_fixtures) - # defaulted by the data load - self.user_foo['enabled'] = True - - def config_overrides(self): - super(BaseBackendLdapIdentitySqlEverythingElse, - self).config_overrides() - self.config_fixture.config(group='identity', driver='ldap') - self.config_fixture.config(group='resource', driver='sql') - self.config_fixture.config(group='assignment', driver='sql') - - -class BaseBackendLdapIdentitySqlEverythingElseWithMapping(object): - """Mixin base class to test mapping of default LDAP backend. - - The default configuration is not to enable mapping when using a single - backend LDAP driver. However, a cloud provider might want to enable - the mapping, hence hiding the LDAP IDs from any clients of keystone. - Setting backward_compatible_ids to False will enable this mapping. - - """ - - def config_overrides(self): - super(BaseBackendLdapIdentitySqlEverythingElseWithMapping, - self).config_overrides() - self.config_fixture.config(group='identity_mapping', - backward_compatible_ids=False) diff --git a/keystone-moon/keystone/tests/unit/backend/core_sql.py b/keystone-moon/keystone/tests/unit/backend/core_sql.py deleted file mode 100644 index 8c9f4957..00000000 --- a/keystone-moon/keystone/tests/unit/backend/core_sql.py +++ /dev/null @@ -1,53 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sqlalchemy - -from keystone.common import sql -from keystone.tests import unit -from keystone.tests.unit import default_fixtures -from keystone.tests.unit.ksfixtures import database - - -class BaseBackendSqlTests(unit.SQLDriverOverrides, unit.TestCase): - - def setUp(self): - super(BaseBackendSqlTests, self).setUp() - self.useFixture(database.Database()) - self.load_backends() - - # populate the engine with tables & fixtures - self.load_fixtures(default_fixtures) - # defaulted by the data load - self.user_foo['enabled'] = True - - def config_files(self): - config_files = super(BaseBackendSqlTests, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_sql.conf')) - return config_files - - -class BaseBackendSqlModels(BaseBackendSqlTests): - - def select_table(self, name): - table = sqlalchemy.Table(name, - sql.ModelBase.metadata, - autoload=True) - s = sqlalchemy.select([table]) - return s - - def assertExpectedSchema(self, table, cols): - table = self.select_table(table) - for col, type_, length in cols: - self.assertIsInstance(table.c[col].type, type_) - if length: - self.assertEqual(length, table.c[col].type.length) diff --git a/keystone-moon/keystone/tests/unit/backend/domain_config/__init__.py b/keystone-moon/keystone/tests/unit/backend/domain_config/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/backend/domain_config/core.py b/keystone-moon/keystone/tests/unit/backend/domain_config/core.py deleted file mode 100644 index 7bbbf313..00000000 --- a/keystone-moon/keystone/tests/unit/backend/domain_config/core.py +++ /dev/null @@ -1,601 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import uuid - -import mock -from testtools import matchers - -from keystone import exception -from keystone.tests import unit - - -class DomainConfigTests(object): - - def setUp(self): - self.domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(self.domain['id'], self.domain) - self.addCleanup(self.clean_up_domain) - - def clean_up_domain(self): - # NOTE(henry-nash): Deleting the domain will also delete any domain - # configs for this domain. - self.domain['enabled'] = False - self.resource_api.update_domain(self.domain['id'], self.domain) - self.resource_api.delete_domain(self.domain['id']) - del self.domain - - def _domain_config_crud(self, sensitive): - group = uuid.uuid4().hex - option = uuid.uuid4().hex - value = uuid.uuid4().hex - self.domain_config_api.create_config_option( - self.domain['id'], group, option, value, sensitive) - res = self.domain_config_api.get_config_option( - self.domain['id'], group, option, sensitive) - config = {'group': group, 'option': option, 'value': value} - self.assertEqual(config, res) - - value = uuid.uuid4().hex - self.domain_config_api.update_config_option( - self.domain['id'], group, option, value, sensitive) - res = self.domain_config_api.get_config_option( - self.domain['id'], group, option, sensitive) - config = {'group': group, 'option': option, 'value': value} - self.assertEqual(config, res) - - self.domain_config_api.delete_config_options( - self.domain['id'], group, option, sensitive) - self.assertRaises(exception.DomainConfigNotFound, - self.domain_config_api.get_config_option, - self.domain['id'], group, option, sensitive) - # ...and silent if we try to delete it again - self.domain_config_api.delete_config_options( - self.domain['id'], group, option, sensitive) - - def test_whitelisted_domain_config_crud(self): - self._domain_config_crud(sensitive=False) - - def test_sensitive_domain_config_crud(self): - self._domain_config_crud(sensitive=True) - - def _list_domain_config(self, sensitive): - """Test listing by combination of domain, group & option.""" - - config1 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex} - # Put config2 in the same group as config1 - config2 = {'group': config1['group'], 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex} - config3 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex, - 'value': 100} - for config in [config1, config2, config3]: - self.domain_config_api.create_config_option( - self.domain['id'], config['group'], config['option'], - config['value'], sensitive) - - # Try listing all items from a domain - res = self.domain_config_api.list_config_options( - self.domain['id'], sensitive=sensitive) - self.assertThat(res, matchers.HasLength(3)) - for res_entry in res: - self.assertIn(res_entry, [config1, config2, config3]) - - # Try listing by domain and group - res = self.domain_config_api.list_config_options( - self.domain['id'], group=config1['group'], sensitive=sensitive) - self.assertThat(res, matchers.HasLength(2)) - for res_entry in res: - self.assertIn(res_entry, [config1, config2]) - - # Try listing by domain, group and option - res = self.domain_config_api.list_config_options( - self.domain['id'], group=config2['group'], - option=config2['option'], sensitive=sensitive) - self.assertThat(res, matchers.HasLength(1)) - self.assertEqual(config2, res[0]) - - def test_list_whitelisted_domain_config_crud(self): - self._list_domain_config(False) - - def test_list_sensitive_domain_config_crud(self): - self._list_domain_config(True) - - def _delete_domain_configs(self, sensitive): - """Test deleting by combination of domain, group & option.""" - - config1 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex} - # Put config2 and config3 in the same group as config1 - config2 = {'group': config1['group'], 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex} - config3 = {'group': config1['group'], 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex} - config4 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex} - for config in [config1, config2, config3, config4]: - self.domain_config_api.create_config_option( - self.domain['id'], config['group'], config['option'], - config['value'], sensitive) - - # Try deleting by domain, group and option - res = self.domain_config_api.delete_config_options( - self.domain['id'], group=config2['group'], - option=config2['option'], sensitive=sensitive) - res = self.domain_config_api.list_config_options( - self.domain['id'], sensitive=sensitive) - self.assertThat(res, matchers.HasLength(3)) - for res_entry in res: - self.assertIn(res_entry, [config1, config3, config4]) - - # Try deleting by domain and group - res = self.domain_config_api.delete_config_options( - self.domain['id'], group=config4['group'], sensitive=sensitive) - res = self.domain_config_api.list_config_options( - self.domain['id'], sensitive=sensitive) - self.assertThat(res, matchers.HasLength(2)) - for res_entry in res: - self.assertIn(res_entry, [config1, config3]) - - # Try deleting all items from a domain - res = self.domain_config_api.delete_config_options( - self.domain['id'], sensitive=sensitive) - res = self.domain_config_api.list_config_options( - self.domain['id'], sensitive=sensitive) - self.assertThat(res, matchers.HasLength(0)) - - def test_delete_whitelisted_domain_configs(self): - self._delete_domain_configs(False) - - def test_delete_sensitive_domain_configs(self): - self._delete_domain_configs(True) - - def _create_domain_config_twice(self, sensitive): - """Test conflict error thrown if create the same option twice.""" - - config = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex} - - self.domain_config_api.create_config_option( - self.domain['id'], config['group'], config['option'], - config['value'], sensitive=sensitive) - self.assertRaises(exception.Conflict, - self.domain_config_api.create_config_option, - self.domain['id'], config['group'], config['option'], - config['value'], sensitive=sensitive) - - def test_create_whitelisted_domain_config_twice(self): - self._create_domain_config_twice(False) - - def test_create_sensitive_domain_config_twice(self): - self._create_domain_config_twice(True) - - def test_delete_domain_deletes_configs(self): - """Test domain deletion clears the domain configs.""" - - domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain['id'], domain) - config1 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex} - # Put config2 in the same group as config1 - config2 = {'group': config1['group'], 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex} - self.domain_config_api.create_config_option( - domain['id'], config1['group'], config1['option'], - config1['value']) - self.domain_config_api.create_config_option( - domain['id'], config2['group'], config2['option'], - config2['value'], sensitive=True) - res = self.domain_config_api.list_config_options( - domain['id']) - self.assertThat(res, matchers.HasLength(1)) - res = self.domain_config_api.list_config_options( - domain['id'], sensitive=True) - self.assertThat(res, matchers.HasLength(1)) - - # Now delete the domain - domain['enabled'] = False - self.resource_api.update_domain(domain['id'], domain) - self.resource_api.delete_domain(domain['id']) - - # Check domain configs have also been deleted - res = self.domain_config_api.list_config_options( - domain['id']) - self.assertThat(res, matchers.HasLength(0)) - res = self.domain_config_api.list_config_options( - domain['id'], sensitive=True) - self.assertThat(res, matchers.HasLength(0)) - - def test_create_domain_config_including_sensitive_option(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - - # password is sensitive, so check that the whitelisted portion and - # the sensitive piece have been stored in the appropriate locations. - res = self.domain_config_api.get_config(self.domain['id']) - config_whitelisted = copy.deepcopy(config) - config_whitelisted['ldap'].pop('password') - self.assertEqual(config_whitelisted, res) - res = self.domain_config_api.get_config_option( - self.domain['id'], 'ldap', 'password', sensitive=True) - self.assertEqual(config['ldap']['password'], res['value']) - - # Finally, use the non-public API to get back the whole config - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - self.assertEqual(config, res) - - def test_get_partial_domain_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - - res = self.domain_config_api.get_config(self.domain['id'], - group='identity') - config_partial = copy.deepcopy(config) - config_partial.pop('ldap') - self.assertEqual(config_partial, res) - res = self.domain_config_api.get_config( - self.domain['id'], group='ldap', option='user_tree_dn') - self.assertEqual({'user_tree_dn': config['ldap']['user_tree_dn']}, res) - # ...but we should fail to get a sensitive option - self.assertRaises(exception.DomainConfigNotFound, - self.domain_config_api.get_config, self.domain['id'], - group='ldap', option='password') - - def test_delete_partial_domain_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - - self.domain_config_api.delete_config( - self.domain['id'], group='identity') - config_partial = copy.deepcopy(config) - config_partial.pop('identity') - config_partial['ldap'].pop('password') - res = self.domain_config_api.get_config(self.domain['id']) - self.assertEqual(config_partial, res) - - self.domain_config_api.delete_config( - self.domain['id'], group='ldap', option='url') - config_partial = copy.deepcopy(config_partial) - config_partial['ldap'].pop('url') - res = self.domain_config_api.get_config(self.domain['id']) - self.assertEqual(config_partial, res) - - def test_get_options_not_in_domain_config(self): - self.assertRaises(exception.DomainConfigNotFound, - self.domain_config_api.get_config, self.domain['id']) - config = {'ldap': {'url': uuid.uuid4().hex}} - - self.domain_config_api.create_config(self.domain['id'], config) - - self.assertRaises(exception.DomainConfigNotFound, - self.domain_config_api.get_config, self.domain['id'], - group='identity') - self.assertRaises(exception.DomainConfigNotFound, - self.domain_config_api.get_config, self.domain['id'], - group='ldap', option='user_tree_dn') - - def test_get_sensitive_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - self.assertEqual({}, res) - self.domain_config_api.create_config(self.domain['id'], config) - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - self.assertEqual(config, res) - - def test_update_partial_domain_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - - # Try updating a group - new_config = {'ldap': {'url': uuid.uuid4().hex, - 'user_filter': uuid.uuid4().hex}} - res = self.domain_config_api.update_config( - self.domain['id'], new_config, group='ldap') - expected_config = copy.deepcopy(config) - expected_config['ldap']['url'] = new_config['ldap']['url'] - expected_config['ldap']['user_filter'] = ( - new_config['ldap']['user_filter']) - expected_full_config = copy.deepcopy(expected_config) - expected_config['ldap'].pop('password') - res = self.domain_config_api.get_config(self.domain['id']) - self.assertEqual(expected_config, res) - # The sensitive option should still existsss - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - self.assertEqual(expected_full_config, res) - - # Try updating a single whitelisted option - self.domain_config_api.delete_config(self.domain['id']) - self.domain_config_api.create_config(self.domain['id'], config) - new_config = {'url': uuid.uuid4().hex} - res = self.domain_config_api.update_config( - self.domain['id'], new_config, group='ldap', option='url') - - # Make sure whitelisted and full config is updated - expected_whitelisted_config = copy.deepcopy(config) - expected_whitelisted_config['ldap']['url'] = new_config['url'] - expected_full_config = copy.deepcopy(expected_whitelisted_config) - expected_whitelisted_config['ldap'].pop('password') - self.assertEqual(expected_whitelisted_config, res) - res = self.domain_config_api.get_config(self.domain['id']) - self.assertEqual(expected_whitelisted_config, res) - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - self.assertEqual(expected_full_config, res) - - # Try updating a single sensitive option - self.domain_config_api.delete_config(self.domain['id']) - self.domain_config_api.create_config(self.domain['id'], config) - new_config = {'password': uuid.uuid4().hex} - res = self.domain_config_api.update_config( - self.domain['id'], new_config, group='ldap', option='password') - # The whitelisted config should not have changed... - expected_whitelisted_config = copy.deepcopy(config) - expected_full_config = copy.deepcopy(config) - expected_whitelisted_config['ldap'].pop('password') - self.assertEqual(expected_whitelisted_config, res) - res = self.domain_config_api.get_config(self.domain['id']) - self.assertEqual(expected_whitelisted_config, res) - expected_full_config['ldap']['password'] = new_config['password'] - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - # ...but the sensitive piece should have. - self.assertEqual(expected_full_config, res) - - def test_update_invalid_partial_domain_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - # An extra group, when specifying one group should fail - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.update_config, - self.domain['id'], config, group='ldap') - # An extra option, when specifying one option should fail - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.update_config, - self.domain['id'], config['ldap'], - group='ldap', option='url') - - # Now try the right number of groups/options, but just not - # ones that are in the config provided - config = {'ldap': {'user_tree_dn': uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.update_config, - self.domain['id'], config, group='identity') - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.update_config, - self.domain['id'], config['ldap'], group='ldap', - option='url') - - # Now some valid groups/options, but just not ones that are in the - # existing config - config = {'ldap': {'user_tree_dn': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - config_wrong_group = {'identity': {'driver': uuid.uuid4().hex}} - self.assertRaises(exception.DomainConfigNotFound, - self.domain_config_api.update_config, - self.domain['id'], config_wrong_group, - group='identity') - config_wrong_option = {'url': uuid.uuid4().hex} - self.assertRaises(exception.DomainConfigNotFound, - self.domain_config_api.update_config, - self.domain['id'], config_wrong_option, - group='ldap', option='url') - - # And finally just some bad groups/options - bad_group = uuid.uuid4().hex - config = {bad_group: {'user': uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.update_config, - self.domain['id'], config, group=bad_group, - option='user') - bad_option = uuid.uuid4().hex - config = {'ldap': {bad_option: uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.update_config, - self.domain['id'], config, group='ldap', - option=bad_option) - - def test_create_invalid_domain_config(self): - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.create_config, - self.domain['id'], {}) - config = {uuid.uuid4().hex: uuid.uuid4().hex} - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.create_config, - self.domain['id'], config) - config = {uuid.uuid4().hex: {uuid.uuid4().hex: uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.create_config, - self.domain['id'], config) - config = {'ldap': {uuid.uuid4().hex: uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.create_config, - self.domain['id'], config) - # Try an option that IS in the standard conf, but neither whitelisted - # or marked as sensitive - config = {'ldap': {'role_tree_dn': uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.create_config, - self.domain['id'], config) - - def test_delete_invalid_partial_domain_config(self): - config = {'ldap': {'url': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - # Try deleting a group not in the config - self.assertRaises(exception.DomainConfigNotFound, - self.domain_config_api.delete_config, - self.domain['id'], group='identity') - # Try deleting an option not in the config - self.assertRaises(exception.DomainConfigNotFound, - self.domain_config_api.delete_config, - self.domain['id'], - group='ldap', option='user_tree_dn') - - def test_sensitive_substitution_in_domain_config(self): - # Create a config that contains a whitelisted option that requires - # substitution of a sensitive option. - config = {'ldap': {'url': 'my_url/%(password)s', - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - - # Read back the config with the internal method and ensure that the - # substitution has taken place. - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - expected_url = ( - config['ldap']['url'] % {'password': config['ldap']['password']}) - self.assertEqual(expected_url, res['ldap']['url']) - - def test_invalid_sensitive_substitution_in_domain_config(self): - """Check that invalid substitutions raise warnings.""" - - mock_log = mock.Mock() - - invalid_option_config = { - 'ldap': {'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - - for invalid_option in ['my_url/%(passssword)s', - 'my_url/%(password', - 'my_url/%(password)', - 'my_url/%(password)d']: - invalid_option_config['ldap']['url'] = invalid_option - self.domain_config_api.create_config( - self.domain['id'], invalid_option_config) - - with mock.patch('keystone.resource.core.LOG', mock_log): - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - mock_log.warn.assert_any_call(mock.ANY) - self.assertEqual( - invalid_option_config['ldap']['url'], res['ldap']['url']) - - def test_escaped_sequence_in_domain_config(self): - """Check that escaped '%(' doesn't get interpreted.""" - - mock_log = mock.Mock() - - escaped_option_config = { - 'ldap': {'url': 'my_url/%%(password)s', - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - - self.domain_config_api.create_config( - self.domain['id'], escaped_option_config) - - with mock.patch('keystone.resource.core.LOG', mock_log): - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - self.assertFalse(mock_log.warn.called) - # The escaping '%' should have been removed - self.assertEqual('my_url/%(password)s', res['ldap']['url']) - - @unit.skip_if_cache_disabled('domain_config') - def test_cache_layer_get_sensitive_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - # cache the result - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - self.assertEqual(config, res) - - # delete, bypassing domain config manager api - self.domain_config_api.delete_config_options(self.domain['id']) - self.domain_config_api.delete_config_options(self.domain['id'], - sensitive=True) - - self.assertDictEqual( - res, self.domain_config_api.get_config_with_sensitive_info( - self.domain['id'])) - self.domain_config_api.get_config_with_sensitive_info.invalidate( - self.domain_config_api, self.domain['id']) - self.assertDictEqual( - {}, - self.domain_config_api.get_config_with_sensitive_info( - self.domain['id'])) - - def test_config_registration(self): - type = uuid.uuid4().hex - self.domain_config_api.obtain_registration( - self.domain['id'], type) - self.domain_config_api.release_registration( - self.domain['id'], type=type) - - # Make sure that once someone has it, nobody else can get it. - # This includes the domain who already has it. - self.domain_config_api.obtain_registration( - self.domain['id'], type) - self.assertFalse( - self.domain_config_api.obtain_registration( - self.domain['id'], type)) - - # Make sure we can read who does have it - self.assertEqual( - self.domain['id'], - self.domain_config_api.read_registration(type)) - - # Make sure releasing it is silent if the domain specified doesn't - # have the registration - domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain2['id'], domain2) - self.domain_config_api.release_registration( - domain2['id'], type=type) - - # If nobody has the type registered, then trying to read it should - # raise ConfigRegistrationNotFound - self.domain_config_api.release_registration( - self.domain['id'], type=type) - self.assertRaises(exception.ConfigRegistrationNotFound, - self.domain_config_api.read_registration, - type) - - # Finally check multiple registrations are cleared if you free the - # registration without specifying the type - type2 = uuid.uuid4().hex - self.domain_config_api.obtain_registration( - self.domain['id'], type) - self.domain_config_api.obtain_registration( - self.domain['id'], type2) - self.domain_config_api.release_registration(self.domain['id']) - self.assertRaises(exception.ConfigRegistrationNotFound, - self.domain_config_api.read_registration, - type) - self.assertRaises(exception.ConfigRegistrationNotFound, - self.domain_config_api.read_registration, - type2) diff --git a/keystone-moon/keystone/tests/unit/backend/domain_config/test_sql.py b/keystone-moon/keystone/tests/unit/backend/domain_config/test_sql.py deleted file mode 100644 index 6459ede1..00000000 --- a/keystone-moon/keystone/tests/unit/backend/domain_config/test_sql.py +++ /dev/null @@ -1,41 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from keystone.common import sql -from keystone.tests.unit.backend import core_sql -from keystone.tests.unit.backend.domain_config import core - - -class SqlDomainConfigModels(core_sql.BaseBackendSqlModels): - - def test_whitelisted_model(self): - cols = (('domain_id', sql.String, 64), - ('group', sql.String, 255), - ('option', sql.String, 255), - ('value', sql.JsonBlob, None)) - self.assertExpectedSchema('whitelisted_config', cols) - - def test_sensitive_model(self): - cols = (('domain_id', sql.String, 64), - ('group', sql.String, 255), - ('option', sql.String, 255), - ('value', sql.JsonBlob, None)) - self.assertExpectedSchema('sensitive_config', cols) - - -class SqlDomainConfig(core_sql.BaseBackendSqlTests, core.DomainConfigTests): - def setUp(self): - super(SqlDomainConfig, self).setUp() - # core.DomainConfigTests is effectively a mixin class, so make sure we - # call its setup - core.DomainConfigTests.setUp(self) diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/sql.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/sql.py deleted file mode 100644 index da1490a7..00000000 --- a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/V8/sql.py +++ /dev/null @@ -1,39 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.tests.unit import test_backend_sql - - -class SqlIdentityV8(test_backend_sql.SqlIdentity): - """Test that a V8 driver still passes the same tests. - - We use the SQL driver as an example of a V8 legacy driver. - - """ - - def config_overrides(self): - super(SqlIdentityV8, self).config_overrides() - # V8 SQL specific driver overrides - self.config_fixture.config( - group='assignment', - driver='keystone.assignment.V8_backends.sql.Assignment') - self.use_specific_sql_driver_version( - 'keystone.assignment', 'backends', 'V8_') - - def test_delete_project_assignments_same_id_as_domain(self): - self.skipTest("V8 doesn't support project acting as a domain.") - - def test_delete_user_assignments_user_same_id_as_group(self): - self.skipTest("Groups and users with the same ID are not supported.") - - def test_delete_group_assignments_group_same_id_as_user(self): - self.skipTest("Groups and users with the same ID are not supported.") diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/assignment/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/api_v3.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/api_v3.py deleted file mode 100644 index d5469768..00000000 --- a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/V8/api_v3.py +++ /dev/null @@ -1,108 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from six.moves import http_client - -from keystone.tests.unit import test_v3_federation - - -class FederatedSetupMixinV8(object): - def useV8driver(self): - # We use the SQL driver as an example V8 driver, so override - # the current driver with that version. - self.config_fixture.config( - group='federation', - driver='keystone.federation.V8_backends.sql.Federation') - self.use_specific_sql_driver_version( - 'keystone.federation', 'backends', 'V8_') - - -class FederatedIdentityProviderTestsV8( - test_v3_federation.FederatedIdentityProviderTests, - FederatedSetupMixinV8): - """Test that a V8 driver still passes the same tests.""" - - def config_overrides(self): - super(FederatedIdentityProviderTestsV8, self).config_overrides() - self.useV8driver() - - def test_create_idp_remote_repeated(self): - """Creates two IdentityProvider entities with some remote_ids - - A remote_id is the same for both so the second IdP is not - created because of the uniqueness of the remote_ids - - Expect HTTP 409 Conflict code for the latter call. - - Note: V9 drivers and later augment the conflict message with - additional information, which won't be present if we are running - a V8 driver - so override the newer tests to just ensure a - conflict message is raised. - """ - body = self.default_body.copy() - repeated_remote_id = uuid.uuid4().hex - body['remote_ids'] = [uuid.uuid4().hex, - uuid.uuid4().hex, - uuid.uuid4().hex, - repeated_remote_id] - self._create_default_idp(body=body) - - url = self.base_url(suffix=uuid.uuid4().hex) - body['remote_ids'] = [uuid.uuid4().hex, - repeated_remote_id] - self.put(url, body={'identity_provider': body}, - expected_status=http_client.CONFLICT) - - def test_check_idp_uniqueness(self): - """Add same IdP twice. - - Expect HTTP 409 Conflict code for the latter call. - - Note: V9 drivers and later augment the conflict message with - additional information, which won't be present if we are running - a V8 driver - so override the newer tests to just ensure a - conflict message is raised. - """ - url = self.base_url(suffix=uuid.uuid4().hex) - body = self._http_idp_input() - self.put(url, body={'identity_provider': body}, - expected_status=http_client.CREATED) - self.put(url, body={'identity_provider': body}, - expected_status=http_client.CONFLICT) - - -class MappingCRUDTestsV8( - test_v3_federation.MappingCRUDTests, - FederatedSetupMixinV8): - """Test that a V8 driver still passes the same tests.""" - - def config_overrides(self): - super(MappingCRUDTestsV8, self).config_overrides() - self.useV8driver() - - -class ServiceProviderTestsV8( - test_v3_federation.ServiceProviderTests, - FederatedSetupMixinV8): - """Test that a V8 driver still passes the same tests.""" - - def config_overrides(self): - super(ServiceProviderTestsV8, self).config_overrides() - self.useV8driver() - - def test_filter_list_sp_by_id(self): - self.skipTest('Operation not supported in v8 and earlier drivers') - - def test_filter_list_sp_by_enabled(self): - self.skipTest('Operation not supported in v8 and earlier drivers') diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/federation/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/sql.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/sql.py deleted file mode 100644 index 16acbdc3..00000000 --- a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/V8/sql.py +++ /dev/null @@ -1,71 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import unittest - -from keystone.resource.V8_backends import sql -from keystone.tests import unit -from keystone.tests.unit.ksfixtures import database -from keystone.tests.unit.resource import test_backends -from keystone.tests.unit import test_backend_sql - - -class SqlIdentityV8(test_backend_sql.SqlIdentity): - """Test that a V8 driver still passes the same tests. - - We use the SQL driver as an example of a V8 legacy driver. - - """ - - def config_overrides(self): - super(SqlIdentityV8, self).config_overrides() - # V8 SQL specific driver overrides - self.config_fixture.config( - group='resource', - driver='keystone.resource.V8_backends.sql.Resource') - self.use_specific_sql_driver_version( - 'keystone.resource', 'backends', 'V8_') - - def test_delete_projects_from_ids(self): - self.skipTest('Operation not supported in v8 and earlier drivers') - - def test_delete_projects_from_ids_with_no_existing_project_id(self): - self.skipTest('Operation not supported in v8 and earlier drivers') - - def test_delete_project_cascade(self): - self.skipTest('Operation not supported in v8 and earlier drivers') - - def test_delete_large_project_cascade(self): - self.skipTest('Operation not supported in v8 and earlier drivers') - - def test_hidden_project_domain_root_is_really_hidden(self): - self.skipTest('Operation not supported in v8 and earlier drivers') - - -class TestSqlResourceDriverV8(unit.BaseTestCase, - test_backends.ResourceDriverTests): - def setUp(self): - super(TestSqlResourceDriverV8, self).setUp() - - version_specifiers = { - 'keystone.resource': { - 'versionless_backend': 'backends', - 'versioned_backend': 'V8_backends' - } - } - self.useFixture(database.Database(version_specifiers)) - - self.driver = sql.Resource() - - @unittest.skip('Null domain not allowed.') - def test_create_project_null_domain(self): - pass diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/resource/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/sql.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/sql.py deleted file mode 100644 index d9378c30..00000000 --- a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/V8/sql.py +++ /dev/null @@ -1,30 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.tests.unit import test_backend_sql - - -class SqlIdentityV8(test_backend_sql.SqlIdentity): - """Test that a V8 driver still passes the same tests. - - We use the SQL driver as an example of a V8 legacy driver. - - """ - - def config_overrides(self): - super(SqlIdentityV8, self).config_overrides() - # V8 SQL specific driver overrides - self.config_fixture.config( - group='role', - driver='keystone.assignment.V8_role_backends.sql.Role') - self.use_specific_sql_driver_version( - 'keystone.assignment', 'role_backends', 'V8_') diff --git a/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/__init__.py b/keystone-moon/keystone/tests/unit/backend/legacy_drivers/role/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/backend/role/__init__.py b/keystone-moon/keystone/tests/unit/backend/role/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/backend/role/core.py b/keystone-moon/keystone/tests/unit/backend/role/core.py deleted file mode 100644 index d6e0d65c..00000000 --- a/keystone-moon/keystone/tests/unit/backend/role/core.py +++ /dev/null @@ -1,130 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import uuid - -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit import default_fixtures - - -class RoleTests(object): - - def test_get_role_404(self): - self.assertRaises(exception.RoleNotFound, - self.role_api.get_role, - uuid.uuid4().hex) - - def test_create_duplicate_role_name_fails(self): - role = {'id': 'fake1', - 'name': 'fake1name'} - self.role_api.create_role('fake1', role) - role['id'] = 'fake2' - self.assertRaises(exception.Conflict, - self.role_api.create_role, - 'fake2', - role) - - def test_rename_duplicate_role_name_fails(self): - role1 = { - 'id': 'fake1', - 'name': 'fake1name' - } - role2 = { - 'id': 'fake2', - 'name': 'fake2name' - } - self.role_api.create_role('fake1', role1) - self.role_api.create_role('fake2', role2) - role1['name'] = 'fake2name' - self.assertRaises(exception.Conflict, - self.role_api.update_role, - 'fake1', - role1) - - def test_role_crud(self): - role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role['id'], role) - role_ref = self.role_api.get_role(role['id']) - role_ref_dict = {x: role_ref[x] for x in role_ref} - self.assertDictEqual(role_ref_dict, role) - - role['name'] = uuid.uuid4().hex - updated_role_ref = self.role_api.update_role(role['id'], role) - role_ref = self.role_api.get_role(role['id']) - role_ref_dict = {x: role_ref[x] for x in role_ref} - self.assertDictEqual(role_ref_dict, role) - self.assertDictEqual(role_ref_dict, updated_role_ref) - - self.role_api.delete_role(role['id']) - self.assertRaises(exception.RoleNotFound, - self.role_api.get_role, - role['id']) - - def test_update_role_404(self): - role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.assertRaises(exception.RoleNotFound, - self.role_api.update_role, - role['id'], - role) - - def test_list_roles(self): - roles = self.role_api.list_roles() - self.assertEqual(len(default_fixtures.ROLES), len(roles)) - role_ids = set(role['id'] for role in roles) - expected_role_ids = set(role['id'] for role in default_fixtures.ROLES) - self.assertEqual(expected_role_ids, role_ids) - - @unit.skip_if_cache_disabled('role') - def test_cache_layer_role_crud(self): - role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - role_id = role['id'] - # Create role - self.role_api.create_role(role_id, role) - role_ref = self.role_api.get_role(role_id) - updated_role_ref = copy.deepcopy(role_ref) - updated_role_ref['name'] = uuid.uuid4().hex - # Update role, bypassing the role api manager - self.role_api.driver.update_role(role_id, updated_role_ref) - # Verify get_role still returns old ref - self.assertDictEqual(role_ref, self.role_api.get_role(role_id)) - # Invalidate Cache - self.role_api.get_role.invalidate(self.role_api, role_id) - # Verify get_role returns the new role_ref - self.assertDictEqual(updated_role_ref, - self.role_api.get_role(role_id)) - # Update role back to original via the assignment api manager - self.role_api.update_role(role_id, role_ref) - # Verify get_role returns the original role ref - self.assertDictEqual(role_ref, self.role_api.get_role(role_id)) - # Delete role bypassing the role api manager - self.role_api.driver.delete_role(role_id) - # Verify get_role still returns the role_ref - self.assertDictEqual(role_ref, self.role_api.get_role(role_id)) - # Invalidate cache - self.role_api.get_role.invalidate(self.role_api, role_id) - # Verify RoleNotFound is now raised - self.assertRaises(exception.RoleNotFound, - self.role_api.get_role, - role_id) - # recreate role - self.role_api.create_role(role_id, role) - self.role_api.get_role(role_id) - # delete role via the assignment api manager - self.role_api.delete_role(role_id) - # verity RoleNotFound is now raised - self.assertRaises(exception.RoleNotFound, - self.role_api.get_role, - role_id) diff --git a/keystone-moon/keystone/tests/unit/backend/role/test_ldap.py b/keystone-moon/keystone/tests/unit/backend/role/test_ldap.py deleted file mode 100644 index 44f2b612..00000000 --- a/keystone-moon/keystone/tests/unit/backend/role/test_ldap.py +++ /dev/null @@ -1,161 +0,0 @@ -# -*- coding: utf-8 -*- -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from oslo_config import cfg - -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit.backend import core_ldap -from keystone.tests.unit.backend.role import core as core_role -from keystone.tests.unit import default_fixtures - - -CONF = cfg.CONF - - -class LdapRoleCommon(core_ldap.BaseBackendLdapCommon, core_role.RoleTests): - """Tests that should be run in every LDAP configuration. - - Include additional tests that are unique to LDAP (or need to be overridden) - which should be run for all the various LDAP configurations we test. - - """ - pass - - -class LdapRole(LdapRoleCommon, core_ldap.BaseBackendLdap, unit.TestCase): - """Test in an all-LDAP configuration. - - Include additional tests that are unique to LDAP (or need to be overridden) - which only need to be run in a basic LDAP configurations. - - """ - def test_configurable_allowed_role_actions(self): - role = {'id': u'fäké1', 'name': u'fäké1'} - self.role_api.create_role(u'fäké1', role) - role_ref = self.role_api.get_role(u'fäké1') - self.assertEqual(u'fäké1', role_ref['id']) - - role['name'] = u'fäké2' - self.role_api.update_role(u'fäké1', role) - - self.role_api.delete_role(u'fäké1') - self.assertRaises(exception.RoleNotFound, - self.role_api.get_role, - u'fäké1') - - def test_configurable_forbidden_role_actions(self): - self.config_fixture.config( - group='ldap', role_allow_create=False, role_allow_update=False, - role_allow_delete=False) - self.load_backends() - - role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.assertRaises(exception.ForbiddenAction, - self.role_api.create_role, - role['id'], - role) - - self.role_member['name'] = uuid.uuid4().hex - self.assertRaises(exception.ForbiddenAction, - self.role_api.update_role, - self.role_member['id'], - self.role_member) - - self.assertRaises(exception.ForbiddenAction, - self.role_api.delete_role, - self.role_member['id']) - - def test_role_filter(self): - role_ref = self.role_api.get_role(self.role_member['id']) - self.assertDictEqual(role_ref, self.role_member) - - self.config_fixture.config(group='ldap', - role_filter='(CN=DOES_NOT_MATCH)') - self.load_backends() - # NOTE(morganfainberg): CONF.ldap.role_filter will not be - # dynamically changed at runtime. This invalidate is a work-around for - # the expectation that it is safe to change config values in tests that - # could affect what the drivers would return up to the manager. This - # solves this assumption when working with aggressive (on-create) - # cache population. - self.role_api.get_role.invalidate(self.role_api, - self.role_member['id']) - self.assertRaises(exception.RoleNotFound, - self.role_api.get_role, - self.role_member['id']) - - def test_role_attribute_mapping(self): - self.config_fixture.config(group='ldap', role_name_attribute='ou') - self.clear_database() - self.load_backends() - self.load_fixtures(default_fixtures) - # NOTE(morganfainberg): CONF.ldap.role_name_attribute will not be - # dynamically changed at runtime. This invalidate is a work-around for - # the expectation that it is safe to change config values in tests that - # could affect what the drivers would return up to the manager. This - # solves this assumption when working with aggressive (on-create) - # cache population. - self.role_api.get_role.invalidate(self.role_api, - self.role_member['id']) - role_ref = self.role_api.get_role(self.role_member['id']) - self.assertEqual(self.role_member['id'], role_ref['id']) - self.assertEqual(self.role_member['name'], role_ref['name']) - - self.config_fixture.config(group='ldap', role_name_attribute='sn') - self.load_backends() - # NOTE(morganfainberg): CONF.ldap.role_name_attribute will not be - # dynamically changed at runtime. This invalidate is a work-around for - # the expectation that it is safe to change config values in tests that - # could affect what the drivers would return up to the manager. This - # solves this assumption when working with aggressive (on-create) - # cache population. - self.role_api.get_role.invalidate(self.role_api, - self.role_member['id']) - role_ref = self.role_api.get_role(self.role_member['id']) - self.assertEqual(self.role_member['id'], role_ref['id']) - self.assertNotIn('name', role_ref) - - def test_role_attribute_ignore(self): - self.config_fixture.config(group='ldap', - role_attribute_ignore=['name']) - self.clear_database() - self.load_backends() - self.load_fixtures(default_fixtures) - # NOTE(morganfainberg): CONF.ldap.role_attribute_ignore will not be - # dynamically changed at runtime. This invalidate is a work-around for - # the expectation that it is safe to change config values in tests that - # could affect what the drivers would return up to the manager. This - # solves this assumption when working with aggressive (on-create) - # cache population. - self.role_api.get_role.invalidate(self.role_api, - self.role_member['id']) - role_ref = self.role_api.get_role(self.role_member['id']) - self.assertEqual(self.role_member['id'], role_ref['id']) - self.assertNotIn('name', role_ref) - - -class LdapIdentitySqlEverythingElseRole( - core_ldap.BaseBackendLdapIdentitySqlEverythingElse, LdapRoleCommon, - unit.TestCase): - """Test Identity in LDAP, Everything else in SQL.""" - pass - - -class LdapIdentitySqlEverythingElseWithMappingRole( - LdapIdentitySqlEverythingElseRole, - core_ldap.BaseBackendLdapIdentitySqlEverythingElseWithMapping): - """Test ID mapping of default LDAP backend.""" - pass diff --git a/keystone-moon/keystone/tests/unit/backend/role/test_sql.py b/keystone-moon/keystone/tests/unit/backend/role/test_sql.py deleted file mode 100644 index 79ff148a..00000000 --- a/keystone-moon/keystone/tests/unit/backend/role/test_sql.py +++ /dev/null @@ -1,40 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from keystone.common import sql -from keystone import exception -from keystone.tests.unit.backend import core_sql -from keystone.tests.unit.backend.role import core - - -class SqlRoleModels(core_sql.BaseBackendSqlModels): - - def test_role_model(self): - cols = (('id', sql.String, 64), - ('name', sql.String, 255)) - self.assertExpectedSchema('role', cols) - - -class SqlRole(core_sql.BaseBackendSqlTests, core.RoleTests): - - def test_create_null_role_name(self): - role = {'id': uuid.uuid4().hex, - 'name': None} - self.assertRaises(exception.UnexpectedError, - self.role_api.create_role, - role['id'], - role) - self.assertRaises(exception.RoleNotFound, - self.role_api.get_role, - role['id']) diff --git a/keystone-moon/keystone/tests/unit/catalog/__init__.py b/keystone-moon/keystone/tests/unit/catalog/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/catalog/test_backends.py b/keystone-moon/keystone/tests/unit/catalog/test_backends.py deleted file mode 100644 index 55898015..00000000 --- a/keystone-moon/keystone/tests/unit/catalog/test_backends.py +++ /dev/null @@ -1,588 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import uuid - -import mock -from six.moves import range -from testtools import matchers - -from keystone.catalog import core -from keystone.common import driver_hints -from keystone import exception -from keystone.tests import unit - - -class CatalogTests(object): - - _legacy_endpoint_id_in_endpoint = True - _enabled_default_to_true_when_creating_endpoint = False - - def test_region_crud(self): - # create - region_id = '0' * 255 - new_region = unit.new_region_ref(id=region_id) - res = self.catalog_api.create_region(new_region) - - # Ensure that we don't need to have a - # parent_region_id in the original supplied - # ref dict, but that it will be returned from - # the endpoint, with None value. - expected_region = new_region.copy() - expected_region['parent_region_id'] = None - self.assertDictEqual(expected_region, res) - - # Test adding another region with the one above - # as its parent. We will check below whether deleting - # the parent successfully deletes any child regions. - parent_region_id = region_id - new_region = unit.new_region_ref(parent_region_id=parent_region_id) - region_id = new_region['id'] - res = self.catalog_api.create_region(new_region) - self.assertDictEqual(new_region, res) - - # list - regions = self.catalog_api.list_regions() - self.assertThat(regions, matchers.HasLength(2)) - region_ids = [x['id'] for x in regions] - self.assertIn(parent_region_id, region_ids) - self.assertIn(region_id, region_ids) - - # update - region_desc_update = {'description': uuid.uuid4().hex} - res = self.catalog_api.update_region(region_id, region_desc_update) - expected_region = new_region.copy() - expected_region['description'] = region_desc_update['description'] - self.assertDictEqual(expected_region, res) - - # delete - self.catalog_api.delete_region(parent_region_id) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.delete_region, - parent_region_id) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - parent_region_id) - # Ensure the child is also gone... - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - region_id) - - def _create_region_with_parent_id(self, parent_id=None): - new_region = unit.new_region_ref(parent_region_id=parent_id) - self.catalog_api.create_region(new_region) - return new_region - - def test_list_regions_filtered_by_parent_region_id(self): - new_region = self._create_region_with_parent_id() - parent_id = new_region['id'] - new_region = self._create_region_with_parent_id(parent_id) - new_region = self._create_region_with_parent_id(parent_id) - - # filter by parent_region_id - hints = driver_hints.Hints() - hints.add_filter('parent_region_id', parent_id) - regions = self.catalog_api.list_regions(hints) - for region in regions: - self.assertEqual(parent_id, region['parent_region_id']) - - @unit.skip_if_cache_disabled('catalog') - def test_cache_layer_region_crud(self): - new_region = unit.new_region_ref() - region_id = new_region['id'] - self.catalog_api.create_region(new_region.copy()) - updated_region = copy.deepcopy(new_region) - updated_region['description'] = uuid.uuid4().hex - # cache the result - self.catalog_api.get_region(region_id) - # update the region bypassing catalog_api - self.catalog_api.driver.update_region(region_id, updated_region) - self.assertDictContainsSubset(new_region, - self.catalog_api.get_region(region_id)) - self.catalog_api.get_region.invalidate(self.catalog_api, region_id) - self.assertDictContainsSubset(updated_region, - self.catalog_api.get_region(region_id)) - # delete the region - self.catalog_api.driver.delete_region(region_id) - # still get the old region - self.assertDictContainsSubset(updated_region, - self.catalog_api.get_region(region_id)) - self.catalog_api.get_region.invalidate(self.catalog_api, region_id) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, region_id) - - @unit.skip_if_cache_disabled('catalog') - def test_invalidate_cache_when_updating_region(self): - new_region = unit.new_region_ref() - region_id = new_region['id'] - self.catalog_api.create_region(new_region) - - # cache the region - self.catalog_api.get_region(region_id) - - # update the region via catalog_api - new_description = {'description': uuid.uuid4().hex} - self.catalog_api.update_region(region_id, new_description) - - # assert that we can get the new region - current_region = self.catalog_api.get_region(region_id) - self.assertEqual(new_description['description'], - current_region['description']) - - def test_create_region_with_duplicate_id(self): - new_region = unit.new_region_ref() - self.catalog_api.create_region(new_region) - # Create region again with duplicate id - self.assertRaises(exception.Conflict, - self.catalog_api.create_region, - new_region) - - def test_get_region_returns_not_found(self): - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - uuid.uuid4().hex) - - def test_delete_region_returns_not_found(self): - self.assertRaises(exception.RegionNotFound, - self.catalog_api.delete_region, - uuid.uuid4().hex) - - def test_create_region_invalid_parent_region_returns_not_found(self): - new_region = unit.new_region_ref(parent_region_id='nonexisting') - self.assertRaises(exception.RegionNotFound, - self.catalog_api.create_region, - new_region) - - def test_avoid_creating_circular_references_in_regions_update(self): - region_one = self._create_region_with_parent_id() - - # self circle: region_one->region_one - self.assertRaises(exception.CircularRegionHierarchyError, - self.catalog_api.update_region, - region_one['id'], - {'parent_region_id': region_one['id']}) - - # region_one->region_two->region_one - region_two = self._create_region_with_parent_id(region_one['id']) - self.assertRaises(exception.CircularRegionHierarchyError, - self.catalog_api.update_region, - region_one['id'], - {'parent_region_id': region_two['id']}) - - # region_one region_two->region_three->region_four->region_two - region_three = self._create_region_with_parent_id(region_two['id']) - region_four = self._create_region_with_parent_id(region_three['id']) - self.assertRaises(exception.CircularRegionHierarchyError, - self.catalog_api.update_region, - region_two['id'], - {'parent_region_id': region_four['id']}) - - @mock.patch.object(core.CatalogDriverV8, - "_ensure_no_circle_in_hierarchical_regions") - def test_circular_regions_can_be_deleted(self, mock_ensure_on_circle): - # turn off the enforcement so that cycles can be created for the test - mock_ensure_on_circle.return_value = None - - region_one = self._create_region_with_parent_id() - - # self circle: region_one->region_one - self.catalog_api.update_region( - region_one['id'], - {'parent_region_id': region_one['id']}) - self.catalog_api.delete_region(region_one['id']) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - region_one['id']) - - # region_one->region_two->region_one - region_one = self._create_region_with_parent_id() - region_two = self._create_region_with_parent_id(region_one['id']) - self.catalog_api.update_region( - region_one['id'], - {'parent_region_id': region_two['id']}) - self.catalog_api.delete_region(region_one['id']) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - region_one['id']) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - region_two['id']) - - # region_one->region_two->region_three->region_one - region_one = self._create_region_with_parent_id() - region_two = self._create_region_with_parent_id(region_one['id']) - region_three = self._create_region_with_parent_id(region_two['id']) - self.catalog_api.update_region( - region_one['id'], - {'parent_region_id': region_three['id']}) - self.catalog_api.delete_region(region_two['id']) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - region_two['id']) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - region_one['id']) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - region_three['id']) - - def test_service_crud(self): - # create - new_service = unit.new_service_ref() - service_id = new_service['id'] - res = self.catalog_api.create_service(service_id, new_service) - self.assertDictEqual(new_service, res) - - # list - services = self.catalog_api.list_services() - self.assertIn(service_id, [x['id'] for x in services]) - - # update - service_name_update = {'name': uuid.uuid4().hex} - res = self.catalog_api.update_service(service_id, service_name_update) - expected_service = new_service.copy() - expected_service['name'] = service_name_update['name'] - self.assertDictEqual(expected_service, res) - - # delete - self.catalog_api.delete_service(service_id) - self.assertRaises(exception.ServiceNotFound, - self.catalog_api.delete_service, - service_id) - self.assertRaises(exception.ServiceNotFound, - self.catalog_api.get_service, - service_id) - - def _create_random_service(self): - new_service = unit.new_service_ref() - service_id = new_service['id'] - return self.catalog_api.create_service(service_id, new_service) - - def test_service_filtering(self): - target_service = self._create_random_service() - unrelated_service1 = self._create_random_service() - unrelated_service2 = self._create_random_service() - - # filter by type - hint_for_type = driver_hints.Hints() - hint_for_type.add_filter(name="type", value=target_service['type']) - services = self.catalog_api.list_services(hint_for_type) - - self.assertEqual(1, len(services)) - filtered_service = services[0] - self.assertEqual(target_service['type'], filtered_service['type']) - self.assertEqual(target_service['id'], filtered_service['id']) - - # filter should have been removed, since it was already used by the - # backend - self.assertEqual(0, len(hint_for_type.filters)) - - # the backend shouldn't filter by name, since this is handled by the - # front end - hint_for_name = driver_hints.Hints() - hint_for_name.add_filter(name="name", value=target_service['name']) - services = self.catalog_api.list_services(hint_for_name) - - self.assertEqual(3, len(services)) - - # filter should still be there, since it wasn't used by the backend - self.assertEqual(1, len(hint_for_name.filters)) - - self.catalog_api.delete_service(target_service['id']) - self.catalog_api.delete_service(unrelated_service1['id']) - self.catalog_api.delete_service(unrelated_service2['id']) - - @unit.skip_if_cache_disabled('catalog') - def test_cache_layer_service_crud(self): - new_service = unit.new_service_ref() - service_id = new_service['id'] - res = self.catalog_api.create_service(service_id, new_service) - self.assertDictEqual(new_service, res) - self.catalog_api.get_service(service_id) - updated_service = copy.deepcopy(new_service) - updated_service['description'] = uuid.uuid4().hex - # update bypassing catalog api - self.catalog_api.driver.update_service(service_id, updated_service) - self.assertDictContainsSubset(new_service, - self.catalog_api.get_service(service_id)) - self.catalog_api.get_service.invalidate(self.catalog_api, service_id) - self.assertDictContainsSubset(updated_service, - self.catalog_api.get_service(service_id)) - - # delete bypassing catalog api - self.catalog_api.driver.delete_service(service_id) - self.assertDictContainsSubset(updated_service, - self.catalog_api.get_service(service_id)) - self.catalog_api.get_service.invalidate(self.catalog_api, service_id) - self.assertRaises(exception.ServiceNotFound, - self.catalog_api.delete_service, - service_id) - self.assertRaises(exception.ServiceNotFound, - self.catalog_api.get_service, - service_id) - - @unit.skip_if_cache_disabled('catalog') - def test_invalidate_cache_when_updating_service(self): - new_service = unit.new_service_ref() - service_id = new_service['id'] - self.catalog_api.create_service(service_id, new_service) - - # cache the service - self.catalog_api.get_service(service_id) - - # update the service via catalog api - new_type = {'type': uuid.uuid4().hex} - self.catalog_api.update_service(service_id, new_type) - - # assert that we can get the new service - current_service = self.catalog_api.get_service(service_id) - self.assertEqual(new_type['type'], current_service['type']) - - def test_delete_service_with_endpoint(self): - # create a service - service = unit.new_service_ref() - self.catalog_api.create_service(service['id'], service) - - # create an endpoint attached to the service - endpoint = unit.new_endpoint_ref(service_id=service['id'], - region_id=None) - self.catalog_api.create_endpoint(endpoint['id'], endpoint) - - # deleting the service should also delete the endpoint - self.catalog_api.delete_service(service['id']) - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.get_endpoint, - endpoint['id']) - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.delete_endpoint, - endpoint['id']) - - def test_cache_layer_delete_service_with_endpoint(self): - service = unit.new_service_ref() - self.catalog_api.create_service(service['id'], service) - - # create an endpoint attached to the service - endpoint = unit.new_endpoint_ref(service_id=service['id'], - region_id=None) - self.catalog_api.create_endpoint(endpoint['id'], endpoint) - # cache the result - self.catalog_api.get_service(service['id']) - self.catalog_api.get_endpoint(endpoint['id']) - # delete the service bypassing catalog api - self.catalog_api.driver.delete_service(service['id']) - self.assertDictContainsSubset(endpoint, - self.catalog_api. - get_endpoint(endpoint['id'])) - self.assertDictContainsSubset(service, - self.catalog_api. - get_service(service['id'])) - self.catalog_api.get_endpoint.invalidate(self.catalog_api, - endpoint['id']) - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.get_endpoint, - endpoint['id']) - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.delete_endpoint, - endpoint['id']) - # multiple endpoints associated with a service - second_endpoint = unit.new_endpoint_ref(service_id=service['id'], - region_id=None) - self.catalog_api.create_service(service['id'], service) - self.catalog_api.create_endpoint(endpoint['id'], endpoint) - self.catalog_api.create_endpoint(second_endpoint['id'], - second_endpoint) - self.catalog_api.delete_service(service['id']) - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.get_endpoint, - endpoint['id']) - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.delete_endpoint, - endpoint['id']) - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.get_endpoint, - second_endpoint['id']) - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.delete_endpoint, - second_endpoint['id']) - - def test_get_service_returns_not_found(self): - self.assertRaises(exception.ServiceNotFound, - self.catalog_api.get_service, - uuid.uuid4().hex) - - def test_delete_service_returns_not_found(self): - self.assertRaises(exception.ServiceNotFound, - self.catalog_api.delete_service, - uuid.uuid4().hex) - - def test_create_endpoint_nonexistent_service(self): - endpoint = unit.new_endpoint_ref(service_id=uuid.uuid4().hex, - region_id=None) - self.assertRaises(exception.ValidationError, - self.catalog_api.create_endpoint, - endpoint['id'], - endpoint) - - def test_update_endpoint_nonexistent_service(self): - dummy_service, enabled_endpoint, dummy_disabled_endpoint = ( - self._create_endpoints()) - new_endpoint = unit.new_endpoint_ref(service_id=uuid.uuid4().hex) - self.assertRaises(exception.ValidationError, - self.catalog_api.update_endpoint, - enabled_endpoint['id'], - new_endpoint) - - def test_create_endpoint_nonexistent_region(self): - service = unit.new_service_ref() - self.catalog_api.create_service(service['id'], service) - - endpoint = unit.new_endpoint_ref(service_id=service['id']) - self.assertRaises(exception.ValidationError, - self.catalog_api.create_endpoint, - endpoint['id'], - endpoint) - - def test_update_endpoint_nonexistent_region(self): - dummy_service, enabled_endpoint, dummy_disabled_endpoint = ( - self._create_endpoints()) - new_endpoint = unit.new_endpoint_ref(service_id=uuid.uuid4().hex) - self.assertRaises(exception.ValidationError, - self.catalog_api.update_endpoint, - enabled_endpoint['id'], - new_endpoint) - - def test_get_endpoint_returns_not_found(self): - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.get_endpoint, - uuid.uuid4().hex) - - def test_delete_endpoint_returns_not_found(self): - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.delete_endpoint, - uuid.uuid4().hex) - - def test_create_endpoint(self): - service = unit.new_service_ref() - self.catalog_api.create_service(service['id'], service) - - endpoint = unit.new_endpoint_ref(service_id=service['id'], - region_id=None) - self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy()) - - def test_update_endpoint(self): - dummy_service_ref, endpoint_ref, dummy_disabled_endpoint_ref = ( - self._create_endpoints()) - res = self.catalog_api.update_endpoint(endpoint_ref['id'], - {'interface': 'private'}) - expected_endpoint = endpoint_ref.copy() - expected_endpoint['enabled'] = True - expected_endpoint['interface'] = 'private' - if self._legacy_endpoint_id_in_endpoint: - expected_endpoint['legacy_endpoint_id'] = None - if self._enabled_default_to_true_when_creating_endpoint: - expected_endpoint['enabled'] = True - self.assertDictEqual(expected_endpoint, res) - - def _create_endpoints(self): - # Creates a service and 2 endpoints for the service in the same region. - # The 'public' interface is enabled and the 'internal' interface is - # disabled. - - def create_endpoint(service_id, region, **kwargs): - ref = unit.new_endpoint_ref( - service_id=service_id, - region_id=region, - url='http://localhost/%s' % uuid.uuid4().hex, - **kwargs) - - self.catalog_api.create_endpoint(ref['id'], ref) - return ref - - # Create a service for use with the endpoints. - service_ref = unit.new_service_ref() - service_id = service_ref['id'] - self.catalog_api.create_service(service_id, service_ref) - - region = unit.new_region_ref() - self.catalog_api.create_region(region) - - # Create endpoints - enabled_endpoint_ref = create_endpoint(service_id, region['id']) - disabled_endpoint_ref = create_endpoint( - service_id, region['id'], enabled=False, interface='internal') - - return service_ref, enabled_endpoint_ref, disabled_endpoint_ref - - def test_list_endpoints(self): - service = unit.new_service_ref() - self.catalog_api.create_service(service['id'], service) - - expected_ids = set([uuid.uuid4().hex for _ in range(3)]) - for endpoint_id in expected_ids: - endpoint = unit.new_endpoint_ref(service_id=service['id'], - id=endpoint_id, - region_id=None) - self.catalog_api.create_endpoint(endpoint['id'], endpoint) - - endpoints = self.catalog_api.list_endpoints() - self.assertEqual(expected_ids, set(e['id'] for e in endpoints)) - - def test_get_catalog_endpoint_disabled(self): - """Get back only enabled endpoints when get the v2 catalog.""" - service_ref, enabled_endpoint_ref, dummy_disabled_endpoint_ref = ( - self._create_endpoints()) - - user_id = uuid.uuid4().hex - project_id = uuid.uuid4().hex - catalog = self.catalog_api.get_catalog(user_id, project_id) - - exp_entry = { - 'id': enabled_endpoint_ref['id'], - 'name': service_ref['name'], - 'publicURL': enabled_endpoint_ref['url'], - } - - region = enabled_endpoint_ref['region_id'] - self.assertEqual(exp_entry, catalog[region][service_ref['type']]) - - def test_get_v3_catalog_endpoint_disabled(self): - """Get back only enabled endpoints when get the v3 catalog.""" - enabled_endpoint_ref = self._create_endpoints()[1] - - user_id = uuid.uuid4().hex - project_id = uuid.uuid4().hex - catalog = self.catalog_api.get_v3_catalog(user_id, project_id) - - endpoint_ids = [x['id'] for x in catalog[0]['endpoints']] - self.assertEqual([enabled_endpoint_ref['id']], endpoint_ids) - - @unit.skip_if_cache_disabled('catalog') - def test_invalidate_cache_when_updating_endpoint(self): - service = unit.new_service_ref() - self.catalog_api.create_service(service['id'], service) - - # create an endpoint attached to the service - endpoint = unit.new_endpoint_ref(service_id=service['id'], - region_id=None) - self.catalog_api.create_endpoint(endpoint['id'], endpoint) - - # cache the endpoint - self.catalog_api.get_endpoint(endpoint['id']) - - # update the endpoint via catalog api - new_url = {'url': uuid.uuid4().hex} - self.catalog_api.update_endpoint(endpoint['id'], new_url) - - # assert that we can get the new endpoint - current_endpoint = self.catalog_api.get_endpoint(endpoint['id']) - self.assertEqual(new_url['url'], current_endpoint['url']) diff --git a/keystone-moon/keystone/tests/unit/catalog/test_core.py b/keystone-moon/keystone/tests/unit/catalog/test_core.py deleted file mode 100644 index b04b0bb7..00000000 --- a/keystone-moon/keystone/tests/unit/catalog/test_core.py +++ /dev/null @@ -1,100 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from keystone.catalog import core -from keystone import exception -from keystone.tests import unit - - -class FormatUrlTests(unit.BaseTestCase): - - def test_successful_formatting(self): - url_template = ('http://$(public_bind_host)s:$(admin_port)d/' - '$(tenant_id)s/$(user_id)s/$(project_id)s') - project_id = uuid.uuid4().hex - values = {'public_bind_host': 'server', 'admin_port': 9090, - 'tenant_id': 'A', 'user_id': 'B', 'project_id': project_id} - actual_url = core.format_url(url_template, values) - - expected_url = 'http://server:9090/A/B/%s' % (project_id,) - self.assertEqual(expected_url, actual_url) - - def test_raises_malformed_on_missing_key(self): - self.assertRaises(exception.MalformedEndpoint, - core.format_url, - "http://$(public_bind_host)s/$(public_port)d", - {"public_bind_host": "1"}) - - def test_raises_malformed_on_wrong_type(self): - self.assertRaises(exception.MalformedEndpoint, - core.format_url, - "http://$(public_bind_host)d", - {"public_bind_host": "something"}) - - def test_raises_malformed_on_incomplete_format(self): - self.assertRaises(exception.MalformedEndpoint, - core.format_url, - "http://$(public_bind_host)", - {"public_bind_host": "1"}) - - def test_formatting_a_non_string(self): - def _test(url_template): - self.assertRaises(exception.MalformedEndpoint, - core.format_url, - url_template, - {}) - - _test(None) - _test(object()) - - def test_substitution_with_key_not_allowed(self): - # If the url template contains a substitution that's not in the allowed - # list then MalformedEndpoint is raised. - # For example, admin_token isn't allowed. - url_template = ('http://$(public_bind_host)s:$(public_port)d/' - '$(tenant_id)s/$(user_id)s/$(admin_token)s') - values = {'public_bind_host': 'server', 'public_port': 9090, - 'tenant_id': 'A', 'user_id': 'B', 'admin_token': 'C'} - self.assertRaises(exception.MalformedEndpoint, - core.format_url, - url_template, - values) - - def test_substitution_with_allowed_tenant_keyerror(self): - # No value of 'tenant_id' is passed into url_template. - # mod: format_url will return None instead of raising - # "MalformedEndpoint" exception. - # This is intentional behavior since we don't want to skip - # all the later endpoints once there is an URL of endpoint - # trying to replace 'tenant_id' with None. - url_template = ('http://$(public_bind_host)s:$(admin_port)d/' - '$(tenant_id)s/$(user_id)s') - values = {'public_bind_host': 'server', 'admin_port': 9090, - 'user_id': 'B'} - self.assertIsNone(core.format_url(url_template, values, - silent_keyerror_failures=['tenant_id'])) - - def test_substitution_with_allowed_project_keyerror(self): - # No value of 'project_id' is passed into url_template. - # mod: format_url will return None instead of raising - # "MalformedEndpoint" exception. - # This is intentional behavior since we don't want to skip - # all the later endpoints once there is an URL of endpoint - # trying to replace 'project_id' with None. - url_template = ('http://$(public_bind_host)s:$(admin_port)d/' - '$(project_id)s/$(user_id)s') - values = {'public_bind_host': 'server', 'admin_port': 9090, - 'user_id': 'B'} - self.assertIsNone(core.format_url(url_template, values, - silent_keyerror_failures=['project_id'])) diff --git a/keystone-moon/keystone/tests/unit/common/__init__.py b/keystone-moon/keystone/tests/unit/common/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/common/test_authorization.py b/keystone-moon/keystone/tests/unit/common/test_authorization.py deleted file mode 100644 index 73ddbc61..00000000 --- a/keystone-moon/keystone/tests/unit/common/test_authorization.py +++ /dev/null @@ -1,161 +0,0 @@ -# Copyright 2015 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import copy -import uuid - -from keystone.common import authorization -from keystone import exception -from keystone.federation import constants as federation_constants -from keystone.models import token_model -from keystone.tests import unit -from keystone.tests.unit import test_token_provider - - -class TestTokenToAuthContext(unit.BaseTestCase): - def test_token_is_project_scoped_with_trust(self): - # Check auth_context result when the token is project-scoped and has - # trust info. - - # SAMPLE_V3_TOKEN has OS-TRUST:trust in it. - token_data = test_token_provider.SAMPLE_V3_TOKEN - token = token_model.KeystoneToken(token_id=uuid.uuid4().hex, - token_data=token_data) - - auth_context = authorization.token_to_auth_context(token) - - self.assertEqual(token, auth_context['token']) - self.assertTrue(auth_context['is_delegated_auth']) - self.assertEqual(token_data['token']['user']['id'], - auth_context['user_id']) - self.assertEqual(token_data['token']['user']['domain']['id'], - auth_context['user_domain_id']) - self.assertEqual(token_data['token']['project']['id'], - auth_context['project_id']) - self.assertEqual(token_data['token']['project']['domain']['id'], - auth_context['project_domain_id']) - self.assertNotIn('domain_id', auth_context) - self.assertNotIn('domain_name', auth_context) - self.assertEqual(token_data['token']['OS-TRUST:trust']['id'], - auth_context['trust_id']) - self.assertEqual( - token_data['token']['OS-TRUST:trust']['trustor_user_id'], - auth_context['trustor_id']) - self.assertEqual( - token_data['token']['OS-TRUST:trust']['trustee_user_id'], - auth_context['trustee_id']) - self.assertItemsEqual( - [r['name'] for r in token_data['token']['roles']], - auth_context['roles']) - self.assertIsNone(auth_context['consumer_id']) - self.assertIsNone(auth_context['access_token_id']) - self.assertNotIn('group_ids', auth_context) - - def test_token_is_domain_scoped(self): - # Check contents of auth_context when token is domain-scoped. - token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN) - del token_data['token']['project'] - - domain_id = uuid.uuid4().hex - domain_name = uuid.uuid4().hex - token_data['token']['domain'] = {'id': domain_id, 'name': domain_name} - - token = token_model.KeystoneToken(token_id=uuid.uuid4().hex, - token_data=token_data) - - auth_context = authorization.token_to_auth_context(token) - - self.assertNotIn('project_id', auth_context) - self.assertNotIn('project_domain_id', auth_context) - - self.assertEqual(domain_id, auth_context['domain_id']) - self.assertEqual(domain_name, auth_context['domain_name']) - - def test_token_is_unscoped(self): - # Check contents of auth_context when the token is unscoped. - token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN) - del token_data['token']['project'] - - token = token_model.KeystoneToken(token_id=uuid.uuid4().hex, - token_data=token_data) - - auth_context = authorization.token_to_auth_context(token) - - self.assertNotIn('project_id', auth_context) - self.assertNotIn('project_domain_id', auth_context) - self.assertNotIn('domain_id', auth_context) - self.assertNotIn('domain_name', auth_context) - - def test_token_is_for_federated_user(self): - # When the token is for a federated user then group_ids is in - # auth_context. - token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN) - - group_ids = [uuid.uuid4().hex for x in range(1, 5)] - - federation_data = {'identity_provider': {'id': uuid.uuid4().hex}, - 'protocol': {'id': 'saml2'}, - 'groups': [{'id': gid} for gid in group_ids]} - token_data['token']['user'][federation_constants.FEDERATION] = ( - federation_data) - - token = token_model.KeystoneToken(token_id=uuid.uuid4().hex, - token_data=token_data) - - auth_context = authorization.token_to_auth_context(token) - - self.assertItemsEqual(group_ids, auth_context['group_ids']) - - def test_oauth_variables_set_for_oauth_token(self): - token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN) - access_token_id = uuid.uuid4().hex - consumer_id = uuid.uuid4().hex - token_data['token']['OS-OAUTH1'] = {'access_token_id': access_token_id, - 'consumer_id': consumer_id} - token = token_model.KeystoneToken(token_id=uuid.uuid4().hex, - token_data=token_data) - - auth_context = authorization.token_to_auth_context(token) - - self.assertEqual(access_token_id, auth_context['access_token_id']) - self.assertEqual(consumer_id, auth_context['consumer_id']) - - def test_oauth_variables_not_set(self): - token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN) - token = token_model.KeystoneToken(token_id=uuid.uuid4().hex, - token_data=token_data) - - auth_context = authorization.token_to_auth_context(token) - - self.assertIsNone(auth_context['access_token_id']) - self.assertIsNone(auth_context['consumer_id']) - - def test_token_is_not_KeystoneToken_raises_exception(self): - # If the token isn't a KeystoneToken then an UnexpectedError exception - # is raised. - self.assertRaises(exception.UnexpectedError, - authorization.token_to_auth_context, {}) - - def test_user_id_missing_in_token_raises_exception(self): - # If there's no user ID in the token then an Unauthorized - # exception is raised. - token_data = copy.deepcopy(test_token_provider.SAMPLE_V3_TOKEN) - del token_data['token']['user']['id'] - - token = token_model.KeystoneToken(token_id=uuid.uuid4().hex, - token_data=token_data) - - self.assertRaises(exception.Unauthorized, - authorization.token_to_auth_context, token) diff --git a/keystone-moon/keystone/tests/unit/common/test_base64utils.py b/keystone-moon/keystone/tests/unit/common/test_base64utils.py deleted file mode 100644 index 355a2e03..00000000 --- a/keystone-moon/keystone/tests/unit/common/test_base64utils.py +++ /dev/null @@ -1,208 +0,0 @@ -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import base64utils -from keystone.tests import unit - -base64_alphabet = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ' - 'abcdefghijklmnopqrstuvwxyz' - '0123456789' - '+/=') # includes pad char - -base64url_alphabet = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ' - 'abcdefghijklmnopqrstuvwxyz' - '0123456789' - '-_=') # includes pad char - - -class TestValid(unit.BaseTestCase): - def test_valid_base64(self): - self.assertTrue(base64utils.is_valid_base64('+/==')) - self.assertTrue(base64utils.is_valid_base64('+/+=')) - self.assertTrue(base64utils.is_valid_base64('+/+/')) - - self.assertFalse(base64utils.is_valid_base64('-_==')) - self.assertFalse(base64utils.is_valid_base64('-_-=')) - self.assertFalse(base64utils.is_valid_base64('-_-_')) - - self.assertTrue(base64utils.is_valid_base64('abcd')) - self.assertFalse(base64utils.is_valid_base64('abcde')) - self.assertFalse(base64utils.is_valid_base64('abcde==')) - self.assertFalse(base64utils.is_valid_base64('abcdef')) - self.assertTrue(base64utils.is_valid_base64('abcdef==')) - self.assertFalse(base64utils.is_valid_base64('abcdefg')) - self.assertTrue(base64utils.is_valid_base64('abcdefg=')) - self.assertTrue(base64utils.is_valid_base64('abcdefgh')) - - self.assertFalse(base64utils.is_valid_base64('-_==')) - - def test_valid_base64url(self): - self.assertFalse(base64utils.is_valid_base64url('+/==')) - self.assertFalse(base64utils.is_valid_base64url('+/+=')) - self.assertFalse(base64utils.is_valid_base64url('+/+/')) - - self.assertTrue(base64utils.is_valid_base64url('-_==')) - self.assertTrue(base64utils.is_valid_base64url('-_-=')) - self.assertTrue(base64utils.is_valid_base64url('-_-_')) - - self.assertTrue(base64utils.is_valid_base64url('abcd')) - self.assertFalse(base64utils.is_valid_base64url('abcde')) - self.assertFalse(base64utils.is_valid_base64url('abcde==')) - self.assertFalse(base64utils.is_valid_base64url('abcdef')) - self.assertTrue(base64utils.is_valid_base64url('abcdef==')) - self.assertFalse(base64utils.is_valid_base64url('abcdefg')) - self.assertTrue(base64utils.is_valid_base64url('abcdefg=')) - self.assertTrue(base64utils.is_valid_base64url('abcdefgh')) - - self.assertTrue(base64utils.is_valid_base64url('-_==')) - - -class TestBase64Padding(unit.BaseTestCase): - - def test_filter(self): - self.assertEqual('', base64utils.filter_formatting('')) - self.assertEqual('', base64utils.filter_formatting(' ')) - self.assertEqual('a', base64utils.filter_formatting('a')) - self.assertEqual('a', base64utils.filter_formatting(' a')) - self.assertEqual('a', base64utils.filter_formatting('a ')) - self.assertEqual('ab', base64utils.filter_formatting('ab')) - self.assertEqual('ab', base64utils.filter_formatting(' ab')) - self.assertEqual('ab', base64utils.filter_formatting('ab ')) - self.assertEqual('ab', base64utils.filter_formatting('a b')) - self.assertEqual('ab', base64utils.filter_formatting(' a b')) - self.assertEqual('ab', base64utils.filter_formatting('a b ')) - self.assertEqual('ab', base64utils.filter_formatting('a\nb\n ')) - - text = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ' - 'abcdefghijklmnopqrstuvwxyz' - '0123456789' - '+/=') - self.assertEqual(base64_alphabet, - base64utils.filter_formatting(text)) - - text = (' ABCDEFGHIJKLMNOPQRSTUVWXYZ\n' - ' abcdefghijklmnopqrstuvwxyz\n' - '\t\f\r' - ' 0123456789\n' - ' +/=') - self.assertEqual(base64_alphabet, - base64utils.filter_formatting(text)) - self.assertEqual(base64url_alphabet, - base64utils.base64_to_base64url(base64_alphabet)) - - text = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ' - 'abcdefghijklmnopqrstuvwxyz' - '0123456789' - '-_=') - self.assertEqual(base64url_alphabet, - base64utils.filter_formatting(text)) - - text = (' ABCDEFGHIJKLMNOPQRSTUVWXYZ\n' - ' abcdefghijklmnopqrstuvwxyz\n' - '\t\f\r' - ' 0123456789\n' - '-_=') - self.assertEqual(base64url_alphabet, - base64utils.filter_formatting(text)) - - def test_alphabet_conversion(self): - self.assertEqual(base64url_alphabet, - base64utils.base64_to_base64url(base64_alphabet)) - - self.assertEqual(base64_alphabet, - base64utils.base64url_to_base64(base64url_alphabet)) - - def test_is_padded(self): - self.assertTrue(base64utils.base64_is_padded('ABCD')) - self.assertTrue(base64utils.base64_is_padded('ABC=')) - self.assertTrue(base64utils.base64_is_padded('AB==')) - - self.assertTrue(base64utils.base64_is_padded('1234ABCD')) - self.assertTrue(base64utils.base64_is_padded('1234ABC=')) - self.assertTrue(base64utils.base64_is_padded('1234AB==')) - - self.assertFalse(base64utils.base64_is_padded('ABC')) - self.assertFalse(base64utils.base64_is_padded('AB')) - self.assertFalse(base64utils.base64_is_padded('A')) - self.assertFalse(base64utils.base64_is_padded('')) - - self.assertRaises(base64utils.InvalidBase64Error, - base64utils.base64_is_padded, '=') - - self.assertRaises(base64utils.InvalidBase64Error, - base64utils.base64_is_padded, 'AB=C') - - self.assertRaises(base64utils.InvalidBase64Error, - base64utils.base64_is_padded, 'AB=') - - self.assertRaises(base64utils.InvalidBase64Error, - base64utils.base64_is_padded, 'ABCD=') - - self.assertRaises(ValueError, base64utils.base64_is_padded, - 'ABC', pad='==') - self.assertRaises(base64utils.InvalidBase64Error, - base64utils.base64_is_padded, 'A=BC') - - def test_strip_padding(self): - self.assertEqual('ABCD', base64utils.base64_strip_padding('ABCD')) - self.assertEqual('ABC', base64utils.base64_strip_padding('ABC=')) - self.assertEqual('AB', base64utils.base64_strip_padding('AB==')) - self.assertRaises(ValueError, base64utils.base64_strip_padding, - 'ABC=', pad='==') - self.assertEqual('ABC', base64utils.base64_strip_padding('ABC')) - - def test_assure_padding(self): - self.assertEqual('ABCD', base64utils.base64_assure_padding('ABCD')) - self.assertEqual('ABC=', base64utils.base64_assure_padding('ABC')) - self.assertEqual('ABC=', base64utils.base64_assure_padding('ABC=')) - self.assertEqual('AB==', base64utils.base64_assure_padding('AB')) - self.assertEqual('AB==', base64utils.base64_assure_padding('AB==')) - self.assertRaises(ValueError, base64utils.base64_assure_padding, - 'ABC', pad='==') - - def test_base64_percent_encoding(self): - self.assertEqual('ABCD', base64utils.base64url_percent_encode('ABCD')) - self.assertEqual('ABC%3D', - base64utils.base64url_percent_encode('ABC=')) - self.assertEqual('AB%3D%3D', - base64utils.base64url_percent_encode('AB==')) - - self.assertEqual('ABCD', base64utils.base64url_percent_decode('ABCD')) - self.assertEqual('ABC=', - base64utils.base64url_percent_decode('ABC%3D')) - self.assertEqual('AB==', - base64utils.base64url_percent_decode('AB%3D%3D')) - self.assertRaises(base64utils.InvalidBase64Error, - base64utils.base64url_percent_encode, 'chars') - self.assertRaises(base64utils.InvalidBase64Error, - base64utils.base64url_percent_decode, 'AB%3D%3') - - -class TestTextWrap(unit.BaseTestCase): - - def test_wrapping(self): - raw_text = 'abcdefgh' - wrapped_text = 'abc\ndef\ngh\n' - - self.assertEqual(wrapped_text, - base64utils.base64_wrap(raw_text, width=3)) - - t = '\n'.join(base64utils.base64_wrap_iter(raw_text, width=3)) + '\n' - self.assertEqual(wrapped_text, t) - - raw_text = 'abcdefgh' - wrapped_text = 'abcd\nefgh\n' - - self.assertEqual(wrapped_text, - base64utils.base64_wrap(raw_text, width=4)) diff --git a/keystone-moon/keystone/tests/unit/common/test_connection_pool.py b/keystone-moon/keystone/tests/unit/common/test_connection_pool.py deleted file mode 100644 index 3813e033..00000000 --- a/keystone-moon/keystone/tests/unit/common/test_connection_pool.py +++ /dev/null @@ -1,135 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import threading -import time - -import mock -import six -from six.moves import queue -import testtools -from testtools import matchers - -from keystone.common.cache import _memcache_pool -from keystone import exception -from keystone.tests.unit import core - - -class _TestConnectionPool(_memcache_pool.ConnectionPool): - destroyed_value = 'destroyed' - - def _create_connection(self): - return mock.MagicMock() - - def _destroy_connection(self, conn): - conn(self.destroyed_value) - - -class TestConnectionPool(core.TestCase): - def setUp(self): - super(TestConnectionPool, self).setUp() - self.unused_timeout = 10 - self.maxsize = 2 - self.connection_pool = _TestConnectionPool( - maxsize=self.maxsize, - unused_timeout=self.unused_timeout) - self.addCleanup(self.cleanup_instance('connection_pool')) - - def test_get_context_manager(self): - self.assertThat(self.connection_pool.queue, matchers.HasLength(0)) - with self.connection_pool.acquire() as conn: - self.assertEqual(1, self.connection_pool._acquired) - self.assertEqual(0, self.connection_pool._acquired) - self.assertThat(self.connection_pool.queue, matchers.HasLength(1)) - self.assertEqual(conn, self.connection_pool.queue[0].connection) - - def test_cleanup_pool(self): - self.test_get_context_manager() - newtime = time.time() + self.unused_timeout * 2 - non_expired_connection = _memcache_pool._PoolItem( - ttl=(newtime * 2), - connection=mock.MagicMock()) - self.connection_pool.queue.append(non_expired_connection) - self.assertThat(self.connection_pool.queue, matchers.HasLength(2)) - with mock.patch.object(time, 'time', return_value=newtime): - conn = self.connection_pool.queue[0].connection - with self.connection_pool.acquire(): - pass - conn.assert_has_calls( - [mock.call(self.connection_pool.destroyed_value)]) - self.assertThat(self.connection_pool.queue, matchers.HasLength(1)) - self.assertEqual(0, non_expired_connection.connection.call_count) - - def test_acquire_conn_exception_returns_acquired_count(self): - class TestException(Exception): - pass - - with mock.patch.object(_TestConnectionPool, '_create_connection', - side_effect=TestException): - with testtools.ExpectedException(TestException): - with self.connection_pool.acquire(): - pass - self.assertThat(self.connection_pool.queue, - matchers.HasLength(0)) - self.assertEqual(0, self.connection_pool._acquired) - - def test_connection_pool_limits_maximum_connections(self): - # NOTE(morganfainberg): To ensure we don't lockup tests until the - # job limit, explicitly call .get_nowait() and .put_nowait() in this - # case. - conn1 = self.connection_pool.get_nowait() - conn2 = self.connection_pool.get_nowait() - - # Use a nowait version to raise an Empty exception indicating we would - # not get another connection until one is placed back into the queue. - self.assertRaises(queue.Empty, self.connection_pool.get_nowait) - - # Place the connections back into the pool. - self.connection_pool.put_nowait(conn1) - self.connection_pool.put_nowait(conn2) - - # Make sure we can get a connection out of the pool again. - self.connection_pool.get_nowait() - - def test_connection_pool_maximum_connection_get_timeout(self): - connection_pool = _TestConnectionPool( - maxsize=1, - unused_timeout=self.unused_timeout, - conn_get_timeout=0) - - def _acquire_connection(): - with connection_pool.acquire(): - pass - - # Make sure we've consumed the only available connection from the pool - conn = connection_pool.get_nowait() - - self.assertRaises(exception.UnexpectedError, _acquire_connection) - - # Put the connection back and ensure we can acquire the connection - # after it is available. - connection_pool.put_nowait(conn) - _acquire_connection() - - -class TestMemcacheClientOverrides(core.BaseTestCase): - - def test_client_stripped_of_threading_local(self): - """threading.local overrides are restored for _MemcacheClient""" - client_class = _memcache_pool._MemcacheClient - # get the genuine thread._local from MRO - thread_local = client_class.__mro__[2] - self.assertTrue(thread_local is threading.local) - for field in six.iterkeys(thread_local.__dict__): - if field not in ('__dict__', '__weakref__'): - self.assertNotEqual(id(getattr(thread_local, field, None)), - id(getattr(client_class, field, None))) diff --git a/keystone-moon/keystone/tests/unit/common/test_injection.py b/keystone-moon/keystone/tests/unit/common/test_injection.py deleted file mode 100644 index 9a5d1e7d..00000000 --- a/keystone-moon/keystone/tests/unit/common/test_injection.py +++ /dev/null @@ -1,238 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from keystone.common import dependency -from keystone.tests import unit - - -class TestDependencyInjection(unit.BaseTestCase): - def setUp(self): - super(TestDependencyInjection, self).setUp() - dependency.reset() - self.addCleanup(dependency.reset) - - def test_dependency_injection(self): - class Interface(object): - def do_work(self): - assert False - - @dependency.provider('first_api') - class FirstImplementation(Interface): - def do_work(self): - return True - - @dependency.provider('second_api') - class SecondImplementation(Interface): - def do_work(self): - return True - - @dependency.requires('first_api', 'second_api') - class Consumer(object): - def do_work_with_dependencies(self): - assert self.first_api.do_work() - assert self.second_api.do_work() - - # initialize dependency providers - first_api = FirstImplementation() - second_api = SecondImplementation() - - # ... sometime later, initialize a dependency consumer - consumer = Consumer() - - # the expected dependencies should be available to the consumer - self.assertIs(consumer.first_api, first_api) - self.assertIs(consumer.second_api, second_api) - self.assertIsInstance(consumer.first_api, Interface) - self.assertIsInstance(consumer.second_api, Interface) - consumer.do_work_with_dependencies() - - def test_dependency_provider_configuration(self): - @dependency.provider('api') - class Configurable(object): - def __init__(self, value=None): - self.value = value - - def get_value(self): - return self.value - - @dependency.requires('api') - class Consumer(object): - def get_value(self): - return self.api.get_value() - - # initialize dependency providers - api = Configurable(value=True) - - # ... sometime later, initialize a dependency consumer - consumer = Consumer() - - # the expected dependencies should be available to the consumer - self.assertIs(consumer.api, api) - self.assertIsInstance(consumer.api, Configurable) - self.assertTrue(consumer.get_value()) - - def test_dependency_consumer_configuration(self): - @dependency.provider('api') - class Provider(object): - def get_value(self): - return True - - @dependency.requires('api') - class Configurable(object): - def __init__(self, value=None): - self.value = value - - def get_value(self): - if self.value: - return self.api.get_value() - - # initialize dependency providers - api = Provider() - - # ... sometime later, initialize a dependency consumer - consumer = Configurable(value=True) - - # the expected dependencies should be available to the consumer - self.assertIs(consumer.api, api) - self.assertIsInstance(consumer.api, Provider) - self.assertTrue(consumer.get_value()) - - def test_inherited_dependency(self): - class Interface(object): - def do_work(self): - assert False - - @dependency.provider('first_api') - class FirstImplementation(Interface): - def do_work(self): - return True - - @dependency.provider('second_api') - class SecondImplementation(Interface): - def do_work(self): - return True - - @dependency.requires('first_api') - class ParentConsumer(object): - def do_work_with_dependencies(self): - assert self.first_api.do_work() - - @dependency.requires('second_api') - class ChildConsumer(ParentConsumer): - def do_work_with_dependencies(self): - assert self.second_api.do_work() - super(ChildConsumer, self).do_work_with_dependencies() - - # initialize dependency providers - first_api = FirstImplementation() - second_api = SecondImplementation() - - # ... sometime later, initialize a dependency consumer - consumer = ChildConsumer() - - # dependencies should be naturally inherited - self.assertEqual( - set(['first_api']), - ParentConsumer._dependencies) - self.assertEqual( - set(['first_api', 'second_api']), - ChildConsumer._dependencies) - self.assertEqual( - set(['first_api', 'second_api']), - consumer._dependencies) - - # the expected dependencies should be available to the consumer - self.assertIs(consumer.first_api, first_api) - self.assertIs(consumer.second_api, second_api) - self.assertIsInstance(consumer.first_api, Interface) - self.assertIsInstance(consumer.second_api, Interface) - consumer.do_work_with_dependencies() - - def test_unresolvable_dependency(self): - @dependency.requires(uuid.uuid4().hex) - class Consumer(object): - pass - - def for_test(): - Consumer() - dependency.resolve_future_dependencies() - - self.assertRaises(dependency.UnresolvableDependencyException, for_test) - - def test_circular_dependency(self): - p1_name = uuid.uuid4().hex - p2_name = uuid.uuid4().hex - - @dependency.provider(p1_name) - @dependency.requires(p2_name) - class P1(object): - pass - - @dependency.provider(p2_name) - @dependency.requires(p1_name) - class P2(object): - pass - - p1 = P1() - p2 = P2() - - dependency.resolve_future_dependencies() - - self.assertIs(getattr(p1, p2_name), p2) - self.assertIs(getattr(p2, p1_name), p1) - - def test_reset(self): - # Can reset the registry of providers. - - p_id = uuid.uuid4().hex - - @dependency.provider(p_id) - class P(object): - pass - - p_inst = P() - - self.assertIs(dependency.get_provider(p_id), p_inst) - - dependency.reset() - - self.assertFalse(dependency._REGISTRY) - - def test_get_provider(self): - # Can get the instance of a provider using get_provider - - provider_name = uuid.uuid4().hex - - @dependency.provider(provider_name) - class P(object): - pass - - provider_instance = P() - retrieved_provider_instance = dependency.get_provider(provider_name) - self.assertIs(provider_instance, retrieved_provider_instance) - - def test_get_provider_not_provided_error(self): - # If no provider and provider is required then fails. - - provider_name = uuid.uuid4().hex - self.assertRaises(KeyError, dependency.get_provider, provider_name) - - def test_get_provider_not_provided_optional(self): - # If no provider and provider is optional then returns None. - - provider_name = uuid.uuid4().hex - self.assertIsNone(dependency.get_provider(provider_name, - dependency.GET_OPTIONAL)) diff --git a/keystone-moon/keystone/tests/unit/common/test_json_home.py b/keystone-moon/keystone/tests/unit/common/test_json_home.py deleted file mode 100644 index 94e2d138..00000000 --- a/keystone-moon/keystone/tests/unit/common/test_json_home.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import copy - -from testtools import matchers - -from keystone.common import json_home -from keystone.tests import unit - - -class JsonHomeTest(unit.BaseTestCase): - def test_build_v3_resource_relation(self): - resource_name = self.getUniqueString() - relation = json_home.build_v3_resource_relation(resource_name) - exp_relation = ( - 'http://docs.openstack.org/api/openstack-identity/3/rel/%s' % - resource_name) - self.assertThat(relation, matchers.Equals(exp_relation)) - - def test_build_v3_extension_resource_relation(self): - extension_name = self.getUniqueString() - extension_version = self.getUniqueString() - resource_name = self.getUniqueString() - relation = json_home.build_v3_extension_resource_relation( - extension_name, extension_version, resource_name) - exp_relation = ( - 'http://docs.openstack.org/api/openstack-identity/3/ext/%s/%s/rel/' - '%s' % (extension_name, extension_version, resource_name)) - self.assertThat(relation, matchers.Equals(exp_relation)) - - def test_build_v3_parameter_relation(self): - parameter_name = self.getUniqueString() - relation = json_home.build_v3_parameter_relation(parameter_name) - exp_relation = ( - 'http://docs.openstack.org/api/openstack-identity/3/param/%s' % - parameter_name) - self.assertThat(relation, matchers.Equals(exp_relation)) - - def test_build_v3_extension_parameter_relation(self): - extension_name = self.getUniqueString() - extension_version = self.getUniqueString() - parameter_name = self.getUniqueString() - relation = json_home.build_v3_extension_parameter_relation( - extension_name, extension_version, parameter_name) - exp_relation = ( - 'http://docs.openstack.org/api/openstack-identity/3/ext/%s/%s/' - 'param/%s' % (extension_name, extension_version, parameter_name)) - self.assertThat(relation, matchers.Equals(exp_relation)) - - def test_translate_urls(self): - href_rel = self.getUniqueString() - href = self.getUniqueString() - href_template_rel = self.getUniqueString() - href_template = self.getUniqueString() - href_vars = {self.getUniqueString(): self.getUniqueString()} - original_json_home = { - 'resources': { - href_rel: {'href': href}, - href_template_rel: { - 'href-template': href_template, - 'href-vars': href_vars} - } - } - - new_json_home = copy.deepcopy(original_json_home) - new_prefix = self.getUniqueString() - json_home.translate_urls(new_json_home, new_prefix) - - exp_json_home = { - 'resources': { - href_rel: {'href': new_prefix + href}, - href_template_rel: { - 'href-template': new_prefix + href_template, - 'href-vars': href_vars} - } - } - - self.assertThat(new_json_home, matchers.Equals(exp_json_home)) diff --git a/keystone-moon/keystone/tests/unit/common/test_ldap.py b/keystone-moon/keystone/tests/unit/common/test_ldap.py deleted file mode 100644 index eed77286..00000000 --- a/keystone-moon/keystone/tests/unit/common/test_ldap.py +++ /dev/null @@ -1,584 +0,0 @@ -# -*- coding: utf-8 -*- -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import tempfile -import uuid - -import fixtures -import ldap.dn -import mock -from oslo_config import cfg -from testtools import matchers - -from keystone.common import driver_hints -from keystone.common import ldap as ks_ldap -from keystone.common.ldap import core as common_ldap_core -from keystone.tests import unit -from keystone.tests.unit import default_fixtures -from keystone.tests.unit import fakeldap -from keystone.tests.unit.ksfixtures import database - - -CONF = cfg.CONF - - -class DnCompareTest(unit.BaseTestCase): - """Tests for the DN comparison functions in keystone.common.ldap.core.""" - - def test_prep(self): - # prep_case_insensitive returns the string with spaces at the front and - # end if it's already lowercase and no insignificant characters. - value = 'lowercase value' - self.assertEqual(value, ks_ldap.prep_case_insensitive(value)) - - def test_prep_lowercase(self): - # prep_case_insensitive returns the string with spaces at the front and - # end and lowercases the value. - value = 'UPPERCASE VALUE' - exp_value = value.lower() - self.assertEqual(exp_value, ks_ldap.prep_case_insensitive(value)) - - def test_prep_insignificant(self): - # prep_case_insensitive remove insignificant spaces. - value = 'before after' - exp_value = 'before after' - self.assertEqual(exp_value, ks_ldap.prep_case_insensitive(value)) - - def test_prep_insignificant_pre_post(self): - # prep_case_insensitive remove insignificant spaces. - value = ' value ' - exp_value = 'value' - self.assertEqual(exp_value, ks_ldap.prep_case_insensitive(value)) - - def test_ava_equal_same(self): - # is_ava_value_equal returns True if the two values are the same. - value = 'val1' - self.assertTrue(ks_ldap.is_ava_value_equal('cn', value, value)) - - def test_ava_equal_complex(self): - # is_ava_value_equal returns True if the two values are the same using - # a value that's got different capitalization and insignificant chars. - val1 = 'before after' - val2 = ' BEFORE afTer ' - self.assertTrue(ks_ldap.is_ava_value_equal('cn', val1, val2)) - - def test_ava_different(self): - # is_ava_value_equal returns False if the values aren't the same. - self.assertFalse(ks_ldap.is_ava_value_equal('cn', 'val1', 'val2')) - - def test_rdn_same(self): - # is_rdn_equal returns True if the two values are the same. - rdn = ldap.dn.str2dn('cn=val1')[0] - self.assertTrue(ks_ldap.is_rdn_equal(rdn, rdn)) - - def test_rdn_diff_length(self): - # is_rdn_equal returns False if the RDNs have a different number of - # AVAs. - rdn1 = ldap.dn.str2dn('cn=cn1')[0] - rdn2 = ldap.dn.str2dn('cn=cn1+ou=ou1')[0] - self.assertFalse(ks_ldap.is_rdn_equal(rdn1, rdn2)) - - def test_rdn_multi_ava_same_order(self): - # is_rdn_equal returns True if the RDNs have the same number of AVAs - # and the values are the same. - rdn1 = ldap.dn.str2dn('cn=cn1+ou=ou1')[0] - rdn2 = ldap.dn.str2dn('cn=CN1+ou=OU1')[0] - self.assertTrue(ks_ldap.is_rdn_equal(rdn1, rdn2)) - - def test_rdn_multi_ava_diff_order(self): - # is_rdn_equal returns True if the RDNs have the same number of AVAs - # and the values are the same, even if in a different order - rdn1 = ldap.dn.str2dn('cn=cn1+ou=ou1')[0] - rdn2 = ldap.dn.str2dn('ou=OU1+cn=CN1')[0] - self.assertTrue(ks_ldap.is_rdn_equal(rdn1, rdn2)) - - def test_rdn_multi_ava_diff_type(self): - # is_rdn_equal returns False if the RDNs have the same number of AVAs - # and the attribute types are different. - rdn1 = ldap.dn.str2dn('cn=cn1+ou=ou1')[0] - rdn2 = ldap.dn.str2dn('cn=cn1+sn=sn1')[0] - self.assertFalse(ks_ldap.is_rdn_equal(rdn1, rdn2)) - - def test_rdn_attr_type_case_diff(self): - # is_rdn_equal returns True for same RDNs even when attr type case is - # different. - rdn1 = ldap.dn.str2dn('cn=cn1')[0] - rdn2 = ldap.dn.str2dn('CN=cn1')[0] - self.assertTrue(ks_ldap.is_rdn_equal(rdn1, rdn2)) - - def test_rdn_attr_type_alias(self): - # is_rdn_equal returns False for same RDNs even when attr type alias is - # used. Note that this is a limitation since an LDAP server should - # consider them equal. - rdn1 = ldap.dn.str2dn('cn=cn1')[0] - rdn2 = ldap.dn.str2dn('2.5.4.3=cn1')[0] - self.assertFalse(ks_ldap.is_rdn_equal(rdn1, rdn2)) - - def test_dn_same(self): - # is_dn_equal returns True if the DNs are the same. - dn = 'cn=Babs Jansen,ou=OpenStack' - self.assertTrue(ks_ldap.is_dn_equal(dn, dn)) - - def test_dn_equal_unicode(self): - # is_dn_equal can accept unicode - dn = u'cn=fäké,ou=OpenStack' - self.assertTrue(ks_ldap.is_dn_equal(dn, dn)) - - def test_dn_diff_length(self): - # is_dn_equal returns False if the DNs don't have the same number of - # RDNs - dn1 = 'cn=Babs Jansen,ou=OpenStack' - dn2 = 'cn=Babs Jansen,ou=OpenStack,dc=example.com' - self.assertFalse(ks_ldap.is_dn_equal(dn1, dn2)) - - def test_dn_equal_rdns(self): - # is_dn_equal returns True if the DNs have the same number of RDNs - # and each RDN is the same. - dn1 = 'cn=Babs Jansen,ou=OpenStack+cn=OpenSource' - dn2 = 'CN=Babs Jansen,cn=OpenSource+ou=OpenStack' - self.assertTrue(ks_ldap.is_dn_equal(dn1, dn2)) - - def test_dn_parsed_dns(self): - # is_dn_equal can also accept parsed DNs. - dn_str1 = ldap.dn.str2dn('cn=Babs Jansen,ou=OpenStack+cn=OpenSource') - dn_str2 = ldap.dn.str2dn('CN=Babs Jansen,cn=OpenSource+ou=OpenStack') - self.assertTrue(ks_ldap.is_dn_equal(dn_str1, dn_str2)) - - def test_startswith_under_child(self): - # dn_startswith returns True if descendant_dn is a child of dn. - child = 'cn=Babs Jansen,ou=OpenStack' - parent = 'ou=OpenStack' - self.assertTrue(ks_ldap.dn_startswith(child, parent)) - - def test_startswith_parent(self): - # dn_startswith returns False if descendant_dn is a parent of dn. - child = 'cn=Babs Jansen,ou=OpenStack' - parent = 'ou=OpenStack' - self.assertFalse(ks_ldap.dn_startswith(parent, child)) - - def test_startswith_same(self): - # dn_startswith returns False if DNs are the same. - dn = 'cn=Babs Jansen,ou=OpenStack' - self.assertFalse(ks_ldap.dn_startswith(dn, dn)) - - def test_startswith_not_parent(self): - # dn_startswith returns False if descendant_dn is not under the dn - child = 'cn=Babs Jansen,ou=OpenStack' - parent = 'dc=example.com' - self.assertFalse(ks_ldap.dn_startswith(child, parent)) - - def test_startswith_descendant(self): - # dn_startswith returns True if descendant_dn is a descendant of dn. - descendant = 'cn=Babs Jansen,ou=Keystone,ou=OpenStack,dc=example.com' - dn = 'ou=OpenStack,dc=example.com' - self.assertTrue(ks_ldap.dn_startswith(descendant, dn)) - - descendant = 'uid=12345,ou=Users,dc=example,dc=com' - dn = 'ou=Users,dc=example,dc=com' - self.assertTrue(ks_ldap.dn_startswith(descendant, dn)) - - def test_startswith_parsed_dns(self): - # dn_startswith also accepts parsed DNs. - descendant = ldap.dn.str2dn('cn=Babs Jansen,ou=OpenStack') - dn = ldap.dn.str2dn('ou=OpenStack') - self.assertTrue(ks_ldap.dn_startswith(descendant, dn)) - - def test_startswith_unicode(self): - # dn_startswith accepts unicode. - child = u'cn=fäké,ou=OpenStäck' - parent = u'ou=OpenStäck' - self.assertTrue(ks_ldap.dn_startswith(child, parent)) - - -class LDAPDeleteTreeTest(unit.TestCase): - - def setUp(self): - super(LDAPDeleteTreeTest, self).setUp() - - ks_ldap.register_handler('fake://', - fakeldap.FakeLdapNoSubtreeDelete) - self.useFixture(database.Database(self.sql_driver_version_overrides)) - - self.load_backends() - self.load_fixtures(default_fixtures) - - self.addCleanup(self.clear_database) - self.addCleanup(common_ldap_core._HANDLERS.clear) - - def clear_database(self): - for shelf in fakeldap.FakeShelves: - fakeldap.FakeShelves[shelf].clear() - - def config_overrides(self): - super(LDAPDeleteTreeTest, self).config_overrides() - self.config_fixture.config(group='identity', driver='ldap') - - def config_files(self): - config_files = super(LDAPDeleteTreeTest, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_ldap.conf')) - return config_files - - def test_delete_tree(self): - """Test manually deleting a tree. - - Few LDAP servers support CONTROL_DELETETREE. This test - exercises the alternate code paths in BaseLdap.delete_tree. - - """ - conn = self.identity_api.user.get_connection() - id_attr = self.identity_api.user.id_attr - objclass = self.identity_api.user.object_class.lower() - tree_dn = self.identity_api.user.tree_dn - - def create_entry(name, parent_dn=None): - if not parent_dn: - parent_dn = tree_dn - dn = '%s=%s,%s' % (id_attr, name, parent_dn) - attrs = [('objectclass', [objclass, 'ldapsubentry']), - (id_attr, [name])] - conn.add_s(dn, attrs) - return dn - - # create 3 entries like this: - # cn=base - # cn=child,cn=base - # cn=grandchild,cn=child,cn=base - # then attempt to delete_tree(cn=base) - base_id = 'base' - base_dn = create_entry(base_id) - child_dn = create_entry('child', base_dn) - grandchild_dn = create_entry('grandchild', child_dn) - - # verify that the three entries were created - scope = ldap.SCOPE_SUBTREE - filt = '(|(objectclass=*)(objectclass=ldapsubentry))' - entries = conn.search_s(base_dn, scope, filt, - attrlist=common_ldap_core.DN_ONLY) - self.assertThat(entries, matchers.HasLength(3)) - sort_ents = sorted([e[0] for e in entries], key=len, reverse=True) - self.assertEqual([grandchild_dn, child_dn, base_dn], sort_ents) - - # verify that a non-leaf node can't be deleted directly by the - # LDAP server - self.assertRaises(ldap.NOT_ALLOWED_ON_NONLEAF, - conn.delete_s, base_dn) - self.assertRaises(ldap.NOT_ALLOWED_ON_NONLEAF, - conn.delete_s, child_dn) - - # call our delete_tree implementation - self.identity_api.user.delete_tree(base_id) - self.assertRaises(ldap.NO_SUCH_OBJECT, - conn.search_s, base_dn, ldap.SCOPE_BASE) - self.assertRaises(ldap.NO_SUCH_OBJECT, - conn.search_s, child_dn, ldap.SCOPE_BASE) - self.assertRaises(ldap.NO_SUCH_OBJECT, - conn.search_s, grandchild_dn, ldap.SCOPE_BASE) - - -class MultiURLTests(unit.TestCase): - """Tests for setting multiple LDAP URLs.""" - - def test_multiple_urls_with_comma_no_conn_pool(self): - urls = 'ldap://localhost,ldap://backup.localhost' - self.config_fixture.config(group='ldap', url=urls, use_pool=False) - base_ldap = ks_ldap.BaseLdap(CONF) - ldap_connection = base_ldap.get_connection() - self.assertEqual(urls, ldap_connection.conn.conn._uri) - - def test_multiple_urls_with_comma_with_conn_pool(self): - urls = 'ldap://localhost,ldap://backup.localhost' - self.config_fixture.config(group='ldap', url=urls, use_pool=True) - base_ldap = ks_ldap.BaseLdap(CONF) - ldap_connection = base_ldap.get_connection() - self.assertEqual(urls, ldap_connection.conn.conn_pool.uri) - - -class SslTlsTest(unit.TestCase): - """Tests for the SSL/TLS functionality in keystone.common.ldap.core.""" - - @mock.patch.object(ks_ldap.core.KeystoneLDAPHandler, 'simple_bind_s') - @mock.patch.object(ldap.ldapobject.LDAPObject, 'start_tls_s') - def _init_ldap_connection(self, config, mock_ldap_one, mock_ldap_two): - # Attempt to connect to initialize python-ldap. - base_ldap = ks_ldap.BaseLdap(config) - base_ldap.get_connection() - - def test_certfile_trust_tls(self): - # We need this to actually exist, so we create a tempfile. - (handle, certfile) = tempfile.mkstemp() - self.addCleanup(os.unlink, certfile) - self.addCleanup(os.close, handle) - self.config_fixture.config(group='ldap', - url='ldap://localhost', - use_tls=True, - tls_cacertfile=certfile) - - self._init_ldap_connection(CONF) - - # Ensure the cert trust option is set. - self.assertEqual(certfile, ldap.get_option(ldap.OPT_X_TLS_CACERTFILE)) - - def test_certdir_trust_tls(self): - # We need this to actually exist, so we create a tempdir. - certdir = self.useFixture(fixtures.TempDir()).path - self.config_fixture.config(group='ldap', - url='ldap://localhost', - use_tls=True, - tls_cacertdir=certdir) - - self._init_ldap_connection(CONF) - - # Ensure the cert trust option is set. - self.assertEqual(certdir, ldap.get_option(ldap.OPT_X_TLS_CACERTDIR)) - - def test_certfile_trust_ldaps(self): - # We need this to actually exist, so we create a tempfile. - (handle, certfile) = tempfile.mkstemp() - self.addCleanup(os.unlink, certfile) - self.addCleanup(os.close, handle) - self.config_fixture.config(group='ldap', - url='ldaps://localhost', - use_tls=False, - tls_cacertfile=certfile) - - self._init_ldap_connection(CONF) - - # Ensure the cert trust option is set. - self.assertEqual(certfile, ldap.get_option(ldap.OPT_X_TLS_CACERTFILE)) - - def test_certdir_trust_ldaps(self): - # We need this to actually exist, so we create a tempdir. - certdir = self.useFixture(fixtures.TempDir()).path - self.config_fixture.config(group='ldap', - url='ldaps://localhost', - use_tls=False, - tls_cacertdir=certdir) - - self._init_ldap_connection(CONF) - - # Ensure the cert trust option is set. - self.assertEqual(certdir, ldap.get_option(ldap.OPT_X_TLS_CACERTDIR)) - - -class LDAPPagedResultsTest(unit.TestCase): - """Tests the paged results functionality in keystone.common.ldap.core.""" - - def setUp(self): - super(LDAPPagedResultsTest, self).setUp() - self.clear_database() - - ks_ldap.register_handler('fake://', fakeldap.FakeLdap) - self.addCleanup(common_ldap_core._HANDLERS.clear) - self.useFixture(database.Database(self.sql_driver_version_overrides)) - - self.load_backends() - self.load_fixtures(default_fixtures) - - def clear_database(self): - for shelf in fakeldap.FakeShelves: - fakeldap.FakeShelves[shelf].clear() - - def config_overrides(self): - super(LDAPPagedResultsTest, self).config_overrides() - self.config_fixture.config(group='identity', driver='ldap') - - def config_files(self): - config_files = super(LDAPPagedResultsTest, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_ldap.conf')) - return config_files - - @mock.patch.object(fakeldap.FakeLdap, 'search_ext') - @mock.patch.object(fakeldap.FakeLdap, 'result3') - def test_paged_results_control_api(self, mock_result3, mock_search_ext): - mock_result3.return_value = ('', [], 1, []) - - self.config_fixture.config(group='ldap', - page_size=1) - - conn = self.identity_api.user.get_connection() - conn._paged_search_s('dc=example,dc=test', - ldap.SCOPE_SUBTREE, - 'objectclass=*') - - -class CommonLdapTestCase(unit.BaseTestCase): - """These test cases call functions in keystone.common.ldap.""" - - def test_binary_attribute_values(self): - result = [( - 'cn=junk,dc=example,dc=com', - { - 'cn': ['junk'], - 'sn': [uuid.uuid4().hex], - 'mail': [uuid.uuid4().hex], - 'binary_attr': ['\x00\xFF\x00\xFF'] - } - ), ] - py_result = ks_ldap.convert_ldap_result(result) - # The attribute containing the binary value should - # not be present in the converted result. - self.assertNotIn('binary_attr', py_result[0][1]) - - def test_utf8_conversion(self): - value_unicode = u'fäké1' - value_utf8 = value_unicode.encode('utf-8') - - result_utf8 = ks_ldap.utf8_encode(value_unicode) - self.assertEqual(value_utf8, result_utf8) - - result_utf8 = ks_ldap.utf8_encode(value_utf8) - self.assertEqual(value_utf8, result_utf8) - - result_unicode = ks_ldap.utf8_decode(value_utf8) - self.assertEqual(value_unicode, result_unicode) - - result_unicode = ks_ldap.utf8_decode(value_unicode) - self.assertEqual(value_unicode, result_unicode) - - self.assertRaises(TypeError, - ks_ldap.utf8_encode, - 100) - - result_unicode = ks_ldap.utf8_decode(100) - self.assertEqual(u'100', result_unicode) - - def test_user_id_begins_with_0(self): - user_id = '0123456' - result = [( - 'cn=dummy,dc=example,dc=com', - { - 'user_id': [user_id], - 'enabled': ['TRUE'] - } - ), ] - py_result = ks_ldap.convert_ldap_result(result) - # The user id should be 0123456, and the enabled - # flag should be True - self.assertIs(py_result[0][1]['enabled'][0], True) - self.assertEqual(user_id, py_result[0][1]['user_id'][0]) - - def test_user_id_begins_with_0_and_enabled_bit_mask(self): - user_id = '0123456' - bitmask = '225' - expected_bitmask = 225 - result = [( - 'cn=dummy,dc=example,dc=com', - { - 'user_id': [user_id], - 'enabled': [bitmask] - } - ), ] - py_result = ks_ldap.convert_ldap_result(result) - # The user id should be 0123456, and the enabled - # flag should be 225 - self.assertEqual(expected_bitmask, py_result[0][1]['enabled'][0]) - self.assertEqual(user_id, py_result[0][1]['user_id'][0]) - - def test_user_id_and_bitmask_begins_with_0(self): - user_id = '0123456' - bitmask = '0225' - expected_bitmask = 225 - result = [( - 'cn=dummy,dc=example,dc=com', - { - 'user_id': [user_id], - 'enabled': [bitmask] - } - ), ] - py_result = ks_ldap.convert_ldap_result(result) - # The user id should be 0123456, and the enabled - # flag should be 225, the 0 is dropped. - self.assertEqual(expected_bitmask, py_result[0][1]['enabled'][0]) - self.assertEqual(user_id, py_result[0][1]['user_id'][0]) - - def test_user_id_and_user_name_with_boolean_string(self): - boolean_strings = ['TRUE', 'FALSE', 'true', 'false', 'True', 'False', - 'TrUe' 'FaLse'] - for user_name in boolean_strings: - user_id = uuid.uuid4().hex - result = [( - 'cn=dummy,dc=example,dc=com', - { - 'user_id': [user_id], - 'user_name': [user_name] - } - ), ] - py_result = ks_ldap.convert_ldap_result(result) - # The user name should still be a string value. - self.assertEqual(user_name, py_result[0][1]['user_name'][0]) - - -class LDAPFilterQueryCompositionTest(unit.TestCase): - """These test cases test LDAP filter generation.""" - - def setUp(self): - super(LDAPFilterQueryCompositionTest, self).setUp() - - self.base_ldap = ks_ldap.BaseLdap(self.config_fixture.conf) - - # The tests need an attribute mapping to use. - self.attribute_name = uuid.uuid4().hex - self.filter_attribute_name = uuid.uuid4().hex - self.base_ldap.attribute_mapping = { - self.attribute_name: self.filter_attribute_name - } - - def test_return_query_with_no_hints(self): - hints = driver_hints.Hints() - # NOTE: doesn't have to be a real query, we just need to make sure the - # same string is returned if there are no hints. - query = uuid.uuid4().hex - self.assertEqual(query, - self.base_ldap.filter_query(hints=hints, query=query)) - - # make sure the default query is an empty string - self.assertEqual('', self.base_ldap.filter_query(hints=hints)) - - def test_filter_with_empty_query_and_hints_set(self): - hints = driver_hints.Hints() - username = uuid.uuid4().hex - hints.add_filter(name=self.attribute_name, - value=username, - comparator='equals', - case_sensitive=False) - expected_ldap_filter = '(&(%s=%s))' % ( - self.filter_attribute_name, username) - self.assertEqual(expected_ldap_filter, - self.base_ldap.filter_query(hints=hints)) - - def test_filter_with_both_query_and_hints_set(self): - hints = driver_hints.Hints() - # NOTE: doesn't have to be a real query, we just need to make sure the - # filter string is concatenated correctly - query = uuid.uuid4().hex - username = uuid.uuid4().hex - expected_result = '(&%(query)s(%(user_name_attr)s=%(username)s))' % ( - {'query': query, - 'user_name_attr': self.filter_attribute_name, - 'username': username}) - hints.add_filter(self.attribute_name, username) - self.assertEqual(expected_result, - self.base_ldap.filter_query(hints=hints, query=query)) - - def test_filter_with_hints_and_query_is_none(self): - hints = driver_hints.Hints() - username = uuid.uuid4().hex - hints.add_filter(name=self.attribute_name, - value=username, - comparator='equals', - case_sensitive=False) - expected_ldap_filter = '(&(%s=%s))' % ( - self.filter_attribute_name, username) - self.assertEqual(expected_ldap_filter, - self.base_ldap.filter_query(hints=hints, query=None)) diff --git a/keystone-moon/keystone/tests/unit/common/test_manager.py b/keystone-moon/keystone/tests/unit/common/test_manager.py deleted file mode 100644 index 7ef91e15..00000000 --- a/keystone-moon/keystone/tests/unit/common/test_manager.py +++ /dev/null @@ -1,40 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import mock - -from keystone import catalog -from keystone.common import manager -from keystone.tests import unit - - -class TestCreateLegacyDriver(unit.BaseTestCase): - - @mock.patch('oslo_log.versionutils.report_deprecated_feature') - def test_class_is_properly_deprecated(self, mock_reporter): - Driver = manager.create_legacy_driver(catalog.CatalogDriverV8) - - # NOTE(dstanek): I want to subvert the requirement for this - # class to implement all of the abstract methods. - Driver.__abstractmethods__ = set() - impl = Driver() - - details = { - 'as_of': 'Liberty', - 'what': 'keystone.catalog.core.Driver', - 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', - 'remove_in': mock.ANY, - } - mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) - self.assertEqual('N', mock_reporter.call_args[0][2]['remove_in'][0]) - - self.assertIsInstance(impl, catalog.CatalogDriverV8) diff --git a/keystone-moon/keystone/tests/unit/common/test_notifications.py b/keystone-moon/keystone/tests/unit/common/test_notifications.py deleted file mode 100644 index aa2e6f72..00000000 --- a/keystone-moon/keystone/tests/unit/common/test_notifications.py +++ /dev/null @@ -1,1248 +0,0 @@ -# Copyright 2013 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging -import uuid - -import mock -from oslo_config import cfg -from oslo_config import fixture as config_fixture -from oslotest import mockpatch -from pycadf import cadftaxonomy -from pycadf import cadftype -from pycadf import eventfactory -from pycadf import resource as cadfresource - -from keystone import notifications -from keystone.tests import unit -from keystone.tests.unit import test_v3 - - -CONF = cfg.CONF - -EXP_RESOURCE_TYPE = uuid.uuid4().hex -CREATED_OPERATION = notifications.ACTIONS.created -UPDATED_OPERATION = notifications.ACTIONS.updated -DELETED_OPERATION = notifications.ACTIONS.deleted -DISABLED_OPERATION = notifications.ACTIONS.disabled - - -class ArbitraryException(Exception): - pass - - -def register_callback(operation, resource_type=EXP_RESOURCE_TYPE): - """Helper for creating and registering a mock callback.""" - callback = mock.Mock(__name__='callback', - im_class=mock.Mock(__name__='class')) - notifications.register_event_callback(operation, resource_type, callback) - return callback - - -class AuditNotificationsTestCase(unit.BaseTestCase): - def setUp(self): - super(AuditNotificationsTestCase, self).setUp() - self.config_fixture = self.useFixture(config_fixture.Config(CONF)) - self.addCleanup(notifications.clear_subscribers) - - def _test_notification_operation(self, notify_function, operation): - exp_resource_id = uuid.uuid4().hex - callback = register_callback(operation) - notify_function(EXP_RESOURCE_TYPE, exp_resource_id) - callback.assert_called_once_with('identity', EXP_RESOURCE_TYPE, - operation, - {'resource_info': exp_resource_id}) - self.config_fixture.config(notification_format='cadf') - with mock.patch( - 'keystone.notifications._create_cadf_payload') as cadf_notify: - notify_function(EXP_RESOURCE_TYPE, exp_resource_id) - initiator = None - cadf_notify.assert_called_once_with( - operation, EXP_RESOURCE_TYPE, exp_resource_id, - notifications.taxonomy.OUTCOME_SUCCESS, initiator) - notify_function(EXP_RESOURCE_TYPE, exp_resource_id, public=False) - cadf_notify.assert_called_once_with( - operation, EXP_RESOURCE_TYPE, exp_resource_id, - notifications.taxonomy.OUTCOME_SUCCESS, initiator) - - def test_resource_created_notification(self): - self._test_notification_operation(notifications.Audit.created, - CREATED_OPERATION) - - def test_resource_updated_notification(self): - self._test_notification_operation(notifications.Audit.updated, - UPDATED_OPERATION) - - def test_resource_deleted_notification(self): - self._test_notification_operation(notifications.Audit.deleted, - DELETED_OPERATION) - - def test_resource_disabled_notification(self): - self._test_notification_operation(notifications.Audit.disabled, - DISABLED_OPERATION) - - -class NotificationsTestCase(unit.BaseTestCase): - - def test_send_notification(self): - """Test _send_notification. - - Test the private method _send_notification to ensure event_type, - payload, and context are built and passed properly. - - """ - resource = uuid.uuid4().hex - resource_type = EXP_RESOURCE_TYPE - operation = CREATED_OPERATION - - # NOTE(ldbragst): Even though notifications._send_notification doesn't - # contain logic that creates cases, this is supposed to test that - # context is always empty and that we ensure the resource ID of the - # resource in the notification is contained in the payload. It was - # agreed that context should be empty in Keystone's case, which is - # also noted in the /keystone/notifications.py module. This test - # ensures and maintains these conditions. - expected_args = [ - {}, # empty context - 'identity.%s.created' % resource_type, # event_type - {'resource_info': resource}, # payload - 'INFO', # priority is always INFO... - ] - - with mock.patch.object(notifications._get_notifier(), - '_notify') as mocked: - notifications._send_notification(operation, resource_type, - resource) - mocked.assert_called_once_with(*expected_args) - - def test_send_notification_with_opt_out(self): - """Test the private method _send_notification with opt-out. - - Test that _send_notification does not notify when a valid - notification_opt_out configuration is provided. - """ - resource = uuid.uuid4().hex - resource_type = EXP_RESOURCE_TYPE - operation = CREATED_OPERATION - event_type = 'identity.%s.created' % resource_type - - # NOTE(diazjf): Here we add notification_opt_out to the - # configuration so that we should return before _get_notifer is - # called. This is because we are opting out notifications for the - # passed resource_type and operation. - conf = self.useFixture(config_fixture.Config(CONF)) - conf.config(notification_opt_out=event_type) - - with mock.patch.object(notifications._get_notifier(), - '_notify') as mocked: - - notifications._send_notification(operation, resource_type, - resource) - mocked.assert_not_called() - - def test_send_audit_notification_with_opt_out(self): - """Test the private method _send_audit_notification with opt-out. - - Test that _send_audit_notification does not notify when a valid - notification_opt_out configuration is provided. - """ - resource_type = EXP_RESOURCE_TYPE - - action = CREATED_OPERATION + '.' + resource_type - initiator = mock - target = mock - outcome = 'success' - event_type = 'identity.%s.created' % resource_type - - conf = self.useFixture(config_fixture.Config(CONF)) - conf.config(notification_opt_out=event_type) - - with mock.patch.object(notifications._get_notifier(), - '_notify') as mocked: - - notifications._send_audit_notification(action, - initiator, - outcome, - target, - event_type) - mocked.assert_not_called() - - def test_opt_out_authenticate_event(self): - """Test that authenticate events are successfully opted out.""" - resource_type = EXP_RESOURCE_TYPE - - action = CREATED_OPERATION + '.' + resource_type - initiator = mock - target = mock - outcome = 'success' - event_type = 'identity.authenticate' - meter_name = '%s.%s' % (event_type, outcome) - - conf = self.useFixture(config_fixture.Config(CONF)) - conf.config(notification_opt_out=meter_name) - - with mock.patch.object(notifications._get_notifier(), - '_notify') as mocked: - - notifications._send_audit_notification(action, - initiator, - outcome, - target, - event_type) - mocked.assert_not_called() - - -class BaseNotificationTest(test_v3.RestfulTestCase): - - def setUp(self): - super(BaseNotificationTest, self).setUp() - - self._notifications = [] - self._audits = [] - - def fake_notify(operation, resource_type, resource_id, - actor_dict=None, public=True): - note = { - 'resource_id': resource_id, - 'operation': operation, - 'resource_type': resource_type, - 'send_notification_called': True, - 'public': public} - if actor_dict: - note['actor_id'] = actor_dict.get('id') - note['actor_type'] = actor_dict.get('type') - note['actor_operation'] = actor_dict.get('actor_operation') - self._notifications.append(note) - - self.useFixture(mockpatch.PatchObject( - notifications, '_send_notification', fake_notify)) - - def fake_audit(action, initiator, outcome, target, - event_type, **kwargs): - service_security = cadftaxonomy.SERVICE_SECURITY - - event = eventfactory.EventFactory().new_event( - eventType=cadftype.EVENTTYPE_ACTIVITY, - outcome=outcome, - action=action, - initiator=initiator, - target=target, - observer=cadfresource.Resource(typeURI=service_security)) - - for key, value in kwargs.items(): - setattr(event, key, value) - - audit = { - 'payload': event.as_dict(), - 'event_type': event_type, - 'send_notification_called': True} - self._audits.append(audit) - - self.useFixture(mockpatch.PatchObject( - notifications, '_send_audit_notification', fake_audit)) - - def _assert_last_note(self, resource_id, operation, resource_type, - actor_id=None, actor_type=None, - actor_operation=None): - # NOTE(stevemar): If 'basic' format is not used, then simply - # return since this assertion is not valid. - if CONF.notification_format != 'basic': - return - self.assertTrue(len(self._notifications) > 0) - note = self._notifications[-1] - self.assertEqual(operation, note['operation']) - self.assertEqual(resource_id, note['resource_id']) - self.assertEqual(resource_type, note['resource_type']) - self.assertTrue(note['send_notification_called']) - if actor_id: - self.assertEqual(actor_id, note['actor_id']) - self.assertEqual(actor_type, note['actor_type']) - self.assertEqual(actor_operation, note['actor_operation']) - - def _assert_last_audit(self, resource_id, operation, resource_type, - target_uri): - # NOTE(stevemar): If 'cadf' format is not used, then simply - # return since this assertion is not valid. - if CONF.notification_format != 'cadf': - return - self.assertTrue(len(self._audits) > 0) - audit = self._audits[-1] - payload = audit['payload'] - self.assertEqual(resource_id, payload['resource_info']) - action = '%s.%s' % (operation, resource_type) - self.assertEqual(action, payload['action']) - self.assertEqual(target_uri, payload['target']['typeURI']) - self.assertEqual(resource_id, payload['target']['id']) - event_type = '%s.%s.%s' % ('identity', resource_type, operation) - self.assertEqual(event_type, audit['event_type']) - self.assertTrue(audit['send_notification_called']) - - def _assert_initiator_data_is_set(self, operation, resource_type, typeURI): - self.assertTrue(len(self._audits) > 0) - audit = self._audits[-1] - payload = audit['payload'] - self.assertEqual(self.user_id, payload['initiator']['id']) - self.assertEqual(self.project_id, payload['initiator']['project_id']) - self.assertEqual(typeURI, payload['target']['typeURI']) - action = '%s.%s' % (operation, resource_type) - self.assertEqual(action, payload['action']) - - def _assert_notify_not_sent(self, resource_id, operation, resource_type, - public=True): - unexpected = { - 'resource_id': resource_id, - 'operation': operation, - 'resource_type': resource_type, - 'send_notification_called': True, - 'public': public} - for note in self._notifications: - self.assertNotEqual(unexpected, note) - - def _assert_notify_sent(self, resource_id, operation, resource_type, - public=True): - expected = { - 'resource_id': resource_id, - 'operation': operation, - 'resource_type': resource_type, - 'send_notification_called': True, - 'public': public} - for note in self._notifications: - if expected == note: - break - else: - self.fail("Notification not sent.") - - -class NotificationsForEntities(BaseNotificationTest): - - def test_create_group(self): - group_ref = unit.new_group_ref(domain_id=self.domain_id) - group_ref = self.identity_api.create_group(group_ref) - self._assert_last_note(group_ref['id'], CREATED_OPERATION, 'group') - self._assert_last_audit(group_ref['id'], CREATED_OPERATION, 'group', - cadftaxonomy.SECURITY_GROUP) - - def test_create_project(self): - project_ref = unit.new_project_ref(domain_id=self.domain_id) - self.resource_api.create_project(project_ref['id'], project_ref) - self._assert_last_note( - project_ref['id'], CREATED_OPERATION, 'project') - self._assert_last_audit(project_ref['id'], CREATED_OPERATION, - 'project', cadftaxonomy.SECURITY_PROJECT) - - def test_create_role(self): - role_ref = unit.new_role_ref() - self.role_api.create_role(role_ref['id'], role_ref) - self._assert_last_note(role_ref['id'], CREATED_OPERATION, 'role') - self._assert_last_audit(role_ref['id'], CREATED_OPERATION, 'role', - cadftaxonomy.SECURITY_ROLE) - - def test_create_user(self): - user_ref = unit.new_user_ref(domain_id=self.domain_id) - user_ref = self.identity_api.create_user(user_ref) - self._assert_last_note(user_ref['id'], CREATED_OPERATION, 'user') - self._assert_last_audit(user_ref['id'], CREATED_OPERATION, 'user', - cadftaxonomy.SECURITY_ACCOUNT_USER) - - def test_create_trust(self): - trustor = unit.new_user_ref(domain_id=self.domain_id) - trustor = self.identity_api.create_user(trustor) - trustee = unit.new_user_ref(domain_id=self.domain_id) - trustee = self.identity_api.create_user(trustee) - role_ref = unit.new_role_ref() - self.role_api.create_role(role_ref['id'], role_ref) - trust_ref = unit.new_trust_ref(trustor['id'], - trustee['id']) - self.trust_api.create_trust(trust_ref['id'], - trust_ref, - [role_ref]) - self._assert_last_note( - trust_ref['id'], CREATED_OPERATION, 'OS-TRUST:trust') - self._assert_last_audit(trust_ref['id'], CREATED_OPERATION, - 'OS-TRUST:trust', cadftaxonomy.SECURITY_TRUST) - - def test_delete_group(self): - group_ref = unit.new_group_ref(domain_id=self.domain_id) - group_ref = self.identity_api.create_group(group_ref) - self.identity_api.delete_group(group_ref['id']) - self._assert_last_note(group_ref['id'], DELETED_OPERATION, 'group') - self._assert_last_audit(group_ref['id'], DELETED_OPERATION, 'group', - cadftaxonomy.SECURITY_GROUP) - - def test_delete_project(self): - project_ref = unit.new_project_ref(domain_id=self.domain_id) - self.resource_api.create_project(project_ref['id'], project_ref) - self.resource_api.delete_project(project_ref['id']) - self._assert_last_note( - project_ref['id'], DELETED_OPERATION, 'project') - self._assert_last_audit(project_ref['id'], DELETED_OPERATION, - 'project', cadftaxonomy.SECURITY_PROJECT) - - def test_delete_role(self): - role_ref = unit.new_role_ref() - self.role_api.create_role(role_ref['id'], role_ref) - self.role_api.delete_role(role_ref['id']) - self._assert_last_note(role_ref['id'], DELETED_OPERATION, 'role') - self._assert_last_audit(role_ref['id'], DELETED_OPERATION, 'role', - cadftaxonomy.SECURITY_ROLE) - - def test_delete_user(self): - user_ref = unit.new_user_ref(domain_id=self.domain_id) - user_ref = self.identity_api.create_user(user_ref) - self.identity_api.delete_user(user_ref['id']) - self._assert_last_note(user_ref['id'], DELETED_OPERATION, 'user') - self._assert_last_audit(user_ref['id'], DELETED_OPERATION, 'user', - cadftaxonomy.SECURITY_ACCOUNT_USER) - - def test_create_domain(self): - domain_ref = unit.new_domain_ref() - self.resource_api.create_domain(domain_ref['id'], domain_ref) - self._assert_last_note(domain_ref['id'], CREATED_OPERATION, 'domain') - self._assert_last_audit(domain_ref['id'], CREATED_OPERATION, 'domain', - cadftaxonomy.SECURITY_DOMAIN) - - def test_update_domain(self): - domain_ref = unit.new_domain_ref() - self.resource_api.create_domain(domain_ref['id'], domain_ref) - domain_ref['description'] = uuid.uuid4().hex - self.resource_api.update_domain(domain_ref['id'], domain_ref) - self._assert_last_note(domain_ref['id'], UPDATED_OPERATION, 'domain') - self._assert_last_audit(domain_ref['id'], UPDATED_OPERATION, 'domain', - cadftaxonomy.SECURITY_DOMAIN) - - def test_delete_domain(self): - domain_ref = unit.new_domain_ref() - self.resource_api.create_domain(domain_ref['id'], domain_ref) - domain_ref['enabled'] = False - self.resource_api.update_domain(domain_ref['id'], domain_ref) - self.resource_api.delete_domain(domain_ref['id']) - self._assert_last_note(domain_ref['id'], DELETED_OPERATION, 'domain') - self._assert_last_audit(domain_ref['id'], DELETED_OPERATION, 'domain', - cadftaxonomy.SECURITY_DOMAIN) - - def test_delete_trust(self): - trustor = unit.new_user_ref(domain_id=self.domain_id) - trustor = self.identity_api.create_user(trustor) - trustee = unit.new_user_ref(domain_id=self.domain_id) - trustee = self.identity_api.create_user(trustee) - role_ref = unit.new_role_ref() - trust_ref = unit.new_trust_ref(trustor['id'], trustee['id']) - self.trust_api.create_trust(trust_ref['id'], - trust_ref, - [role_ref]) - self.trust_api.delete_trust(trust_ref['id']) - self._assert_last_note( - trust_ref['id'], DELETED_OPERATION, 'OS-TRUST:trust') - self._assert_last_audit(trust_ref['id'], DELETED_OPERATION, - 'OS-TRUST:trust', cadftaxonomy.SECURITY_TRUST) - - def test_create_endpoint(self): - endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id) - self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref) - self._assert_notify_sent(endpoint_ref['id'], CREATED_OPERATION, - 'endpoint') - self._assert_last_audit(endpoint_ref['id'], CREATED_OPERATION, - 'endpoint', cadftaxonomy.SECURITY_ENDPOINT) - - def test_update_endpoint(self): - endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id) - self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref) - self.catalog_api.update_endpoint(endpoint_ref['id'], endpoint_ref) - self._assert_notify_sent(endpoint_ref['id'], UPDATED_OPERATION, - 'endpoint') - self._assert_last_audit(endpoint_ref['id'], UPDATED_OPERATION, - 'endpoint', cadftaxonomy.SECURITY_ENDPOINT) - - def test_delete_endpoint(self): - endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id) - self.catalog_api.create_endpoint(endpoint_ref['id'], endpoint_ref) - self.catalog_api.delete_endpoint(endpoint_ref['id']) - self._assert_notify_sent(endpoint_ref['id'], DELETED_OPERATION, - 'endpoint') - self._assert_last_audit(endpoint_ref['id'], DELETED_OPERATION, - 'endpoint', cadftaxonomy.SECURITY_ENDPOINT) - - def test_create_service(self): - service_ref = unit.new_service_ref() - self.catalog_api.create_service(service_ref['id'], service_ref) - self._assert_notify_sent(service_ref['id'], CREATED_OPERATION, - 'service') - self._assert_last_audit(service_ref['id'], CREATED_OPERATION, - 'service', cadftaxonomy.SECURITY_SERVICE) - - def test_update_service(self): - service_ref = unit.new_service_ref() - self.catalog_api.create_service(service_ref['id'], service_ref) - self.catalog_api.update_service(service_ref['id'], service_ref) - self._assert_notify_sent(service_ref['id'], UPDATED_OPERATION, - 'service') - self._assert_last_audit(service_ref['id'], UPDATED_OPERATION, - 'service', cadftaxonomy.SECURITY_SERVICE) - - def test_delete_service(self): - service_ref = unit.new_service_ref() - self.catalog_api.create_service(service_ref['id'], service_ref) - self.catalog_api.delete_service(service_ref['id']) - self._assert_notify_sent(service_ref['id'], DELETED_OPERATION, - 'service') - self._assert_last_audit(service_ref['id'], DELETED_OPERATION, - 'service', cadftaxonomy.SECURITY_SERVICE) - - def test_create_region(self): - region_ref = unit.new_region_ref() - self.catalog_api.create_region(region_ref) - self._assert_notify_sent(region_ref['id'], CREATED_OPERATION, - 'region') - self._assert_last_audit(region_ref['id'], CREATED_OPERATION, - 'region', cadftaxonomy.SECURITY_REGION) - - def test_update_region(self): - region_ref = unit.new_region_ref() - self.catalog_api.create_region(region_ref) - self.catalog_api.update_region(region_ref['id'], region_ref) - self._assert_notify_sent(region_ref['id'], UPDATED_OPERATION, - 'region') - self._assert_last_audit(region_ref['id'], UPDATED_OPERATION, - 'region', cadftaxonomy.SECURITY_REGION) - - def test_delete_region(self): - region_ref = unit.new_region_ref() - self.catalog_api.create_region(region_ref) - self.catalog_api.delete_region(region_ref['id']) - self._assert_notify_sent(region_ref['id'], DELETED_OPERATION, - 'region') - self._assert_last_audit(region_ref['id'], DELETED_OPERATION, - 'region', cadftaxonomy.SECURITY_REGION) - - def test_create_policy(self): - policy_ref = unit.new_policy_ref() - self.policy_api.create_policy(policy_ref['id'], policy_ref) - self._assert_notify_sent(policy_ref['id'], CREATED_OPERATION, - 'policy') - self._assert_last_audit(policy_ref['id'], CREATED_OPERATION, - 'policy', cadftaxonomy.SECURITY_POLICY) - - def test_update_policy(self): - policy_ref = unit.new_policy_ref() - self.policy_api.create_policy(policy_ref['id'], policy_ref) - self.policy_api.update_policy(policy_ref['id'], policy_ref) - self._assert_notify_sent(policy_ref['id'], UPDATED_OPERATION, - 'policy') - self._assert_last_audit(policy_ref['id'], UPDATED_OPERATION, - 'policy', cadftaxonomy.SECURITY_POLICY) - - def test_delete_policy(self): - policy_ref = unit.new_policy_ref() - self.policy_api.create_policy(policy_ref['id'], policy_ref) - self.policy_api.delete_policy(policy_ref['id']) - self._assert_notify_sent(policy_ref['id'], DELETED_OPERATION, - 'policy') - self._assert_last_audit(policy_ref['id'], DELETED_OPERATION, - 'policy', cadftaxonomy.SECURITY_POLICY) - - def test_disable_domain(self): - domain_ref = unit.new_domain_ref() - self.resource_api.create_domain(domain_ref['id'], domain_ref) - domain_ref['enabled'] = False - self.resource_api.update_domain(domain_ref['id'], domain_ref) - self._assert_notify_sent(domain_ref['id'], 'disabled', 'domain', - public=False) - - def test_disable_of_disabled_domain_does_not_notify(self): - domain_ref = unit.new_domain_ref(enabled=False) - self.resource_api.create_domain(domain_ref['id'], domain_ref) - # The domain_ref above is not changed during the create process. We - # can use the same ref to perform the update. - self.resource_api.update_domain(domain_ref['id'], domain_ref) - self._assert_notify_not_sent(domain_ref['id'], 'disabled', 'domain', - public=False) - - def test_update_group(self): - group_ref = unit.new_group_ref(domain_id=self.domain_id) - group_ref = self.identity_api.create_group(group_ref) - self.identity_api.update_group(group_ref['id'], group_ref) - self._assert_last_note(group_ref['id'], UPDATED_OPERATION, 'group') - self._assert_last_audit(group_ref['id'], UPDATED_OPERATION, 'group', - cadftaxonomy.SECURITY_GROUP) - - def test_update_project(self): - project_ref = unit.new_project_ref(domain_id=self.domain_id) - self.resource_api.create_project(project_ref['id'], project_ref) - self.resource_api.update_project(project_ref['id'], project_ref) - self._assert_notify_sent( - project_ref['id'], UPDATED_OPERATION, 'project', public=True) - self._assert_last_audit(project_ref['id'], UPDATED_OPERATION, - 'project', cadftaxonomy.SECURITY_PROJECT) - - def test_disable_project(self): - project_ref = unit.new_project_ref(domain_id=self.domain_id) - self.resource_api.create_project(project_ref['id'], project_ref) - project_ref['enabled'] = False - self.resource_api.update_project(project_ref['id'], project_ref) - self._assert_notify_sent(project_ref['id'], 'disabled', 'project', - public=False) - - def test_disable_of_disabled_project_does_not_notify(self): - project_ref = unit.new_project_ref(domain_id=self.domain_id, - enabled=False) - self.resource_api.create_project(project_ref['id'], project_ref) - # The project_ref above is not changed during the create process. We - # can use the same ref to perform the update. - self.resource_api.update_project(project_ref['id'], project_ref) - self._assert_notify_not_sent(project_ref['id'], 'disabled', 'project', - public=False) - - def test_update_project_does_not_send_disable(self): - project_ref = unit.new_project_ref(domain_id=self.domain_id) - self.resource_api.create_project(project_ref['id'], project_ref) - project_ref['enabled'] = True - self.resource_api.update_project(project_ref['id'], project_ref) - self._assert_last_note( - project_ref['id'], UPDATED_OPERATION, 'project') - self._assert_notify_not_sent(project_ref['id'], 'disabled', 'project') - - def test_update_role(self): - role_ref = unit.new_role_ref() - self.role_api.create_role(role_ref['id'], role_ref) - self.role_api.update_role(role_ref['id'], role_ref) - self._assert_last_note(role_ref['id'], UPDATED_OPERATION, 'role') - self._assert_last_audit(role_ref['id'], UPDATED_OPERATION, 'role', - cadftaxonomy.SECURITY_ROLE) - - def test_update_user(self): - user_ref = unit.new_user_ref(domain_id=self.domain_id) - user_ref = self.identity_api.create_user(user_ref) - self.identity_api.update_user(user_ref['id'], user_ref) - self._assert_last_note(user_ref['id'], UPDATED_OPERATION, 'user') - self._assert_last_audit(user_ref['id'], UPDATED_OPERATION, 'user', - cadftaxonomy.SECURITY_ACCOUNT_USER) - - def test_config_option_no_events(self): - self.config_fixture.config(notification_format='basic') - role_ref = unit.new_role_ref() - self.role_api.create_role(role_ref['id'], role_ref) - # The regular notifications will still be emitted, since they are - # used for callback handling. - self._assert_last_note(role_ref['id'], CREATED_OPERATION, 'role') - # No audit event should have occurred - self.assertEqual(0, len(self._audits)) - - def test_add_user_to_group(self): - user_ref = unit.new_user_ref(domain_id=self.domain_id) - user_ref = self.identity_api.create_user(user_ref) - group_ref = unit.new_group_ref(domain_id=self.domain_id) - group_ref = self.identity_api.create_group(group_ref) - self.identity_api.add_user_to_group(user_ref['id'], group_ref['id']) - self._assert_last_note(group_ref['id'], UPDATED_OPERATION, 'group', - actor_id=user_ref['id'], actor_type='user', - actor_operation='added') - - def test_remove_user_from_group(self): - user_ref = unit.new_user_ref(domain_id=self.domain_id) - user_ref = self.identity_api.create_user(user_ref) - group_ref = unit.new_group_ref(domain_id=self.domain_id) - group_ref = self.identity_api.create_group(group_ref) - self.identity_api.add_user_to_group(user_ref['id'], group_ref['id']) - self.identity_api.remove_user_from_group(user_ref['id'], - group_ref['id']) - self._assert_last_note(group_ref['id'], UPDATED_OPERATION, 'group', - actor_id=user_ref['id'], actor_type='user', - actor_operation='removed') - - -class CADFNotificationsForEntities(NotificationsForEntities): - - def setUp(self): - super(CADFNotificationsForEntities, self).setUp() - self.config_fixture.config(notification_format='cadf') - - def test_initiator_data_is_set(self): - ref = unit.new_domain_ref() - resp = self.post('/domains', body={'domain': ref}) - resource_id = resp.result.get('domain').get('id') - self._assert_last_audit(resource_id, CREATED_OPERATION, 'domain', - cadftaxonomy.SECURITY_DOMAIN) - self._assert_initiator_data_is_set(CREATED_OPERATION, - 'domain', - cadftaxonomy.SECURITY_DOMAIN) - - -class V2Notifications(BaseNotificationTest): - - def setUp(self): - super(V2Notifications, self).setUp() - self.config_fixture.config(notification_format='cadf') - - def test_user(self): - token = self.get_scoped_token() - resp = self.admin_request( - method='POST', - path='/v2.0/users', - body={ - 'user': { - 'name': uuid.uuid4().hex, - 'password': uuid.uuid4().hex, - 'enabled': True, - }, - }, - token=token, - ) - user_id = resp.result.get('user').get('id') - self._assert_initiator_data_is_set(CREATED_OPERATION, - 'user', - cadftaxonomy.SECURITY_ACCOUNT_USER) - # test for delete user - self.admin_request( - method='DELETE', - path='/v2.0/users/%s' % user_id, - token=token, - ) - self._assert_initiator_data_is_set(DELETED_OPERATION, - 'user', - cadftaxonomy.SECURITY_ACCOUNT_USER) - - def test_role(self): - token = self.get_scoped_token() - resp = self.admin_request( - method='POST', - path='/v2.0/OS-KSADM/roles', - body={ - 'role': { - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - }, - }, - token=token, - ) - role_id = resp.result.get('role').get('id') - self._assert_initiator_data_is_set(CREATED_OPERATION, - 'role', - cadftaxonomy.SECURITY_ROLE) - # test for delete role - self.admin_request( - method='DELETE', - path='/v2.0/OS-KSADM/roles/%s' % role_id, - token=token, - ) - self._assert_initiator_data_is_set(DELETED_OPERATION, - 'role', - cadftaxonomy.SECURITY_ROLE) - - def test_service_and_endpoint(self): - token = self.get_scoped_token() - resp = self.admin_request( - method='POST', - path='/v2.0/OS-KSADM/services', - body={ - 'OS-KSADM:service': { - 'name': uuid.uuid4().hex, - 'type': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - }, - }, - token=token, - ) - service_id = resp.result.get('OS-KSADM:service').get('id') - self._assert_initiator_data_is_set(CREATED_OPERATION, - 'service', - cadftaxonomy.SECURITY_SERVICE) - resp = self.admin_request( - method='POST', - path='/v2.0/endpoints', - body={ - 'endpoint': { - 'region': uuid.uuid4().hex, - 'service_id': service_id, - 'publicurl': uuid.uuid4().hex, - 'adminurl': uuid.uuid4().hex, - 'internalurl': uuid.uuid4().hex, - }, - }, - token=token, - ) - endpoint_id = resp.result.get('endpoint').get('id') - self._assert_initiator_data_is_set(CREATED_OPERATION, - 'endpoint', - cadftaxonomy.SECURITY_ENDPOINT) - # test for delete endpoint - self.admin_request( - method='DELETE', - path='/v2.0/endpoints/%s' % endpoint_id, - token=token, - ) - self._assert_initiator_data_is_set(DELETED_OPERATION, - 'endpoint', - cadftaxonomy.SECURITY_ENDPOINT) - # test for delete service - self.admin_request( - method='DELETE', - path='/v2.0/OS-KSADM/services/%s' % service_id, - token=token, - ) - self._assert_initiator_data_is_set(DELETED_OPERATION, - 'service', - cadftaxonomy.SECURITY_SERVICE) - - def test_project(self): - token = self.get_scoped_token() - resp = self.admin_request( - method='POST', - path='/v2.0/tenants', - body={ - 'tenant': { - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - 'enabled': True - }, - }, - token=token, - ) - project_id = resp.result.get('tenant').get('id') - self._assert_initiator_data_is_set(CREATED_OPERATION, - 'project', - cadftaxonomy.SECURITY_PROJECT) - # test for delete project - self.admin_request( - method='DELETE', - path='/v2.0/tenants/%s' % project_id, - token=token, - ) - self._assert_initiator_data_is_set(DELETED_OPERATION, - 'project', - cadftaxonomy.SECURITY_PROJECT) - - -class TestEventCallbacks(test_v3.RestfulTestCase): - - def setUp(self): - super(TestEventCallbacks, self).setUp() - self.has_been_called = False - - def _project_deleted_callback(self, service, resource_type, operation, - payload): - self.has_been_called = True - - def _project_created_callback(self, service, resource_type, operation, - payload): - self.has_been_called = True - - def test_notification_received(self): - callback = register_callback(CREATED_OPERATION, 'project') - project_ref = unit.new_project_ref(domain_id=self.domain_id) - self.resource_api.create_project(project_ref['id'], project_ref) - self.assertTrue(callback.called) - - def test_notification_method_not_callable(self): - fake_method = None - self.assertRaises(TypeError, - notifications.register_event_callback, - UPDATED_OPERATION, - 'project', - [fake_method]) - - def test_notification_event_not_valid(self): - self.assertRaises(ValueError, - notifications.register_event_callback, - uuid.uuid4().hex, - 'project', - self._project_deleted_callback) - - def test_event_registration_for_unknown_resource_type(self): - # Registration for unknown resource types should succeed. If no event - # is issued for that resource type, the callback wont be triggered. - notifications.register_event_callback(DELETED_OPERATION, - uuid.uuid4().hex, - self._project_deleted_callback) - resource_type = uuid.uuid4().hex - notifications.register_event_callback(DELETED_OPERATION, - resource_type, - self._project_deleted_callback) - - def test_provider_event_callback_subscription(self): - callback_called = [] - - @notifications.listener - class Foo(object): - def __init__(self): - self.event_callbacks = { - CREATED_OPERATION: {'project': self.foo_callback}} - - def foo_callback(self, service, resource_type, operation, - payload): - # uses callback_called from the closure - callback_called.append(True) - - Foo() - project_ref = unit.new_project_ref(domain_id=self.domain_id) - self.resource_api.create_project(project_ref['id'], project_ref) - self.assertEqual([True], callback_called) - - def test_provider_event_callbacks_subscription(self): - callback_called = [] - - @notifications.listener - class Foo(object): - def __init__(self): - self.event_callbacks = { - CREATED_OPERATION: { - 'project': [self.callback_0, self.callback_1]}} - - def callback_0(self, service, resource_type, operation, payload): - # uses callback_called from the closure - callback_called.append('cb0') - - def callback_1(self, service, resource_type, operation, payload): - # uses callback_called from the closure - callback_called.append('cb1') - - Foo() - project_ref = unit.new_project_ref(domain_id=self.domain_id) - self.resource_api.create_project(project_ref['id'], project_ref) - self.assertItemsEqual(['cb1', 'cb0'], callback_called) - - def test_invalid_event_callbacks(self): - @notifications.listener - class Foo(object): - def __init__(self): - self.event_callbacks = 'bogus' - - self.assertRaises(AttributeError, Foo) - - def test_invalid_event_callbacks_event(self): - @notifications.listener - class Foo(object): - def __init__(self): - self.event_callbacks = {CREATED_OPERATION: 'bogus'} - - self.assertRaises(AttributeError, Foo) - - def test_using_an_unbound_method_as_a_callback_fails(self): - # NOTE(dstanek): An unbound method is when you reference a method - # from a class object. You'll get a method that isn't bound to a - # particular instance so there is no magic 'self'. You can call it, - # but you have to pass in the instance manually like: C.m(C()). - # If you reference the method from an instance then you get a method - # that effectively curries the self argument for you - # (think functools.partial). Obviously is we don't have an - # instance then we can't call the method. - @notifications.listener - class Foo(object): - def __init__(self): - self.event_callbacks = {CREATED_OPERATION: - {'project': Foo.callback}} - - def callback(self, *args): - pass - - # TODO(dstanek): it would probably be nice to fail early using - # something like: - # self.assertRaises(TypeError, Foo) - Foo() - project_ref = unit.new_project_ref(domain_id=self.domain_id) - self.assertRaises(TypeError, self.resource_api.create_project, - project_ref['id'], project_ref) - - -class CadfNotificationsWrapperTestCase(test_v3.RestfulTestCase): - - LOCAL_HOST = 'localhost' - ACTION = 'authenticate' - ROLE_ASSIGNMENT = 'role_assignment' - - def setUp(self): - super(CadfNotificationsWrapperTestCase, self).setUp() - self._notifications = [] - - def fake_notify(action, initiator, outcome, target, - event_type, **kwargs): - service_security = cadftaxonomy.SERVICE_SECURITY - - event = eventfactory.EventFactory().new_event( - eventType=cadftype.EVENTTYPE_ACTIVITY, - outcome=outcome, - action=action, - initiator=initiator, - target=target, - observer=cadfresource.Resource(typeURI=service_security)) - - for key, value in kwargs.items(): - setattr(event, key, value) - - note = { - 'action': action, - 'initiator': initiator, - 'event': event, - 'event_type': event_type, - 'send_notification_called': True} - self._notifications.append(note) - - self.useFixture(mockpatch.PatchObject( - notifications, '_send_audit_notification', fake_notify)) - - def _assert_last_note(self, action, user_id, event_type=None): - self.assertTrue(self._notifications) - note = self._notifications[-1] - self.assertEqual(action, note['action']) - initiator = note['initiator'] - self.assertEqual(user_id, initiator.id) - self.assertEqual(self.LOCAL_HOST, initiator.host.address) - self.assertTrue(note['send_notification_called']) - if event_type: - self.assertEqual(event_type, note['event_type']) - - def _assert_event(self, role_id, project=None, domain=None, - user=None, group=None, inherit=False): - """Assert that the CADF event is valid. - - In the case of role assignments, the event will have extra data, - specifically, the role, target, actor, and if the role is inherited. - - An example event, as a dictionary is seen below: - { - 'typeURI': 'http://schemas.dmtf.org/cloud/audit/1.0/event', - 'initiator': { - 'typeURI': 'service/security/account/user', - 'host': {'address': 'localhost'}, - 'id': 'openstack:0a90d95d-582c-4efb-9cbc-e2ca7ca9c341', - 'name': u'bccc2d9bfc2a46fd9e33bcf82f0b5c21' - }, - 'target': { - 'typeURI': 'service/security/account/user', - 'id': 'openstack:d48ea485-ef70-4f65-8d2b-01aa9d7ec12d' - }, - 'observer': { - 'typeURI': 'service/security', - 'id': 'openstack:d51dd870-d929-4aba-8d75-dcd7555a0c95' - }, - 'eventType': 'activity', - 'eventTime': '2014-08-21T21:04:56.204536+0000', - 'role': u'0e6b990380154a2599ce6b6e91548a68', - 'domain': u'24bdcff1aab8474895dbaac509793de1', - 'inherited_to_projects': False, - 'group': u'c1e22dc67cbd469ea0e33bf428fe597a', - 'action': 'created.role_assignment', - 'outcome': 'success', - 'id': 'openstack:782689dd-f428-4f13-99c7-5c70f94a5ac1' - } - """ - note = self._notifications[-1] - event = note['event'] - if project: - self.assertEqual(project, event.project) - if domain: - self.assertEqual(domain, event.domain) - if group: - self.assertEqual(group, event.group) - elif user: - self.assertEqual(user, event.user) - self.assertEqual(role_id, event.role) - self.assertEqual(inherit, event.inherited_to_projects) - - def test_v3_authenticate_user_name_and_domain_id(self): - user_id = self.user_id - user_name = self.user['name'] - password = self.user['password'] - domain_id = self.domain_id - data = self.build_authentication_request(username=user_name, - user_domain_id=domain_id, - password=password) - self.post('/auth/tokens', body=data) - self._assert_last_note(self.ACTION, user_id) - - def test_v3_authenticate_user_id(self): - user_id = self.user_id - password = self.user['password'] - data = self.build_authentication_request(user_id=user_id, - password=password) - self.post('/auth/tokens', body=data) - self._assert_last_note(self.ACTION, user_id) - - def test_v3_authenticate_user_name_and_domain_name(self): - user_id = self.user_id - user_name = self.user['name'] - password = self.user['password'] - domain_name = self.domain['name'] - data = self.build_authentication_request(username=user_name, - user_domain_name=domain_name, - password=password) - self.post('/auth/tokens', body=data) - self._assert_last_note(self.ACTION, user_id) - - def _test_role_assignment(self, url, role, project=None, domain=None, - user=None, group=None): - self.put(url) - action = "%s.%s" % (CREATED_OPERATION, self.ROLE_ASSIGNMENT) - event_type = '%s.%s.%s' % (notifications.SERVICE, - self.ROLE_ASSIGNMENT, CREATED_OPERATION) - self._assert_last_note(action, self.user_id, event_type) - self._assert_event(role, project, domain, user, group) - self.delete(url) - action = "%s.%s" % (DELETED_OPERATION, self.ROLE_ASSIGNMENT) - event_type = '%s.%s.%s' % (notifications.SERVICE, - self.ROLE_ASSIGNMENT, DELETED_OPERATION) - self._assert_last_note(action, self.user_id, event_type) - self._assert_event(role, project, domain, user, None) - - def test_user_project_grant(self): - url = ('/projects/%s/users/%s/roles/%s' % - (self.project_id, self.user_id, self.role_id)) - self._test_role_assignment(url, self.role_id, - project=self.project_id, - user=self.user_id) - - def test_group_domain_grant(self): - group_ref = unit.new_group_ref(domain_id=self.domain_id) - group = self.identity_api.create_group(group_ref) - self.identity_api.add_user_to_group(self.user_id, group['id']) - url = ('/domains/%s/groups/%s/roles/%s' % - (self.domain_id, group['id'], self.role_id)) - self._test_role_assignment(url, self.role_id, - domain=self.domain_id, - user=self.user_id, - group=group['id']) - - def test_add_role_to_user_and_project(self): - # A notification is sent when add_role_to_user_and_project is called on - # the assignment manager. - - project_ref = unit.new_project_ref(self.domain_id) - project = self.resource_api.create_project( - project_ref['id'], project_ref) - tenant_id = project['id'] - - self.assignment_api.add_role_to_user_and_project( - self.user_id, tenant_id, self.role_id) - - self.assertTrue(self._notifications) - note = self._notifications[-1] - self.assertEqual('created.role_assignment', note['action']) - self.assertTrue(note['send_notification_called']) - - self._assert_event(self.role_id, project=tenant_id, user=self.user_id) - - def test_remove_role_from_user_and_project(self): - # A notification is sent when remove_role_from_user_and_project is - # called on the assignment manager. - - self.assignment_api.remove_role_from_user_and_project( - self.user_id, self.project_id, self.role_id) - - self.assertTrue(self._notifications) - note = self._notifications[-1] - self.assertEqual('deleted.role_assignment', note['action']) - self.assertTrue(note['send_notification_called']) - - self._assert_event(self.role_id, project=self.project_id, - user=self.user_id) - - -class TestCallbackRegistration(unit.BaseTestCase): - def setUp(self): - super(TestCallbackRegistration, self).setUp() - self.mock_log = mock.Mock() - # Force the callback logging to occur - self.mock_log.logger.getEffectiveLevel.return_value = logging.DEBUG - - def verify_log_message(self, data): - """Verify log message. - - Tests that use this are a little brittle because adding more - logging can break them. - - TODO(dstanek): remove the need for this in a future refactoring - - """ - log_fn = self.mock_log.debug - self.assertEqual(len(data), log_fn.call_count) - for datum in data: - log_fn.assert_any_call(mock.ANY, datum) - - def test_a_function_callback(self): - def callback(*args, **kwargs): - pass - - resource_type = 'thing' - with mock.patch('keystone.notifications.LOG', self.mock_log): - notifications.register_event_callback( - CREATED_OPERATION, resource_type, callback) - - callback = 'keystone.tests.unit.common.test_notifications.callback' - expected_log_data = { - 'callback': callback, - 'event': 'identity.%s.created' % resource_type - } - self.verify_log_message([expected_log_data]) - - def test_a_method_callback(self): - class C(object): - def callback(self, *args, **kwargs): - pass - - with mock.patch('keystone.notifications.LOG', self.mock_log): - notifications.register_event_callback( - CREATED_OPERATION, 'thing', C().callback) - - callback = 'keystone.tests.unit.common.test_notifications.C.callback' - expected_log_data = { - 'callback': callback, - 'event': 'identity.thing.created' - } - self.verify_log_message([expected_log_data]) - - def test_a_list_of_callbacks(self): - def callback(*args, **kwargs): - pass - - class C(object): - def callback(self, *args, **kwargs): - pass - - with mock.patch('keystone.notifications.LOG', self.mock_log): - notifications.register_event_callback( - CREATED_OPERATION, 'thing', [callback, C().callback]) - - callback_1 = 'keystone.tests.unit.common.test_notifications.callback' - callback_2 = 'keystone.tests.unit.common.test_notifications.C.callback' - expected_log_data = [ - { - 'callback': callback_1, - 'event': 'identity.thing.created' - }, - { - 'callback': callback_2, - 'event': 'identity.thing.created' - }, - ] - self.verify_log_message(expected_log_data) - - def test_an_invalid_callback(self): - self.assertRaises(TypeError, - notifications.register_event_callback, - (CREATED_OPERATION, 'thing', object())) - - def test_an_invalid_event(self): - def callback(*args, **kwargs): - pass - - self.assertRaises(ValueError, - notifications.register_event_callback, - uuid.uuid4().hex, - 'thing', - callback) diff --git a/keystone-moon/keystone/tests/unit/common/test_pemutils.py b/keystone-moon/keystone/tests/unit/common/test_pemutils.py deleted file mode 100644 index c2f58518..00000000 --- a/keystone-moon/keystone/tests/unit/common/test_pemutils.py +++ /dev/null @@ -1,337 +0,0 @@ -# Copyright 2013 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import base64 - -from six import moves - -from keystone.common import pemutils -from keystone.tests import unit as tests - - -# List of 2-tuples, (pem_type, pem_header) -headers = pemutils.PEM_TYPE_TO_HEADER.items() - - -def make_data(size, offset=0): - return ''.join([chr(x % 255) for x in moves.range(offset, size + offset)]) - - -def make_base64_from_data(data): - return base64.b64encode(data) - - -def wrap_base64(base64_text): - wrapped_text = '\n'.join([base64_text[x:x + 64] - for x in moves.range(0, len(base64_text), 64)]) - wrapped_text += '\n' - return wrapped_text - - -def make_pem(header, data): - base64_text = make_base64_from_data(data) - wrapped_text = wrap_base64(base64_text) - - result = '-----BEGIN %s-----\n' % header - result += wrapped_text - result += '-----END %s-----\n' % header - - return result - - -class PEM(object): - """PEM text and it's associated data broken out, used for testing. - - """ - def __init__(self, pem_header='CERTIFICATE', pem_type='cert', - data_size=70, data_offset=0): - self.pem_header = pem_header - self.pem_type = pem_type - self.data_size = data_size - self.data_offset = data_offset - self.data = make_data(self.data_size, self.data_offset) - self.base64_text = make_base64_from_data(self.data) - self.wrapped_base64 = wrap_base64(self.base64_text) - self.pem_text = make_pem(self.pem_header, self.data) - - -class TestPEMParseResult(tests.BaseTestCase): - - def test_pem_types(self): - for pem_type in pemutils.pem_types: - pem_header = pemutils.PEM_TYPE_TO_HEADER[pem_type] - r = pemutils.PEMParseResult(pem_type=pem_type) - self.assertEqual(pem_type, r.pem_type) - self.assertEqual(pem_header, r.pem_header) - - pem_type = 'xxx' - self.assertRaises(ValueError, - pemutils.PEMParseResult, pem_type=pem_type) - - def test_pem_headers(self): - for pem_header in pemutils.pem_headers: - pem_type = pemutils.PEM_HEADER_TO_TYPE[pem_header] - r = pemutils.PEMParseResult(pem_header=pem_header) - self.assertEqual(pem_type, r.pem_type) - self.assertEqual(pem_header, r.pem_header) - - pem_header = 'xxx' - self.assertRaises(ValueError, - pemutils.PEMParseResult, pem_header=pem_header) - - -class TestPEMParse(tests.BaseTestCase): - def test_parse_none(self): - text = '' - text += 'bla bla\n' - text += 'yada yada yada\n' - text += 'burfl blatz bingo\n' - - parse_results = pemutils.parse_pem(text) - self.assertEqual(0, len(parse_results)) - - self.assertEqual(False, pemutils.is_pem(text)) - - def test_parse_invalid(self): - p = PEM(pem_type='xxx', - pem_header='XXX') - text = p.pem_text - - self.assertRaises(ValueError, - pemutils.parse_pem, text) - - def test_parse_one(self): - data_size = 70 - count = len(headers) - pems = [] - - for i in moves.range(count): - pems.append(PEM(pem_type=headers[i][0], - pem_header=headers[i][1], - data_size=data_size + i, - data_offset=i)) - - for i in moves.range(count): - p = pems[i] - text = p.pem_text - - parse_results = pemutils.parse_pem(text) - self.assertEqual(1, len(parse_results)) - - r = parse_results[0] - self.assertEqual(p.pem_type, r.pem_type) - self.assertEqual(p.pem_header, r.pem_header) - self.assertEqual(p.pem_text, - text[r.pem_start:r.pem_end]) - self.assertEqual(p.wrapped_base64, - text[r.base64_start:r.base64_end]) - self.assertEqual(p.data, r.binary_data) - - def test_parse_one_embedded(self): - p = PEM(data_offset=0) - text = '' - text += 'bla bla\n' - text += 'yada yada yada\n' - text += p.pem_text - text += 'burfl blatz bingo\n' - - parse_results = pemutils.parse_pem(text) - self.assertEqual(1, len(parse_results)) - - r = parse_results[0] - self.assertEqual(p.pem_type, r.pem_type) - self.assertEqual(p.pem_header, r.pem_header) - self.assertEqual(p.pem_text, - text[r.pem_start:r.pem_end]) - self.assertEqual(p.wrapped_base64, - text[r.base64_start: r.base64_end]) - self.assertEqual(p.data, r.binary_data) - - def test_parse_multple(self): - data_size = 70 - count = len(headers) - pems = [] - text = '' - - for i in moves.range(count): - pems.append(PEM(pem_type=headers[i][0], - pem_header=headers[i][1], - data_size=data_size + i, - data_offset=i)) - - for i in moves.range(count): - text += pems[i].pem_text - - parse_results = pemutils.parse_pem(text) - self.assertEqual(count, len(parse_results)) - - for i in moves.range(count): - r = parse_results[i] - p = pems[i] - - self.assertEqual(p.pem_type, r.pem_type) - self.assertEqual(p.pem_header, r.pem_header) - self.assertEqual(p.pem_text, - text[r.pem_start:r.pem_end]) - self.assertEqual(p.wrapped_base64, - text[r.base64_start: r.base64_end]) - self.assertEqual(p.data, r.binary_data) - - def test_parse_multple_find_specific(self): - data_size = 70 - count = len(headers) - pems = [] - text = '' - - for i in moves.range(count): - pems.append(PEM(pem_type=headers[i][0], - pem_header=headers[i][1], - data_size=data_size + i, - data_offset=i)) - - for i in moves.range(count): - text += pems[i].pem_text - - for i in moves.range(count): - parse_results = pemutils.parse_pem(text, pem_type=headers[i][0]) - self.assertEqual(1, len(parse_results)) - - r = parse_results[0] - p = pems[i] - - self.assertEqual(p.pem_type, r.pem_type) - self.assertEqual(p.pem_header, r.pem_header) - self.assertEqual(p.pem_text, - text[r.pem_start:r.pem_end]) - self.assertEqual(p.wrapped_base64, - text[r.base64_start:r.base64_end]) - self.assertEqual(p.data, r.binary_data) - - def test_parse_multple_embedded(self): - data_size = 75 - count = len(headers) - pems = [] - text = '' - - for i in moves.range(count): - pems.append(PEM(pem_type=headers[i][0], - pem_header=headers[i][1], - data_size=data_size + i, - data_offset=i)) - - for i in moves.range(count): - text += 'bla bla\n' - text += 'yada yada yada\n' - text += pems[i].pem_text - text += 'burfl blatz bingo\n' - - parse_results = pemutils.parse_pem(text) - self.assertEqual(count, len(parse_results)) - - for i in moves.range(count): - r = parse_results[i] - p = pems[i] - - self.assertEqual(p.pem_type, r.pem_type) - self.assertEqual(p.pem_header, r.pem_header) - self.assertEqual(p.pem_text, - text[r.pem_start:r.pem_end]) - self.assertEqual(p.wrapped_base64, - text[r.base64_start:r.base64_end]) - self.assertEqual(p.data, r.binary_data) - - def test_get_pem_data_none(self): - text = '' - text += 'bla bla\n' - text += 'yada yada yada\n' - text += 'burfl blatz bingo\n' - - data = pemutils.get_pem_data(text) - self.assertIsNone(data) - - def test_get_pem_data_invalid(self): - p = PEM(pem_type='xxx', - pem_header='XXX') - text = p.pem_text - - self.assertRaises(ValueError, - pemutils.get_pem_data, text) - - def test_get_pem_data(self): - data_size = 70 - count = len(headers) - pems = [] - - for i in moves.range(count): - pems.append(PEM(pem_type=headers[i][0], - pem_header=headers[i][1], - data_size=data_size + i, - data_offset=i)) - - for i in moves.range(count): - p = pems[i] - text = p.pem_text - - data = pemutils.get_pem_data(text, p.pem_type) - self.assertEqual(p.data, data) - - def test_is_pem(self): - data_size = 70 - count = len(headers) - pems = [] - - for i in moves.range(count): - pems.append(PEM(pem_type=headers[i][0], - pem_header=headers[i][1], - data_size=data_size + i, - data_offset=i)) - - for i in moves.range(count): - p = pems[i] - text = p.pem_text - self.assertTrue(pemutils.is_pem(text, pem_type=p.pem_type)) - self.assertFalse(pemutils.is_pem(text, - pem_type=p.pem_type + 'xxx')) - - def test_base64_to_pem(self): - data_size = 70 - count = len(headers) - pems = [] - - for i in moves.range(count): - pems.append(PEM(pem_type=headers[i][0], - pem_header=headers[i][1], - data_size=data_size + i, - data_offset=i)) - - for i in moves.range(count): - p = pems[i] - pem = pemutils.base64_to_pem(p.base64_text, p.pem_type) - self.assertEqual(pemutils.get_pem_data(pem, p.pem_type), p.data) - - def test_binary_to_pem(self): - data_size = 70 - count = len(headers) - pems = [] - - for i in moves.range(count): - pems.append(PEM(pem_type=headers[i][0], - pem_header=headers[i][1], - data_size=data_size + i, - data_offset=i)) - - for i in moves.range(count): - p = pems[i] - pem = pemutils.binary_to_pem(p.data, p.pem_type) - self.assertEqual(pemutils.get_pem_data(pem, p.pem_type), p.data) diff --git a/keystone-moon/keystone/tests/unit/common/test_sql_core.py b/keystone-moon/keystone/tests/unit/common/test_sql_core.py deleted file mode 100644 index 7d20eb03..00000000 --- a/keystone-moon/keystone/tests/unit/common/test_sql_core.py +++ /dev/null @@ -1,52 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from sqlalchemy.ext import declarative - -from keystone.common import sql -from keystone.tests import unit -from keystone.tests.unit import utils - - -ModelBase = declarative.declarative_base() - - -class TestModel(ModelBase, sql.ModelDictMixin): - __tablename__ = 'testmodel' - id = sql.Column(sql.String(64), primary_key=True) - text = sql.Column(sql.String(64), nullable=False) - - -class TestModelDictMixin(unit.BaseTestCase): - - def test_creating_a_model_instance_from_a_dict(self): - d = {'id': utils.new_uuid(), 'text': utils.new_uuid()} - m = TestModel.from_dict(d) - self.assertEqual(d['id'], m.id) - self.assertEqual(d['text'], m.text) - - def test_creating_a_dict_from_a_model_instance(self): - m = TestModel(id=utils.new_uuid(), text=utils.new_uuid()) - d = m.to_dict() - self.assertEqual(d['id'], m.id) - self.assertEqual(d['text'], m.text) - - def test_creating_a_model_instance_from_an_invalid_dict(self): - d = {'id': utils.new_uuid(), 'text': utils.new_uuid(), 'extra': None} - self.assertRaises(TypeError, TestModel.from_dict, d) - - def test_creating_a_dict_from_a_model_instance_that_has_extra_attrs(self): - expected = {'id': utils.new_uuid(), 'text': utils.new_uuid()} - m = TestModel(id=expected['id'], text=expected['text']) - m.extra = 'this should not be in the dictionary' - self.assertEqual(expected, m.to_dict()) diff --git a/keystone-moon/keystone/tests/unit/common/test_utils.py b/keystone-moon/keystone/tests/unit/common/test_utils.py deleted file mode 100644 index 3641aacd..00000000 --- a/keystone-moon/keystone/tests/unit/common/test_utils.py +++ /dev/null @@ -1,210 +0,0 @@ -# encoding: utf-8 -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime -import uuid - -from oslo_config import cfg -from oslo_config import fixture as config_fixture -from oslo_serialization import jsonutils -import six - -from keystone.common import utils as common_utils -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit import utils -from keystone.version import service - - -CONF = cfg.CONF - -TZ = utils.TZ - - -class UtilsTestCase(unit.BaseTestCase): - OPTIONAL = object() - - def setUp(self): - super(UtilsTestCase, self).setUp() - self.config_fixture = self.useFixture(config_fixture.Config(CONF)) - - def test_resource_uuid(self): - uuid_str = '536e28c2017e405e89b25a1ed777b952' - self.assertEqual(uuid_str, common_utils.resource_uuid(uuid_str)) - - # Exact 64 length string. - uuid_str = ('536e28c2017e405e89b25a1ed777b952' - 'f13de678ac714bb1b7d1e9a007c10db5') - resource_id_namespace = common_utils.RESOURCE_ID_NAMESPACE - transformed_id = uuid.uuid5(resource_id_namespace, uuid_str).hex - self.assertEqual(transformed_id, common_utils.resource_uuid(uuid_str)) - - # Non-ASCII character test. - non_ascii_ = 'ß' * 32 - transformed_id = uuid.uuid5(resource_id_namespace, non_ascii_).hex - self.assertEqual(transformed_id, - common_utils.resource_uuid(non_ascii_)) - - # This input is invalid because it's length is more than 64. - invalid_input = 'x' * 65 - self.assertRaises(ValueError, common_utils.resource_uuid, - invalid_input) - - # 64 length unicode string, to mimic what is returned from mapping_id - # backend. - uuid_str = six.text_type('536e28c2017e405e89b25a1ed777b952' - 'f13de678ac714bb1b7d1e9a007c10db5') - resource_id_namespace = common_utils.RESOURCE_ID_NAMESPACE - if six.PY2: - uuid_str = uuid_str.encode('utf-8') - transformed_id = uuid.uuid5(resource_id_namespace, uuid_str).hex - self.assertEqual(transformed_id, common_utils.resource_uuid(uuid_str)) - - def test_hash(self): - password = 'right' - wrong = 'wrongwrong' # Two wrongs don't make a right - hashed = common_utils.hash_password(password) - self.assertTrue(common_utils.check_password(password, hashed)) - self.assertFalse(common_utils.check_password(wrong, hashed)) - - def test_verify_normal_password_strict(self): - self.config_fixture.config(strict_password_check=False) - password = uuid.uuid4().hex - verified = common_utils.verify_length_and_trunc_password(password) - self.assertEqual(password, verified) - - def test_that_a_hash_can_not_be_validated_against_a_hash(self): - # NOTE(dstanek): Bug 1279849 reported a problem where passwords - # were not being hashed if they already looked like a hash. This - # would allow someone to hash their password ahead of time - # (potentially getting around password requirements, like - # length) and then they could auth with their original password. - password = uuid.uuid4().hex - hashed_password = common_utils.hash_password(password) - new_hashed_password = common_utils.hash_password(hashed_password) - self.assertFalse(common_utils.check_password(password, - new_hashed_password)) - - def test_verify_long_password_strict(self): - self.config_fixture.config(strict_password_check=False) - self.config_fixture.config(group='identity', max_password_length=5) - max_length = CONF.identity.max_password_length - invalid_password = 'passw0rd' - trunc = common_utils.verify_length_and_trunc_password(invalid_password) - self.assertEqual(invalid_password[:max_length], trunc) - - def test_verify_long_password_strict_raises_exception(self): - self.config_fixture.config(strict_password_check=True) - self.config_fixture.config(group='identity', max_password_length=5) - invalid_password = 'passw0rd' - self.assertRaises(exception.PasswordVerificationError, - common_utils.verify_length_and_trunc_password, - invalid_password) - - def test_hash_long_password_truncation(self): - self.config_fixture.config(strict_password_check=False) - invalid_length_password = '0' * 9999999 - hashed = common_utils.hash_password(invalid_length_password) - self.assertTrue(common_utils.check_password(invalid_length_password, - hashed)) - - def test_hash_long_password_strict(self): - self.config_fixture.config(strict_password_check=True) - invalid_length_password = '0' * 9999999 - self.assertRaises(exception.PasswordVerificationError, - common_utils.hash_password, - invalid_length_password) - - def _create_test_user(self, password=OPTIONAL): - user = {"name": "hthtest"} - if password is not self.OPTIONAL: - user['password'] = password - - return user - - def test_hash_user_password_without_password(self): - user = self._create_test_user() - hashed = common_utils.hash_user_password(user) - self.assertEqual(user, hashed) - - def test_hash_user_password_with_null_password(self): - user = self._create_test_user(password=None) - hashed = common_utils.hash_user_password(user) - self.assertEqual(user, hashed) - - def test_hash_user_password_with_empty_password(self): - password = '' - user = self._create_test_user(password=password) - user_hashed = common_utils.hash_user_password(user) - password_hashed = user_hashed['password'] - self.assertTrue(common_utils.check_password(password, password_hashed)) - - def test_hash_edge_cases(self): - hashed = common_utils.hash_password('secret') - self.assertFalse(common_utils.check_password('', hashed)) - self.assertFalse(common_utils.check_password(None, hashed)) - - def test_hash_unicode(self): - password = u'Comment \xe7a va' - wrong = 'Comment ?a va' - hashed = common_utils.hash_password(password) - self.assertTrue(common_utils.check_password(password, hashed)) - self.assertFalse(common_utils.check_password(wrong, hashed)) - - def test_auth_str_equal(self): - self.assertTrue(common_utils.auth_str_equal('abc123', 'abc123')) - self.assertFalse(common_utils.auth_str_equal('a', 'aaaaa')) - self.assertFalse(common_utils.auth_str_equal('aaaaa', 'a')) - self.assertFalse(common_utils.auth_str_equal('ABC123', 'abc123')) - - def test_unixtime(self): - global TZ - - @utils.timezone - def _test_unixtime(): - epoch = common_utils.unixtime(dt) - self.assertEqual(epoch, epoch_ans, "TZ=%s" % TZ) - - dt = datetime.datetime(1970, 1, 2, 3, 4, 56, 0) - epoch_ans = 56 + 4 * 60 + 3 * 3600 + 86400 - for d in ['+0', '-11', '-8', '-5', '+5', '+8', '+14']: - TZ = 'UTC' + d - _test_unixtime() - - def test_pki_encoder(self): - data = {'field': 'value'} - json = jsonutils.dumps(data, cls=common_utils.PKIEncoder) - expected_json = '{"field":"value"}' - self.assertEqual(expected_json, json) - - def test_url_safe_check(self): - base_str = 'i am safe' - self.assertFalse(common_utils.is_not_url_safe(base_str)) - for i in common_utils.URL_RESERVED_CHARS: - self.assertTrue(common_utils.is_not_url_safe(base_str + i)) - - def test_url_safe_with_unicode_check(self): - base_str = u'i am \xe7afe' - self.assertFalse(common_utils.is_not_url_safe(base_str)) - for i in common_utils.URL_RESERVED_CHARS: - self.assertTrue(common_utils.is_not_url_safe(base_str + i)) - - -class ServiceHelperTests(unit.BaseTestCase): - - @service.fail_gracefully - def _do_test(self): - raise Exception("Test Exc") - - def test_fail_gracefully(self): - self.assertRaises(unit.UnexpectedExit, self._do_test) diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_db2.conf b/keystone-moon/keystone/tests/unit/config_files/backend_db2.conf deleted file mode 100644 index 2bd0c1a6..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/backend_db2.conf +++ /dev/null @@ -1,4 +0,0 @@ -#Used for running the Migrate tests against a live DB2 Server -#See _sql_livetest.py -[database] -connection = ibm_db_sa://keystone:keystone@/staktest?charset=utf8 diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_ldap.conf b/keystone-moon/keystone/tests/unit/config_files/backend_ldap.conf deleted file mode 100644 index 32161185..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/backend_ldap.conf +++ /dev/null @@ -1,5 +0,0 @@ -[ldap] -url = fake://memory -user = cn=Admin -password = password -suffix = cn=example,cn=com diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_ldap_pool.conf b/keystone-moon/keystone/tests/unit/config_files/backend_ldap_pool.conf deleted file mode 100644 index 36fa1ac9..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/backend_ldap_pool.conf +++ /dev/null @@ -1,41 +0,0 @@ -[ldap] -url = fakepool://memory -user = cn=Admin -password = password -backend_entities = ['Tenant', 'User', 'UserRoleAssociation', 'Role', 'Group', 'Domain'] -suffix = cn=example,cn=com - -# Connection pooling specific attributes - -# Enable LDAP connection pooling. (boolean value) -use_pool=true - -# Connection pool size. (integer value) -pool_size=5 - -# Maximum count of reconnect trials. (integer value) -pool_retry_max=2 - -# Time span in seconds to wait between two reconnect trials. -# (floating point value) -pool_retry_delay=0.2 - -# Connector timeout in seconds. Value -1 indicates indefinite -# wait for response. (integer value) -pool_connection_timeout=-1 - -# Connection lifetime in seconds. -# (integer value) -pool_connection_lifetime=600 - -# Enable LDAP connection pooling for end user authentication. -# If use_pool is disabled, then this setting is meaningless -# and is not used at all. (boolean value) -use_auth_pool=true - -# End user auth connection pool size. (integer value) -auth_pool_size=50 - -# End user auth connection lifetime in seconds. (integer -# value) -auth_pool_connection_lifetime=60 \ No newline at end of file diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf deleted file mode 100644 index 96a0ffa9..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/backend_ldap_sql.conf +++ /dev/null @@ -1,14 +0,0 @@ -[database] -#For a specific location file based SQLite use: -#connection = sqlite:////tmp/keystone.db -#To Test MySQL: -#connection = mysql+pymysql://keystone:keystone@localhost/keystone?charset=utf8 -#To Test PostgreSQL: -#connection = postgresql://keystone:keystone@localhost/keystone?client_encoding=utf8 -idle_timeout = 200 - -[ldap] -url = fake://memory -user = cn=Admin -password = password -suffix = cn=example,cn=com diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_liveldap.conf b/keystone-moon/keystone/tests/unit/config_files/backend_liveldap.conf deleted file mode 100644 index bb9ee08f..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/backend_liveldap.conf +++ /dev/null @@ -1,10 +0,0 @@ -[ldap] -url = ldap://localhost -user = cn=Manager,dc=openstack,dc=org -password = test -suffix = dc=openstack,dc=org -group_tree_dn = ou=UserGroups,dc=openstack,dc=org -user_tree_dn = ou=Users,dc=openstack,dc=org -user_enabled_emulation = True -user_mail_attribute = mail -use_dumb_member = True diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_multi_ldap_sql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_multi_ldap_sql.conf deleted file mode 100644 index 5185770b..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/backend_multi_ldap_sql.conf +++ /dev/null @@ -1,9 +0,0 @@ -[database] -connection = sqlite:// -#For a file based sqlite use -#connection = sqlite:////tmp/keystone.db -#To Test MySQL: -#connection = mysql+pymysql://keystone:keystone@localhost/keystone?charset=utf8 -#To Test PostgreSQL: -#connection = postgresql://keystone:keystone@localhost/keystone?client_encoding=utf8 -idle_timeout = 200 diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf deleted file mode 100644 index 2495f036..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/backend_mysql.conf +++ /dev/null @@ -1,4 +0,0 @@ -#Used for running the Migrate tests against a live MySQL Server -#See _sql_livetest.py -[database] -connection = mysql+pymysql://keystone:keystone@localhost/keystone_test?charset=utf8 diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_pool_liveldap.conf b/keystone-moon/keystone/tests/unit/config_files/backend_pool_liveldap.conf deleted file mode 100644 index c36e05f9..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/backend_pool_liveldap.conf +++ /dev/null @@ -1,32 +0,0 @@ -[ldap] -url = ldap://localhost -user = cn=Manager,dc=openstack,dc=org -password = test -suffix = dc=openstack,dc=org -group_tree_dn = ou=UserGroups,dc=openstack,dc=org -user_tree_dn = ou=Users,dc=openstack,dc=org -user_enabled_emulation = True -user_mail_attribute = mail -use_dumb_member = True - -# Connection pooling specific attributes - -# Enable LDAP connection pooling. (boolean value) -use_pool=true -# Connection pool size. (integer value) -pool_size=5 -# Connection lifetime in seconds. -# (integer value) -pool_connection_lifetime=60 - -# Enable LDAP connection pooling for end user authentication. -# If use_pool is disabled, then this setting is meaningless -# and is not used at all. (boolean value) -use_auth_pool=true - -# End user auth connection pool size. (integer value) -auth_pool_size=50 - -# End user auth connection lifetime in seconds. (integer -# value) -auth_pool_connection_lifetime=300 \ No newline at end of file diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_postgresql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_postgresql.conf deleted file mode 100644 index 001805df..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/backend_postgresql.conf +++ /dev/null @@ -1,4 +0,0 @@ -#Used for running the Migrate tests against a live Postgresql Server -#See _sql_livetest.py -[database] -connection = postgresql://keystone:keystone@localhost/keystone_test?client_encoding=utf8 diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_sql.conf b/keystone-moon/keystone/tests/unit/config_files/backend_sql.conf deleted file mode 100644 index f2828e2e..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/backend_sql.conf +++ /dev/null @@ -1,8 +0,0 @@ -[database] -#For a specific location file based SQLite use: -#connection = sqlite:////tmp/keystone.db -#To Test MySQL: -#connection = mysql+pymysql://keystone:keystone@localhost/keystone?charset=utf8 -#To Test PostgreSQL: -#connection = postgresql://keystone:keystone@localhost/keystone?client_encoding=utf8 -idle_timeout = 200 diff --git a/keystone-moon/keystone/tests/unit/config_files/backend_tls_liveldap.conf b/keystone-moon/keystone/tests/unit/config_files/backend_tls_liveldap.conf deleted file mode 100644 index b66044b7..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/backend_tls_liveldap.conf +++ /dev/null @@ -1,14 +0,0 @@ -[ldap] -url = ldap:// -user = dc=Manager,dc=openstack,dc=org -password = test -suffix = dc=openstack,dc=org -group_tree_dn = ou=UserGroups,dc=openstack,dc=org -user_tree_dn = ou=Users,dc=openstack,dc=org -user_enabled_emulation = True -user_mail_attribute = mail -use_dumb_member = True -use_tls = True -tls_cacertfile = /etc/keystone/ssl/certs/cacert.pem -tls_cacertdir = /etc/keystone/ssl/certs/ -tls_req_cert = demand diff --git a/keystone-moon/keystone/tests/unit/config_files/deprecated.conf b/keystone-moon/keystone/tests/unit/config_files/deprecated.conf deleted file mode 100644 index 515e663a..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/deprecated.conf +++ /dev/null @@ -1,8 +0,0 @@ -# Options in this file are deprecated. See test_config. - -[sql] -# These options were deprecated in Icehouse with the switch to oslo's -# db.sqlalchemy. - -connection = sqlite://deprecated -idle_timeout = 54321 diff --git a/keystone-moon/keystone/tests/unit/config_files/deprecated_override.conf b/keystone-moon/keystone/tests/unit/config_files/deprecated_override.conf deleted file mode 100644 index 1d1c926f..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/deprecated_override.conf +++ /dev/null @@ -1,15 +0,0 @@ -# Options in this file are deprecated. See test_config. - -[sql] -# These options were deprecated in Icehouse with the switch to oslo's -# db.sqlalchemy. - -connection = sqlite://deprecated -idle_timeout = 54321 - - -[database] -# These are the new options from the [sql] section. - -connection = sqlite://new -idle_timeout = 65432 diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_default_ldap_one_sql/keystone.domain1.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_default_ldap_one_sql/keystone.domain1.conf deleted file mode 100644 index fecc7bea..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_default_ldap_one_sql/keystone.domain1.conf +++ /dev/null @@ -1,5 +0,0 @@ -# The domain-specific configuration file for the test domain -# 'domain1' for use with unit tests. - -[identity] -driver = sql \ No newline at end of file diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf deleted file mode 100644 index 64d01d48..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.Default.conf +++ /dev/null @@ -1,14 +0,0 @@ -# The domain-specific configuration file for the default domain for -# use with unit tests. -# -# The domain_name of the default domain is 'Default', hence the -# strange mix of upper/lower case in the file name. - -[ldap] -url = fake://memory -user = cn=Admin -password = password -suffix = cn=example,cn=com - -[identity] -driver = ldap diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf deleted file mode 100644 index af540537..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain1.conf +++ /dev/null @@ -1,12 +0,0 @@ -# The domain-specific configuration file for the test domain -# 'domain1' for use with unit tests. - -[ldap] -url = fake://memory1 -user = cn=Admin -password = password -suffix = cn=example,cn=com - -[identity] -driver = ldap -list_limit = 101 diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain2.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain2.conf deleted file mode 100644 index a14179e3..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_multi_ldap/keystone.domain2.conf +++ /dev/null @@ -1,13 +0,0 @@ -# The domain-specific configuration file for the test domain -# 'domain2' for use with unit tests. - -[ldap] -url = fake://memory -user = cn=Admin -password = password -suffix = cn=myroot,cn=com -group_tree_dn = ou=UserGroups,dc=myroot,dc=org -user_tree_dn = ou=Users,dc=myroot,dc=org - -[identity] -driver = ldap \ No newline at end of file diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_extra_sql/keystone.domain2.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_extra_sql/keystone.domain2.conf deleted file mode 100644 index 925b26f2..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_extra_sql/keystone.domain2.conf +++ /dev/null @@ -1,5 +0,0 @@ -# The domain-specific configuration file for the test domain -# 'domain2' for use with unit tests. - -[identity] -driver = sql \ No newline at end of file diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.Default.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.Default.conf deleted file mode 100644 index 2dd86c25..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.Default.conf +++ /dev/null @@ -1,14 +0,0 @@ -# The domain-specific configuration file for the default domain for -# use with unit tests. -# -# The domain_name of the default domain is 'Default', hence the -# strange mix of upper/lower case in the file name. - -[ldap] -url = fake://memory -user = cn=Admin -password = password -suffix = cn=example,cn=com - -[identity] -driver = ldap \ No newline at end of file diff --git a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.domain1.conf b/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.domain1.conf deleted file mode 100644 index fecc7bea..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/domain_configs_one_sql_one_ldap/keystone.domain1.conf +++ /dev/null @@ -1,5 +0,0 @@ -# The domain-specific configuration file for the test domain -# 'domain1' for use with unit tests. - -[identity] -driver = sql \ No newline at end of file diff --git a/keystone-moon/keystone/tests/unit/config_files/test_auth_plugin.conf b/keystone-moon/keystone/tests/unit/config_files/test_auth_plugin.conf deleted file mode 100644 index 4a9e87d5..00000000 --- a/keystone-moon/keystone/tests/unit/config_files/test_auth_plugin.conf +++ /dev/null @@ -1,4 +0,0 @@ -[auth] -methods = external,password,token,simple_challenge_response,saml2,openid,x509 -simple_challenge_response = keystone.tests.unit.test_auth_plugin.SimpleChallengeResponse - diff --git a/keystone-moon/keystone/tests/unit/contrib/__init__.py b/keystone-moon/keystone/tests/unit/contrib/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/contrib/federation/__init__.py b/keystone-moon/keystone/tests/unit/contrib/federation/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/contrib/federation/test_utils.py b/keystone-moon/keystone/tests/unit/contrib/federation/test_utils.py deleted file mode 100644 index 52a6095b..00000000 --- a/keystone-moon/keystone/tests/unit/contrib/federation/test_utils.py +++ /dev/null @@ -1,725 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from oslo_config import cfg -from oslo_config import fixture as config_fixture -from oslo_serialization import jsonutils - -from keystone.auth.plugins import mapped -from keystone import exception -from keystone.federation import utils as mapping_utils -from keystone.tests import unit -from keystone.tests.unit import mapping_fixtures - - -FAKE_MAPPING_ID = uuid.uuid4().hex - - -class MappingRuleEngineTests(unit.BaseTestCase): - """A class for testing the mapping rule engine.""" - - def assertValidMappedUserObject(self, mapped_properties, - user_type='ephemeral', - domain_id=None): - """Check whether mapped properties object has 'user' within. - - According to today's rules, RuleProcessor does not have to issue user's - id or name. What's actually required is user's type and for ephemeral - users that would be service domain named 'Federated'. - """ - self.assertIn('user', mapped_properties, - message='Missing user object in mapped properties') - user = mapped_properties['user'] - self.assertIn('type', user) - self.assertEqual(user_type, user['type']) - self.assertIn('domain', user) - domain = user['domain'] - domain_name_or_id = domain.get('id') or domain.get('name') - domain_ref = domain_id or 'Federated' - self.assertEqual(domain_ref, domain_name_or_id) - - def test_rule_engine_any_one_of_and_direct_mapping(self): - """Should return user's name and group id EMPLOYEE_GROUP_ID. - - The ADMIN_ASSERTION should successfully have a match in MAPPING_LARGE. - They will test the case where `any_one_of` is valid, and there is - a direct mapping for the users name. - - """ - mapping = mapping_fixtures.MAPPING_LARGE - assertion = mapping_fixtures.ADMIN_ASSERTION - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - values = rp.process(assertion) - - fn = assertion.get('FirstName') - ln = assertion.get('LastName') - full_name = '%s %s' % (fn, ln) - group_ids = values.get('group_ids') - user_name = values.get('user', {}).get('name') - - self.assertIn(mapping_fixtures.EMPLOYEE_GROUP_ID, group_ids) - self.assertEqual(full_name, user_name) - - def test_rule_engine_no_regex_match(self): - """Should deny authorization, the email of the tester won't match. - - This will not match since the email in the assertion will fail - the regex test. It is set to match any @example.com address. - But the incoming value is set to eviltester@example.org. - RuleProcessor should raise ValidationError. - - """ - mapping = mapping_fixtures.MAPPING_LARGE - assertion = mapping_fixtures.BAD_TESTER_ASSERTION - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - self.assertRaises(exception.ValidationError, - rp.process, - assertion) - - def test_rule_engine_regex_many_groups(self): - """Should return group CONTRACTOR_GROUP_ID. - - The TESTER_ASSERTION should successfully have a match in - MAPPING_TESTER_REGEX. This will test the case where many groups - are in the assertion, and a regex value is used to try and find - a match. - - """ - mapping = mapping_fixtures.MAPPING_TESTER_REGEX - assertion = mapping_fixtures.TESTER_ASSERTION - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - values = rp.process(assertion) - - self.assertValidMappedUserObject(values) - user_name = assertion.get('UserName') - group_ids = values.get('group_ids') - name = values.get('user', {}).get('name') - - self.assertEqual(user_name, name) - self.assertIn(mapping_fixtures.TESTER_GROUP_ID, group_ids) - - def test_rule_engine_any_one_of_many_rules(self): - """Should return group CONTRACTOR_GROUP_ID. - - The CONTRACTOR_ASSERTION should successfully have a match in - MAPPING_SMALL. This will test the case where many rules - must be matched, including an `any_one_of`, and a direct - mapping. - - """ - mapping = mapping_fixtures.MAPPING_SMALL - assertion = mapping_fixtures.CONTRACTOR_ASSERTION - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - values = rp.process(assertion) - - self.assertValidMappedUserObject(values) - user_name = assertion.get('UserName') - group_ids = values.get('group_ids') - name = values.get('user', {}).get('name') - - self.assertEqual(user_name, name) - self.assertIn(mapping_fixtures.CONTRACTOR_GROUP_ID, group_ids) - - def test_rule_engine_not_any_of_and_direct_mapping(self): - """Should return user's name and email. - - The CUSTOMER_ASSERTION should successfully have a match in - MAPPING_LARGE. This will test the case where a requirement - has `not_any_of`, and direct mapping to a username, no group. - - """ - mapping = mapping_fixtures.MAPPING_LARGE - assertion = mapping_fixtures.CUSTOMER_ASSERTION - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - values = rp.process(assertion) - - self.assertValidMappedUserObject(values) - user_name = assertion.get('UserName') - group_ids = values.get('group_ids') - name = values.get('user', {}).get('name') - - self.assertEqual(user_name, name) - self.assertEqual([], group_ids,) - - def test_rule_engine_not_any_of_many_rules(self): - """Should return group EMPLOYEE_GROUP_ID. - - The EMPLOYEE_ASSERTION should successfully have a match in - MAPPING_SMALL. This will test the case where many remote - rules must be matched, including a `not_any_of`. - - """ - mapping = mapping_fixtures.MAPPING_SMALL - assertion = mapping_fixtures.EMPLOYEE_ASSERTION - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - values = rp.process(assertion) - - self.assertValidMappedUserObject(values) - user_name = assertion.get('UserName') - group_ids = values.get('group_ids') - name = values.get('user', {}).get('name') - - self.assertEqual(user_name, name) - self.assertIn(mapping_fixtures.EMPLOYEE_GROUP_ID, group_ids) - - def test_rule_engine_not_any_of_regex_verify_pass(self): - """Should return group DEVELOPER_GROUP_ID. - - The DEVELOPER_ASSERTION should successfully have a match in - MAPPING_DEVELOPER_REGEX. This will test the case where many - remote rules must be matched, including a `not_any_of`, with - regex set to True. - - """ - mapping = mapping_fixtures.MAPPING_DEVELOPER_REGEX - assertion = mapping_fixtures.DEVELOPER_ASSERTION - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - values = rp.process(assertion) - - self.assertValidMappedUserObject(values) - user_name = assertion.get('UserName') - group_ids = values.get('group_ids') - name = values.get('user', {}).get('name') - - self.assertEqual(user_name, name) - self.assertIn(mapping_fixtures.DEVELOPER_GROUP_ID, group_ids) - - def test_rule_engine_not_any_of_regex_verify_fail(self): - """Should deny authorization. - - The email in the assertion will fail the regex test. - It is set to reject any @example.org address, but the - incoming value is set to evildeveloper@example.org. - RuleProcessor should yield ValidationError. - - """ - mapping = mapping_fixtures.MAPPING_DEVELOPER_REGEX - assertion = mapping_fixtures.BAD_DEVELOPER_ASSERTION - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - self.assertRaises(exception.ValidationError, - rp.process, - assertion) - - def _rule_engine_regex_match_and_many_groups(self, assertion): - """Should return group DEVELOPER_GROUP_ID and TESTER_GROUP_ID. - - A helper function injecting assertion passed as an argument. - Expect DEVELOPER_GROUP_ID and TESTER_GROUP_ID in the results. - - """ - mapping = mapping_fixtures.MAPPING_LARGE - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - values = rp.process(assertion) - - user_name = assertion.get('UserName') - group_ids = values.get('group_ids') - name = values.get('user', {}).get('name') - - self.assertValidMappedUserObject(values) - self.assertEqual(user_name, name) - self.assertIn(mapping_fixtures.DEVELOPER_GROUP_ID, group_ids) - self.assertIn(mapping_fixtures.TESTER_GROUP_ID, group_ids) - - def test_rule_engine_regex_match_and_many_groups(self): - """Should return group DEVELOPER_GROUP_ID and TESTER_GROUP_ID. - - The TESTER_ASSERTION should successfully have a match in - MAPPING_LARGE. This will test a successful regex match - for an `any_one_of` evaluation type, and will have many - groups returned. - - """ - self._rule_engine_regex_match_and_many_groups( - mapping_fixtures.TESTER_ASSERTION) - - def test_rule_engine_discards_nonstring_objects(self): - """Check whether RuleProcessor discards non string objects. - - Despite the fact that assertion is malformed and contains - non string objects, RuleProcessor should correctly discard them and - successfully have a match in MAPPING_LARGE. - - """ - self._rule_engine_regex_match_and_many_groups( - mapping_fixtures.MALFORMED_TESTER_ASSERTION) - - def test_rule_engine_fails_after_discarding_nonstring(self): - """Check whether RuleProcessor discards non string objects. - - Expect RuleProcessor to discard non string object, which - is required for a correct rule match. RuleProcessor will result with - ValidationError. - - """ - mapping = mapping_fixtures.MAPPING_SMALL - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - assertion = mapping_fixtures.CONTRACTOR_MALFORMED_ASSERTION - self.assertRaises(exception.ValidationError, - rp.process, - assertion) - - def test_using_remote_direct_mapping_that_doesnt_exist_fails(self): - """Test for the correct error when referring to a bad remote match. - - The remote match must exist in a rule when a local section refers to - a remote matching using the format (e.g. {0} in a local section). - """ - mapping = mapping_fixtures.MAPPING_DIRECT_MAPPING_THROUGH_KEYWORD - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - assertion = mapping_fixtures.CUSTOMER_ASSERTION - - self.assertRaises(exception.DirectMappingError, - rp.process, - assertion) - - def test_rule_engine_returns_group_names(self): - """Check whether RuleProcessor returns group names with their domains. - - RuleProcessor should return 'group_names' entry with a list of - dictionaries with two entries 'name' and 'domain' identifying group by - its name and domain. - - """ - mapping = mapping_fixtures.MAPPING_GROUP_NAMES - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - assertion = mapping_fixtures.EMPLOYEE_ASSERTION - mapped_properties = rp.process(assertion) - self.assertIsNotNone(mapped_properties) - self.assertValidMappedUserObject(mapped_properties) - reference = { - mapping_fixtures.DEVELOPER_GROUP_NAME: - { - "name": mapping_fixtures.DEVELOPER_GROUP_NAME, - "domain": { - "name": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_NAME - } - }, - mapping_fixtures.TESTER_GROUP_NAME: - { - "name": mapping_fixtures.TESTER_GROUP_NAME, - "domain": { - "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID - } - } - } - for rule in mapped_properties['group_names']: - self.assertDictEqual(reference.get(rule.get('name')), rule) - - def test_rule_engine_whitelist_and_direct_groups_mapping(self): - """Should return user's groups Developer and Contractor. - - The EMPLOYEE_ASSERTION_MULTIPLE_GROUPS should successfully have a match - in MAPPING_GROUPS_WHITELIST. It will test the case where 'whitelist' - correctly filters out Manager and only allows Developer and Contractor. - - """ - mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST - assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - mapped_properties = rp.process(assertion) - self.assertIsNotNone(mapped_properties) - - reference = { - mapping_fixtures.DEVELOPER_GROUP_NAME: - { - "name": mapping_fixtures.DEVELOPER_GROUP_NAME, - "domain": { - "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID - } - }, - mapping_fixtures.CONTRACTOR_GROUP_NAME: - { - "name": mapping_fixtures.CONTRACTOR_GROUP_NAME, - "domain": { - "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID - } - } - } - for rule in mapped_properties['group_names']: - self.assertDictEqual(reference.get(rule.get('name')), rule) - - self.assertEqual('tbo', mapped_properties['user']['name']) - self.assertEqual([], mapped_properties['group_ids']) - - def test_rule_engine_blacklist_and_direct_groups_mapping(self): - """Should return user's group Developer. - - The EMPLOYEE_ASSERTION_MULTIPLE_GROUPS should successfully have a match - in MAPPING_GROUPS_BLACKLIST. It will test the case where 'blacklist' - correctly filters out Manager and Developer and only allows Contractor. - - """ - mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST - assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - mapped_properties = rp.process(assertion) - self.assertIsNotNone(mapped_properties) - - reference = { - mapping_fixtures.CONTRACTOR_GROUP_NAME: - { - "name": mapping_fixtures.CONTRACTOR_GROUP_NAME, - "domain": { - "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID - } - } - } - for rule in mapped_properties['group_names']: - self.assertDictEqual(reference.get(rule.get('name')), rule) - self.assertEqual('tbo', mapped_properties['user']['name']) - self.assertEqual([], mapped_properties['group_ids']) - - def test_rule_engine_blacklist_and_direct_groups_mapping_multiples(self): - """Tests matching multiple values before the blacklist. - - Verifies that the local indexes are correct when matching multiple - remote values for a field when the field occurs before the blacklist - entry in the remote rules. - - """ - mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST_MULTIPLES - assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - mapped_properties = rp.process(assertion) - self.assertIsNotNone(mapped_properties) - - reference = { - mapping_fixtures.CONTRACTOR_GROUP_NAME: - { - "name": mapping_fixtures.CONTRACTOR_GROUP_NAME, - "domain": { - "id": mapping_fixtures.DEVELOPER_GROUP_DOMAIN_ID - } - } - } - for rule in mapped_properties['group_names']: - self.assertDictEqual(reference.get(rule.get('name')), rule) - self.assertEqual('tbo', mapped_properties['user']['name']) - self.assertEqual([], mapped_properties['group_ids']) - - def test_rule_engine_whitelist_direct_group_mapping_missing_domain(self): - """Test if the local rule is rejected upon missing domain value - - This is a variation with a ``whitelist`` filter. - - """ - mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_MISSING_DOMAIN - assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - self.assertRaises(exception.ValidationError, rp.process, assertion) - - def test_rule_engine_blacklist_direct_group_mapping_missing_domain(self): - """Test if the local rule is rejected upon missing domain value - - This is a variation with a ``blacklist`` filter. - - """ - mapping = mapping_fixtures.MAPPING_GROUPS_BLACKLIST_MISSING_DOMAIN - assertion = mapping_fixtures.EMPLOYEE_ASSERTION_MULTIPLE_GROUPS - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - self.assertRaises(exception.ValidationError, rp.process, assertion) - - def test_rule_engine_no_groups_allowed(self): - """Should return user mapped to no groups. - - The EMPLOYEE_ASSERTION should successfully have a match - in MAPPING_GROUPS_WHITELIST, but 'whitelist' should filter out - the group values from the assertion and thus map to no groups. - - """ - mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST - assertion = mapping_fixtures.EMPLOYEE_ASSERTION - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - mapped_properties = rp.process(assertion) - self.assertIsNotNone(mapped_properties) - self.assertListEqual(mapped_properties['group_names'], []) - self.assertListEqual(mapped_properties['group_ids'], []) - self.assertEqual('tbo', mapped_properties['user']['name']) - - def test_mapping_federated_domain_specified(self): - """Test mapping engine when domain 'ephemeral' is explicitly set. - - For that, we use mapping rule MAPPING_EPHEMERAL_USER and assertion - EMPLOYEE_ASSERTION - - """ - mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - assertion = mapping_fixtures.EMPLOYEE_ASSERTION - mapped_properties = rp.process(assertion) - self.assertIsNotNone(mapped_properties) - self.assertValidMappedUserObject(mapped_properties) - - def test_set_ephemeral_domain_to_ephemeral_users(self): - """Test auto assigning service domain to ephemeral users. - - Test that ephemeral users will always become members of federated - service domain. The check depends on ``type`` value which must be set - to ``ephemeral`` in case of ephemeral user. - - """ - mapping = mapping_fixtures.MAPPING_EPHEMERAL_USER_LOCAL_DOMAIN - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - assertion = mapping_fixtures.CONTRACTOR_ASSERTION - mapped_properties = rp.process(assertion) - self.assertIsNotNone(mapped_properties) - self.assertValidMappedUserObject(mapped_properties) - - def test_local_user_local_domain(self): - """Test that local users can have non-service domains assigned.""" - mapping = mapping_fixtures.MAPPING_LOCAL_USER_LOCAL_DOMAIN - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - assertion = mapping_fixtures.CONTRACTOR_ASSERTION - mapped_properties = rp.process(assertion) - self.assertIsNotNone(mapped_properties) - self.assertValidMappedUserObject( - mapped_properties, user_type='local', - domain_id=mapping_fixtures.LOCAL_DOMAIN) - - def test_user_identifications_name(self): - """Test varius mapping options and how users are identified. - - This test calls mapped.setup_username() for propagating user object. - - Test plan: - - Check if the user has proper domain ('federated') set - - Check if the user has property type set ('ephemeral') - - Check if user's name is properly mapped from the assertion - - Check if unique_id is properly set and equal to display_name, - as it was not explicitly specified in the mapping. - - """ - mapping = mapping_fixtures.MAPPING_USER_IDS - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - assertion = mapping_fixtures.CONTRACTOR_ASSERTION - mapped_properties = rp.process(assertion) - self.assertIsNotNone(mapped_properties) - self.assertValidMappedUserObject(mapped_properties) - self.assertEqual('jsmith', mapped_properties['user']['name']) - unique_id, display_name = mapped.get_user_unique_id_and_display_name( - {}, mapped_properties) - self.assertEqual('jsmith', unique_id) - self.assertEqual('jsmith', display_name) - - def test_user_identifications_name_and_federated_domain(self): - """Test varius mapping options and how users are identified. - - This test calls mapped.setup_username() for propagating user object. - - Test plan: - - Check if the user has proper domain ('federated') set - - Check if the user has propert type set ('ephemeral') - - Check if user's name is properly mapped from the assertion - - Check if the unique_id and display_name are properly set - - """ - mapping = mapping_fixtures.MAPPING_USER_IDS - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - assertion = mapping_fixtures.EMPLOYEE_ASSERTION - mapped_properties = rp.process(assertion) - self.assertIsNotNone(mapped_properties) - self.assertValidMappedUserObject(mapped_properties) - unique_id, display_name = mapped.get_user_unique_id_and_display_name( - {}, mapped_properties) - self.assertEqual('tbo', display_name) - self.assertEqual('abc123%40example.com', unique_id) - - def test_user_identification_id(self): - """Test varius mapping options and how users are identified. - - This test calls mapped.setup_username() for propagating user object. - - Test plan: - - Check if the user has proper domain ('federated') set - - Check if the user has propert type set ('ephemeral') - - Check if user's display_name is properly set and equal to unique_id, - as it was not explicitly specified in the mapping. - - """ - mapping = mapping_fixtures.MAPPING_USER_IDS - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - assertion = mapping_fixtures.ADMIN_ASSERTION - mapped_properties = rp.process(assertion) - context = {'environment': {}} - self.assertIsNotNone(mapped_properties) - self.assertValidMappedUserObject(mapped_properties) - unique_id, display_name = mapped.get_user_unique_id_and_display_name( - context, mapped_properties) - self.assertEqual('bob', unique_id) - self.assertEqual('bob', display_name) - - def test_user_identification_id_and_name(self): - """Test varius mapping options and how users are identified. - - This test calls mapped.setup_username() for propagating user object. - - Test plan: - - Check if the user has proper domain ('federated') set - - Check if the user has proper type set ('ephemeral') - - Check if display_name is properly set from the assertion - - Check if unique_id is properly set and and equal to value hardcoded - in the mapping - - This test does two iterations with different assertions used as input - for the Mapping Engine. Different assertions will be matched with - different rules in the ruleset, effectively issuing different user_id - (hardcoded values). In the first iteration, the hardcoded user_id is - not url-safe and we expect Keystone to make it url safe. In the latter - iteration, provided user_id is already url-safe and we expect server - not to change it. - - """ - testcases = [(mapping_fixtures.CUSTOMER_ASSERTION, 'bwilliams'), - (mapping_fixtures.EMPLOYEE_ASSERTION, 'tbo')] - for assertion, exp_user_name in testcases: - mapping = mapping_fixtures.MAPPING_USER_IDS - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - mapped_properties = rp.process(assertion) - context = {'environment': {}} - self.assertIsNotNone(mapped_properties) - self.assertValidMappedUserObject(mapped_properties) - unique_id, display_name = ( - mapped.get_user_unique_id_and_display_name(context, - mapped_properties) - ) - self.assertEqual(exp_user_name, display_name) - self.assertEqual('abc123%40example.com', unique_id) - - def test_whitelist_pass_through(self): - mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_PASS_THROUGH - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - assertion = mapping_fixtures.DEVELOPER_ASSERTION - mapped_properties = rp.process(assertion) - self.assertValidMappedUserObject(mapped_properties) - - self.assertEqual('developacct', mapped_properties['user']['name']) - self.assertEqual('Developer', - mapped_properties['group_names'][0]['name']) - - def test_mapping_with_incorrect_local_keys(self): - mapping = mapping_fixtures.MAPPING_BAD_LOCAL_SETUP - self.assertRaises(exception.ValidationError, - mapping_utils.validate_mapping_structure, - mapping) - - def test_mapping_with_group_name_and_domain(self): - mapping = mapping_fixtures.MAPPING_GROUP_NAMES - mapping_utils.validate_mapping_structure(mapping) - - def test_type_not_in_assertion(self): - """Test that if the remote "type" is not in the assertion it fails.""" - mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_PASS_THROUGH - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - assertion = {uuid.uuid4().hex: uuid.uuid4().hex} - self.assertRaises(exception.ValidationError, - rp.process, - assertion) - - def test_rule_engine_group_ids_mapping_whitelist(self): - """Test mapping engine when group_ids is explicitly set - - Also test whitelists on group ids - - """ - mapping = mapping_fixtures.MAPPING_GROUPS_IDS_WHITELIST - assertion = mapping_fixtures.GROUP_IDS_ASSERTION - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - mapped_properties = rp.process(assertion) - self.assertIsNotNone(mapped_properties) - self.assertEqual('opilotte', mapped_properties['user']['name']) - self.assertListEqual([], mapped_properties['group_names']) - self.assertItemsEqual(['abc123', 'ghi789', 'klm012'], - mapped_properties['group_ids']) - - def test_rule_engine_group_ids_mapping_blacklist(self): - """Test mapping engine when group_ids is explicitly set. - - Also test blacklists on group ids - - """ - mapping = mapping_fixtures.MAPPING_GROUPS_IDS_BLACKLIST - assertion = mapping_fixtures.GROUP_IDS_ASSERTION - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - mapped_properties = rp.process(assertion) - self.assertIsNotNone(mapped_properties) - self.assertEqual('opilotte', mapped_properties['user']['name']) - self.assertListEqual([], mapped_properties['group_names']) - self.assertItemsEqual(['abc123', 'ghi789', 'klm012'], - mapped_properties['group_ids']) - - def test_rule_engine_group_ids_mapping_only_one_group(self): - """Test mapping engine when group_ids is explicitly set. - - If the group ids list has only one group, - test if the transformation is done correctly - - """ - mapping = mapping_fixtures.MAPPING_GROUPS_IDS_WHITELIST - assertion = mapping_fixtures.GROUP_IDS_ASSERTION_ONLY_ONE_GROUP - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - mapped_properties = rp.process(assertion) - self.assertIsNotNone(mapped_properties) - self.assertEqual('opilotte', mapped_properties['user']['name']) - self.assertListEqual([], mapped_properties['group_names']) - self.assertItemsEqual(['210mlk', '321cba'], - mapped_properties['group_ids']) - - -class TestUnicodeAssertionData(unit.BaseTestCase): - """Ensure that unicode data in the assertion headers works. - - Bug #1525250 reported that something was not getting correctly encoded - and/or decoded when assertion data contained non-ASCII characters. - - This test class mimics what happens in a real HTTP request. - """ - - def setUp(self): - super(TestUnicodeAssertionData, self).setUp() - self.config_fixture = self.useFixture(config_fixture.Config(cfg.CONF)) - self.config_fixture.config(group='federation', - assertion_prefix='PFX') - - def _pull_mapping_rules_from_the_database(self): - # NOTE(dstanek): In a live system. The rules are dumped into JSON bytes - # before being # stored in the database. Upon retrieval the bytes are - # loaded and the resulting dictionary is full of unicode text strings. - # Most of tests in this file incorrectly assume the mapping fixture - # dictionary is the same as what it would look like coming out of the - # database. The string, when coming out of the database, are all text. - return jsonutils.loads(jsonutils.dumps( - mapping_fixtures.MAPPING_UNICODE)) - - def _pull_assertion_from_the_request_headers(self): - # NOTE(dstanek): In a live system the bytes for the assertion are - # pulled from the HTTP headers. These bytes may be decodable as - # ISO-8859-1 according to Section 3.2.4 of RFC 7230. Let's assume - # that our web server plugins are correctly encoding the data. - context = dict(environment=mapping_fixtures.UNICODE_NAME_ASSERTION) - data = mapping_utils.get_assertion_params_from_env(context) - # NOTE(dstanek): keystone.auth.plugins.mapped - return dict(data) - - def test_unicode(self): - mapping = self._pull_mapping_rules_from_the_database() - assertion = self._pull_assertion_from_the_request_headers() - - rp = mapping_utils.RuleProcessor(FAKE_MAPPING_ID, mapping['rules']) - values = rp.process(assertion) - - fn = assertion.get('PFX_FirstName') - ln = assertion.get('PFX_LastName') - full_name = '%s %s' % (fn, ln) - user_name = values.get('user', {}).get('name') - self.assertEqual(full_name, user_name) diff --git a/keystone-moon/keystone/tests/unit/core.py b/keystone-moon/keystone/tests/unit/core.py deleted file mode 100644 index 1054e131..00000000 --- a/keystone-moon/keystone/tests/unit/core.py +++ /dev/null @@ -1,907 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import -import atexit -import base64 -import datetime -import functools -import hashlib -import json -import logging -import os -import re -import shutil -import socket -import sys -import uuid -import warnings - -import fixtures -from oslo_config import cfg -from oslo_config import fixture as config_fixture -from oslo_context import context as oslo_context -from oslo_context import fixture as oslo_ctx_fixture -from oslo_log import fixture as log_fixture -from oslo_log import log -from oslo_utils import timeutils -from oslotest import mockpatch -from paste.deploy import loadwsgi -import six -from sqlalchemy import exc -import testtools -from testtools import testcase - -# NOTE(ayoung) -# environment.use_eventlet must run before any of the code that will -# call the eventlet monkeypatching. -from keystone.common import environment # noqa -environment.use_eventlet() - -from keystone import auth -from keystone.common import config -from keystone.common import dependency -from keystone.common.kvs import core as kvs_core -from keystone.common import sql -from keystone import exception -from keystone import notifications -from keystone.server import common -from keystone.tests.unit import ksfixtures -from keystone.version import controllers -from keystone.version import service - - -config.configure() - -PID = six.text_type(os.getpid()) -TESTSDIR = os.path.dirname(os.path.abspath(__file__)) -TESTCONF = os.path.join(TESTSDIR, 'config_files') -ROOTDIR = os.path.normpath(os.path.join(TESTSDIR, '..', '..', '..')) -VENDOR = os.path.join(ROOTDIR, 'vendor') -ETCDIR = os.path.join(ROOTDIR, 'etc') - - -def _calc_tmpdir(): - env_val = os.environ.get('KEYSTONE_TEST_TEMP_DIR') - if not env_val: - return os.path.join(TESTSDIR, 'tmp', PID) - return os.path.join(env_val, PID) - - -TMPDIR = _calc_tmpdir() - -CONF = cfg.CONF -log.register_options(CONF) - -IN_MEM_DB_CONN_STRING = 'sqlite://' - -TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ' - -exception._FATAL_EXCEPTION_FORMAT_ERRORS = True -os.makedirs(TMPDIR) -atexit.register(shutil.rmtree, TMPDIR) - - -class dirs(object): - @staticmethod - def root(*p): - return os.path.join(ROOTDIR, *p) - - @staticmethod - def etc(*p): - return os.path.join(ETCDIR, *p) - - @staticmethod - def tests(*p): - return os.path.join(TESTSDIR, *p) - - @staticmethod - def tmp(*p): - return os.path.join(TMPDIR, *p) - - @staticmethod - def tests_conf(*p): - return os.path.join(TESTCONF, *p) - - -# keystone.common.sql.initialize() for testing. -DEFAULT_TEST_DB_FILE = dirs.tmp('test.db') - - -class EggLoader(loadwsgi.EggLoader): - _basket = {} - - def find_egg_entry_point(self, object_type, name=None): - egg_key = '%s:%s' % (object_type, name) - egg_ep = self._basket.get(egg_key) - if not egg_ep: - egg_ep = super(EggLoader, self).find_egg_entry_point( - object_type, name=name) - self._basket[egg_key] = egg_ep - return egg_ep - - -# NOTE(dstanek): class paths were remove from the keystone-paste.ini in -# favor of using entry points. This caused tests to slow to a crawl -# since we reload the application object for each RESTful test. This -# monkey-patching adds caching to paste deploy's egg lookup. -loadwsgi.EggLoader = EggLoader - - -@atexit.register -def remove_test_databases(): - db = dirs.tmp('test.db') - if os.path.exists(db): - os.unlink(db) - pristine = dirs.tmp('test.db.pristine') - if os.path.exists(pristine): - os.unlink(pristine) - - -def generate_paste_config(extension_name): - # Generate a file, based on keystone-paste.ini, that is named: - # extension_name.ini, and includes extension_name in the pipeline - with open(dirs.etc('keystone-paste.ini'), 'r') as f: - contents = f.read() - - new_contents = contents.replace(' service_v3', - ' %s service_v3' % (extension_name)) - - new_paste_file = dirs.tmp(extension_name + '.ini') - with open(new_paste_file, 'w') as f: - f.write(new_contents) - - return new_paste_file - - -def remove_generated_paste_config(extension_name): - # Remove the generated paste config file, named extension_name.ini - paste_file_to_remove = dirs.tmp(extension_name + '.ini') - os.remove(paste_file_to_remove) - - -def skip_if_cache_disabled(*sections): - """This decorator is used to skip a test if caching is disabled. - - Caching can be disabled either globally or for a specific section. - - In the code fragment:: - - @skip_if_cache_is_disabled('assignment', 'token') - def test_method(*args): - ... - - The method test_method would be skipped if caching is disabled globally via - the `enabled` option in the `cache` section of the configuration or if - the `caching` option is set to false in either `assignment` or `token` - sections of the configuration. This decorator can be used with no - arguments to only check global caching. - - If a specified configuration section does not define the `caching` option, - this decorator makes the same assumption as the `should_cache_fn` in - keystone.common.cache that caching should be enabled. - - """ - def wrapper(f): - @functools.wraps(f) - def inner(*args, **kwargs): - if not CONF.cache.enabled: - raise testcase.TestSkipped('Cache globally disabled.') - for s in sections: - conf_sec = getattr(CONF, s, None) - if conf_sec is not None: - if not getattr(conf_sec, 'caching', True): - raise testcase.TestSkipped('%s caching disabled.' % s) - return f(*args, **kwargs) - return inner - return wrapper - - -def skip_if_cache_is_enabled(*sections): - def wrapper(f): - @functools.wraps(f) - def inner(*args, **kwargs): - if CONF.cache.enabled: - for s in sections: - conf_sec = getattr(CONF, s, None) - if conf_sec is not None: - if getattr(conf_sec, 'caching', True): - raise testcase.TestSkipped('%s caching enabled.' % - s) - return f(*args, **kwargs) - return inner - return wrapper - - -def skip_if_no_multiple_domains_support(f): - """Decorator to skip tests for identity drivers limited to one domain.""" - @functools.wraps(f) - def wrapper(*args, **kwargs): - test_obj = args[0] - if not test_obj.identity_api.multiple_domains_supported: - raise testcase.TestSkipped('No multiple domains support') - return f(*args, **kwargs) - return wrapper - - -class UnexpectedExit(Exception): - pass - - -def new_region_ref(parent_region_id=None, **kwargs): - ref = { - 'id': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - 'parent_region_id': parent_region_id} - - ref.update(kwargs) - return ref - - -def new_service_ref(**kwargs): - ref = { - 'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - 'enabled': True, - 'type': uuid.uuid4().hex, - } - ref.update(kwargs) - return ref - - -NEEDS_REGION_ID = object() - - -def new_endpoint_ref(service_id, interface='public', - region_id=NEEDS_REGION_ID, **kwargs): - - ref = { - 'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - 'interface': interface, - 'service_id': service_id, - 'url': 'https://' + uuid.uuid4().hex + '.com', - } - - if region_id is NEEDS_REGION_ID: - ref['region_id'] = uuid.uuid4().hex - elif region_id is None and kwargs.get('region') is not None: - # pre-3.2 form endpoints are not supported by this function - raise NotImplementedError("use new_endpoint_ref_with_region") - else: - ref['region_id'] = region_id - ref.update(kwargs) - return ref - - -def new_endpoint_ref_with_region(service_id, region, interface='public', - **kwargs): - """Define an endpoint_ref having a pre-3.2 form. - - Contains the deprecated 'region' instead of 'region_id'. - """ - ref = new_endpoint_ref(service_id, interface, region=region, - region_id='invalid', **kwargs) - del ref['region_id'] - return ref - - -def new_domain_ref(**kwargs): - ref = { - 'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - 'enabled': True - } - ref.update(kwargs) - return ref - - -def new_project_ref(domain_id=None, is_domain=False, **kwargs): - ref = { - 'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - 'enabled': True, - 'domain_id': domain_id, - 'is_domain': is_domain, - } - # NOTE(henry-nash): We don't include parent_id in the initial list above - # since specifying it is optional depending on where the project sits in - # the hierarchy (and a parent_id of None has meaning - i.e. it's a top - # level project). - ref.update(kwargs) - return ref - - -def new_user_ref(domain_id, project_id=None, **kwargs): - ref = { - 'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'enabled': True, - 'domain_id': domain_id, - 'email': uuid.uuid4().hex, - 'password': uuid.uuid4().hex, - } - if project_id: - ref['default_project_id'] = project_id - ref.update(kwargs) - return ref - - -def new_federated_user_ref(idp_id=None, protocol_id=None, **kwargs): - ref = { - 'idp_id': idp_id or 'ORG_IDP', - 'protocol_id': protocol_id or 'saml2', - 'unique_id': uuid.uuid4().hex, - 'display_name': uuid.uuid4().hex, - } - ref.update(kwargs) - return ref - - -def new_group_ref(domain_id, **kwargs): - ref = { - 'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - 'domain_id': domain_id - } - ref.update(kwargs) - return ref - - -def new_credential_ref(user_id, project_id=None, type='cert', **kwargs): - ref = { - 'id': uuid.uuid4().hex, - 'user_id': user_id, - 'type': type, - } - - if project_id: - ref['project_id'] = project_id - if 'blob' not in kwargs: - ref['blob'] = uuid.uuid4().hex - - ref.update(kwargs) - return ref - - -def new_cert_credential(user_id, project_id=None, blob=None, **kwargs): - if blob is None: - blob = {'access': uuid.uuid4().hex, 'secret': uuid.uuid4().hex} - - credential = new_credential_ref(user_id=user_id, - project_id=project_id, - blob=json.dumps(blob), - type='cert', - **kwargs) - return blob, credential - - -def new_ec2_credential(user_id, project_id=None, blob=None, **kwargs): - if blob is None: - blob = { - 'access': uuid.uuid4().hex, - 'secret': uuid.uuid4().hex, - 'trust_id': None - } - - if 'id' not in kwargs: - access = blob['access'].encode('utf-8') - kwargs['id'] = hashlib.sha256(access).hexdigest() - - credential = new_credential_ref(user_id=user_id, - project_id=project_id, - blob=json.dumps(blob), - type='ec2', - **kwargs) - return blob, credential - - -def new_totp_credential(user_id, project_id=None, blob=None): - if not blob: - blob = base64.b32encode(uuid.uuid4().hex).rstrip('=') - credential = new_credential_ref(user_id=user_id, - project_id=project_id, - blob=blob, - type='totp') - return credential - - -def new_role_ref(**kwargs): - ref = { - 'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'domain_id': None - } - ref.update(kwargs) - return ref - - -def new_policy_ref(**kwargs): - ref = { - 'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - 'enabled': True, - # Store serialized JSON data as the blob to mimic real world usage. - 'blob': json.dumps({'data': uuid.uuid4().hex, }), - 'type': uuid.uuid4().hex, - } - - ref.update(kwargs) - return ref - - -def new_trust_ref(trustor_user_id, trustee_user_id, project_id=None, - impersonation=None, expires=None, role_ids=None, - role_names=None, remaining_uses=None, - allow_redelegation=False, redelegation_count=None, **kwargs): - ref = { - 'id': uuid.uuid4().hex, - 'trustor_user_id': trustor_user_id, - 'trustee_user_id': trustee_user_id, - 'impersonation': impersonation or False, - 'project_id': project_id, - 'remaining_uses': remaining_uses, - 'allow_redelegation': allow_redelegation, - } - - if isinstance(redelegation_count, int): - ref.update(redelegation_count=redelegation_count) - - if isinstance(expires, six.string_types): - ref['expires_at'] = expires - elif isinstance(expires, dict): - ref['expires_at'] = ( - timeutils.utcnow() + datetime.timedelta(**expires) - ).strftime(TIME_FORMAT) - elif expires is None: - pass - else: - raise NotImplementedError('Unexpected value for "expires"') - - role_ids = role_ids or [] - role_names = role_names or [] - if role_ids or role_names: - ref['roles'] = [] - for role_id in role_ids: - ref['roles'].append({'id': role_id}) - for role_name in role_names: - ref['roles'].append({'name': role_name}) - - ref.update(kwargs) - return ref - - -def create_user(api, domain_id, **kwargs): - """Create a user via the API. Keep the created password. - - The password is saved and restored when api.create_user() is called. - Only use this routine if there is a requirement for the user object to - have a valid password after api.create_user() is called. - """ - user = new_user_ref(domain_id=domain_id, **kwargs) - password = user['password'] - user = api.create_user(user) - user['password'] = password - return user - - -class BaseTestCase(testtools.TestCase): - """Light weight base test class. - - This is a placeholder that will eventually go away once the - setup/teardown in TestCase is properly trimmed down to the bare - essentials. This is really just a play to speed up the tests by - eliminating unnecessary work. - """ - - def setUp(self): - super(BaseTestCase, self).setUp() - - self.useFixture(fixtures.NestedTempfile()) - self.useFixture(fixtures.TempHomeDir()) - - self.useFixture(mockpatch.PatchObject(sys, 'exit', - side_effect=UnexpectedExit)) - self.useFixture(log_fixture.get_logging_handle_error_fixture()) - - warnings.filterwarnings('error', category=DeprecationWarning, - module='^keystone\\.') - warnings.simplefilter('error', exc.SAWarning) - self.addCleanup(warnings.resetwarnings) - # Ensure we have an empty threadlocal context at the start of each - # test. - self.assertIsNone(oslo_context.get_current()) - self.useFixture(oslo_ctx_fixture.ClearRequestContext()) - - def cleanup_instance(self, *names): - """Create a function suitable for use with self.addCleanup. - - :returns: a callable that uses a closure to delete instance attributes - - """ - def cleanup(): - for name in names: - # TODO(dstanek): remove this 'if' statement once - # load_backend in test_backend_ldap is only called once - # per test - if hasattr(self, name): - delattr(self, name) - return cleanup - - -class TestCase(BaseTestCase): - - def config_files(self): - return [] - - def _policy_fixture(self): - return ksfixtures.Policy(dirs.etc('policy.json'), self.config_fixture) - - def config_overrides(self): - # NOTE(morganfainberg): enforce config_overrides can only ever be - # called a single time. - assert self.__config_overrides_called is False - self.__config_overrides_called = True - - signing_certfile = 'examples/pki/certs/signing_cert.pem' - signing_keyfile = 'examples/pki/private/signing_key.pem' - - self.useFixture(self._policy_fixture()) - - self.config_fixture.config( - # TODO(morganfainberg): Make Cache Testing a separate test case - # in tempest, and move it out of the base unit tests. - group='cache', - backend='dogpile.cache.memory', - enabled=True, - proxies=['oslo_cache.testing.CacheIsolatingProxy']) - self.config_fixture.config( - group='catalog', - driver='sql', - template_file=dirs.tests('default_catalog.templates')) - self.config_fixture.config( - group='kvs', - backends=[ - ('keystone.tests.unit.test_kvs.' - 'KVSBackendForcedKeyMangleFixture'), - 'keystone.tests.unit.test_kvs.KVSBackendFixture']) - self.config_fixture.config( - group='signing', certfile=signing_certfile, - keyfile=signing_keyfile, - ca_certs='examples/pki/certs/cacert.pem') - self.config_fixture.config(group='token', driver='kvs') - self.config_fixture.config( - group='saml', certfile=signing_certfile, keyfile=signing_keyfile) - self.config_fixture.config( - default_log_levels=[ - 'amqp=WARN', - 'amqplib=WARN', - 'boto=WARN', - 'qpid=WARN', - 'sqlalchemy=WARN', - 'suds=INFO', - 'oslo.messaging=INFO', - 'iso8601=WARN', - 'requests.packages.urllib3.connectionpool=WARN', - 'routes.middleware=INFO', - 'stevedore.extension=INFO', - 'keystone.notifications=INFO', - 'keystone.common.ldap=INFO', - ]) - self.auth_plugin_config_override() - - def auth_plugin_config_override(self, methods=None, **method_classes): - self.useFixture( - ksfixtures.ConfigAuthPlugins(self.config_fixture, - methods, - **method_classes)) - - def _assert_config_overrides_called(self): - assert self.__config_overrides_called is True - - def setUp(self): - super(TestCase, self).setUp() - self.__config_overrides_called = False - self.__load_backends_called = False - self.addCleanup(CONF.reset) - self.config_fixture = self.useFixture(config_fixture.Config(CONF)) - self.addCleanup(delattr, self, 'config_fixture') - self.config(self.config_files()) - - # NOTE(morganfainberg): mock the auth plugin setup to use the config - # fixture which automatically unregisters options when performing - # cleanup. - def mocked_register_auth_plugin_opt(conf, opt): - self.config_fixture.register_opt(opt, group='auth') - self.useFixture(mockpatch.PatchObject( - config, '_register_auth_plugin_opt', - new=mocked_register_auth_plugin_opt)) - - self.sql_driver_version_overrides = {} - self.config_overrides() - # NOTE(morganfainberg): ensure config_overrides has been called. - self.addCleanup(self._assert_config_overrides_called) - - self.useFixture(fixtures.FakeLogger(level=logging.DEBUG)) - - # NOTE(morganfainberg): This code is a copy from the oslo-incubator - # log module. This is not in a function or otherwise available to use - # without having a CONF object to setup logging. This should help to - # reduce the log size by limiting what we log (similar to how Keystone - # would run under mod_wsgi or eventlet). - for pair in CONF.default_log_levels: - mod, _sep, level_name = pair.partition('=') - logger = logging.getLogger(mod) - logger.setLevel(level_name) - - self.useFixture(ksfixtures.Cache()) - - # Clear the registry of providers so that providers from previous - # tests aren't used. - self.addCleanup(dependency.reset) - - # Ensure Notification subscriptions and resource types are empty - self.addCleanup(notifications.clear_subscribers) - self.addCleanup(notifications.reset_notifier) - - # Reset the auth-plugin registry - self.addCleanup(self.clear_auth_plugin_registry) - - self.addCleanup(setattr, controllers, '_VERSIONS', []) - - def config(self, config_files): - sql.initialize() - CONF(args=[], project='keystone', default_config_files=config_files) - - def load_backends(self): - """Initializes each manager and assigns them to an attribute.""" - # TODO(blk-u): Shouldn't need to clear the registry here, but some - # tests call load_backends multiple times. These should be fixed to - # only call load_backends once. - dependency.reset() - - # TODO(morganfainberg): Shouldn't need to clear the registry here, but - # some tests call load_backends multiple times. Since it is not - # possible to re-configure a backend, we need to clear the list. This - # should eventually be removed once testing has been cleaned up. - kvs_core.KEY_VALUE_STORE_REGISTRY.clear() - - self.clear_auth_plugin_registry() - drivers, _unused = common.setup_backends( - load_extra_backends_fn=self.load_extra_backends) - - for manager_name, manager in drivers.items(): - setattr(self, manager_name, manager) - self.addCleanup(self.cleanup_instance(*list(drivers.keys()))) - - def load_extra_backends(self): - """Override to load managers that aren't loaded by default. - - This is useful to load managers initialized by extensions. No extra - backends are loaded by default. - - :returns: dict of name -> manager - """ - return {} - - def load_fixtures(self, fixtures): - """Hacky basic and naive fixture loading based on a python module. - - Expects that the various APIs into the various services are already - defined on `self`. - - """ - # NOTE(dstanek): create a list of attribute names to be removed - # from this instance during cleanup - fixtures_to_cleanup = [] - - # TODO(termie): doing something from json, probably based on Django's - # loaddata will be much preferred. - if (hasattr(self, 'identity_api') and - hasattr(self, 'assignment_api') and - hasattr(self, 'resource_api')): - for domain in fixtures.DOMAINS: - try: - rv = self.resource_api.create_domain(domain['id'], domain) - except exception.Conflict: - rv = self.resource_api.get_domain(domain['id']) - except exception.NotImplemented: - rv = domain - attrname = 'domain_%s' % domain['id'] - setattr(self, attrname, rv) - fixtures_to_cleanup.append(attrname) - - for tenant in fixtures.TENANTS: - tenant_attr_name = 'tenant_%s' % tenant['name'].lower() - if hasattr(self, tenant_attr_name): - try: - # This will clear out any roles on the project as well - self.resource_api.delete_project(tenant['id']) - except exception.ProjectNotFound: - pass - rv = self.resource_api.create_project( - tenant['id'], tenant) - - setattr(self, tenant_attr_name, rv) - fixtures_to_cleanup.append(tenant_attr_name) - - for role in fixtures.ROLES: - try: - rv = self.role_api.create_role(role['id'], role) - except exception.Conflict: - rv = self.role_api.get_role(role['id']) - attrname = 'role_%s' % role['id'] - setattr(self, attrname, rv) - fixtures_to_cleanup.append(attrname) - - for user in fixtures.USERS: - user_copy = user.copy() - tenants = user_copy.pop('tenants') - try: - existing_user = getattr(self, 'user_%s' % user['id'], None) - if existing_user is not None: - self.identity_api.delete_user(existing_user['id']) - except exception.UserNotFound: - pass - - # For users, the manager layer will generate the ID - user_copy = self.identity_api.create_user(user_copy) - # Our tests expect that the password is still in the user - # record so that they can reference it, so put it back into - # the dict returned. - user_copy['password'] = user['password'] - - for tenant_id in tenants: - try: - self.assignment_api.add_user_to_project( - tenant_id, user_copy['id']) - except exception.Conflict: - pass - # Use the ID from the fixture as the attribute name, so - # that our tests can easily reference each user dict, while - # the ID in the dict will be the real public ID. - attrname = 'user_%s' % user['id'] - setattr(self, attrname, user_copy) - fixtures_to_cleanup.append(attrname) - - for role_assignment in fixtures.ROLE_ASSIGNMENTS: - role_id = role_assignment['role_id'] - user = role_assignment['user'] - tenant_id = role_assignment['tenant_id'] - user_id = getattr(self, 'user_%s' % user)['id'] - try: - self.assignment_api.add_role_to_user_and_project( - user_id, tenant_id, role_id) - except exception.Conflict: - pass - - self.addCleanup(self.cleanup_instance(*fixtures_to_cleanup)) - - def _paste_config(self, config): - if not config.startswith('config:'): - test_path = os.path.join(TESTSDIR, config) - etc_path = os.path.join(ROOTDIR, 'etc', config) - for path in [test_path, etc_path]: - if os.path.exists('%s-paste.ini' % path): - return 'config:%s-paste.ini' % path - return config - - def loadapp(self, config, name='main'): - return service.loadapp(self._paste_config(config), name=name) - - def clear_auth_plugin_registry(self): - auth.controllers.AUTH_METHODS.clear() - auth.controllers.AUTH_PLUGINS_LOADED = False - - def assertCloseEnoughForGovernmentWork(self, a, b, delta=3): - """Asserts that two datetimes are nearly equal within a small delta. - - :param delta: Maximum allowable time delta, defined in seconds. - """ - if a == b: - # Short-circuit if the values are the same. - return - - msg = '%s != %s within %s delta' % (a, b, delta) - - self.assertTrue(abs(a - b).seconds <= delta, msg) - - def assertNotEmpty(self, l): - self.assertTrue(len(l)) - - def assertRaisesRegexp(self, expected_exception, expected_regexp, - callable_obj, *args, **kwargs): - """Asserts that the message in a raised exception matches a regexp.""" - try: - callable_obj(*args, **kwargs) - except expected_exception as exc_value: - if isinstance(expected_regexp, six.string_types): - expected_regexp = re.compile(expected_regexp) - - if isinstance(exc_value.args[0], six.text_type): - if not expected_regexp.search(six.text_type(exc_value)): - raise self.failureException( - '"%s" does not match "%s"' % - (expected_regexp.pattern, six.text_type(exc_value))) - else: - if not expected_regexp.search(str(exc_value)): - raise self.failureException( - '"%s" does not match "%s"' % - (expected_regexp.pattern, str(exc_value))) - else: - if hasattr(expected_exception, '__name__'): - excName = expected_exception.__name__ - else: - excName = str(expected_exception) - raise self.failureException("%s not raised" % excName) - - @property - def ipv6_enabled(self): - if socket.has_ipv6: - sock = None - try: - sock = socket.socket(socket.AF_INET6) - # NOTE(Mouad): Try to bind to IPv6 loopback ip address. - sock.bind(("::1", 0)) - return True - except socket.error: - pass - finally: - if sock: - sock.close() - return False - - def skip_if_no_ipv6(self): - if not self.ipv6_enabled: - raise self.skipTest("IPv6 is not enabled in the system") - - def skip_if_env_not_set(self, env_var): - if not os.environ.get(env_var): - self.skipTest('Env variable %s is not set.' % env_var) - - -class SQLDriverOverrides(object): - """A mixin for consolidating sql-specific test overrides.""" - - def config_overrides(self): - super(SQLDriverOverrides, self).config_overrides() - # SQL specific driver overrides - self.config_fixture.config(group='catalog', driver='sql') - self.config_fixture.config(group='identity', driver='sql') - self.config_fixture.config(group='policy', driver='sql') - self.config_fixture.config(group='token', driver='sql') - self.config_fixture.config(group='trust', driver='sql') - - def use_specific_sql_driver_version(self, driver_path, - versionless_backend, version_suffix): - """Add this versioned driver to the list that will be loaded. - - :param driver_path: The path to the drivers, e.g. 'keystone.assignment' - :param versionless_backend: The name of the versionless drivers, e.g. - 'backends' - :param version_suffix: The suffix for the version , e.g. ``V8_`` - - This method assumes that versioned drivers are named: - , e.g. 'V8_backends'. - - """ - self.sql_driver_version_overrides[driver_path] = { - 'versionless_backend': versionless_backend, - 'versioned_backend': version_suffix + versionless_backend} diff --git a/keystone-moon/keystone/tests/unit/default_catalog.templates b/keystone-moon/keystone/tests/unit/default_catalog.templates deleted file mode 100644 index faf87eb5..00000000 --- a/keystone-moon/keystone/tests/unit/default_catalog.templates +++ /dev/null @@ -1,14 +0,0 @@ -# config for templated.Catalog, using camelCase because I don't want to do -# translations for keystone compat -catalog.RegionOne.identity.publicURL = http://localhost:$(public_port)s/v2.0 -catalog.RegionOne.identity.adminURL = http://localhost:$(admin_port)s/v2.0 -catalog.RegionOne.identity.internalURL = http://localhost:$(admin_port)s/v2.0 -catalog.RegionOne.identity.name = 'Identity Service' -catalog.RegionOne.identity.id = 1 - -# fake compute service for now to help novaclient tests work -catalog.RegionOne.compute.publicURL = http://localhost:8774/v1.1/$(tenant_id)s -catalog.RegionOne.compute.adminURL = http://localhost:8774/v1.1/$(tenant_id)s -catalog.RegionOne.compute.internalURL = http://localhost:8774/v1.1/$(tenant_id)s -catalog.RegionOne.compute.name = 'Compute Service' -catalog.RegionOne.compute.id = 2 diff --git a/keystone-moon/keystone/tests/unit/default_fixtures.py b/keystone-moon/keystone/tests/unit/default_fixtures.py deleted file mode 100644 index 7f661986..00000000 --- a/keystone-moon/keystone/tests/unit/default_fixtures.py +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# NOTE(dolph): please try to avoid additional fixtures if possible; test suite -# performance may be negatively affected. -import uuid - -BAR_TENANT_ID = uuid.uuid4().hex -BAZ_TENANT_ID = uuid.uuid4().hex -MTU_TENANT_ID = uuid.uuid4().hex -SERVICE_TENANT_ID = uuid.uuid4().hex -DEFAULT_DOMAIN_ID = 'default' - -TENANTS = [ - { - 'id': BAR_TENANT_ID, - 'name': 'BAR', - 'domain_id': DEFAULT_DOMAIN_ID, - 'description': 'description', - 'enabled': True, - 'parent_id': DEFAULT_DOMAIN_ID, - 'is_domain': False, - }, { - 'id': BAZ_TENANT_ID, - 'name': 'BAZ', - 'domain_id': DEFAULT_DOMAIN_ID, - 'description': 'description', - 'enabled': True, - 'parent_id': DEFAULT_DOMAIN_ID, - 'is_domain': False, - }, { - 'id': MTU_TENANT_ID, - 'name': 'MTU', - 'description': 'description', - 'enabled': True, - 'domain_id': DEFAULT_DOMAIN_ID, - 'parent_id': DEFAULT_DOMAIN_ID, - 'is_domain': False, - }, { - 'id': SERVICE_TENANT_ID, - 'name': 'service', - 'description': 'description', - 'enabled': True, - 'domain_id': DEFAULT_DOMAIN_ID, - 'parent_id': DEFAULT_DOMAIN_ID, - 'is_domain': False, - } -] - -# NOTE(ja): a role of keystone_admin is done in setUp -USERS = [ - # NOTE(morganfainberg): Admin user for replacing admin_token_auth - { - 'id': 'reqadmin', - 'name': 'REQ_ADMIN', - 'domain_id': DEFAULT_DOMAIN_ID, - 'password': 'password', - 'tenants': [], - 'enabled': True - }, - { - 'id': 'foo', - 'name': 'FOO', - 'domain_id': DEFAULT_DOMAIN_ID, - 'password': 'foo2', - 'tenants': [BAR_TENANT_ID], - 'enabled': True, - 'email': 'foo@bar.com', - }, { - 'id': 'two', - 'name': 'TWO', - 'domain_id': DEFAULT_DOMAIN_ID, - 'password': 'two2', - 'enabled': True, - 'default_project_id': BAZ_TENANT_ID, - 'tenants': [BAZ_TENANT_ID], - 'email': 'two@three.com', - }, { - 'id': 'badguy', - 'name': 'BadGuy', - 'domain_id': DEFAULT_DOMAIN_ID, - 'password': 'bad', - 'enabled': False, - 'default_project_id': BAZ_TENANT_ID, - 'tenants': [BAZ_TENANT_ID], - 'email': 'bad@guy.com', - }, { - 'id': 'sna', - 'name': 'SNA', - 'domain_id': DEFAULT_DOMAIN_ID, - 'password': 'snafu', - 'enabled': True, - 'tenants': [BAR_TENANT_ID], - 'email': 'sna@snl.coom', - } -] - -ROLES = [ - { - 'id': 'admin', - 'name': 'admin', - 'domain_id': None, - }, { - 'id': 'member', - 'name': 'Member', - 'domain_id': None, - }, { - 'id': '9fe2ff9ee4384b1894a90878d3e92bab', - 'name': '_member_', - 'domain_id': None, - }, { - 'id': 'other', - 'name': 'Other', - 'domain_id': None, - }, { - 'id': 'browser', - 'name': 'Browser', - 'domain_id': None, - }, { - 'id': 'writer', - 'name': 'Writer', - 'domain_id': None, - }, { - 'id': 'service', - 'name': 'Service', - 'domain_id': None, - } -] - -# NOTE(morganfainberg): Admin assignment for replacing admin_token_auth -ROLE_ASSIGNMENTS = [ - { - 'user': 'reqadmin', - 'tenant_id': SERVICE_TENANT_ID, - 'role_id': 'admin' - }, -] - -DOMAINS = [{'description': - (u'The default domain'), - 'enabled': True, - 'id': DEFAULT_DOMAIN_ID, - 'name': u'Default'}] diff --git a/keystone-moon/keystone/tests/unit/external/README.rst b/keystone-moon/keystone/tests/unit/external/README.rst deleted file mode 100644 index e8f9fa65..00000000 --- a/keystone-moon/keystone/tests/unit/external/README.rst +++ /dev/null @@ -1,9 +0,0 @@ -This directory contains interface tests for external libraries. The goal -is not to test every possible path through a library's code and get 100% -coverage. It's to give us a level of confidence that their general interface -remains the same through version upgrades. - -This gives us a place to put these tests without having to litter our -own tests with assertions that are not directly related to the code -under test. The expectations for the external library are all in one -place so it makes it easier for us to find out what they are. diff --git a/keystone-moon/keystone/tests/unit/external/__init__.py b/keystone-moon/keystone/tests/unit/external/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/external/test_timeutils.py b/keystone-moon/keystone/tests/unit/external/test_timeutils.py deleted file mode 100644 index 7fc72d58..00000000 --- a/keystone-moon/keystone/tests/unit/external/test_timeutils.py +++ /dev/null @@ -1,33 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime - -from oslo_utils import timeutils - -import keystone.tests.unit as tests - - -class TestTimeUtils(tests.BaseTestCase): - - def test_parsing_date_strings_returns_a_datetime(self): - example_date_str = '2015-09-23T04:45:37.196621Z' - dt = timeutils.parse_strtime(example_date_str, fmt=tests.TIME_FORMAT) - self.assertIsInstance(dt, datetime.datetime) - - def test_parsing_invalid_date_strings_raises_a_ValueError(self): - example_date_str = '' - simple_format = '%Y' - self.assertRaises(ValueError, - timeutils.parse_strtime, - example_date_str, - fmt=simple_format) diff --git a/keystone-moon/keystone/tests/unit/fakeldap.py b/keystone-moon/keystone/tests/unit/fakeldap.py deleted file mode 100644 index 9ad1f218..00000000 --- a/keystone-moon/keystone/tests/unit/fakeldap.py +++ /dev/null @@ -1,664 +0,0 @@ -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Fake LDAP server for test harness. - -This class does very little error checking, and knows nothing about ldap -class definitions. It implements the minimum emulation of the python ldap -library to work with keystone. - -""" - -import random -import re -import shelve - -import ldap -from oslo_config import cfg -from oslo_log import log -import six -from six import moves - -from keystone.common.ldap import core -from keystone import exception - - -SCOPE_NAMES = { - ldap.SCOPE_BASE: 'SCOPE_BASE', - ldap.SCOPE_ONELEVEL: 'SCOPE_ONELEVEL', - ldap.SCOPE_SUBTREE: 'SCOPE_SUBTREE', -} - -# http://msdn.microsoft.com/en-us/library/windows/desktop/aa366991(v=vs.85).aspx # noqa -CONTROL_TREEDELETE = '1.2.840.113556.1.4.805' - -LOG = log.getLogger(__name__) -CONF = cfg.CONF - - -def _internal_attr(attr_name, value_or_values): - def normalize_value(value): - return core.utf8_decode(value) - - def normalize_dn(dn): - # Capitalize the attribute names as an LDAP server might. - - # NOTE(blk-u): Special case for this tested value, used with - # test_user_id_comma. The call to str2dn here isn't always correct - # here, because `dn` is escaped for an LDAP filter. str2dn() normally - # works only because there's no special characters in `dn`. - if dn == 'cn=Doe\\5c, John,ou=Users,cn=example,cn=com': - return 'CN=Doe\\, John,OU=Users,CN=example,CN=com' - - # NOTE(blk-u): Another special case for this tested value. When a - # roleOccupant has an escaped comma, it gets converted to \2C. - if dn == 'cn=Doe\\, John,ou=Users,cn=example,cn=com': - return 'CN=Doe\\2C John,OU=Users,CN=example,CN=com' - - try: - dn = ldap.dn.str2dn(core.utf8_encode(dn)) - except ldap.DECODING_ERROR: - # NOTE(amakarov): In case of IDs instead of DNs in group members - # they must be handled as regular values. - return normalize_value(dn) - - norm = [] - for part in dn: - name, val, i = part[0] - name = core.utf8_decode(name) - name = name.upper() - name = core.utf8_encode(name) - norm.append([(name, val, i)]) - return core.utf8_decode(ldap.dn.dn2str(norm)) - - if attr_name in ('member', 'roleOccupant'): - attr_fn = normalize_dn - else: - attr_fn = normalize_value - - if isinstance(value_or_values, list): - return [attr_fn(x) for x in value_or_values] - return [attr_fn(value_or_values)] - - -def _match_query(query, attrs, attrs_checked): - """Match an ldap query to an attribute dictionary. - - The characters &, |, and ! are supported in the query. No syntax checking - is performed, so malformed queries will not work correctly. - """ - # cut off the parentheses - inner = query[1:-1] - if inner.startswith(('&', '|')): - if inner[0] == '&': - matchfn = all - else: - matchfn = any - # cut off the & or | - groups = _paren_groups(inner[1:]) - return matchfn(_match_query(group, attrs, attrs_checked) - for group in groups) - if inner.startswith('!'): - # cut off the ! and the nested parentheses - return not _match_query(query[2:-1], attrs, attrs_checked) - - (k, _sep, v) = inner.partition('=') - attrs_checked.add(k.lower()) - return _match(k, v, attrs) - - -def _paren_groups(source): - """Split a string into parenthesized groups.""" - count = 0 - start = 0 - result = [] - for pos in moves.range(len(source)): - if source[pos] == '(': - if count == 0: - start = pos - count += 1 - if source[pos] == ')': - count -= 1 - if count == 0: - result.append(source[start:pos + 1]) - return result - - -def _match(key, value, attrs): - """Match a given key and value against an attribute list.""" - def match_with_wildcards(norm_val, val_list): - # Case insensitive checking with wildcards - if norm_val.startswith('*'): - if norm_val.endswith('*'): - # Is the string anywhere in the target? - for x in val_list: - if norm_val[1:-1] in x: - return True - else: - # Is the string at the end of the target? - for x in val_list: - if (norm_val[1:] == - x[len(x) - len(norm_val) + 1:]): - return True - elif norm_val.endswith('*'): - # Is the string at the start of the target? - for x in val_list: - if norm_val[:-1] == x[:len(norm_val) - 1]: - return True - else: - # Is the string an exact match? - for x in val_list: - if check_value == x: - return True - return False - - if key not in attrs: - return False - # This is a pure wild card search, so the answer must be yes! - if value == '*': - return True - if key == 'serviceId': - # for serviceId, the backend is returning a list of numbers - # make sure we convert them to strings first before comparing - # them - str_sids = [six.text_type(x) for x in attrs[key]] - return six.text_type(value) in str_sids - if key != 'objectclass': - check_value = _internal_attr(key, value)[0].lower() - norm_values = list( - _internal_attr(key, x)[0].lower() for x in attrs[key]) - return match_with_wildcards(check_value, norm_values) - # it is an objectclass check, so check subclasses - values = _subs(value) - for v in values: - if v in attrs[key]: - return True - return False - - -def _subs(value): - """Returns a list of subclass strings. - - The strings represent the ldap objectclass plus any subclasses that - inherit from it. Fakeldap doesn't know about the ldap object structure, - so subclasses need to be defined manually in the dictionary below. - - """ - subs = {'groupOfNames': ['keystoneTenant', - 'keystoneRole', - 'keystoneTenantRole']} - if value in subs: - return [value] + subs[value] - return [value] - - -server_fail = False - - -class FakeShelve(dict): - - def sync(self): - pass - - -FakeShelves = {} -PendingRequests = {} - - -class FakeLdap(core.LDAPHandler): - """Emulate the python-ldap API. - - The python-ldap API requires all strings to be UTF-8 encoded. This - is assured by the caller of this interface - (i.e. KeystoneLDAPHandler). - - However, internally this emulation MUST process and store strings - in a canonical form which permits operations on - characters. Encoded strings do not provide the ability to operate - on characters. Therefore this emulation accepts UTF-8 encoded - strings, decodes them to unicode for operations internal to this - emulation, and encodes them back to UTF-8 when returning values - from the emulation. - - """ - - __prefix = 'ldap:' - - def __init__(self, conn=None): - super(FakeLdap, self).__init__(conn=conn) - self._ldap_options = {ldap.OPT_DEREF: ldap.DEREF_NEVER} - - def connect(self, url, page_size=0, alias_dereferencing=None, - use_tls=False, tls_cacertfile=None, tls_cacertdir=None, - tls_req_cert='demand', chase_referrals=None, debug_level=None, - use_pool=None, pool_size=None, pool_retry_max=None, - pool_retry_delay=None, pool_conn_timeout=None, - pool_conn_lifetime=None): - if url.startswith('fake://memory'): - if url not in FakeShelves: - FakeShelves[url] = FakeShelve() - self.db = FakeShelves[url] - else: - self.db = shelve.open(url[7:]) - - using_ldaps = url.lower().startswith("ldaps") - - if use_tls and using_ldaps: - raise AssertionError('Invalid TLS / LDAPS combination') - - if use_tls: - if tls_cacertfile: - ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, tls_cacertfile) - elif tls_cacertdir: - ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, tls_cacertdir) - if tls_req_cert in list(core.LDAP_TLS_CERTS.values()): - ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, tls_req_cert) - else: - raise ValueError("invalid TLS_REQUIRE_CERT tls_req_cert=%s", - tls_req_cert) - - if alias_dereferencing is not None: - self.set_option(ldap.OPT_DEREF, alias_dereferencing) - self.page_size = page_size - - self.use_pool = use_pool - self.pool_size = pool_size - self.pool_retry_max = pool_retry_max - self.pool_retry_delay = pool_retry_delay - self.pool_conn_timeout = pool_conn_timeout - self.pool_conn_lifetime = pool_conn_lifetime - - def dn(self, dn): - return core.utf8_decode(dn) - - def _dn_to_id_attr(self, dn): - return core.utf8_decode(ldap.dn.str2dn(core.utf8_encode(dn))[0][0][0]) - - def _dn_to_id_value(self, dn): - return core.utf8_decode(ldap.dn.str2dn(core.utf8_encode(dn))[0][0][1]) - - def key(self, dn): - return '%s%s' % (self.__prefix, self.dn(dn)) - - def simple_bind_s(self, who='', cred='', - serverctrls=None, clientctrls=None): - """This method is ignored, but provided for compatibility.""" - if server_fail: - raise ldap.SERVER_DOWN - whos = ['cn=Admin', CONF.ldap.user] - if who in whos and cred in ['password', CONF.ldap.password]: - return - - try: - attrs = self.db[self.key(who)] - except KeyError: - LOG.debug('bind fail: who=%s not found', core.utf8_decode(who)) - raise ldap.NO_SUCH_OBJECT - - db_password = None - try: - db_password = attrs['userPassword'][0] - except (KeyError, IndexError): - LOG.debug('bind fail: password for who=%s not found', - core.utf8_decode(who)) - raise ldap.INAPPROPRIATE_AUTH - - if cred != db_password: - LOG.debug('bind fail: password for who=%s does not match', - core.utf8_decode(who)) - raise ldap.INVALID_CREDENTIALS - - def unbind_s(self): - """This method is ignored, but provided for compatibility.""" - if server_fail: - raise ldap.SERVER_DOWN - - def add_s(self, dn, modlist): - """Add an object with the specified attributes at dn.""" - if server_fail: - raise ldap.SERVER_DOWN - - id_attr_in_modlist = False - id_attr = self._dn_to_id_attr(dn) - id_value = self._dn_to_id_value(dn) - - # The LDAP API raises a TypeError if attr name is None. - for k, dummy_v in modlist: - if k is None: - raise TypeError('must be string, not None. modlist=%s' % - modlist) - - if k == id_attr: - for val in dummy_v: - if core.utf8_decode(val) == id_value: - id_attr_in_modlist = True - - if not id_attr_in_modlist: - LOG.debug('id_attribute=%(attr)s missing, attributes=%(attrs)s' % - {'attr': id_attr, 'attrs': modlist}) - raise ldap.NAMING_VIOLATION - key = self.key(dn) - LOG.debug('add item: dn=%(dn)s, attrs=%(attrs)s', { - 'dn': core.utf8_decode(dn), 'attrs': modlist}) - if key in self.db: - LOG.debug('add item failed: dn=%s is already in store.', - core.utf8_decode(dn)) - raise ldap.ALREADY_EXISTS(dn) - - self.db[key] = {k: _internal_attr(k, v) for k, v in modlist} - self.db.sync() - - def delete_s(self, dn): - """Remove the ldap object at specified dn.""" - return self.delete_ext_s(dn, serverctrls=[]) - - def _getChildren(self, dn): - return [k for k, v in self.db.items() - if re.match('%s.*,%s' % ( - re.escape(self.__prefix), - re.escape(self.dn(dn))), k)] - - def delete_ext_s(self, dn, serverctrls, clientctrls=None): - """Remove the ldap object at specified dn.""" - if server_fail: - raise ldap.SERVER_DOWN - - try: - if CONTROL_TREEDELETE in [c.controlType for c in serverctrls]: - LOG.debug('FakeLdap subtree_delete item: dn=%s', - core.utf8_decode(dn)) - children = self._getChildren(dn) - for c in children: - del self.db[c] - - key = self.key(dn) - LOG.debug('FakeLdap delete item: dn=%s', core.utf8_decode(dn)) - del self.db[key] - except KeyError: - LOG.debug('delete item failed: dn=%s not found.', - core.utf8_decode(dn)) - raise ldap.NO_SUCH_OBJECT - self.db.sync() - - def modify_s(self, dn, modlist): - """Modify the object at dn using the attribute list. - - :param dn: an LDAP DN - :param modlist: a list of tuples in the following form: - ([MOD_ADD | MOD_DELETE | MOD_REPACE], attribute, value) - """ - if server_fail: - raise ldap.SERVER_DOWN - - key = self.key(dn) - LOG.debug('modify item: dn=%(dn)s attrs=%(attrs)s', { - 'dn': core.utf8_decode(dn), 'attrs': modlist}) - try: - entry = self.db[key] - except KeyError: - LOG.debug('modify item failed: dn=%s not found.', - core.utf8_decode(dn)) - raise ldap.NO_SUCH_OBJECT - - for cmd, k, v in modlist: - values = entry.setdefault(k, []) - if cmd == ldap.MOD_ADD: - v = _internal_attr(k, v) - for x in v: - if x in values: - raise ldap.TYPE_OR_VALUE_EXISTS - values += v - elif cmd == ldap.MOD_REPLACE: - values[:] = _internal_attr(k, v) - elif cmd == ldap.MOD_DELETE: - if v is None: - if not values: - LOG.debug('modify item failed: ' - 'item has no attribute "%s" to delete', k) - raise ldap.NO_SUCH_ATTRIBUTE - values[:] = [] - else: - for val in _internal_attr(k, v): - try: - values.remove(val) - except ValueError: - LOG.debug('modify item failed: ' - 'item has no attribute "%(k)s" with ' - 'value "%(v)s" to delete', { - 'k': k, 'v': val}) - raise ldap.NO_SUCH_ATTRIBUTE - else: - LOG.debug('modify item failed: unknown command %s', cmd) - raise NotImplementedError('modify_s action %s not' - ' implemented' % cmd) - self.db[key] = entry - self.db.sync() - - def search_s(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0): - """Search for all matching objects under base using the query. - - Args: - base -- dn to search under - scope -- search scope (base, subtree, onelevel) - filterstr -- filter objects by - attrlist -- attrs to return. Returns all attrs if not specified - - """ - if server_fail: - raise ldap.SERVER_DOWN - - if (not filterstr) and (scope != ldap.SCOPE_BASE): - raise AssertionError('Search without filter on onelevel or ' - 'subtree scope') - - if scope == ldap.SCOPE_BASE: - try: - item_dict = self.db[self.key(base)] - except KeyError: - LOG.debug('search fail: dn not found for SCOPE_BASE') - raise ldap.NO_SUCH_OBJECT - results = [(base, item_dict)] - elif scope == ldap.SCOPE_SUBTREE: - # FIXME - LDAP search with SUBTREE scope must return the base - # entry, but the code below does _not_. Unfortunately, there are - # several tests that depend on this broken behavior, and fail - # when the base entry is returned in the search results. The - # fix is easy here, just initialize results as above for - # the SCOPE_BASE case. - # https://bugs.launchpad.net/keystone/+bug/1368772 - try: - item_dict = self.db[self.key(base)] - except KeyError: - LOG.debug('search fail: dn not found for SCOPE_SUBTREE') - raise ldap.NO_SUCH_OBJECT - results = [(base, item_dict)] - extraresults = [(k[len(self.__prefix):], v) - for k, v in self.db.items() - if re.match('%s.*,%s' % - (re.escape(self.__prefix), - re.escape(self.dn(base))), k)] - results.extend(extraresults) - elif scope == ldap.SCOPE_ONELEVEL: - - def get_entries(): - base_dn = ldap.dn.str2dn(core.utf8_encode(base)) - base_len = len(base_dn) - - for k, v in self.db.items(): - if not k.startswith(self.__prefix): - continue - k_dn_str = k[len(self.__prefix):] - k_dn = ldap.dn.str2dn(core.utf8_encode(k_dn_str)) - if len(k_dn) != base_len + 1: - continue - if k_dn[-base_len:] != base_dn: - continue - yield (k_dn_str, v) - - results = list(get_entries()) - - else: - # openldap client/server raises PROTOCOL_ERROR for unexpected scope - raise ldap.PROTOCOL_ERROR - - objects = [] - for dn, attrs in results: - # filter the objects by filterstr - id_attr, id_val, _ = ldap.dn.str2dn(core.utf8_encode(dn))[0][0] - id_attr = core.utf8_decode(id_attr) - id_val = core.utf8_decode(id_val) - match_attrs = attrs.copy() - match_attrs[id_attr] = [id_val] - attrs_checked = set() - if not filterstr or _match_query(filterstr, match_attrs, - attrs_checked): - if (filterstr and - (scope != ldap.SCOPE_BASE) and - ('objectclass' not in attrs_checked)): - raise AssertionError('No objectClass in search filter') - # filter the attributes by attrlist - attrs = {k: v for k, v in attrs.items() - if not attrlist or k in attrlist} - objects.append((dn, attrs)) - - return objects - - def set_option(self, option, invalue): - self._ldap_options[option] = invalue - - def get_option(self, option): - value = self._ldap_options.get(option) - return value - - def search_ext(self, base, scope, - filterstr='(objectClass=*)', attrlist=None, attrsonly=0, - serverctrls=None, clientctrls=None, - timeout=-1, sizelimit=0): - if clientctrls is not None or timeout != -1 or sizelimit != 0: - raise exception.NotImplemented() - - # only passing a single server control is supported by this fake ldap - if len(serverctrls) > 1: - raise exception.NotImplemented() - - # search_ext is async and returns an identifier used for - # retrieving the results via result3(). This will be emulated by - # storing the request in a variable with random integer key and - # performing the real lookup in result3() - msgid = random.randint(0, 1000) - PendingRequests[msgid] = (base, scope, filterstr, attrlist, attrsonly, - serverctrls) - return msgid - - def result3(self, msgid=ldap.RES_ANY, all=1, timeout=None, - resp_ctrl_classes=None): - """Execute async request - - Only msgid param is supported. Request info is fetched from global - variable `PendingRequests` by msgid, executed using search_s and - limited if requested. - """ - if all != 1 or timeout is not None or resp_ctrl_classes is not None: - raise exception.NotImplemented() - - params = PendingRequests[msgid] - # search_s accepts a subset of parameters of search_ext, - # that's why we use only the first 5. - results = self.search_s(*params[:5]) - - # extract limit from serverctrl - serverctrls = params[5] - ctrl = serverctrls[0] - - if ctrl.size: - rdata = results[:ctrl.size] - else: - rdata = results - - # real result3 returns various service info -- rtype, rmsgid, - # serverctrls. Now this info is not used, so all this info is None - rtype = None - rmsgid = None - serverctrls = None - return (rtype, rdata, rmsgid, serverctrls) - - -class FakeLdapPool(FakeLdap): - """Emulate the python-ldap API with pooled connections. - - This class is used as connector class in PooledLDAPHandler. - - """ - - def __init__(self, uri, retry_max=None, retry_delay=None, conn=None): - super(FakeLdapPool, self).__init__(conn=conn) - self.url = uri - self.connected = None - self.conn = self - self._connection_time = 5 # any number greater than 0 - - def get_lifetime(self): - return self._connection_time - - def simple_bind_s(self, who=None, cred=None, - serverctrls=None, clientctrls=None): - if self.url.startswith('fakepool://memory'): - if self.url not in FakeShelves: - FakeShelves[self.url] = FakeShelve() - self.db = FakeShelves[self.url] - else: - self.db = shelve.open(self.url[11:]) - - if not who: - who = 'cn=Admin' - if not cred: - cred = 'password' - - super(FakeLdapPool, self).simple_bind_s(who=who, cred=cred, - serverctrls=serverctrls, - clientctrls=clientctrls) - - def unbind_ext_s(self): - """Added to extend FakeLdap as connector class.""" - pass - - -class FakeLdapNoSubtreeDelete(FakeLdap): - """FakeLdap subclass that does not support subtree delete - - Same as FakeLdap except delete will throw the LDAP error - ldap.NOT_ALLOWED_ON_NONLEAF if there is an attempt to delete - an entry that has children. - """ - - def delete_ext_s(self, dn, serverctrls, clientctrls=None): - """Remove the ldap object at specified dn.""" - if server_fail: - raise ldap.SERVER_DOWN - - try: - children = self._getChildren(dn) - if children: - raise ldap.NOT_ALLOWED_ON_NONLEAF - - except KeyError: - LOG.debug('delete item failed: dn=%s not found.', - core.utf8_decode(dn)) - raise ldap.NO_SUCH_OBJECT - super(FakeLdapNoSubtreeDelete, self).delete_ext_s(dn, - serverctrls, - clientctrls) diff --git a/keystone-moon/keystone/tests/unit/federation_fixtures.py b/keystone-moon/keystone/tests/unit/federation_fixtures.py deleted file mode 100644 index d4527d9c..00000000 --- a/keystone-moon/keystone/tests/unit/federation_fixtures.py +++ /dev/null @@ -1,28 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -IDP_ENTITY_ID = 'https://localhost/v3/OS-FEDERATION/saml2/idp' -IDP_SSO_ENDPOINT = 'https://localhost/v3/OS-FEDERATION/saml2/SSO' - -# Organization info -IDP_ORGANIZATION_NAME = 'ACME INC' -IDP_ORGANIZATION_DISPLAY_NAME = 'ACME' -IDP_ORGANIZATION_URL = 'https://acme.example.com' - -# Contact info -IDP_CONTACT_COMPANY = 'ACME Sub' -IDP_CONTACT_GIVEN_NAME = 'Joe' -IDP_CONTACT_SURNAME = 'Hacker' -IDP_CONTACT_EMAIL = 'joe@acme.example.com' -IDP_CONTACT_TELEPHONE_NUMBER = '1234567890' -IDP_CONTACT_TYPE = 'technical' diff --git a/keystone-moon/keystone/tests/unit/filtering.py b/keystone-moon/keystone/tests/unit/filtering.py deleted file mode 100644 index 59301299..00000000 --- a/keystone-moon/keystone/tests/unit/filtering.py +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from oslo_config import cfg -from six.moves import range - - -CONF = cfg.CONF - - -class FilterTests(object): - - # Provide support for checking if a batch of list items all - # exist within a contiguous range in a total list - def _match_with_list(self, this_batch, total_list, - batch_size=None, - list_start=None, list_end=None): - if batch_size is None: - batch_size = len(this_batch) - if list_start is None: - list_start = 0 - if list_end is None: - list_end = len(total_list) - for batch_item in range(0, batch_size): - found = False - for list_item in range(list_start, list_end): - if this_batch[batch_item]['id'] == total_list[list_item]['id']: - found = True - self.assertTrue(found) - - def _create_entity(self, entity_type): - """Find the create_ method. - - Searches through the [identity_api, resource_api, assignment_api] - managers for a method called create_ and returns the first - one. - - """ - f = getattr(self.identity_api, 'create_%s' % entity_type, None) - if f is None: - f = getattr(self.resource_api, 'create_%s' % entity_type, None) - if f is None: - f = getattr(self.assignment_api, 'create_%s' % entity_type) - return f - - def _delete_entity(self, entity_type): - """Find the delete_ method. - - Searches through the [identity_api, resource_api, assignment_api] - managers for a method called delete_ and returns the first - one. - - """ - f = getattr(self.identity_api, 'delete_%s' % entity_type, None) - if f is None: - f = getattr(self.resource_api, 'delete_%s' % entity_type, None) - if f is None: - f = getattr(self.assignment_api, 'delete_%s' % entity_type) - return f - - def _list_entities(self, entity_type): - """Find the list_ method. - - Searches through the [identity_api, resource_api, assignment_api] - managers for a method called list_ and returns the first - one. - - """ - f = getattr(self.identity_api, 'list_%ss' % entity_type, None) - if f is None: - f = getattr(self.resource_api, 'list_%ss' % entity_type, None) - if f is None: - f = getattr(self.assignment_api, 'list_%ss' % entity_type) - return f - - def _create_one_entity(self, entity_type, domain_id, name): - new_entity = {'name': name, - 'domain_id': domain_id} - if entity_type in ['user', 'group']: - # The manager layer creates the ID for users and groups - new_entity = self._create_entity(entity_type)(new_entity) - else: - new_entity['id'] = '0000' + uuid.uuid4().hex - self._create_entity(entity_type)(new_entity['id'], new_entity) - return new_entity - - def _create_test_data(self, entity_type, number, domain_id=None, - name_dict=None): - """Create entity test data - - :param entity_type: type of entity to create, e.g. 'user', group' etc. - :param number: number of entities to create, - :param domain_id: if not defined, all users will be created in the - default domain. - :param name_dict: optional dict containing entity number and name pairs - - """ - entity_list = [] - if domain_id is None: - domain_id = CONF.identity.default_domain_id - name_dict = name_dict or {} - for x in range(number): - # If this index has a name defined in the name_dict, then use it - name = name_dict.get(x, uuid.uuid4().hex) - new_entity = self._create_one_entity(entity_type, domain_id, name) - entity_list.append(new_entity) - return entity_list - - def _delete_test_data(self, entity_type, entity_list): - for entity in entity_list: - self._delete_entity(entity_type)(entity['id']) diff --git a/keystone-moon/keystone/tests/unit/identity/__init__.py b/keystone-moon/keystone/tests/unit/identity/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/identity/test_backends.py b/keystone-moon/keystone/tests/unit/identity/test_backends.py deleted file mode 100644 index 8b5c0def..00000000 --- a/keystone-moon/keystone/tests/unit/identity/test_backends.py +++ /dev/null @@ -1,1297 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -import mock -from oslo_config import cfg -from six.moves import range -from testtools import matchers - -from keystone.common import driver_hints -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit import default_fixtures -from keystone.tests.unit import filtering - - -CONF = cfg.CONF - - -class IdentityTests(object): - - def _get_domain_fixture(self): - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - return domain - - def _set_domain_scope(self, domain_id): - # We only provide a domain scope if we have multiple drivers - if CONF.identity.domain_specific_drivers_enabled: - return domain_id - - def test_authenticate_bad_user(self): - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, - user_id=uuid.uuid4().hex, - password=self.user_foo['password']) - - def test_authenticate_bad_password(self): - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, - user_id=self.user_foo['id'], - password=uuid.uuid4().hex) - - def test_authenticate(self): - user_ref = self.identity_api.authenticate( - context={}, - user_id=self.user_sna['id'], - password=self.user_sna['password']) - # NOTE(termie): the password field is left in user_sna to make - # it easier to authenticate in tests, but should - # not be returned by the api - self.user_sna.pop('password') - self.user_sna['enabled'] = True - self.assertDictEqual(self.user_sna, user_ref) - - def test_authenticate_and_get_roles_no_metadata(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - - # Remove user id. It is ignored by create_user() and will break the - # subset test below. - del user['id'] - - new_user = self.identity_api.create_user(user) - self.assignment_api.add_user_to_project(self.tenant_baz['id'], - new_user['id']) - user_ref = self.identity_api.authenticate( - context={}, - user_id=new_user['id'], - password=user['password']) - self.assertNotIn('password', user_ref) - # NOTE(termie): the password field is left in user_sna to make - # it easier to authenticate in tests, but should - # not be returned by the api - user.pop('password') - self.assertDictContainsSubset(user, user_ref) - role_list = self.assignment_api.get_roles_for_user_and_project( - new_user['id'], self.tenant_baz['id']) - self.assertEqual(1, len(role_list)) - self.assertIn(CONF.member_role_id, role_list) - - def test_authenticate_if_no_password_set(self): - id_ = uuid.uuid4().hex - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - self.identity_api.create_user(user) - - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, - user_id=id_, - password='password') - - def test_create_unicode_user_name(self): - unicode_name = u'name \u540d\u5b57' - user = unit.new_user_ref(name=unicode_name, - domain_id=CONF.identity.default_domain_id) - ref = self.identity_api.create_user(user) - self.assertEqual(unicode_name, ref['name']) - - def test_get_user(self): - user_ref = self.identity_api.get_user(self.user_foo['id']) - # NOTE(termie): the password field is left in user_foo to make - # it easier to authenticate in tests, but should - # not be returned by the api - self.user_foo.pop('password') - self.assertDictEqual(self.user_foo, user_ref) - - @unit.skip_if_cache_disabled('identity') - def test_cache_layer_get_user(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - self.identity_api.create_user(user) - ref = self.identity_api.get_user_by_name(user['name'], - user['domain_id']) - # cache the result. - self.identity_api.get_user(ref['id']) - # delete bypassing identity api - domain_id, driver, entity_id = ( - self.identity_api._get_domain_driver_and_entity_id(ref['id'])) - driver.delete_user(entity_id) - - self.assertDictEqual(ref, self.identity_api.get_user(ref['id'])) - self.identity_api.get_user.invalidate(self.identity_api, ref['id']) - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user, ref['id']) - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - ref = self.identity_api.get_user_by_name(user['name'], - user['domain_id']) - user['description'] = uuid.uuid4().hex - # cache the result. - self.identity_api.get_user(ref['id']) - # update using identity api and get back updated user. - user_updated = self.identity_api.update_user(ref['id'], user) - self.assertDictContainsSubset(self.identity_api.get_user(ref['id']), - user_updated) - self.assertDictContainsSubset( - self.identity_api.get_user_by_name(ref['name'], ref['domain_id']), - user_updated) - - def test_get_user_returns_not_found(self): - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user, - uuid.uuid4().hex) - - def test_get_user_by_name(self): - user_ref = self.identity_api.get_user_by_name( - self.user_foo['name'], CONF.identity.default_domain_id) - # NOTE(termie): the password field is left in user_foo to make - # it easier to authenticate in tests, but should - # not be returned by the api - self.user_foo.pop('password') - self.assertDictEqual(self.user_foo, user_ref) - - @unit.skip_if_cache_disabled('identity') - def test_cache_layer_get_user_by_name(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - self.identity_api.create_user(user) - ref = self.identity_api.get_user_by_name(user['name'], - user['domain_id']) - # delete bypassing the identity api. - domain_id, driver, entity_id = ( - self.identity_api._get_domain_driver_and_entity_id(ref['id'])) - driver.delete_user(entity_id) - - self.assertDictEqual(ref, self.identity_api.get_user_by_name( - user['name'], CONF.identity.default_domain_id)) - self.identity_api.get_user_by_name.invalidate( - self.identity_api, user['name'], CONF.identity.default_domain_id) - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user_by_name, - user['name'], CONF.identity.default_domain_id) - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - ref = self.identity_api.get_user_by_name(user['name'], - user['domain_id']) - user['description'] = uuid.uuid4().hex - user_updated = self.identity_api.update_user(ref['id'], user) - self.assertDictContainsSubset(self.identity_api.get_user(ref['id']), - user_updated) - self.assertDictContainsSubset( - self.identity_api.get_user_by_name(ref['name'], ref['domain_id']), - user_updated) - - def test_get_user_by_name_returns_not_found(self): - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user_by_name, - uuid.uuid4().hex, - CONF.identity.default_domain_id) - - def test_create_duplicate_user_name_fails(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - self.assertRaises(exception.Conflict, - self.identity_api.create_user, - user) - - def test_create_duplicate_user_name_in_different_domains(self): - new_domain = unit.new_domain_ref() - self.resource_api.create_domain(new_domain['id'], new_domain) - user1 = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - - user2 = unit.new_user_ref(name=user1['name'], - domain_id=new_domain['id']) - - self.identity_api.create_user(user1) - self.identity_api.create_user(user2) - - def test_move_user_between_domains(self): - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - user = unit.new_user_ref(domain_id=domain1['id']) - user = self.identity_api.create_user(user) - user['domain_id'] = domain2['id'] - # Update the user asserting that a deprecation warning is emitted - with mock.patch( - 'oslo_log.versionutils.report_deprecated_feature') as mock_dep: - self.identity_api.update_user(user['id'], user) - self.assertTrue(mock_dep.called) - - updated_user_ref = self.identity_api.get_user(user['id']) - self.assertEqual(domain2['id'], updated_user_ref['domain_id']) - - def test_move_user_between_domains_with_clashing_names_fails(self): - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - # First, create a user in domain1 - user1 = unit.new_user_ref(domain_id=domain1['id']) - user1 = self.identity_api.create_user(user1) - # Now create a user in domain2 with a potentially clashing - # name - which should work since we have domain separation - user2 = unit.new_user_ref(name=user1['name'], - domain_id=domain2['id']) - user2 = self.identity_api.create_user(user2) - # Now try and move user1 into the 2nd domain - which should - # fail since the names clash - user1['domain_id'] = domain2['id'] - self.assertRaises(exception.Conflict, - self.identity_api.update_user, - user1['id'], - user1) - - def test_rename_duplicate_user_name_fails(self): - user1 = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user2 = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - self.identity_api.create_user(user1) - user2 = self.identity_api.create_user(user2) - user2['name'] = user1['name'] - self.assertRaises(exception.Conflict, - self.identity_api.update_user, - user2['id'], - user2) - - def test_update_user_id_fails(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - original_id = user['id'] - user['id'] = 'fake2' - self.assertRaises(exception.ValidationError, - self.identity_api.update_user, - original_id, - user) - user_ref = self.identity_api.get_user(original_id) - self.assertEqual(original_id, user_ref['id']) - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user, - 'fake2') - - def test_delete_user_with_group_project_domain_links(self): - role1 = unit.new_role_ref() - self.role_api.create_role(role1['id'], role1) - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - project1 = unit.new_project_ref(domain_id=domain1['id']) - self.resource_api.create_project(project1['id'], project1) - user1 = unit.new_user_ref(domain_id=domain1['id']) - user1 = self.identity_api.create_user(user1) - group1 = unit.new_group_ref(domain_id=domain1['id']) - group1 = self.identity_api.create_group(group1) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role1['id']) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain1['id'], - role_id=role1['id']) - self.identity_api.add_user_to_group(user_id=user1['id'], - group_id=group1['id']) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(1, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain1['id']) - self.assertEqual(1, len(roles_ref)) - self.identity_api.check_user_in_group( - user_id=user1['id'], - group_id=group1['id']) - self.identity_api.delete_user(user1['id']) - self.assertRaises(exception.NotFound, - self.identity_api.check_user_in_group, - user1['id'], - group1['id']) - - def test_delete_group_with_user_project_domain_links(self): - role1 = unit.new_role_ref() - self.role_api.create_role(role1['id'], role1) - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - project1 = unit.new_project_ref(domain_id=domain1['id']) - self.resource_api.create_project(project1['id'], project1) - user1 = unit.new_user_ref(domain_id=domain1['id']) - user1 = self.identity_api.create_user(user1) - group1 = unit.new_group_ref(domain_id=domain1['id']) - group1 = self.identity_api.create_group(group1) - - self.assignment_api.create_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=role1['id']) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain1['id'], - role_id=role1['id']) - self.identity_api.add_user_to_group(user_id=user1['id'], - group_id=group1['id']) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) - self.assertEqual(1, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain1['id']) - self.assertEqual(1, len(roles_ref)) - self.identity_api.check_user_in_group( - user_id=user1['id'], - group_id=group1['id']) - self.identity_api.delete_group(group1['id']) - self.identity_api.get_user(user1['id']) - - def test_update_user_returns_not_found(self): - user_id = uuid.uuid4().hex - self.assertRaises(exception.UserNotFound, - self.identity_api.update_user, - user_id, - {'id': user_id, - 'domain_id': CONF.identity.default_domain_id}) - - def test_delete_user_returns_not_found(self): - self.assertRaises(exception.UserNotFound, - self.identity_api.delete_user, - uuid.uuid4().hex) - - def test_create_user_long_name_fails(self): - user = unit.new_user_ref(name='a' * 256, - domain_id=CONF.identity.default_domain_id) - self.assertRaises(exception.ValidationError, - self.identity_api.create_user, - user) - - def test_create_user_blank_name_fails(self): - user = unit.new_user_ref(name='', - domain_id=CONF.identity.default_domain_id) - self.assertRaises(exception.ValidationError, - self.identity_api.create_user, - user) - - def test_create_user_missed_password(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - self.identity_api.get_user(user['id']) - # Make sure the user is not allowed to login - # with a password that is empty string or None - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, - user_id=user['id'], - password='') - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, - user_id=user['id'], - password=None) - - def test_create_user_none_password(self): - user = unit.new_user_ref(password=None, - domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - self.identity_api.get_user(user['id']) - # Make sure the user is not allowed to login - # with a password that is empty string or None - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, - user_id=user['id'], - password='') - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, - user_id=user['id'], - password=None) - - def test_create_user_invalid_name_fails(self): - user = unit.new_user_ref(name=None, - domain_id=CONF.identity.default_domain_id) - self.assertRaises(exception.ValidationError, - self.identity_api.create_user, - user) - - user = unit.new_user_ref(name=123, - domain_id=CONF.identity.default_domain_id) - self.assertRaises(exception.ValidationError, - self.identity_api.create_user, - user) - - def test_create_user_invalid_enabled_type_string(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id, - # invalid string value - enabled='true') - self.assertRaises(exception.ValidationError, - self.identity_api.create_user, - user) - - def test_update_user_long_name_fails(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - user['name'] = 'a' * 256 - self.assertRaises(exception.ValidationError, - self.identity_api.update_user, - user['id'], - user) - - def test_update_user_blank_name_fails(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - user['name'] = '' - self.assertRaises(exception.ValidationError, - self.identity_api.update_user, - user['id'], - user) - - def test_update_user_invalid_name_fails(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - - user['name'] = None - self.assertRaises(exception.ValidationError, - self.identity_api.update_user, - user['id'], - user) - - user['name'] = 123 - self.assertRaises(exception.ValidationError, - self.identity_api.update_user, - user['id'], - user) - - def test_list_users(self): - users = self.identity_api.list_users( - domain_scope=self._set_domain_scope( - CONF.identity.default_domain_id)) - self.assertEqual(len(default_fixtures.USERS), len(users)) - user_ids = set(user['id'] for user in users) - expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id'] - for user in default_fixtures.USERS) - for user_ref in users: - self.assertNotIn('password', user_ref) - self.assertEqual(expected_user_ids, user_ids) - - def test_list_groups(self): - group1 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group2 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group1 = self.identity_api.create_group(group1) - group2 = self.identity_api.create_group(group2) - groups = self.identity_api.list_groups( - domain_scope=self._set_domain_scope( - CONF.identity.default_domain_id)) - self.assertEqual(2, len(groups)) - group_ids = [] - for group in groups: - group_ids.append(group.get('id')) - self.assertIn(group1['id'], group_ids) - self.assertIn(group2['id'], group_ids) - - def test_create_user_doesnt_modify_passed_in_dict(self): - new_user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - original_user = new_user.copy() - self.identity_api.create_user(new_user) - self.assertDictEqual(original_user, new_user) - - def test_update_user_enable(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - user_ref = self.identity_api.get_user(user['id']) - self.assertTrue(user_ref['enabled']) - - user['enabled'] = False - self.identity_api.update_user(user['id'], user) - user_ref = self.identity_api.get_user(user['id']) - self.assertEqual(user['enabled'], user_ref['enabled']) - - # If not present, enabled field should not be updated - del user['enabled'] - self.identity_api.update_user(user['id'], user) - user_ref = self.identity_api.get_user(user['id']) - self.assertFalse(user_ref['enabled']) - - user['enabled'] = True - self.identity_api.update_user(user['id'], user) - user_ref = self.identity_api.get_user(user['id']) - self.assertEqual(user['enabled'], user_ref['enabled']) - - del user['enabled'] - self.identity_api.update_user(user['id'], user) - user_ref = self.identity_api.get_user(user['id']) - self.assertTrue(user_ref['enabled']) - - # Integers are valid Python's booleans. Explicitly test it. - user['enabled'] = 0 - self.identity_api.update_user(user['id'], user) - user_ref = self.identity_api.get_user(user['id']) - self.assertFalse(user_ref['enabled']) - - # Any integers other than 0 are interpreted as True - user['enabled'] = -42 - self.identity_api.update_user(user['id'], user) - user_ref = self.identity_api.get_user(user['id']) - # NOTE(breton): below, attribute `enabled` is explicitly tested to be - # equal True. assertTrue should not be used, because it converts - # the passed value to bool(). - self.assertIs(user_ref['enabled'], True) - - def test_update_user_name(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - user_ref = self.identity_api.get_user(user['id']) - self.assertEqual(user['name'], user_ref['name']) - - changed_name = user_ref['name'] + '_changed' - user_ref['name'] = changed_name - updated_user = self.identity_api.update_user(user_ref['id'], user_ref) - - # NOTE(dstanek): the SQL backend adds an 'extra' field containing a - # dictionary of the extra fields in addition to the - # fields in the object. For the details see: - # SqlIdentity.test_update_project_returns_extra - updated_user.pop('extra', None) - - self.assertDictEqual(user_ref, updated_user) - - user_ref = self.identity_api.get_user(user_ref['id']) - self.assertEqual(changed_name, user_ref['name']) - - def test_update_user_enable_fails(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - user_ref = self.identity_api.get_user(user['id']) - self.assertTrue(user_ref['enabled']) - - # Strings are not valid boolean values - user['enabled'] = 'false' - self.assertRaises(exception.ValidationError, - self.identity_api.update_user, - user['id'], - user) - - def test_add_user_to_group(self): - domain = self._get_domain_fixture() - new_group = unit.new_group_ref(domain_id=domain['id']) - new_group = self.identity_api.create_group(new_group) - new_user = unit.new_user_ref(domain_id=domain['id']) - new_user = self.identity_api.create_user(new_user) - self.identity_api.add_user_to_group(new_user['id'], - new_group['id']) - groups = self.identity_api.list_groups_for_user(new_user['id']) - - found = False - for x in groups: - if (x['id'] == new_group['id']): - found = True - self.assertTrue(found) - - def test_add_user_to_group_returns_not_found(self): - domain = self._get_domain_fixture() - new_user = unit.new_user_ref(domain_id=domain['id']) - new_user = self.identity_api.create_user(new_user) - self.assertRaises(exception.GroupNotFound, - self.identity_api.add_user_to_group, - new_user['id'], - uuid.uuid4().hex) - - new_group = unit.new_group_ref(domain_id=domain['id']) - new_group = self.identity_api.create_group(new_group) - self.assertRaises(exception.UserNotFound, - self.identity_api.add_user_to_group, - uuid.uuid4().hex, - new_group['id']) - - self.assertRaises(exception.NotFound, - self.identity_api.add_user_to_group, - uuid.uuid4().hex, - uuid.uuid4().hex) - - def test_check_user_in_group(self): - domain = self._get_domain_fixture() - new_group = unit.new_group_ref(domain_id=domain['id']) - new_group = self.identity_api.create_group(new_group) - new_user = unit.new_user_ref(domain_id=domain['id']) - new_user = self.identity_api.create_user(new_user) - self.identity_api.add_user_to_group(new_user['id'], - new_group['id']) - self.identity_api.check_user_in_group(new_user['id'], new_group['id']) - - def test_check_user_not_in_group(self): - new_group = unit.new_group_ref( - domain_id=CONF.identity.default_domain_id) - new_group = self.identity_api.create_group(new_group) - - new_user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - new_user = self.identity_api.create_user(new_user) - - self.assertRaises(exception.NotFound, - self.identity_api.check_user_in_group, - new_user['id'], - new_group['id']) - - def test_check_user_in_group_returns_not_found(self): - new_user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - new_user = self.identity_api.create_user(new_user) - - new_group = unit.new_group_ref( - domain_id=CONF.identity.default_domain_id) - new_group = self.identity_api.create_group(new_group) - - self.assertRaises(exception.UserNotFound, - self.identity_api.check_user_in_group, - uuid.uuid4().hex, - new_group['id']) - - self.assertRaises(exception.GroupNotFound, - self.identity_api.check_user_in_group, - new_user['id'], - uuid.uuid4().hex) - - self.assertRaises(exception.NotFound, - self.identity_api.check_user_in_group, - uuid.uuid4().hex, - uuid.uuid4().hex) - - def test_list_users_in_group(self): - domain = self._get_domain_fixture() - new_group = unit.new_group_ref(domain_id=domain['id']) - new_group = self.identity_api.create_group(new_group) - # Make sure we get an empty list back on a new group, not an error. - user_refs = self.identity_api.list_users_in_group(new_group['id']) - self.assertEqual([], user_refs) - # Make sure we get the correct users back once they have been added - # to the group. - new_user = unit.new_user_ref(domain_id=domain['id']) - new_user = self.identity_api.create_user(new_user) - self.identity_api.add_user_to_group(new_user['id'], - new_group['id']) - user_refs = self.identity_api.list_users_in_group(new_group['id']) - found = False - for x in user_refs: - if (x['id'] == new_user['id']): - found = True - self.assertNotIn('password', x) - self.assertTrue(found) - - def test_list_users_in_group_returns_not_found(self): - self.assertRaises(exception.GroupNotFound, - self.identity_api.list_users_in_group, - uuid.uuid4().hex) - - def test_list_groups_for_user(self): - domain = self._get_domain_fixture() - test_groups = [] - test_users = [] - GROUP_COUNT = 3 - USER_COUNT = 2 - - for x in range(0, USER_COUNT): - new_user = unit.new_user_ref(domain_id=domain['id']) - new_user = self.identity_api.create_user(new_user) - test_users.append(new_user) - positive_user = test_users[0] - negative_user = test_users[1] - - for x in range(0, USER_COUNT): - group_refs = self.identity_api.list_groups_for_user( - test_users[x]['id']) - self.assertEqual(0, len(group_refs)) - - for x in range(0, GROUP_COUNT): - before_count = x - after_count = x + 1 - new_group = unit.new_group_ref(domain_id=domain['id']) - new_group = self.identity_api.create_group(new_group) - test_groups.append(new_group) - - # add the user to the group and ensure that the - # group count increases by one for each - group_refs = self.identity_api.list_groups_for_user( - positive_user['id']) - self.assertEqual(before_count, len(group_refs)) - self.identity_api.add_user_to_group( - positive_user['id'], - new_group['id']) - group_refs = self.identity_api.list_groups_for_user( - positive_user['id']) - self.assertEqual(after_count, len(group_refs)) - - # Make sure the group count for the unrelated user did not change - group_refs = self.identity_api.list_groups_for_user( - negative_user['id']) - self.assertEqual(0, len(group_refs)) - - # remove the user from each group and ensure that - # the group count reduces by one for each - for x in range(0, 3): - before_count = GROUP_COUNT - x - after_count = GROUP_COUNT - x - 1 - group_refs = self.identity_api.list_groups_for_user( - positive_user['id']) - self.assertEqual(before_count, len(group_refs)) - self.identity_api.remove_user_from_group( - positive_user['id'], - test_groups[x]['id']) - group_refs = self.identity_api.list_groups_for_user( - positive_user['id']) - self.assertEqual(after_count, len(group_refs)) - # Make sure the group count for the unrelated user - # did not change - group_refs = self.identity_api.list_groups_for_user( - negative_user['id']) - self.assertEqual(0, len(group_refs)) - - def test_remove_user_from_group(self): - domain = self._get_domain_fixture() - new_group = unit.new_group_ref(domain_id=domain['id']) - new_group = self.identity_api.create_group(new_group) - new_user = unit.new_user_ref(domain_id=domain['id']) - new_user = self.identity_api.create_user(new_user) - self.identity_api.add_user_to_group(new_user['id'], - new_group['id']) - groups = self.identity_api.list_groups_for_user(new_user['id']) - self.assertIn(new_group['id'], [x['id'] for x in groups]) - self.identity_api.remove_user_from_group(new_user['id'], - new_group['id']) - groups = self.identity_api.list_groups_for_user(new_user['id']) - self.assertNotIn(new_group['id'], [x['id'] for x in groups]) - - def test_remove_user_from_group_returns_not_found(self): - domain = self._get_domain_fixture() - new_user = unit.new_user_ref(domain_id=domain['id']) - new_user = self.identity_api.create_user(new_user) - new_group = unit.new_group_ref(domain_id=domain['id']) - new_group = self.identity_api.create_group(new_group) - self.assertRaises(exception.GroupNotFound, - self.identity_api.remove_user_from_group, - new_user['id'], - uuid.uuid4().hex) - - self.assertRaises(exception.UserNotFound, - self.identity_api.remove_user_from_group, - uuid.uuid4().hex, - new_group['id']) - - self.assertRaises(exception.NotFound, - self.identity_api.remove_user_from_group, - uuid.uuid4().hex, - uuid.uuid4().hex) - - def test_group_crud(self): - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - group = unit.new_group_ref(domain_id=domain['id']) - group = self.identity_api.create_group(group) - group_ref = self.identity_api.get_group(group['id']) - self.assertDictContainsSubset(group, group_ref) - - group['name'] = uuid.uuid4().hex - self.identity_api.update_group(group['id'], group) - group_ref = self.identity_api.get_group(group['id']) - self.assertDictContainsSubset(group, group_ref) - - self.identity_api.delete_group(group['id']) - self.assertRaises(exception.GroupNotFound, - self.identity_api.get_group, - group['id']) - - def test_get_group_by_name(self): - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group_name = group['name'] - group = self.identity_api.create_group(group) - spoiler = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - self.identity_api.create_group(spoiler) - - group_ref = self.identity_api.get_group_by_name( - group_name, CONF.identity.default_domain_id) - self.assertDictEqual(group, group_ref) - - def test_get_group_by_name_returns_not_found(self): - self.assertRaises(exception.GroupNotFound, - self.identity_api.get_group_by_name, - uuid.uuid4().hex, - CONF.identity.default_domain_id) - - @unit.skip_if_cache_disabled('identity') - def test_cache_layer_group_crud(self): - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group = self.identity_api.create_group(group) - # cache the result - group_ref = self.identity_api.get_group(group['id']) - # delete the group bypassing identity api. - domain_id, driver, entity_id = ( - self.identity_api._get_domain_driver_and_entity_id(group['id'])) - driver.delete_group(entity_id) - - self.assertEqual(group_ref, self.identity_api.get_group(group['id'])) - self.identity_api.get_group.invalidate(self.identity_api, group['id']) - self.assertRaises(exception.GroupNotFound, - self.identity_api.get_group, group['id']) - - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group = self.identity_api.create_group(group) - # cache the result - self.identity_api.get_group(group['id']) - group['name'] = uuid.uuid4().hex - group_ref = self.identity_api.update_group(group['id'], group) - # after updating through identity api, get updated group - self.assertDictContainsSubset(self.identity_api.get_group(group['id']), - group_ref) - - def test_create_duplicate_group_name_fails(self): - group1 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group2 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id, - name=group1['name']) - group1 = self.identity_api.create_group(group1) - self.assertRaises(exception.Conflict, - self.identity_api.create_group, - group2) - - def test_create_duplicate_group_name_in_different_domains(self): - new_domain = unit.new_domain_ref() - self.resource_api.create_domain(new_domain['id'], new_domain) - group1 = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group2 = unit.new_group_ref(domain_id=new_domain['id'], - name=group1['name']) - group1 = self.identity_api.create_group(group1) - group2 = self.identity_api.create_group(group2) - - def test_move_group_between_domains(self): - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - group = unit.new_group_ref(domain_id=domain1['id']) - group = self.identity_api.create_group(group) - group['domain_id'] = domain2['id'] - # Update the group asserting that a deprecation warning is emitted - with mock.patch( - 'oslo_log.versionutils.report_deprecated_feature') as mock_dep: - self.identity_api.update_group(group['id'], group) - self.assertTrue(mock_dep.called) - - updated_group_ref = self.identity_api.get_group(group['id']) - self.assertEqual(domain2['id'], updated_group_ref['domain_id']) - - def test_move_group_between_domains_with_clashing_names_fails(self): - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - # First, create a group in domain1 - group1 = unit.new_group_ref(domain_id=domain1['id']) - group1 = self.identity_api.create_group(group1) - # Now create a group in domain2 with a potentially clashing - # name - which should work since we have domain separation - group2 = unit.new_group_ref(name=group1['name'], - domain_id=domain2['id']) - group2 = self.identity_api.create_group(group2) - # Now try and move group1 into the 2nd domain - which should - # fail since the names clash - group1['domain_id'] = domain2['id'] - self.assertRaises(exception.Conflict, - self.identity_api.update_group, - group1['id'], - group1) - - def test_user_crud(self): - user_dict = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id) - del user_dict['id'] - user = self.identity_api.create_user(user_dict) - user_ref = self.identity_api.get_user(user['id']) - del user_dict['password'] - user_ref_dict = {x: user_ref[x] for x in user_ref} - self.assertDictContainsSubset(user_dict, user_ref_dict) - - user_dict['password'] = uuid.uuid4().hex - self.identity_api.update_user(user['id'], user_dict) - user_ref = self.identity_api.get_user(user['id']) - del user_dict['password'] - user_ref_dict = {x: user_ref[x] for x in user_ref} - self.assertDictContainsSubset(user_dict, user_ref_dict) - - self.identity_api.delete_user(user['id']) - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user, - user['id']) - - def test_arbitrary_attributes_are_returned_from_create_user(self): - attr_value = uuid.uuid4().hex - user_data = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id, - arbitrary_attr=attr_value) - - user = self.identity_api.create_user(user_data) - - self.assertEqual(attr_value, user['arbitrary_attr']) - - def test_arbitrary_attributes_are_returned_from_get_user(self): - attr_value = uuid.uuid4().hex - user_data = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id, - arbitrary_attr=attr_value) - - user_data = self.identity_api.create_user(user_data) - - user = self.identity_api.get_user(user_data['id']) - self.assertEqual(attr_value, user['arbitrary_attr']) - - def test_new_arbitrary_attributes_are_returned_from_update_user(self): - user_data = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id) - - user = self.identity_api.create_user(user_data) - attr_value = uuid.uuid4().hex - user['arbitrary_attr'] = attr_value - updated_user = self.identity_api.update_user(user['id'], user) - - self.assertEqual(attr_value, updated_user['arbitrary_attr']) - - def test_updated_arbitrary_attributes_are_returned_from_update_user(self): - attr_value = uuid.uuid4().hex - user_data = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id, - arbitrary_attr=attr_value) - - new_attr_value = uuid.uuid4().hex - user = self.identity_api.create_user(user_data) - user['arbitrary_attr'] = new_attr_value - updated_user = self.identity_api.update_user(user['id'], user) - - self.assertEqual(new_attr_value, updated_user['arbitrary_attr']) - - def test_user_update_and_user_get_return_same_response(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - - user = self.identity_api.create_user(user) - - updated_user = {'enabled': False} - updated_user_ref = self.identity_api.update_user( - user['id'], updated_user) - - # SQL backend adds 'extra' field - updated_user_ref.pop('extra', None) - - self.assertIs(False, updated_user_ref['enabled']) - - user_ref = self.identity_api.get_user(user['id']) - self.assertDictEqual(updated_user_ref, user_ref) - - -class FilterTests(filtering.FilterTests): - def test_list_entities_filtered(self): - for entity in ['user', 'group', 'project']: - # Create 20 entities - entity_list = self._create_test_data(entity, 20) - - # Try filtering to get one an exact item out of the list - hints = driver_hints.Hints() - hints.add_filter('name', entity_list[10]['name']) - entities = self._list_entities(entity)(hints=hints) - self.assertEqual(1, len(entities)) - self.assertEqual(entity_list[10]['id'], entities[0]['id']) - # Check the driver has removed the filter from the list hints - self.assertFalse(hints.get_exact_filter_by_name('name')) - self._delete_test_data(entity, entity_list) - - def test_list_users_inexact_filtered(self): - # Create 20 users, some with specific names. We set the names at create - # time (rather than updating them), since the LDAP driver does not - # support name updates. - user_name_data = { - # user index: name for user - 5: 'The', - 6: 'The Ministry', - 7: 'The Ministry of', - 8: 'The Ministry of Silly', - 9: 'The Ministry of Silly Walks', - # ...and one for useful case insensitivity testing - 10: 'The ministry of silly walks OF' - } - user_list = self._create_test_data( - 'user', 20, domain_id=CONF.identity.default_domain_id, - name_dict=user_name_data) - - hints = driver_hints.Hints() - hints.add_filter('name', 'ministry', comparator='contains') - users = self.identity_api.list_users(hints=hints) - self.assertEqual(5, len(users)) - self._match_with_list(users, user_list, - list_start=6, list_end=11) - # TODO(henry-nash) Check inexact filter has been removed. - - hints = driver_hints.Hints() - hints.add_filter('name', 'The', comparator='startswith') - users = self.identity_api.list_users(hints=hints) - self.assertEqual(6, len(users)) - self._match_with_list(users, user_list, - list_start=5, list_end=11) - # TODO(henry-nash) Check inexact filter has been removed. - - hints = driver_hints.Hints() - hints.add_filter('name', 'of', comparator='endswith') - users = self.identity_api.list_users(hints=hints) - self.assertEqual(2, len(users)) - # We can't assume we will get back the users in any particular order - self.assertIn(user_list[7]['id'], [users[0]['id'], users[1]['id']]) - self.assertIn(user_list[10]['id'], [users[0]['id'], users[1]['id']]) - # TODO(henry-nash) Check inexact filter has been removed. - - # TODO(henry-nash): Add some case sensitive tests. However, - # these would be hard to validate currently, since: - # - # For SQL, the issue is that MySQL 0.7, by default, is installed in - # case insensitive mode (which is what is run by default for our - # SQL backend tests). For production deployments. OpenStack - # assumes a case sensitive database. For these tests, therefore, we - # need to be able to check the sensitivity of the database so as to - # know whether to run case sensitive tests here. - # - # For LDAP/AD, although dependent on the schema being used, attributes - # are typically configured to be case aware, but not case sensitive. - - self._delete_test_data('user', user_list) - - def _groups_for_user_data(self): - number_of_groups = 10 - group_name_data = { - # entity index: name for entity - 5: 'The', - 6: 'The Ministry', - 9: 'The Ministry of Silly Walks', - } - group_list = self._create_test_data( - 'group', number_of_groups, - domain_id=CONF.identity.default_domain_id, - name_dict=group_name_data) - user_list = self._create_test_data('user', 2) - - for group in range(7): - # Create membership, including with two out of the three groups - # with well know names - self.identity_api.add_user_to_group(user_list[0]['id'], - group_list[group]['id']) - # ...and some spoiler memberships - for group in range(7, number_of_groups): - self.identity_api.add_user_to_group(user_list[1]['id'], - group_list[group]['id']) - - return group_list, user_list - - def test_groups_for_user_inexact_filtered(self): - """Test use of filtering doesn't break groups_for_user listing. - - Some backends may use filtering to achieve the list of groups for a - user, so test that it can combine a second filter. - - Test Plan: - - - Create 10 groups, some with names we can filter on - - Create 2 users - - Assign 1 of those users to most of the groups, including some of the - well known named ones - - Assign the other user to other groups as spoilers - - Ensure that when we list groups for users with a filter on the group - name, both restrictions have been enforced on what is returned. - - """ - group_list, user_list = self._groups_for_user_data() - - hints = driver_hints.Hints() - hints.add_filter('name', 'Ministry', comparator='contains') - groups = self.identity_api.list_groups_for_user( - user_list[0]['id'], hints=hints) - # We should only get back one group, since of the two that contain - # 'Ministry' the user only belongs to one. - self.assertThat(len(groups), matchers.Equals(1)) - self.assertEqual(group_list[6]['id'], groups[0]['id']) - - hints = driver_hints.Hints() - hints.add_filter('name', 'The', comparator='startswith') - groups = self.identity_api.list_groups_for_user( - user_list[0]['id'], hints=hints) - # We should only get back 2 out of the 3 groups that start with 'The' - # hence showing that both "filters" have been applied - self.assertThat(len(groups), matchers.Equals(2)) - self.assertIn(group_list[5]['id'], [groups[0]['id'], groups[1]['id']]) - self.assertIn(group_list[6]['id'], [groups[0]['id'], groups[1]['id']]) - - hints.add_filter('name', 'The', comparator='endswith') - groups = self.identity_api.list_groups_for_user( - user_list[0]['id'], hints=hints) - # We should only get back one group since it is the only one that - # ends with 'The' - self.assertThat(len(groups), matchers.Equals(1)) - self.assertEqual(group_list[5]['id'], groups[0]['id']) - - self._delete_test_data('user', user_list) - self._delete_test_data('group', group_list) - - def test_groups_for_user_exact_filtered(self): - """Test exact filters doesn't break groups_for_user listing.""" - group_list, user_list = self._groups_for_user_data() - hints = driver_hints.Hints() - hints.add_filter('name', 'The Ministry', comparator='equals') - groups = self.identity_api.list_groups_for_user( - user_list[0]['id'], hints=hints) - # We should only get back 1 out of the 3 groups with name 'The - # Ministry' hence showing that both "filters" have been applied. - self.assertEqual(1, len(groups)) - self.assertEqual(group_list[6]['id'], groups[0]['id']) - self._delete_test_data('user', user_list) - self._delete_test_data('group', group_list) - - def _get_user_name_field_size(self): - """Return the size of the user name field for the backend. - - Subclasses can override this method to indicate that the user name - field is limited in length. The user name is the field used in the test - that validates that a filter value works even if it's longer than a - field. - - If the backend doesn't limit the value length then return None. - - """ - return None - - def test_filter_value_wider_than_field(self): - # If a filter value is given that's larger than the field in the - # backend then no values are returned. - - user_name_field_size = self._get_user_name_field_size() - - if user_name_field_size is None: - # The backend doesn't limit the size of the user name, so pass this - # test. - return - - # Create some users just to make sure would return something if the - # filter was ignored. - self._create_test_data('user', 2) - - hints = driver_hints.Hints() - value = 'A' * (user_name_field_size + 1) - hints.add_filter('name', value) - users = self.identity_api.list_users(hints=hints) - self.assertEqual([], users) - - def _list_users_in_group_data(self): - number_of_users = 10 - user_name_data = { - 1: 'Arthur Conan Doyle', - 3: 'Arthur Rimbaud', - 9: 'Arthur Schopenhauer', - } - user_list = self._create_test_data( - 'user', number_of_users, - domain_id=CONF.identity.default_domain_id, - name_dict=user_name_data) - group = self._create_one_entity( - 'group', CONF.identity.default_domain_id, 'Great Writers') - for i in range(7): - self.identity_api.add_user_to_group(user_list[i]['id'], - group['id']) - - return user_list, group - - def test_list_users_in_group_inexact_filtered(self): - user_list, group = self._list_users_in_group_data() - - hints = driver_hints.Hints() - hints.add_filter('name', 'Arthur', comparator='contains') - users = self.identity_api.list_users_in_group(group['id'], hints=hints) - self.assertThat(len(users), matchers.Equals(2)) - self.assertIn(user_list[1]['id'], [users[0]['id'], users[1]['id']]) - self.assertIn(user_list[3]['id'], [users[0]['id'], users[1]['id']]) - - hints = driver_hints.Hints() - hints.add_filter('name', 'Arthur', comparator='startswith') - users = self.identity_api.list_users_in_group(group['id'], hints=hints) - self.assertThat(len(users), matchers.Equals(2)) - self.assertIn(user_list[1]['id'], [users[0]['id'], users[1]['id']]) - self.assertIn(user_list[3]['id'], [users[0]['id'], users[1]['id']]) - - hints = driver_hints.Hints() - hints.add_filter('name', 'Doyle', comparator='endswith') - users = self.identity_api.list_users_in_group(group['id'], hints=hints) - self.assertThat(len(users), matchers.Equals(1)) - self.assertEqual(user_list[1]['id'], users[0]['id']) - - self._delete_test_data('user', user_list) - self._delete_entity('group')(group['id']) - - def test_list_users_in_group_exact_filtered(self): - hints = driver_hints.Hints() - user_list, group = self._list_users_in_group_data() - hints.add_filter('name', 'Arthur Rimbaud', comparator='equals') - users = self.identity_api.list_users_in_group(group['id'], hints=hints) - self.assertEqual(1, len(users)) - self.assertEqual(user_list[3]['id'], users[0]['id']) - self._delete_test_data('user', user_list) - self._delete_entity('group')(group['id']) - - -class LimitTests(filtering.FilterTests): - ENTITIES = ['user', 'group', 'project'] - - def setUp(self): - """Setup for Limit Test Cases.""" - self.entity_lists = {} - - for entity in self.ENTITIES: - # Create 20 entities - self.entity_lists[entity] = self._create_test_data(entity, 20) - self.addCleanup(self.clean_up_entities) - - def clean_up_entities(self): - """Clean up entity test data from Limit Test Cases.""" - for entity in self.ENTITIES: - self._delete_test_data(entity, self.entity_lists[entity]) - del self.entity_lists - - def _test_list_entity_filtered_and_limited(self, entity): - self.config_fixture.config(list_limit=10) - # Should get back just 10 entities - hints = driver_hints.Hints() - entities = self._list_entities(entity)(hints=hints) - self.assertEqual(hints.limit['limit'], len(entities)) - self.assertTrue(hints.limit['truncated']) - - # Override with driver specific limit - if entity == 'project': - self.config_fixture.config(group='resource', list_limit=5) - else: - self.config_fixture.config(group='identity', list_limit=5) - - # Should get back just 5 users - hints = driver_hints.Hints() - entities = self._list_entities(entity)(hints=hints) - self.assertEqual(hints.limit['limit'], len(entities)) - - # Finally, let's pretend we want to get the full list of entities, - # even with the limits set, as part of some internal calculation. - # Calling the API without a hints list should achieve this, and - # return at least the 20 entries we created (there may be other - # entities lying around created by other tests/setup). - entities = self._list_entities(entity)() - self.assertTrue(len(entities) >= 20) - self._match_with_list(self.entity_lists[entity], entities) - - def test_list_users_filtered_and_limited(self): - self._test_list_entity_filtered_and_limited('user') - - def test_list_groups_filtered_and_limited(self): - self._test_list_entity_filtered_and_limited('group') - - def test_list_projects_filtered_and_limited(self): - self._test_list_entity_filtered_and_limited('project') diff --git a/keystone-moon/keystone/tests/unit/identity/test_controllers.py b/keystone-moon/keystone/tests/unit/identity/test_controllers.py deleted file mode 100644 index ed2fe3ff..00000000 --- a/keystone-moon/keystone/tests/unit/identity/test_controllers.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright 2016 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from oslo_config import cfg - -from keystone import exception -from keystone.identity import controllers -from keystone.tests import unit -from keystone.tests.unit.ksfixtures import database - - -CONF = cfg.CONF - -_ADMIN_CONTEXT = {'is_admin': True, 'query_string': {}} - - -class UserTestCaseNoDefaultDomain(unit.TestCase): - - def setUp(self): - super(UserTestCaseNoDefaultDomain, self).setUp() - self.useFixture(database.Database()) - self.load_backends() - self.user_controller = controllers.User() - - def test_setup(self): - # Other tests in this class assume there's no default domain, so make - # sure the setUp worked as expected. - self.assertRaises( - exception.DomainNotFound, - self.resource_api.get_domain, CONF.identity.default_domain_id) - - def test_get_users(self): - # When list_users is done and there's no default domain, the result is - # an empty list. - res = self.user_controller.get_users(_ADMIN_CONTEXT) - self.assertEqual([], res['users']) - - def test_get_user_by_name(self): - # When get_user_by_name is done and there's no default domain, the - # result is 404 Not Found - user_name = uuid.uuid4().hex - self.assertRaises( - exception.UserNotFound, - self.user_controller.get_user_by_name, _ADMIN_CONTEXT, user_name) - - def test_create_user(self): - # When a user is created using the v2 controller and there's no default - # domain, it doesn't fail with can't find domain (a default domain is - # created) - user = {'name': uuid.uuid4().hex} - self.user_controller.create_user(_ADMIN_CONTEXT, user) - # If the above doesn't fail then this is successful. diff --git a/keystone-moon/keystone/tests/unit/identity/test_core.py b/keystone-moon/keystone/tests/unit/identity/test_core.py deleted file mode 100644 index 39f3c701..00000000 --- a/keystone-moon/keystone/tests/unit/identity/test_core.py +++ /dev/null @@ -1,176 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Unit tests for core identity behavior.""" - -import itertools -import os -import uuid - -import mock -from oslo_config import cfg -from oslo_config import fixture as config_fixture - -from keystone import exception -from keystone import identity -from keystone.tests import unit -from keystone.tests.unit.ksfixtures import database - - -CONF = cfg.CONF - - -class TestDomainConfigs(unit.BaseTestCase): - - def setUp(self): - super(TestDomainConfigs, self).setUp() - self.addCleanup(CONF.reset) - - self.tmp_dir = unit.dirs.tmp() - - self.config_fixture = self.useFixture(config_fixture.Config(CONF)) - self.config_fixture.config(domain_config_dir=self.tmp_dir, - group='identity') - - def test_config_for_nonexistent_domain(self): - """Having a config for a non-existent domain will be ignored. - - There are no assertions in this test because there are no side - effects. If there is a config file for a domain that does not - exist it should be ignored. - - """ - domain_id = uuid.uuid4().hex - domain_config_filename = os.path.join(self.tmp_dir, - 'keystone.%s.conf' % domain_id) - self.addCleanup(lambda: os.remove(domain_config_filename)) - with open(domain_config_filename, 'w'): - """Write an empty config file.""" - - e = exception.DomainNotFound(domain_id=domain_id) - mock_assignment_api = mock.Mock() - mock_assignment_api.get_domain_by_name.side_effect = e - - domain_config = identity.DomainConfigs() - fake_standard_driver = None - domain_config.setup_domain_drivers(fake_standard_driver, - mock_assignment_api) - - def test_config_for_dot_name_domain(self): - # Ensure we can get the right domain name which has dots within it - # from filename. - domain_config_filename = os.path.join(self.tmp_dir, - 'keystone.abc.def.com.conf') - with open(domain_config_filename, 'w'): - """Write an empty config file.""" - self.addCleanup(os.remove, domain_config_filename) - - with mock.patch.object(identity.DomainConfigs, - '_load_config_from_file') as mock_load_config: - domain_config = identity.DomainConfigs() - fake_assignment_api = None - fake_standard_driver = None - domain_config.setup_domain_drivers(fake_standard_driver, - fake_assignment_api) - mock_load_config.assert_called_once_with(fake_assignment_api, - [domain_config_filename], - 'abc.def.com') - - def test_config_for_multiple_sql_backend(self): - domains_config = identity.DomainConfigs() - - # Create the right sequence of is_sql in the drivers being - # requested to expose the bug, which is that a False setting - # means it forgets previous True settings. - drivers = [] - files = [] - for idx, is_sql in enumerate((True, False, True)): - drv = mock.Mock(is_sql=is_sql) - drivers.append(drv) - name = 'dummy.{0}'.format(idx) - files.append(''.join(( - identity.DOMAIN_CONF_FHEAD, - name, - identity.DOMAIN_CONF_FTAIL))) - - walk_fake = lambda *a, **kwa: ( - ('/fake/keystone/domains/config', [], files), ) - - generic_driver = mock.Mock(is_sql=False) - - assignment_api = mock.Mock() - id_factory = itertools.count() - assignment_api.get_domain_by_name.side_effect = ( - lambda name: {'id': next(id_factory), '_': 'fake_domain'}) - load_driver_mock = mock.Mock(side_effect=drivers) - - with mock.patch.object(os, 'walk', walk_fake): - with mock.patch.object(identity.cfg, 'ConfigOpts'): - with mock.patch.object(domains_config, '_load_driver', - load_driver_mock): - self.assertRaises( - exception.MultipleSQLDriversInConfig, - domains_config.setup_domain_drivers, - generic_driver, assignment_api) - - self.assertEqual(3, load_driver_mock.call_count) - - -class TestDatabaseDomainConfigs(unit.TestCase): - - def setUp(self): - super(TestDatabaseDomainConfigs, self).setUp() - self.useFixture(database.Database()) - self.load_backends() - - def test_domain_config_in_database_disabled_by_default(self): - self.assertFalse(CONF.identity.domain_configurations_from_database) - - def test_loading_config_from_database(self): - self.config_fixture.config(domain_configurations_from_database=True, - group='identity') - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - # Override two config options for our domain - conf = {'ldap': {'url': uuid.uuid4().hex, - 'suffix': uuid.uuid4().hex, - 'use_tls': 'True'}, - 'identity': { - 'driver': 'ldap'}} - self.domain_config_api.create_config(domain['id'], conf) - fake_standard_driver = None - domain_config = identity.DomainConfigs() - domain_config.setup_domain_drivers(fake_standard_driver, - self.resource_api) - # Make sure our two overrides are in place, and others are not affected - res = domain_config.get_domain_conf(domain['id']) - self.assertEqual(conf['ldap']['url'], res.ldap.url) - self.assertEqual(conf['ldap']['suffix'], res.ldap.suffix) - self.assertEqual(CONF.ldap.query_scope, res.ldap.query_scope) - - # Make sure the override is not changing the type of the config value - use_tls_type = type(CONF.ldap.use_tls) - self.assertEqual(use_tls_type(conf['ldap']['use_tls']), - res.ldap.use_tls) - - # Now turn off using database domain configuration and check that the - # default config file values are now seen instead of the overrides. - CONF.set_override('domain_configurations_from_database', False, - 'identity', enforce_type=True) - domain_config = identity.DomainConfigs() - domain_config.setup_domain_drivers(fake_standard_driver, - self.resource_api) - res = domain_config.get_domain_conf(domain['id']) - self.assertEqual(CONF.ldap.url, res.ldap.url) - self.assertEqual(CONF.ldap.suffix, res.ldap.suffix) - self.assertEqual(CONF.ldap.use_tls, res.ldap.use_tls) - self.assertEqual(CONF.ldap.query_scope, res.ldap.query_scope) diff --git a/keystone-moon/keystone/tests/unit/identity_mapping.py b/keystone-moon/keystone/tests/unit/identity_mapping.py deleted file mode 100644 index 4ba4f0c2..00000000 --- a/keystone-moon/keystone/tests/unit/identity_mapping.py +++ /dev/null @@ -1,22 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from keystone.common import sql -from keystone.identity.mapping_backends import sql as mapping_sql - - -def list_id_mappings(): - """List all id_mappings for testing purposes.""" - with sql.session_for_read() as session: - refs = session.query(mapping_sql.IDMapping).all() - return [x.to_dict() for x in refs] diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/__init__.py b/keystone-moon/keystone/tests/unit/ksfixtures/__init__.py deleted file mode 100644 index 4b914752..00000000 --- a/keystone-moon/keystone/tests/unit/ksfixtures/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from keystone.tests.unit.ksfixtures.auth_plugins import ConfigAuthPlugins # noqa -from keystone.tests.unit.ksfixtures.cache import Cache # noqa -from keystone.tests.unit.ksfixtures.key_repository import KeyRepository # noqa -from keystone.tests.unit.ksfixtures.policy import Policy # noqa diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/appserver.py b/keystone-moon/keystone/tests/unit/ksfixtures/appserver.py deleted file mode 100644 index a23b804f..00000000 --- a/keystone-moon/keystone/tests/unit/ksfixtures/appserver.py +++ /dev/null @@ -1,79 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -import fixtures -from oslo_config import cfg -from paste import deploy - -from keystone.common import environment - - -CONF = cfg.CONF - -MAIN = 'main' -ADMIN = 'admin' - - -class AppServer(fixtures.Fixture): - """A fixture for managing an application server instance.""" - - def __init__(self, config, name, cert=None, key=None, ca=None, - cert_required=False, host='127.0.0.1', port=0): - super(AppServer, self).__init__() - self.config = config - self.name = name - self.cert = cert - self.key = key - self.ca = ca - self.cert_required = cert_required - self.host = host - self.port = port - - def setUp(self): - super(AppServer, self).setUp() - - app = deploy.loadapp(self.config, name=self.name) - self.server = environment.Server(app, self.host, self.port) - self._setup_SSL_if_requested() - self.server.start(key='socket') - - # some tests need to know the port we ran on. - self.port = self.server.socket_info['socket'][1] - self._update_config_opt() - - self.addCleanup(self.server.stop) - - def _setup_SSL_if_requested(self): - # TODO(dstanek): fix environment.Server to take a SSLOpts instance - # so that the params are either always set or not - if (self.cert is not None and - self.ca is not None and - self.key is not None): - self.server.set_ssl(certfile=self.cert, - keyfile=self.key, - ca_certs=self.ca, - cert_required=self.cert_required) - - def _update_config_opt(self): - """Updates the config with the actual port used.""" - opt_name = self._get_config_option_for_section_name() - CONF.set_override(opt_name, self.port, group='eventlet_server', - enforce_type=True) - - def _get_config_option_for_section_name(self): - """Maps Paster config section names to port option names.""" - return {'admin': 'admin_port', 'main': 'public_port'}[self.name] diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/auth_plugins.py b/keystone-moon/keystone/tests/unit/ksfixtures/auth_plugins.py deleted file mode 100644 index 68ba6f3a..00000000 --- a/keystone-moon/keystone/tests/unit/ksfixtures/auth_plugins.py +++ /dev/null @@ -1,34 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import fixtures - -from keystone.common import config as common_cfg - - -class ConfigAuthPlugins(fixtures.Fixture): - """A fixture for setting up and tearing down a auth plugins.""" - - def __init__(self, config_fixture, methods, **method_classes): - super(ConfigAuthPlugins, self).__init__() - self.methods = methods - self.config_fixture = config_fixture - self.method_classes = method_classes - - def setUp(self): - super(ConfigAuthPlugins, self).setUp() - if self.methods: - self.config_fixture.config(group='auth', methods=self.methods) - common_cfg.setup_authentication() - if self.method_classes: - self.config_fixture.config(group='auth', **self.method_classes) diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/cache.py b/keystone-moon/keystone/tests/unit/ksfixtures/cache.py deleted file mode 100644 index e0833ae2..00000000 --- a/keystone-moon/keystone/tests/unit/ksfixtures/cache.py +++ /dev/null @@ -1,43 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import fixtures - -from keystone import catalog -from keystone.common import cache - - -CACHE_REGIONS = (cache.CACHE_REGION, catalog.COMPUTED_CATALOG_REGION) - - -class Cache(fixtures.Fixture): - """A fixture for setting up the cache between test cases. - - This will also tear down an existing cache if one is already configured. - """ - - def setUp(self): - super(Cache, self).setUp() - - # NOTE(dstanek): We must remove the existing cache backend in the - # setUp instead of the tearDown because it defaults to a no-op cache - # and we want the configure call below to create the correct backend. - - # NOTE(morganfainberg): The only way to reconfigure the CacheRegion - # object on each setUp() call is to remove the .backend property. - for region in CACHE_REGIONS: - if region.is_configured: - del region.backend - - # ensure the cache region instance is setup - cache.configure_cache(region=region) diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/database.py b/keystone-moon/keystone/tests/unit/ksfixtures/database.py deleted file mode 100644 index 52c35cee..00000000 --- a/keystone-moon/keystone/tests/unit/ksfixtures/database.py +++ /dev/null @@ -1,158 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import functools -import os - -import fixtures -from oslo_config import cfg -from oslo_db import options as db_options - -from keystone.common import sql -from keystone.tests import unit - - -CONF = cfg.CONF - - -def run_once(f): - """A decorator to ensure the decorated function is only executed once. - - The decorated function is assumed to have a one parameter. - - """ - @functools.wraps(f) - def wrapper(one): - if not wrapper.already_ran: - f(one) - wrapper.already_ran = True - wrapper.already_ran = False - return wrapper - - -# NOTE(I159): Every execution all the options will be cleared. The method must -# be called at the every fixture initialization. -def initialize_sql_session(): - # Make sure the DB is located in the correct location, in this case set - # the default value, as this should be able to be overridden in some - # test cases. - db_options.set_defaults( - CONF, - connection=unit.IN_MEM_DB_CONN_STRING) - - -@run_once -def _load_sqlalchemy_models(version_specifiers): - """Find all modules containing SQLAlchemy models and import them. - - This creates more consistent, deterministic test runs because tables - for all core and extension models are always created in the test - database. We ensure this by importing all modules that contain model - definitions. - - The database schema during test runs is created using reflection. - Reflection is simply SQLAlchemy taking the model definitions for - all models currently imported and making tables for each of them. - The database schema created during test runs may vary between tests - as more models are imported. Importing all models at the start of - the test run avoids this problem. - - version_specifiers is a dict that contains any specific driver versions - that have been requested. The dict is of the form: - - { : {'versioned_backend' : , - 'versionless_backend' : } - } - - For example: - - {'keystone.assignment': {'versioned_backend' : 'V8_backends', - 'versionless_backend' : 'backends'}, - 'keystone.identity': {'versioned_backend' : 'V9_backends', - 'versionless_backend' : 'backends'} - } - - The version_specifiers will be used to load the correct driver. The - algorithm for this assumes that versioned drivers begin in 'V'. - - """ - keystone_root = os.path.normpath(os.path.join( - os.path.dirname(__file__), '..', '..', '..')) - for root, dirs, files in os.walk(keystone_root): - # NOTE(morganfainberg): Slice the keystone_root off the root to ensure - # we do not end up with a module name like: - # Users.home.openstack.keystone.assignment.backends.sql - root = root[len(keystone_root):] - if root.endswith('backends') and 'sql.py' in files: - # The root will be prefixed with an instance of os.sep, which will - # make the root after replacement '.', the 'keystone' part - # of the module path is always added to the front - module_root = ('keystone.%s' % - root.replace(os.sep, '.').lstrip('.')) - module_components = module_root.split('.') - module_without_backends = '' - for x in range(0, len(module_components) - 1): - module_without_backends += module_components[x] + '.' - module_without_backends = module_without_backends.rstrip('.') - this_backend = module_components[len(module_components) - 1] - - # At this point module_without_backends might be something like - # 'keystone.assignment', while this_backend might be something - # 'V8_backends'. - - if module_without_backends.startswith('keystone.contrib'): - # All the sql modules have now been moved into the core tree - # so no point in loading these again here (and, in fact, doing - # so might break trying to load a versioned driver. - continue - - if module_without_backends in version_specifiers: - # OK, so there is a request for a specific version of this one. - # We therefore should skip any other versioned backend as well - # as the non-versioned one. - version = version_specifiers[module_without_backends] - if ((this_backend != version['versioned_backend'] and - this_backend.startswith('V')) or - this_backend == version['versionless_backend']): - continue - else: - # No versioned driver requested, so ignore any that are - # versioned - if this_backend.startswith('V'): - continue - - module_name = module_root + '.sql' - __import__(module_name) - - -class Database(fixtures.Fixture): - """A fixture for setting up and tearing down a database.""" - - def __init__(self, version_specifiers=None): - super(Database, self).__init__() - initialize_sql_session() - if version_specifiers is None: - version_specifiers = {} - _load_sqlalchemy_models(version_specifiers) - - def setUp(self): - super(Database, self).setUp() - - with sql.session_for_write() as session: - self.engine = session.get_bind() - self.addCleanup(sql.cleanup) - sql.ModelBase.metadata.create_all(bind=self.engine) - self.addCleanup(sql.ModelBase.metadata.drop_all, bind=self.engine) - - def recreate(self): - sql.ModelBase.metadata.create_all(bind=self.engine) diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/hacking.py b/keystone-moon/keystone/tests/unit/ksfixtures/hacking.py deleted file mode 100644 index 9977b206..00000000 --- a/keystone-moon/keystone/tests/unit/ksfixtures/hacking.py +++ /dev/null @@ -1,417 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# NOTE(morganfainberg) This file shouldn't have flake8 run on it as it has -# code examples that will fail normal CI pep8/flake8 tests. This is expected. -# The code has been moved here to ensure that proper tests occur on the -# test_hacking_checks test cases. -# flake8: noqa - -import fixtures - - -class HackingCode(fixtures.Fixture): - """A fixture to house the various code examples for the keystone hacking - style checks. - """ - - mutable_default_args = { - 'code': """ - def f(): - pass - - def f(a, b='', c=None): - pass - - def f(bad=[]): - pass - - def f(foo, bad=[], more_bad=[x for x in range(3)]): - pass - - def f(foo, bad={}): - pass - - def f(foo, bad={}, another_bad=[], fine=None): - pass - - def f(bad=[]): # noqa - pass - - def funcs(bad=dict(), more_bad=list(), even_more_bad=set()): - "creating mutables through builtins" - - def funcs(bad=something(), more_bad=some_object.something()): - "defaults from any functions" - - def f(bad=set(), more_bad={x for x in range(3)}, - even_more_bad={1, 2, 3}): - "set and set comprehession" - - def f(bad={x: x for x in range(3)}): - "dict comprehension" - """, - 'expected_errors': [ - (7, 10, 'K001'), - (10, 15, 'K001'), - (10, 29, 'K001'), - (13, 15, 'K001'), - (16, 15, 'K001'), - (16, 31, 'K001'), - (22, 14, 'K001'), - (22, 31, 'K001'), - (22, 53, 'K001'), - (25, 14, 'K001'), - (25, 36, 'K001'), - (28, 10, 'K001'), - (28, 27, 'K001'), - (29, 21, 'K001'), - (32, 11, 'K001'), - ]} - - comments_begin_with_space = { - 'code': """ - # This is a good comment - - #This is a bad one - - # This is alright and can - # be continued with extra indentation - # if that's what the developer wants. - """, - 'expected_errors': [ - (3, 0, 'K002'), - ]} - - asserting_none_equality = { - 'code': """ - class Test(object): - - def test(self): - self.assertEqual('', '') - self.assertEqual('', None) - self.assertEqual(None, '') - self.assertNotEqual('', None) - self.assertNotEqual(None, '') - self.assertNotEqual('', None) # noqa - self.assertNotEqual(None, '') # noqa - """, - 'expected_errors': [ - (5, 8, 'K003'), - (6, 8, 'K003'), - (7, 8, 'K004'), - (8, 8, 'K004'), - ]} - - dict_constructor = { - 'code': """ - lower_res = {k.lower(): v for k, v in six.iteritems(res[1])} - fool = dict(a='a', b='b') - lower_res = dict((k.lower(), v) for k, v in six.iteritems(res[1])) - attrs = dict([(k, _from_json(v))]) - dict([[i,i] for i in range(3)]) - dict(({1:2})) - """, - 'expected_errors': [ - (3, 0, 'K008'), - (4, 0, 'K008'), - (5, 0, 'K008'), - ]} - - -class HackingLogging(fixtures.Fixture): - - shared_imports = """ - import logging - import logging as stlib_logging - from keystone.i18n import _ - from keystone.i18n import _ as oslo_i18n - from keystone.i18n import _LC - from keystone.i18n import _LE - from keystone.i18n import _LE as error_hint - from keystone.i18n import _LI - from keystone.i18n import _LW - from oslo_log import log - from oslo_log import log as oslo_logging - """ - - examples = [ - { - 'code': """ - # stdlib logging - LOG = logging.getLogger() - LOG.info(_('text')) - class C: - def __init__(self): - LOG.warning(oslo_i18n('text', {})) - LOG.warning(_LW('text', {})) - """, - 'expected_errors': [ - (3, 9, 'K006'), - (6, 20, 'K006'), - ], - }, - { - 'code': """ - # stdlib logging w/ alias and specifying a logger - class C: - def __init__(self): - self.L = logging.getLogger(__name__) - def m(self): - self.L.warning( - _('text'), {} - ) - self.L.warning( - _LW('text'), {} - ) - """, - 'expected_errors': [ - (7, 12, 'K006'), - ], - }, - { - 'code': """ - # oslo logging and specifying a logger - L = log.getLogger(__name__) - L.error(oslo_i18n('text')) - L.error(error_hint('text')) - """, - 'expected_errors': [ - (3, 8, 'K006'), - ], - }, - { - 'code': """ - # oslo logging w/ alias - class C: - def __init__(self): - self.LOG = oslo_logging.getLogger() - self.LOG.critical(_('text')) - self.LOG.critical(_LC('text')) - """, - 'expected_errors': [ - (5, 26, 'K006'), - ], - }, - { - 'code': """ - LOG = log.getLogger(__name__) - # translation on a separate line - msg = _('text') - LOG.exception(msg) - msg = _LE('text') - LOG.exception(msg) - """, - 'expected_errors': [ - (4, 14, 'K006'), - ], - }, - { - 'code': """ - LOG = logging.getLogger() - - # ensure the correct helper is being used - LOG.warning(_LI('this should cause an error')) - - # debug should not allow any helpers either - LOG.debug(_LI('this should cause an error')) - """, - 'expected_errors': [ - (4, 12, 'K006'), - (7, 10, 'K005'), - ], - }, - { - 'code': """ - # this should not be an error - L = log.getLogger(__name__) - msg = _('text') - L.warning(msg) - raise Exception(msg) - """, - 'expected_errors': [], - }, - { - 'code': """ - L = log.getLogger(__name__) - def f(): - msg = _('text') - L2.warning(msg) - something = True # add an extra statement here - raise Exception(msg) - """, - 'expected_errors': [], - }, - { - 'code': """ - LOG = log.getLogger(__name__) - def func(): - msg = _('text') - LOG.warning(msg) - raise Exception('some other message') - """, - 'expected_errors': [ - (4, 16, 'K006'), - ], - }, - { - 'code': """ - LOG = log.getLogger(__name__) - if True: - msg = _('text') - else: - msg = _('text') - LOG.warning(msg) - raise Exception(msg) - """, - 'expected_errors': [ - ], - }, - { - 'code': """ - LOG = log.getLogger(__name__) - if True: - msg = _('text') - else: - msg = _('text') - LOG.warning(msg) - """, - 'expected_errors': [ - (6, 12, 'K006'), - ], - }, - { - 'code': """ - LOG = log.getLogger(__name__) - msg = _LW('text') - LOG.warning(msg) - raise Exception(msg) - """, - 'expected_errors': [ - (3, 12, 'K007'), - ], - }, - { - 'code': """ - LOG = log.getLogger(__name__) - msg = _LW('text') - LOG.warning(msg) - msg = _('something else') - raise Exception(msg) - """, - 'expected_errors': [], - }, - { - 'code': """ - LOG = log.getLogger(__name__) - msg = _LW('hello %s') % 'world' - LOG.warning(msg) - raise Exception(msg) - """, - 'expected_errors': [ - (3, 12, 'K007'), - ], - }, - { - 'code': """ - LOG = log.getLogger(__name__) - msg = _LW('hello %s') % 'world' - LOG.warning(msg) - """, - 'expected_errors': [], - }, - { - 'code': """ - # this should not be an error - LOG = log.getLogger(__name__) - try: - something = True - except AssertionError as e: - LOG.warning(six.text_type(e)) - raise exception.Unauthorized(e) - """, - 'expected_errors': [], - }, - ] - - assert_not_using_deprecated_warn = { - 'code': """ - # Logger.warn has been deprecated in Python3 in favor of - # Logger.warning - LOG = log.getLogger(__name__) - LOG.warn(_LW('text')) - """, - 'expected_errors': [ - (4, 9, 'K009'), - ], - } - - assert_no_translations_for_debug_logging = { - 'code': """ - # stdlib logging - L0 = logging.getLogger() - L0.debug(_('text')) - class C: - def __init__(self): - L0.debug(oslo_i18n('text', {})) - - # stdlib logging w/ alias and specifying a logger - class C: - def __init__(self): - self.L1 = logging.getLogger(__name__) - def m(self): - self.L1.debug( - _('text'), {} - ) - - # oslo logging and specifying a logger - L2 = logging.getLogger(__name__) - L2.debug(oslo_i18n('text')) - - # oslo logging w/ alias - class C: - def __init__(self): - self.L3 = oslo_logging.getLogger() - self.L3.debug(_('text')) - - # translation on a separate line - msg = _('text') - L2.debug(msg) - - # this should not fail - if True: - msg = _('message %s') % X - L2.error(msg) - raise TypeError(msg) - if True: - msg = 'message' - L2.debug(msg) - - # this should not fail - if True: - if True: - msg = _('message') - else: - msg = _('message') - L2.debug(msg) - raise Exception(msg) - """, - 'expected_errors': [ - (3, 9, 'K005'), - (6, 17, 'K005'), - (14, 12, 'K005'), - (19, 9, 'K005'), - (25, 22, 'K005'), - (29, 9, 'K005'), - ] - } - diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/key_repository.py b/keystone-moon/keystone/tests/unit/ksfixtures/key_repository.py deleted file mode 100644 index 7784bddc..00000000 --- a/keystone-moon/keystone/tests/unit/ksfixtures/key_repository.py +++ /dev/null @@ -1,30 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import fixtures - -from keystone.token.providers.fernet import utils - - -class KeyRepository(fixtures.Fixture): - def __init__(self, config_fixture): - super(KeyRepository, self).__init__() - self.config_fixture = config_fixture - - def setUp(self): - super(KeyRepository, self).setUp() - directory = self.useFixture(fixtures.TempDir()).path - self.config_fixture.config(group='fernet_tokens', - key_repository=directory) - - utils.create_key_directory() - utils.initialize_key_repository() diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/ldapdb.py b/keystone-moon/keystone/tests/unit/ksfixtures/ldapdb.py deleted file mode 100644 index 6cd8cc0b..00000000 --- a/keystone-moon/keystone/tests/unit/ksfixtures/ldapdb.py +++ /dev/null @@ -1,35 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import fixtures - -from keystone.common import ldap as common_ldap -from keystone.common.ldap import core as common_ldap_core -from keystone.tests.unit import fakeldap - - -class LDAPDatabase(fixtures.Fixture): - """A fixture for setting up and tearing down an LDAP database.""" - - def setUp(self): - super(LDAPDatabase, self).setUp() - self.clear() - common_ldap_core._HANDLERS.clear() - common_ldap.register_handler('fake://', fakeldap.FakeLdap) - # TODO(dstanek): switch the flow here - self.addCleanup(self.clear) - self.addCleanup(common_ldap_core._HANDLERS.clear) - - def clear(self): - for shelf in fakeldap.FakeShelves: - fakeldap.FakeShelves[shelf].clear() diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/policy.py b/keystone-moon/keystone/tests/unit/ksfixtures/policy.py deleted file mode 100644 index b883f980..00000000 --- a/keystone-moon/keystone/tests/unit/ksfixtures/policy.py +++ /dev/null @@ -1,33 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import fixtures -from oslo_policy import opts - -from keystone.policy.backends import rules - - -class Policy(fixtures.Fixture): - """A fixture for working with policy configuration.""" - - def __init__(self, policy_file, config_fixture): - self._policy_file = policy_file - self._config_fixture = config_fixture - - def setUp(self): - super(Policy, self).setUp() - opts.set_defaults(self._config_fixture.conf) - self._config_fixture.config(group='oslo_policy', - policy_file=self._policy_file) - rules.init() - self.addCleanup(rules.reset) diff --git a/keystone-moon/keystone/tests/unit/ksfixtures/temporaryfile.py b/keystone-moon/keystone/tests/unit/ksfixtures/temporaryfile.py deleted file mode 100644 index a4be06f8..00000000 --- a/keystone-moon/keystone/tests/unit/ksfixtures/temporaryfile.py +++ /dev/null @@ -1,29 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import tempfile - -import fixtures - - -class SecureTempFile(fixtures.Fixture): - """A fixture for creating a secure temp file.""" - - def setUp(self): - super(SecureTempFile, self).setUp() - - _fd, self.file_name = tempfile.mkstemp() - # Make sure no file descriptors are leaked, close the unused FD. - os.close(_fd) - self.addCleanup(os.remove, self.file_name) diff --git a/keystone-moon/keystone/tests/unit/mapping_fixtures.py b/keystone-moon/keystone/tests/unit/mapping_fixtures.py deleted file mode 100644 index 9dc980aa..00000000 --- a/keystone-moon/keystone/tests/unit/mapping_fixtures.py +++ /dev/null @@ -1,1486 +0,0 @@ -# -*- coding: utf-8 -*- - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Fixtures for Federation Mapping.""" - -from six.moves import range, zip - - -EMPLOYEE_GROUP_ID = "0cd5e9" -CONTRACTOR_GROUP_ID = "85a868" -TESTER_GROUP_ID = "123" -TESTER_GROUP_NAME = "tester" -DEVELOPER_GROUP_ID = "xyz" -DEVELOPER_GROUP_NAME = "Developer" -CONTRACTOR_GROUP_NAME = "Contractor" -DEVELOPER_GROUP_DOMAIN_NAME = "outsourcing" -DEVELOPER_GROUP_DOMAIN_ID = "5abc43" -FEDERATED_DOMAIN = "Federated" -LOCAL_DOMAIN = "Local" - -# Mapping summary: -# LastName Smith & Not Contractor or SubContractor -> group 0cd5e9 -# FirstName Jill & Contractor or SubContractor -> to group 85a868 -MAPPING_SMALL = { - "rules": [ - { - "local": [ - { - "group": { - "id": EMPLOYEE_GROUP_ID - } - }, - { - "user": { - "name": "{0}" - } - } - ], - "remote": [ - { - "type": "UserName" - }, - { - "type": "orgPersonType", - "not_any_of": [ - "Contractor", - "SubContractor" - ] - }, - { - "type": "LastName", - "any_one_of": [ - "Bo" - ] - } - ] - }, - { - "local": [ - { - "group": { - "id": CONTRACTOR_GROUP_ID - } - }, - { - "user": { - "name": "{0}" - } - } - ], - "remote": [ - { - "type": "UserName" - }, - { - "type": "orgPersonType", - "any_one_of": [ - "Contractor", - "SubContractor" - ] - }, - { - "type": "FirstName", - "any_one_of": [ - "Jill" - ] - } - ] - } - ] -} - -# Mapping summary: -# orgPersonType Admin or Big Cheese -> name {0} {1} email {2} and group 0cd5e9 -# orgPersonType Customer -> user name {0} email {1} -# orgPersonType Test and email ^@example.com$ -> group 123 and xyz -MAPPING_LARGE = { - "rules": [ - { - "local": [ - { - "user": { - "name": "{0} {1}", - "email": "{2}" - }, - "group": { - "id": EMPLOYEE_GROUP_ID - } - } - ], - "remote": [ - { - "type": "FirstName" - }, - { - "type": "LastName" - }, - { - "type": "Email" - }, - { - "type": "orgPersonType", - "any_one_of": [ - "Admin", - "Big Cheese" - ] - } - ] - }, - { - "local": [ - { - "user": { - "name": "{0}", - "email": "{1}" - } - } - ], - "remote": [ - { - "type": "UserName" - }, - { - "type": "Email" - }, - { - "type": "orgPersonType", - "not_any_of": [ - "Admin", - "Employee", - "Contractor", - "Tester" - ] - } - ] - }, - { - "local": [ - { - "group": { - "id": TESTER_GROUP_ID - } - }, - { - "group": { - "id": DEVELOPER_GROUP_ID - } - }, - { - "user": { - "name": "{0}" - } - } - ], - "remote": [ - { - "type": "UserName" - }, - { - "type": "orgPersonType", - "any_one_of": [ - "Tester" - ] - }, - { - "type": "Email", - "any_one_of": [ - ".*@example.com$" - ], - "regex": True - } - ] - } - ] -} - -MAPPING_BAD_REQ = { - "rules": [ - { - "local": [ - { - "user": "name" - } - ], - "remote": [ - { - "type": "UserName", - "bad_requirement": [ - "Young" - ] - } - ] - } - ] -} - -MAPPING_BAD_VALUE = { - "rules": [ - { - "local": [ - { - "user": "name" - } - ], - "remote": [ - { - "type": "UserName", - "any_one_of": "should_be_list" - } - ] - } - ] -} - -MAPPING_NO_RULES = { - 'rules': [] -} - -MAPPING_NO_REMOTE = { - "rules": [ - { - "local": [ - { - "user": "name" - } - ], - "remote": [] - } - ] -} - -MAPPING_MISSING_LOCAL = { - "rules": [ - { - "remote": [ - { - "type": "UserName", - "any_one_of": "should_be_list" - } - ] - } - ] -} - -MAPPING_WRONG_TYPE = { - "rules": [ - { - "local": [ - { - "user": "{1}" - } - ], - "remote": [ - { - "not_type": "UserName" - } - ] - } - ] -} - -MAPPING_MISSING_TYPE = { - "rules": [ - { - "local": [ - { - "user": "{1}" - } - ], - "remote": [ - {} - ] - } - ] -} - -MAPPING_EXTRA_REMOTE_PROPS_NOT_ANY_OF = { - "rules": [ - { - "local": [ - { - "group": { - "id": "0cd5e9" - } - }, - { - "user": { - "name": "{0}" - } - } - ], - "remote": [ - { - "type": "UserName" - }, - { - "type": "orgPersonType", - "not_any_of": [ - "SubContractor" - ], - "invalid_type": "xyz" - } - ] - } - ] -} - -MAPPING_EXTRA_REMOTE_PROPS_ANY_ONE_OF = { - "rules": [ - { - "local": [ - { - "group": { - "id": "0cd5e9" - } - }, - { - "user": { - "name": "{0}" - } - } - ], - "remote": [ - { - "type": "UserName" - }, - { - "type": "orgPersonType", - "any_one_of": [ - "SubContractor" - ], - "invalid_type": "xyz" - } - ] - } - ] -} - -MAPPING_EXTRA_REMOTE_PROPS_JUST_TYPE = { - "rules": [ - { - "local": [ - { - "group": { - "id": "0cd5e9" - } - }, - { - "user": { - "name": "{0}" - } - } - ], - "remote": [ - { - "type": "UserName" - }, - { - "type": "orgPersonType", - "invalid_type": "xyz" - } - ] - } - ] -} - -MAPPING_EXTRA_RULES_PROPS = { - "rules": [ - { - "local": [ - { - "group": { - "id": "0cd5e9" - } - }, - { - "user": { - "name": "{0}" - } - } - ], - "invalid_type": { - "id": "xyz", - }, - "remote": [ - { - "type": "UserName" - }, - { - "type": "orgPersonType", - "not_any_of": [ - "SubContractor" - ] - } - ] - } - ] -} - -MAPPING_TESTER_REGEX = { - "rules": [ - { - "local": [ - { - "user": { - "name": "{0}", - } - } - ], - "remote": [ - { - "type": "UserName" - } - ] - }, - { - "local": [ - { - "group": { - "id": TESTER_GROUP_ID - } - } - ], - "remote": [ - { - "type": "orgPersonType", - "any_one_of": [ - ".*Tester*" - ], - "regex": True - } - ] - } - ] -} - - -MAPPING_DIRECT_MAPPING_THROUGH_KEYWORD = { - "rules": [ - { - "local": [ - { - "user": "{0}" - }, - { - "group": TESTER_GROUP_ID - } - ], - "remote": [ - { - "type": "UserName", - "any_one_of": [ - "bwilliams" - ] - } - ] - } - ] -} - -MAPPING_DEVELOPER_REGEX = { - "rules": [ - { - "local": [ - { - "user": { - "name": "{0}", - }, - "group": { - "id": DEVELOPER_GROUP_ID - } - } - ], - "remote": [ - { - "type": "UserName" - }, - { - "type": "orgPersonType", - "any_one_of": [ - "Developer" - ], - }, - { - "type": "Email", - "not_any_of": [ - ".*@example.org$" - ], - "regex": True - } - ] - } - ] -} - -MAPPING_GROUP_NAMES = { - - "rules": [ - { - "local": [ - { - "user": { - "name": "{0}", - } - } - ], - "remote": [ - { - "type": "UserName" - } - ] - }, - { - "local": [ - { - "group": { - "name": DEVELOPER_GROUP_NAME, - "domain": { - "name": DEVELOPER_GROUP_DOMAIN_NAME - } - } - } - ], - "remote": [ - { - "type": "orgPersonType", - "any_one_of": [ - "Employee" - ], - } - ] - }, - { - "local": [ - { - "group": { - "name": TESTER_GROUP_NAME, - "domain": { - "id": DEVELOPER_GROUP_DOMAIN_ID - } - } - } - ], - "remote": [ - { - "type": "orgPersonType", - "any_one_of": [ - "BuildingX" - ] - } - ] - }, - ] -} - -MAPPING_EPHEMERAL_USER = { - "rules": [ - { - "local": [ - { - "user": { - "name": "{0}", - "domain": { - "id": FEDERATED_DOMAIN - }, - "type": "ephemeral" - } - } - ], - "remote": [ - { - "type": "UserName" - }, - { - "type": "UserName", - "any_one_of": [ - "tbo" - ] - } - ] - } - ] -} - -MAPPING_GROUPS_WHITELIST = { - "rules": [ - { - "remote": [ - { - "type": "orgPersonType", - "whitelist": [ - "Developer", "Contractor" - ] - }, - { - "type": "UserName" - } - ], - "local": [ - { - "groups": "{0}", - "domain": { - "id": DEVELOPER_GROUP_DOMAIN_ID - } - }, - { - "user": { - "name": "{1}" - } - } - ] - } - ] -} - -MAPPING_EPHEMERAL_USER_LOCAL_DOMAIN = { - "rules": [ - { - "local": [ - { - "user": { - "name": "{0}", - "domain": { - "id": LOCAL_DOMAIN - }, - "type": "ephemeral" - } - } - ], - "remote": [ - { - "type": "UserName" - }, - { - "type": "UserName", - "any_one_of": [ - "jsmith" - ] - } - ] - } - ] -} - -MAPPING_GROUPS_WHITELIST_MISSING_DOMAIN = { - "rules": [ - { - "remote": [ - { - "type": "orgPersonType", - "whitelist": [ - "Developer", "Contractor" - ] - }, - ], - "local": [ - { - "groups": "{0}", - } - ] - } - ] -} - -MAPPING_LOCAL_USER_LOCAL_DOMAIN = { - "rules": [ - { - "local": [ - { - "user": { - "name": "{0}", - "domain": { - "id": LOCAL_DOMAIN - }, - "type": "local" - } - } - ], - "remote": [ - { - "type": "UserName" - }, - { - "type": "UserName", - "any_one_of": [ - "jsmith" - ] - } - ] - } - ] -} - -MAPPING_GROUPS_BLACKLIST_MULTIPLES = { - "rules": [ - { - "remote": [ - { - "type": "orgPersonType", - "blacklist": [ - "Developer", "Manager" - ] - }, - { - "type": "Thing" # this could be variable length! - }, - { - "type": "UserName" - }, - ], - "local": [ - { - "groups": "{0}", - "domain": { - "id": DEVELOPER_GROUP_DOMAIN_ID - } - }, - { - "user": { - "name": "{2}", - } - } - ] - } - ] -} -MAPPING_GROUPS_BLACKLIST = { - "rules": [ - { - "remote": [ - { - "type": "orgPersonType", - "blacklist": [ - "Developer", "Manager" - ] - }, - { - "type": "UserName" - } - ], - "local": [ - { - "groups": "{0}", - "domain": { - "id": DEVELOPER_GROUP_DOMAIN_ID - } - }, - { - "user": { - "name": "{1}" - } - } - ] - } - ] -} - -# Exercise all possibilities of user identification. Values are hardcoded on -# purpose. -MAPPING_USER_IDS = { - "rules": [ - { - "local": [ - { - "user": { - "name": "{0}" - } - } - ], - "remote": [ - { - "type": "UserName" - }, - { - "type": "UserName", - "any_one_of": [ - "jsmith" - ] - } - ] - }, - { - "local": [ - { - "user": { - "name": "{0}", - "id": "abc123@example.com", - "domain": { - "id": "federated" - } - } - } - ], - "remote": [ - { - "type": "UserName" - }, - { - "type": "UserName", - "any_one_of": [ - "tbo" - ] - } - ] - }, - { - "local": [ - { - "user": { - "id": "{0}" - } - } - ], - "remote": [ - { - "type": "UserName" - }, - { - "type": "UserName", - "any_one_of": [ - "bob" - ] - } - ] - }, - { - "local": [ - { - "user": { - "id": "abc123@example.com", - "name": "{0}", - "domain": { - "id": "federated" - } - } - } - ], - "remote": [ - { - "type": "UserName" - }, - { - "type": "UserName", - "any_one_of": [ - "bwilliams" - ] - } - ] - } - ] -} - -MAPPING_GROUPS_BLACKLIST_MISSING_DOMAIN = { - "rules": [ - { - "remote": [ - { - "type": "orgPersonType", - "blacklist": [ - "Developer", "Manager" - ] - }, - ], - "local": [ - { - "groups": "{0}", - }, - ] - } - ] -} - -MAPPING_GROUPS_WHITELIST_AND_BLACKLIST = { - "rules": [ - { - "remote": [ - { - "type": "orgPersonType", - "blacklist": [ - "Employee" - ], - "whitelist": [ - "Contractor" - ] - }, - ], - "local": [ - { - "groups": "{0}", - "domain": { - "id": DEVELOPER_GROUP_DOMAIN_ID - } - }, - ] - } - ] -} - -# Mapping used by tokenless test cases, it maps the user_name -# and domain_name. -MAPPING_WITH_USERNAME_AND_DOMAINNAME = { - 'rules': [ - { - 'local': [ - { - 'user': { - 'name': '{0}', - 'domain': { - 'name': '{1}' - }, - 'type': 'local' - } - } - ], - 'remote': [ - { - 'type': 'SSL_CLIENT_USER_NAME' - }, - { - 'type': 'SSL_CLIENT_DOMAIN_NAME' - } - ] - } - ] -} - -# Mapping used by tokenless test cases, it maps the user_id -# and domain_name. -MAPPING_WITH_USERID_AND_DOMAINNAME = { - 'rules': [ - { - 'local': [ - { - 'user': { - 'id': '{0}', - 'domain': { - 'name': '{1}' - }, - 'type': 'local' - } - } - ], - 'remote': [ - { - 'type': 'SSL_CLIENT_USER_ID' - }, - { - 'type': 'SSL_CLIENT_DOMAIN_NAME' - } - ] - } - ] -} - -# Mapping used by tokenless test cases, it maps the user_name -# and domain_id. -MAPPING_WITH_USERNAME_AND_DOMAINID = { - 'rules': [ - { - 'local': [ - { - 'user': { - 'name': '{0}', - 'domain': { - 'id': '{1}' - }, - 'type': 'local' - } - } - ], - 'remote': [ - { - 'type': 'SSL_CLIENT_USER_NAME' - }, - { - 'type': 'SSL_CLIENT_DOMAIN_ID' - } - ] - } - ] -} - -# Mapping used by tokenless test cases, it maps the user_id -# and domain_id. -MAPPING_WITH_USERID_AND_DOMAINID = { - 'rules': [ - { - 'local': [ - { - 'user': { - 'id': '{0}', - 'domain': { - 'id': '{1}' - }, - 'type': 'local' - } - } - ], - 'remote': [ - { - 'type': 'SSL_CLIENT_USER_ID' - }, - { - 'type': 'SSL_CLIENT_DOMAIN_ID' - } - ] - } - ] -} - -# Mapping used by tokenless test cases, it maps the domain_id only. -MAPPING_WITH_DOMAINID_ONLY = { - 'rules': [ - { - 'local': [ - { - 'user': { - 'domain': { - 'id': '{0}' - }, - 'type': 'local' - } - } - ], - 'remote': [ - { - 'type': 'SSL_CLIENT_DOMAIN_ID' - } - ] - } - ] -} - -MAPPING_GROUPS_IDS_WHITELIST = { - "rules": [ - { - "local": [ - { - "user": { - "name": "{0}" - } - }, - { - "group_ids": "{1}" - }, - { - "group": { - "id": "{2}" - } - } - ], - "remote": [ - { - "type": "name" - }, - { - "type": "group_ids", - "whitelist": [ - "abc123", "ghi789", "321cba" - ] - }, - { - "type": "group" - } - ] - } - ] -} - -MAPPING_GROUPS_IDS_BLACKLIST = { - "rules": [ - { - "local": [ - { - "user": { - "name": "{0}" - } - }, - { - "group_ids": "{1}" - }, - { - "group": { - "id": "{2}" - } - } - ], - "remote": [ - { - "type": "name" - }, - { - "type": "group_ids", - "blacklist": [ - "def456" - ] - }, - { - "type": "group" - } - ] - } - ] -} - -# Mapping used by tokenless test cases, it maps the domain_name only. -MAPPING_WITH_DOMAINNAME_ONLY = { - 'rules': [ - { - 'local': [ - { - 'user': { - 'domain': { - 'name': '{0}' - }, - 'type': 'local' - } - } - ], - 'remote': [ - { - 'type': 'SSL_CLIENT_DOMAIN_NAME' - } - ] - } - ] -} - -# Mapping used by tokenless test cases, it maps the user_name only. -MAPPING_WITH_USERNAME_ONLY = { - 'rules': [ - { - 'local': [ - { - 'user': { - 'name': '{0}', - 'type': 'local' - } - } - ], - 'remote': [ - { - 'type': 'SSL_CLIENT_USER_NAME' - } - ] - } - ] -} - -# Mapping used by tokenless test cases, it maps the user_id only. -MAPPING_WITH_USERID_ONLY = { - 'rules': [ - { - 'local': [ - { - 'user': { - 'id': '{0}', - 'type': 'local' - } - } - ], - 'remote': [ - { - 'type': 'SSL_CLIENT_USER_ID' - } - ] - } - ] -} - -MAPPING_FOR_EPHEMERAL_USER = { - 'rules': [ - { - 'local': [ - { - 'user': { - 'name': '{0}', - 'type': 'ephemeral' - }, - 'group': { - 'id': 'dummy' - } - } - ], - 'remote': [ - { - 'type': 'SSL_CLIENT_USER_NAME' - } - ] - } - ] -} - -MAPPING_FOR_DEFAULT_EPHEMERAL_USER = { - 'rules': [ - { - 'local': [ - { - 'user': { - 'name': '{0}' - }, - 'group': { - 'id': 'dummy' - } - } - ], - 'remote': [ - { - 'type': 'SSL_CLIENT_USER_NAME' - } - ] - } - ] -} - -MAPPING_GROUPS_WHITELIST_PASS_THROUGH = { - "rules": [ - { - "remote": [ - { - "type": "UserName" - } - ], - "local": [ - { - "user": { - "name": "{0}", - "domain": { - "id": DEVELOPER_GROUP_DOMAIN_ID - } - } - } - ] - }, - { - "remote": [ - { - "type": "orgPersonType", - "whitelist": ['Developer'] - } - ], - "local": [ - { - "groups": "{0}", - "domain": { - "id": DEVELOPER_GROUP_DOMAIN_ID - } - } - ] - } - ] -} - -MAPPING_BAD_LOCAL_SETUP = { - "rules": [ - { - "local": [ - { - "user": { - "name": "{0}", - "domain": {"id": "default"} - }, - "whatisthis": "local" - } - ], - "remote": [ - { - "type": "UserName" - } - ] - } - ] -} - -EMPLOYEE_ASSERTION = { - 'Email': 'tim@example.com', - 'UserName': 'tbo', - 'FirstName': 'Tim', - 'LastName': 'Bo', - 'orgPersonType': 'Employee;BuildingX' -} - -EMPLOYEE_ASSERTION_MULTIPLE_GROUPS = { - 'Email': 'tim@example.com', - 'UserName': 'tbo', - 'FirstName': 'Tim', - 'LastName': 'Bo', - 'orgPersonType': 'Developer;Manager;Contractor', - 'Thing': 'yes!;maybe!;no!!' -} - -EMPLOYEE_ASSERTION_PREFIXED = { - 'PREFIX_Email': 'tim@example.com', - 'PREFIX_UserName': 'tbo', - 'PREFIX_FirstName': 'Tim', - 'PREFIX_LastName': 'Bo', - 'PREFIX_orgPersonType': 'SuperEmployee;BuildingX' -} - -CONTRACTOR_ASSERTION = { - 'Email': 'jill@example.com', - 'UserName': 'jsmith', - 'FirstName': 'Jill', - 'LastName': 'Smith', - 'orgPersonType': 'Contractor;Non-Dev' -} - -ADMIN_ASSERTION = { - 'Email': 'bob@example.com', - 'UserName': 'bob', - 'FirstName': 'Bob', - 'LastName': 'Thompson', - 'orgPersonType': 'Admin;Chief' -} - -CUSTOMER_ASSERTION = { - 'Email': 'beth@example.com', - 'UserName': 'bwilliams', - 'FirstName': 'Beth', - 'LastName': 'Williams', - 'orgPersonType': 'Customer' -} - -ANOTHER_CUSTOMER_ASSERTION = { - 'Email': 'mark@example.com', - 'UserName': 'markcol', - 'FirstName': 'Mark', - 'LastName': 'Collins', - 'orgPersonType': 'Managers;CEO;CTO' -} - -TESTER_ASSERTION = { - 'Email': 'testacct@example.com', - 'UserName': 'testacct', - 'FirstName': 'Test', - 'LastName': 'Account', - 'orgPersonType': 'MadeupGroup;Tester;GroupX' -} - -ANOTHER_TESTER_ASSERTION = { - 'Email': 'testacct@example.com', - 'UserName': 'IamTester' -} - -BAD_TESTER_ASSERTION = { - 'Email': 'eviltester@example.org', - 'UserName': 'Evil', - 'FirstName': 'Test', - 'LastName': 'Account', - 'orgPersonType': 'Tester' -} - -BAD_DEVELOPER_ASSERTION = { - 'Email': 'evildeveloper@example.org', - 'UserName': 'Evil', - 'FirstName': 'Develop', - 'LastName': 'Account', - 'orgPersonType': 'Developer' -} - -MALFORMED_TESTER_ASSERTION = { - 'Email': 'testacct@example.com', - 'UserName': 'testacct', - 'FirstName': 'Test', - 'LastName': 'Account', - 'orgPersonType': 'Tester', - 'object': object(), - 'dictionary': dict(zip('teststring', range(10))), - 'tuple': tuple(range(5)) -} - -DEVELOPER_ASSERTION = { - 'Email': 'developacct@example.com', - 'UserName': 'developacct', - 'FirstName': 'Develop', - 'LastName': 'Account', - 'orgPersonType': 'Developer' -} - -CONTRACTOR_MALFORMED_ASSERTION = { - 'UserName': 'user', - 'FirstName': object(), - 'orgPersonType': 'Contractor' -} - -LOCAL_USER_ASSERTION = { - 'UserName': 'marek', - 'UserType': 'random' -} - -ANOTHER_LOCAL_USER_ASSERTION = { - 'UserName': 'marek', - 'Position': 'DirectorGeneral' -} - -UNMATCHED_GROUP_ASSERTION = { - 'REMOTE_USER': 'Any Momoose', - 'REMOTE_USER_GROUPS': 'EXISTS;NO_EXISTS' -} - -GROUP_IDS_ASSERTION = { - 'name': 'opilotte', - 'group_ids': 'abc123;def456;ghi789', - 'group': 'klm012' -} - -GROUP_IDS_ASSERTION_ONLY_ONE_GROUP = { - 'name': 'opilotte', - 'group_ids': '321cba', - 'group': '210mlk' -} - -UNICODE_NAME_ASSERTION = { - 'PFX_Email': 'jon@example.com', - 'PFX_UserName': 'jonkare', - 'PFX_FirstName': 'Jon Kåre', - 'PFX_LastName': 'Hellån', - 'PFX_orgPersonType': 'Admin;Chief' -} - -MAPPING_UNICODE = { - "rules": [ - { - "local": [ - { - "user": { - "name": "{0} {1}", - "email": "{2}" - }, - "group": { - "id": EMPLOYEE_GROUP_ID - } - } - ], - "remote": [ - { - "type": "PFX_FirstName" - }, - { - "type": "PFX_LastName" - }, - { - "type": "PFX_Email" - }, - { - "type": "PFX_orgPersonType", - "any_one_of": [ - "Admin", - "Big Cheese" - ] - } - ] - }, - ], -} diff --git a/keystone-moon/keystone/tests/unit/policy/__init__.py b/keystone-moon/keystone/tests/unit/policy/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/policy/test_backends.py b/keystone-moon/keystone/tests/unit/policy/test_backends.py deleted file mode 100644 index 7b672420..00000000 --- a/keystone-moon/keystone/tests/unit/policy/test_backends.py +++ /dev/null @@ -1,86 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from keystone import exception -from keystone.tests import unit - - -class PolicyTests(object): - def test_create(self): - ref = unit.new_policy_ref() - res = self.policy_api.create_policy(ref['id'], ref) - self.assertDictEqual(ref, res) - - def test_get(self): - ref = unit.new_policy_ref() - res = self.policy_api.create_policy(ref['id'], ref) - - res = self.policy_api.get_policy(ref['id']) - self.assertDictEqual(ref, res) - - def test_list(self): - ref = unit.new_policy_ref() - self.policy_api.create_policy(ref['id'], ref) - - res = self.policy_api.list_policies() - res = [x for x in res if x['id'] == ref['id']][0] - self.assertDictEqual(ref, res) - - def test_update(self): - ref = unit.new_policy_ref() - self.policy_api.create_policy(ref['id'], ref) - orig = ref - - ref = unit.new_policy_ref() - - # (cannot change policy ID) - self.assertRaises(exception.ValidationError, - self.policy_api.update_policy, - orig['id'], - ref) - - ref['id'] = orig['id'] - res = self.policy_api.update_policy(orig['id'], ref) - self.assertDictEqual(ref, res) - - def test_delete(self): - ref = unit.new_policy_ref() - self.policy_api.create_policy(ref['id'], ref) - - self.policy_api.delete_policy(ref['id']) - self.assertRaises(exception.PolicyNotFound, - self.policy_api.delete_policy, - ref['id']) - self.assertRaises(exception.PolicyNotFound, - self.policy_api.get_policy, - ref['id']) - res = self.policy_api.list_policies() - self.assertFalse(len([x for x in res if x['id'] == ref['id']])) - - def test_get_policy_returns_not_found(self): - self.assertRaises(exception.PolicyNotFound, - self.policy_api.get_policy, - uuid.uuid4().hex) - - def test_update_policy_returns_not_found(self): - ref = unit.new_policy_ref() - self.assertRaises(exception.PolicyNotFound, - self.policy_api.update_policy, - ref['id'], - ref) - - def test_delete_policy_returns_not_found(self): - self.assertRaises(exception.PolicyNotFound, - self.policy_api.delete_policy, - uuid.uuid4().hex) diff --git a/keystone-moon/keystone/tests/unit/resource/__init__.py b/keystone-moon/keystone/tests/unit/resource/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/resource/backends/__init__.py b/keystone-moon/keystone/tests/unit/resource/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/resource/backends/test_sql.py b/keystone-moon/keystone/tests/unit/resource/backends/test_sql.py deleted file mode 100644 index 79ad3df2..00000000 --- a/keystone-moon/keystone/tests/unit/resource/backends/test_sql.py +++ /dev/null @@ -1,24 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.resource.backends import sql -from keystone.tests import unit -from keystone.tests.unit.ksfixtures import database -from keystone.tests.unit.resource import test_backends - - -class TestSqlResourceDriver(unit.BaseTestCase, - test_backends.ResourceDriverTests): - def setUp(self): - super(TestSqlResourceDriver, self).setUp() - self.useFixture(database.Database()) - self.driver = sql.Resource() diff --git a/keystone-moon/keystone/tests/unit/resource/config_backends/__init__.py b/keystone-moon/keystone/tests/unit/resource/config_backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/resource/config_backends/test_sql.py b/keystone-moon/keystone/tests/unit/resource/config_backends/test_sql.py deleted file mode 100644 index b4c5f262..00000000 --- a/keystone-moon/keystone/tests/unit/resource/config_backends/test_sql.py +++ /dev/null @@ -1,53 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from keystone.common import sql -from keystone.resource.config_backends import sql as config_sql -from keystone.tests import unit -from keystone.tests.unit.backend import core_sql -from keystone.tests.unit.ksfixtures import database -from keystone.tests.unit.resource import test_core - - -class SqlDomainConfigModels(core_sql.BaseBackendSqlModels): - - def test_whitelisted_model(self): - cols = (('domain_id', sql.String, 64), - ('group', sql.String, 255), - ('option', sql.String, 255), - ('value', sql.JsonBlob, None)) - self.assertExpectedSchema('whitelisted_config', cols) - - def test_sensitive_model(self): - cols = (('domain_id', sql.String, 64), - ('group', sql.String, 255), - ('option', sql.String, 255), - ('value', sql.JsonBlob, None)) - self.assertExpectedSchema('sensitive_config', cols) - - -class SqlDomainConfigDriver(unit.BaseTestCase, - test_core.DomainConfigDriverTests): - def setUp(self): - super(SqlDomainConfigDriver, self).setUp() - self.useFixture(database.Database()) - self.driver = config_sql.DomainConfig() - - -class SqlDomainConfig(core_sql.BaseBackendSqlTests, - test_core.DomainConfigTests): - def setUp(self): - super(SqlDomainConfig, self).setUp() - # test_core.DomainConfigTests is effectively a mixin class, so make - # sure we call its setup - test_core.DomainConfigTests.setUp(self) diff --git a/keystone-moon/keystone/tests/unit/resource/test_backends.py b/keystone-moon/keystone/tests/unit/resource/test_backends.py deleted file mode 100644 index eed4c6ba..00000000 --- a/keystone-moon/keystone/tests/unit/resource/test_backends.py +++ /dev/null @@ -1,1669 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import uuid - -import mock -from oslo_config import cfg -from six.moves import range -from testtools import matchers - -from keystone.common import driver_hints -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit import default_fixtures -from keystone.tests.unit import utils as test_utils - - -CONF = cfg.CONF - - -class ResourceTests(object): - - domain_count = len(default_fixtures.DOMAINS) - - def test_get_project(self): - tenant_ref = self.resource_api.get_project(self.tenant_bar['id']) - self.assertDictEqual(self.tenant_bar, tenant_ref) - - def test_get_project_returns_not_found(self): - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - uuid.uuid4().hex) - - def test_get_project_by_name(self): - tenant_ref = self.resource_api.get_project_by_name( - self.tenant_bar['name'], - CONF.identity.default_domain_id) - self.assertDictEqual(self.tenant_bar, tenant_ref) - - @unit.skip_if_no_multiple_domains_support - def test_get_project_by_name_for_project_acting_as_a_domain(self): - """Tests get_project_by_name works when the domain_id is None.""" - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, is_domain=False) - project = self.resource_api.create_project(project['id'], project) - - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project_by_name, - project['name'], - None) - - # Test that querying with domain_id as None will find the project - # acting as a domain, even if it's name is the same as the regular - # project above. - project2 = unit.new_project_ref(is_domain=True, - name=project['name']) - project2 = self.resource_api.create_project(project2['id'], project2) - - project_ref = self.resource_api.get_project_by_name( - project2['name'], None) - - self.assertEqual(project2, project_ref) - - def test_get_project_by_name_returns_not_found(self): - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project_by_name, - uuid.uuid4().hex, - CONF.identity.default_domain_id) - - def test_create_duplicate_project_id_fails(self): - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - project_id = project['id'] - self.resource_api.create_project(project_id, project) - project['name'] = 'fake2' - self.assertRaises(exception.Conflict, - self.resource_api.create_project, - project_id, - project) - - def test_create_duplicate_project_name_fails(self): - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - project_id = project['id'] - self.resource_api.create_project(project_id, project) - project['id'] = 'fake2' - self.assertRaises(exception.Conflict, - self.resource_api.create_project, - project['id'], - project) - - def test_create_duplicate_project_name_in_different_domains(self): - new_domain = unit.new_domain_ref() - self.resource_api.create_domain(new_domain['id'], new_domain) - project1 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - project2 = unit.new_project_ref(name=project1['name'], - domain_id=new_domain['id']) - self.resource_api.create_project(project1['id'], project1) - self.resource_api.create_project(project2['id'], project2) - - def test_move_project_between_domains(self): - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - project = unit.new_project_ref(domain_id=domain1['id']) - self.resource_api.create_project(project['id'], project) - project['domain_id'] = domain2['id'] - # Update the project asserting that a deprecation warning is emitted - with mock.patch( - 'oslo_log.versionutils.report_deprecated_feature') as mock_dep: - self.resource_api.update_project(project['id'], project) - self.assertTrue(mock_dep.called) - - updated_project_ref = self.resource_api.get_project(project['id']) - self.assertEqual(domain2['id'], updated_project_ref['domain_id']) - - def test_move_project_between_domains_with_clashing_names_fails(self): - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - # First, create a project in domain1 - project1 = unit.new_project_ref(domain_id=domain1['id']) - self.resource_api.create_project(project1['id'], project1) - # Now create a project in domain2 with a potentially clashing - # name - which should work since we have domain separation - project2 = unit.new_project_ref(name=project1['name'], - domain_id=domain2['id']) - self.resource_api.create_project(project2['id'], project2) - # Now try and move project1 into the 2nd domain - which should - # fail since the names clash - project1['domain_id'] = domain2['id'] - self.assertRaises(exception.Conflict, - self.resource_api.update_project, - project1['id'], - project1) - - @unit.skip_if_no_multiple_domains_support - def test_move_project_with_children_between_domains_fails(self): - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - project = unit.new_project_ref(domain_id=domain1['id']) - self.resource_api.create_project(project['id'], project) - child_project = unit.new_project_ref(domain_id=domain1['id'], - parent_id=project['id']) - self.resource_api.create_project(child_project['id'], child_project) - project['domain_id'] = domain2['id'] - - # Update is not allowed, since updating the whole subtree would be - # necessary - self.assertRaises(exception.ValidationError, - self.resource_api.update_project, - project['id'], - project) - - @unit.skip_if_no_multiple_domains_support - def test_move_project_not_root_between_domains_fails(self): - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - project = unit.new_project_ref(domain_id=domain1['id']) - self.resource_api.create_project(project['id'], project) - child_project = unit.new_project_ref(domain_id=domain1['id'], - parent_id=project['id']) - self.resource_api.create_project(child_project['id'], child_project) - child_project['domain_id'] = domain2['id'] - - self.assertRaises(exception.ValidationError, - self.resource_api.update_project, - child_project['id'], - child_project) - - @unit.skip_if_no_multiple_domains_support - def test_move_root_project_between_domains_succeeds(self): - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - root_project = unit.new_project_ref(domain_id=domain1['id']) - root_project = self.resource_api.create_project(root_project['id'], - root_project) - - root_project['domain_id'] = domain2['id'] - self.resource_api.update_project(root_project['id'], root_project) - project_from_db = self.resource_api.get_project(root_project['id']) - - self.assertEqual(domain2['id'], project_from_db['domain_id']) - - @unit.skip_if_no_multiple_domains_support - def test_update_domain_id_project_is_domain_fails(self): - other_domain = unit.new_domain_ref() - self.resource_api.create_domain(other_domain['id'], other_domain) - project = unit.new_project_ref(is_domain=True) - self.resource_api.create_project(project['id'], project) - project['domain_id'] = other_domain['id'] - - # Update of domain_id of projects acting as domains is not allowed - self.assertRaises(exception.ValidationError, - self.resource_api.update_project, - project['id'], - project) - - def test_rename_duplicate_project_name_fails(self): - project1 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - project2 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(project1['id'], project1) - self.resource_api.create_project(project2['id'], project2) - project2['name'] = project1['name'] - self.assertRaises(exception.Error, - self.resource_api.update_project, - project2['id'], - project2) - - def test_update_project_id_does_nothing(self): - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - project_id = project['id'] - self.resource_api.create_project(project['id'], project) - project['id'] = 'fake2' - self.resource_api.update_project(project_id, project) - project_ref = self.resource_api.get_project(project_id) - self.assertEqual(project_id, project_ref['id']) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - 'fake2') - - def test_delete_domain_with_user_group_project_links(self): - # TODO(chungg):add test case once expected behaviour defined - pass - - def test_update_project_returns_not_found(self): - self.assertRaises(exception.ProjectNotFound, - self.resource_api.update_project, - uuid.uuid4().hex, - dict()) - - def test_delete_project_returns_not_found(self): - self.assertRaises(exception.ProjectNotFound, - self.resource_api.delete_project, - uuid.uuid4().hex) - - def test_create_update_delete_unicode_project(self): - unicode_project_name = u'name \u540d\u5b57' - project = unit.new_project_ref( - name=unicode_project_name, - domain_id=CONF.identity.default_domain_id) - project = self.resource_api.create_project(project['id'], project) - self.resource_api.update_project(project['id'], project) - self.resource_api.delete_project(project['id']) - - def test_create_project_with_no_enabled_field(self): - ref = unit.new_project_ref(domain_id=CONF.identity.default_domain_id) - del ref['enabled'] - self.resource_api.create_project(ref['id'], ref) - - project = self.resource_api.get_project(ref['id']) - self.assertIs(project['enabled'], True) - - def test_create_project_long_name_fails(self): - project = unit.new_project_ref( - name='a' * 65, domain_id=CONF.identity.default_domain_id) - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - project['id'], - project) - - def test_create_project_blank_name_fails(self): - project = unit.new_project_ref( - name='', domain_id=CONF.identity.default_domain_id) - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - project['id'], - project) - - def test_create_project_invalid_name_fails(self): - project = unit.new_project_ref( - name=None, domain_id=CONF.identity.default_domain_id) - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - project['id'], - project) - project = unit.new_project_ref( - name=123, domain_id=CONF.identity.default_domain_id) - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - project['id'], - project) - - def test_update_project_blank_name_fails(self): - project = unit.new_project_ref( - name='fake1', domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(project['id'], project) - project['name'] = '' - self.assertRaises(exception.ValidationError, - self.resource_api.update_project, - project['id'], - project) - - def test_update_project_long_name_fails(self): - project = unit.new_project_ref( - name='fake1', domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(project['id'], project) - project['name'] = 'a' * 65 - self.assertRaises(exception.ValidationError, - self.resource_api.update_project, - project['id'], - project) - - def test_update_project_invalid_name_fails(self): - project = unit.new_project_ref( - name='fake1', domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(project['id'], project) - project['name'] = None - self.assertRaises(exception.ValidationError, - self.resource_api.update_project, - project['id'], - project) - - project['name'] = 123 - self.assertRaises(exception.ValidationError, - self.resource_api.update_project, - project['id'], - project) - - def test_update_project_invalid_enabled_type_string(self): - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(project['id'], project) - project_ref = self.resource_api.get_project(project['id']) - self.assertTrue(project_ref['enabled']) - - # Strings are not valid boolean values - project['enabled'] = "false" - self.assertRaises(exception.ValidationError, - self.resource_api.update_project, - project['id'], - project) - - def test_create_project_invalid_enabled_type_string(self): - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, - # invalid string value - enabled="true") - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - project['id'], - project) - - def test_create_project_invalid_domain_id(self): - project = unit.new_project_ref(domain_id=uuid.uuid4().hex) - self.assertRaises(exception.DomainNotFound, - self.resource_api.create_project, - project['id'], - project) - - def test_list_domains(self): - domain1 = unit.new_domain_ref() - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - self.resource_api.create_domain(domain2['id'], domain2) - domains = self.resource_api.list_domains() - self.assertEqual(3, len(domains)) - domain_ids = [] - for domain in domains: - domain_ids.append(domain.get('id')) - self.assertIn(CONF.identity.default_domain_id, domain_ids) - self.assertIn(domain1['id'], domain_ids) - self.assertIn(domain2['id'], domain_ids) - - def test_list_projects(self): - project_refs = self.resource_api.list_projects() - project_count = len(default_fixtures.TENANTS) + self.domain_count - self.assertEqual(project_count, len(project_refs)) - for project in default_fixtures.TENANTS: - self.assertIn(project, project_refs) - - def test_list_projects_with_multiple_filters(self): - # Create a project - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - project = self.resource_api.create_project(project['id'], project) - - # Build driver hints with the project's name and inexistent description - hints = driver_hints.Hints() - hints.add_filter('name', project['name']) - hints.add_filter('description', uuid.uuid4().hex) - - # Retrieve projects based on hints and check an empty list is returned - projects = self.resource_api.list_projects(hints) - self.assertEqual([], projects) - - # Build correct driver hints - hints = driver_hints.Hints() - hints.add_filter('name', project['name']) - hints.add_filter('description', project['description']) - - # Retrieve projects based on hints - projects = self.resource_api.list_projects(hints) - - # Check that the returned list contains only the first project - self.assertEqual(1, len(projects)) - self.assertEqual(project, projects[0]) - - def test_list_projects_for_domain(self): - project_ids = ([x['id'] for x in - self.resource_api.list_projects_in_domain( - CONF.identity.default_domain_id)]) - # Only the projects from the default fixtures are expected, since - # filtering by domain does not include any project that acts as a - # domain. - self.assertThat( - project_ids, matchers.HasLength(len(default_fixtures.TENANTS))) - self.assertIn(self.tenant_bar['id'], project_ids) - self.assertIn(self.tenant_baz['id'], project_ids) - self.assertIn(self.tenant_mtu['id'], project_ids) - self.assertIn(self.tenant_service['id'], project_ids) - - @unit.skip_if_no_multiple_domains_support - def test_list_projects_acting_as_domain(self): - initial_domains = self.resource_api.list_domains() - - # Creating 5 projects that act as domains - new_projects_acting_as_domains = [] - for i in range(5): - project = unit.new_project_ref(is_domain=True) - project = self.resource_api.create_project(project['id'], project) - new_projects_acting_as_domains.append(project) - - # Creating a few regular project to ensure it doesn't mess with the - # ones that act as domains - self._create_projects_hierarchy(hierarchy_size=2) - - projects = self.resource_api.list_projects_acting_as_domain() - expected_number_projects = ( - len(initial_domains) + len(new_projects_acting_as_domains)) - self.assertEqual(expected_number_projects, len(projects)) - for project in new_projects_acting_as_domains: - self.assertIn(project, projects) - for domain in initial_domains: - self.assertIn(domain['id'], [p['id'] for p in projects]) - - @unit.skip_if_no_multiple_domains_support - def test_list_projects_for_alternate_domain(self): - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - project1 = unit.new_project_ref(domain_id=domain1['id']) - self.resource_api.create_project(project1['id'], project1) - project2 = unit.new_project_ref(domain_id=domain1['id']) - self.resource_api.create_project(project2['id'], project2) - project_ids = ([x['id'] for x in - self.resource_api.list_projects_in_domain( - domain1['id'])]) - self.assertEqual(2, len(project_ids)) - self.assertIn(project1['id'], project_ids) - self.assertIn(project2['id'], project_ids) - - def _create_projects_hierarchy(self, hierarchy_size=2, - domain_id=None, - is_domain=False, - parent_project_id=None): - """Creates a project hierarchy with specified size. - - :param hierarchy_size: the desired hierarchy size, default is 2 - - a project with one child. - :param domain_id: domain where the projects hierarchy will be created. - :param is_domain: if the hierarchy will have the is_domain flag active - or not. - :param parent_project_id: if the intention is to create a - sub-hierarchy, sets the sub-hierarchy root. Defaults to creating - a new hierarchy, i.e. a new root project. - - :returns projects: a list of the projects in the created hierarchy. - - """ - if domain_id is None: - domain_id = CONF.identity.default_domain_id - if parent_project_id: - project = unit.new_project_ref(parent_id=parent_project_id, - domain_id=domain_id, - is_domain=is_domain) - else: - project = unit.new_project_ref(domain_id=domain_id, - is_domain=is_domain) - project_id = project['id'] - project = self.resource_api.create_project(project_id, project) - - projects = [project] - for i in range(1, hierarchy_size): - new_project = unit.new_project_ref(parent_id=project_id, - domain_id=domain_id) - - self.resource_api.create_project(new_project['id'], new_project) - projects.append(new_project) - project_id = new_project['id'] - - return projects - - @unit.skip_if_no_multiple_domains_support - def test_create_domain_with_project_api(self): - project = unit.new_project_ref(is_domain=True) - ref = self.resource_api.create_project(project['id'], project) - self.assertTrue(ref['is_domain']) - self.resource_api.get_domain(ref['id']) - - @unit.skip_if_no_multiple_domains_support - def test_project_as_a_domain_uniqueness_constraints(self): - """Tests project uniqueness for those acting as domains. - - If it is a project acting as a domain, we can't have two or more with - the same name. - - """ - # Create two projects acting as a domain - project = unit.new_project_ref(is_domain=True) - project = self.resource_api.create_project(project['id'], project) - project2 = unit.new_project_ref(is_domain=True) - project2 = self.resource_api.create_project(project2['id'], project2) - - # All projects acting as domains have a null domain_id, so should not - # be able to create another with the same name but a different - # project ID. - new_project = project.copy() - new_project['id'] = uuid.uuid4().hex - - self.assertRaises(exception.Conflict, - self.resource_api.create_project, - new_project['id'], - new_project) - - # We also should not be able to update one to have a name clash - project2['name'] = project['name'] - self.assertRaises(exception.Conflict, - self.resource_api.update_project, - project2['id'], - project2) - - # But updating it to a unique name is OK - project2['name'] = uuid.uuid4().hex - self.resource_api.update_project(project2['id'], project2) - - # Finally, it should be OK to create a project with same name as one of - # these acting as a domain, as long as it is a regular project - project3 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, name=project2['name']) - self.resource_api.create_project(project3['id'], project3) - # In fact, it should be OK to create such a project in the domain which - # has the matching name. - # TODO(henry-nash): Once we fully support projects acting as a domain, - # add a test here to create a sub-project with a name that matches its - # project acting as a domain - - @unit.skip_if_no_multiple_domains_support - @test_utils.wip('waiting for sub projects acting as domains support') - def test_is_domain_sub_project_has_parent_domain_id(self): - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, is_domain=True) - self.resource_api.create_project(project['id'], project) - - sub_project = unit.new_project_ref(domain_id=project['id'], - parent_id=project['id'], - is_domain=True) - - ref = self.resource_api.create_project(sub_project['id'], sub_project) - self.assertTrue(ref['is_domain']) - self.assertEqual(project['id'], ref['parent_id']) - self.assertEqual(project['id'], ref['domain_id']) - - @unit.skip_if_no_multiple_domains_support - def test_delete_domain_with_project_api(self): - project = unit.new_project_ref(domain_id=None, - is_domain=True) - self.resource_api.create_project(project['id'], project) - - # Check that a corresponding domain was created - self.resource_api.get_domain(project['id']) - - # Try to delete the enabled project that acts as a domain - self.assertRaises(exception.ForbiddenNotSecurity, - self.resource_api.delete_project, - project['id']) - - # Disable the project - project['enabled'] = False - self.resource_api.update_project(project['id'], project) - - # Successfully delete the project - self.resource_api.delete_project(project['id']) - - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project['id']) - - self.assertRaises(exception.DomainNotFound, - self.resource_api.get_domain, - project['id']) - - @unit.skip_if_no_multiple_domains_support - def test_create_subproject_acting_as_domain_fails(self): - root_project = unit.new_project_ref(is_domain=True) - self.resource_api.create_project(root_project['id'], root_project) - - sub_project = unit.new_project_ref(is_domain=True, - parent_id=root_project['id']) - - # Creation of sub projects acting as domains is not allowed yet - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - sub_project['id'], sub_project) - - @unit.skip_if_no_multiple_domains_support - def test_create_domain_under_regular_project_hierarchy_fails(self): - # Projects acting as domains can't have a regular project as parent - projects_hierarchy = self._create_projects_hierarchy() - parent = projects_hierarchy[1] - project = unit.new_project_ref(domain_id=parent['id'], - parent_id=parent['id'], - is_domain=True) - - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - project['id'], project) - - @unit.skip_if_no_multiple_domains_support - @test_utils.wip('waiting for sub projects acting as domains support') - def test_create_project_under_domain_hierarchy(self): - projects_hierarchy = self._create_projects_hierarchy(is_domain=True) - parent = projects_hierarchy[1] - project = unit.new_project_ref(domain_id=parent['id'], - parent_id=parent['id'], - is_domain=False) - - ref = self.resource_api.create_project(project['id'], project) - self.assertFalse(ref['is_domain']) - self.assertEqual(parent['id'], ref['parent_id']) - self.assertEqual(parent['id'], ref['domain_id']) - - def test_create_project_without_is_domain_flag(self): - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - del project['is_domain'] - ref = self.resource_api.create_project(project['id'], project) - # The is_domain flag should be False by default - self.assertFalse(ref['is_domain']) - - @unit.skip_if_no_multiple_domains_support - def test_create_project_passing_is_domain_flag_true(self): - project = unit.new_project_ref(is_domain=True) - - ref = self.resource_api.create_project(project['id'], project) - self.assertTrue(ref['is_domain']) - - def test_create_project_passing_is_domain_flag_false(self): - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, is_domain=False) - - ref = self.resource_api.create_project(project['id'], project) - self.assertIs(False, ref['is_domain']) - - @test_utils.wip('waiting for support for parent_id to imply domain_id') - def test_create_project_with_parent_id_and_without_domain_id(self): - # First create a domain - project = unit.new_project_ref(is_domain=True) - self.resource_api.create_project(project['id'], project) - # Now create a child by just naming the parent_id - sub_project = unit.new_project_ref(parent_id=project['id']) - ref = self.resource_api.create_project(sub_project['id'], sub_project) - - # The domain_id should be set to the parent domain_id - self.assertEqual(project['domain_id'], ref['domain_id']) - - def test_create_project_with_domain_id_and_without_parent_id(self): - # First create a domain - project = unit.new_project_ref(is_domain=True) - self.resource_api.create_project(project['id'], project) - # Now create a child by just naming the domain_id - sub_project = unit.new_project_ref(domain_id=project['id']) - ref = self.resource_api.create_project(sub_project['id'], sub_project) - - # The parent_id and domain_id should be set to the id of the project - # acting as a domain - self.assertEqual(project['id'], ref['parent_id']) - self.assertEqual(project['id'], ref['domain_id']) - - def test_create_project_with_domain_id_mismatch_to_parent_domain(self): - # First create a domain - project = unit.new_project_ref(is_domain=True) - self.resource_api.create_project(project['id'], project) - # Now try to create a child with the above as its parent, but - # specifying a different domain. - sub_project = unit.new_project_ref( - parent_id=project['id'], domain_id=CONF.identity.default_domain_id) - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - sub_project['id'], sub_project) - - def test_check_leaf_projects(self): - projects_hierarchy = self._create_projects_hierarchy() - root_project = projects_hierarchy[0] - leaf_project = projects_hierarchy[1] - - self.assertFalse(self.resource_api.is_leaf_project( - root_project['id'])) - self.assertTrue(self.resource_api.is_leaf_project( - leaf_project['id'])) - - # Delete leaf_project - self.resource_api.delete_project(leaf_project['id']) - - # Now, root_project should be leaf - self.assertTrue(self.resource_api.is_leaf_project( - root_project['id'])) - - def test_list_projects_in_subtree(self): - projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3) - project1 = projects_hierarchy[0] - project2 = projects_hierarchy[1] - project3 = projects_hierarchy[2] - project4 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, - parent_id=project2['id']) - self.resource_api.create_project(project4['id'], project4) - - subtree = self.resource_api.list_projects_in_subtree(project1['id']) - self.assertEqual(3, len(subtree)) - self.assertIn(project2, subtree) - self.assertIn(project3, subtree) - self.assertIn(project4, subtree) - - subtree = self.resource_api.list_projects_in_subtree(project2['id']) - self.assertEqual(2, len(subtree)) - self.assertIn(project3, subtree) - self.assertIn(project4, subtree) - - subtree = self.resource_api.list_projects_in_subtree(project3['id']) - self.assertEqual(0, len(subtree)) - - def test_list_projects_in_subtree_with_circular_reference(self): - project1 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - project1 = self.resource_api.create_project(project1['id'], project1) - - project2 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, - parent_id=project1['id']) - self.resource_api.create_project(project2['id'], project2) - - project1['parent_id'] = project2['id'] # Adds cyclic reference - - # NOTE(dstanek): The manager does not allow parent_id to be updated. - # Instead will directly use the driver to create the cyclic - # reference. - self.resource_api.driver.update_project(project1['id'], project1) - - subtree = self.resource_api.list_projects_in_subtree(project1['id']) - - # NOTE(dstanek): If a cyclic reference is detected the code bails - # and returns None instead of falling into the infinite - # recursion trap. - self.assertIsNone(subtree) - - def test_list_projects_in_subtree_invalid_project_id(self): - self.assertRaises(exception.ValidationError, - self.resource_api.list_projects_in_subtree, - None) - - self.assertRaises(exception.ProjectNotFound, - self.resource_api.list_projects_in_subtree, - uuid.uuid4().hex) - - def test_list_project_parents(self): - projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3) - project1 = projects_hierarchy[0] - project2 = projects_hierarchy[1] - project3 = projects_hierarchy[2] - project4 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, - parent_id=project2['id']) - self.resource_api.create_project(project4['id'], project4) - - parents1 = self.resource_api.list_project_parents(project3['id']) - self.assertEqual(3, len(parents1)) - self.assertIn(project1, parents1) - self.assertIn(project2, parents1) - - parents2 = self.resource_api.list_project_parents(project4['id']) - self.assertEqual(parents1, parents2) - - parents = self.resource_api.list_project_parents(project1['id']) - # It has the default domain as parent - self.assertEqual(1, len(parents)) - - def test_update_project_enabled_cascade(self): - """Test update_project_cascade - - Ensures the enabled attribute is correctly updated across - a simple 3-level projects hierarchy. - """ - projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3) - parent = projects_hierarchy[0] - - # Disable in parent project disables the whole subtree - parent['enabled'] = False - # Store the ref from backend in another variable so we don't bother - # to remove other attributes that were not originally provided and - # were set in the manager, like parent_id and domain_id. - parent_ref = self.resource_api.update_project(parent['id'], - parent, - cascade=True) - - subtree = self.resource_api.list_projects_in_subtree(parent['id']) - self.assertEqual(2, len(subtree)) - self.assertFalse(parent_ref['enabled']) - self.assertFalse(subtree[0]['enabled']) - self.assertFalse(subtree[1]['enabled']) - - # Enable parent project enables the whole subtree - parent['enabled'] = True - parent_ref = self.resource_api.update_project(parent['id'], - parent, - cascade=True) - - subtree = self.resource_api.list_projects_in_subtree(parent['id']) - self.assertEqual(2, len(subtree)) - self.assertTrue(parent_ref['enabled']) - self.assertTrue(subtree[0]['enabled']) - self.assertTrue(subtree[1]['enabled']) - - def test_cannot_enable_cascade_with_parent_disabled(self): - projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3) - grandparent = projects_hierarchy[0] - parent = projects_hierarchy[1] - - grandparent['enabled'] = False - self.resource_api.update_project(grandparent['id'], - grandparent, - cascade=True) - subtree = self.resource_api.list_projects_in_subtree(parent['id']) - self.assertFalse(subtree[0]['enabled']) - - parent['enabled'] = True - self.assertRaises(exception.ForbiddenNotSecurity, - self.resource_api.update_project, - parent['id'], - parent, - cascade=True) - - def test_update_cascade_only_accepts_enabled(self): - # Update cascade does not accept any other attribute but 'enabled' - new_project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(new_project['id'], new_project) - - new_project['name'] = 'project1' - self.assertRaises(exception.ValidationError, - self.resource_api.update_project, - new_project['id'], - new_project, - cascade=True) - - def test_list_project_parents_invalid_project_id(self): - self.assertRaises(exception.ValidationError, - self.resource_api.list_project_parents, - None) - - self.assertRaises(exception.ProjectNotFound, - self.resource_api.list_project_parents, - uuid.uuid4().hex) - - def test_create_project_doesnt_modify_passed_in_dict(self): - new_project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - original_project = new_project.copy() - self.resource_api.create_project(new_project['id'], new_project) - self.assertDictEqual(original_project, new_project) - - def test_update_project_enable(self): - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(project['id'], project) - project_ref = self.resource_api.get_project(project['id']) - self.assertTrue(project_ref['enabled']) - - project['enabled'] = False - self.resource_api.update_project(project['id'], project) - project_ref = self.resource_api.get_project(project['id']) - self.assertEqual(project['enabled'], project_ref['enabled']) - - # If not present, enabled field should not be updated - del project['enabled'] - self.resource_api.update_project(project['id'], project) - project_ref = self.resource_api.get_project(project['id']) - self.assertFalse(project_ref['enabled']) - - project['enabled'] = True - self.resource_api.update_project(project['id'], project) - project_ref = self.resource_api.get_project(project['id']) - self.assertEqual(project['enabled'], project_ref['enabled']) - - del project['enabled'] - self.resource_api.update_project(project['id'], project) - project_ref = self.resource_api.get_project(project['id']) - self.assertTrue(project_ref['enabled']) - - def test_create_invalid_domain_fails(self): - new_group = unit.new_group_ref(domain_id="doesnotexist") - self.assertRaises(exception.DomainNotFound, - self.identity_api.create_group, - new_group) - new_user = unit.new_user_ref(domain_id="doesnotexist") - self.assertRaises(exception.DomainNotFound, - self.identity_api.create_user, - new_user) - - @unit.skip_if_no_multiple_domains_support - def test_project_crud(self): - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - project = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project['id'], project) - project_ref = self.resource_api.get_project(project['id']) - self.assertDictContainsSubset(project, project_ref) - - project['name'] = uuid.uuid4().hex - self.resource_api.update_project(project['id'], project) - project_ref = self.resource_api.get_project(project['id']) - self.assertDictContainsSubset(project, project_ref) - - self.resource_api.delete_project(project['id']) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project['id']) - - def test_domain_delete_hierarchy(self): - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - - # Creating a root and a leaf project inside the domain - projects_hierarchy = self._create_projects_hierarchy( - domain_id=domain['id']) - root_project = projects_hierarchy[0] - leaf_project = projects_hierarchy[0] - - # Disable the domain - domain['enabled'] = False - self.resource_api.update_domain(domain['id'], domain) - - # Delete the domain - self.resource_api.delete_domain(domain['id']) - - # Make sure the domain no longer exists - self.assertRaises(exception.DomainNotFound, - self.resource_api.get_domain, - domain['id']) - - # Make sure the root project no longer exists - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - root_project['id']) - - # Make sure the leaf project no longer exists - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - leaf_project['id']) - - def test_delete_projects_from_ids(self): - """Tests the resource backend call delete_projects_from_ids. - - Tests the normal flow of the delete_projects_from_ids backend call, - that ensures no project on the list exists after it is succesfully - called. - """ - project1_ref = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - project2_ref = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - projects = (project1_ref, project2_ref) - for project in projects: - self.resource_api.create_project(project['id'], project) - - # Setting up the ID's list - projects_ids = [p['id'] for p in projects] - self.resource_api.driver.delete_projects_from_ids(projects_ids) - - # Ensuring projects no longer exist at backend level - for project_id in projects_ids: - self.assertRaises(exception.ProjectNotFound, - self.resource_api.driver.get_project, - project_id) - - # Passing an empty list is silently ignored - self.resource_api.driver.delete_projects_from_ids([]) - - def test_delete_projects_from_ids_with_no_existing_project_id(self): - """Tests delete_projects_from_ids issues warning if not found. - - Tests the resource backend call delete_projects_from_ids passing a - non existing ID in project_ids, which is logged and ignored by - the backend. - """ - project_ref = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(project_ref['id'], project_ref) - - # Setting up the ID's list - projects_ids = (project_ref['id'], uuid.uuid4().hex) - with mock.patch('keystone.resource.backends.sql.LOG') as mock_log: - self.resource_api.delete_projects_from_ids(projects_ids) - self.assertTrue(mock_log.warning.called) - # The existing project was deleted. - self.assertRaises(exception.ProjectNotFound, - self.resource_api.driver.get_project, - project_ref['id']) - - # Even if we only have one project, and it does not exist, it returns - # no error. - self.resource_api.driver.delete_projects_from_ids([uuid.uuid4().hex]) - - def test_delete_project_cascade(self): - # create a hierarchy with 3 levels - projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3) - root_project = projects_hierarchy[0] - project1 = projects_hierarchy[1] - project2 = projects_hierarchy[2] - - # Disabling all projects before attempting to delete - for project in (project2, project1, root_project): - project['enabled'] = False - self.resource_api.update_project(project['id'], project) - - self.resource_api.delete_project(root_project['id'], cascade=True) - - for project in projects_hierarchy: - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project['id']) - - def test_delete_large_project_cascade(self): - """Try delete a large project with cascade true. - - Tree we will create:: - - +-p1-+ - | | - p5 p2 - | | - p6 +-p3-+ - | | - p7 p4 - """ - # create a hierarchy with 4 levels - projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=4) - p1 = projects_hierarchy[0] - # Add the left branch to the hierarchy (p5, p6) - self._create_projects_hierarchy(hierarchy_size=2, - parent_project_id=p1['id']) - # Add p7 to the hierarchy - p3_id = projects_hierarchy[2]['id'] - self._create_projects_hierarchy(hierarchy_size=1, - parent_project_id=p3_id) - # Reverse the hierarchy to disable the leaf first - prjs_hierarchy = ([p1] + self.resource_api.list_projects_in_subtree( - p1['id']))[::-1] - - # Disabling all projects before attempting to delete - for project in prjs_hierarchy: - project['enabled'] = False - self.resource_api.update_project(project['id'], project) - - self.resource_api.delete_project(p1['id'], cascade=True) - for project in prjs_hierarchy: - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project['id']) - - def test_cannot_delete_project_cascade_with_enabled_child(self): - # create a hierarchy with 3 levels - projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3) - root_project = projects_hierarchy[0] - project1 = projects_hierarchy[1] - project2 = projects_hierarchy[2] - - project2['enabled'] = False - self.resource_api.update_project(project2['id'], project2) - - # Cannot cascade delete root_project, since project1 is enabled - self.assertRaises(exception.ForbiddenNotSecurity, - self.resource_api.delete_project, - root_project['id'], - cascade=True) - - # Ensuring no project was deleted, not even project2 - self.resource_api.get_project(root_project['id']) - self.resource_api.get_project(project1['id']) - self.resource_api.get_project(project2['id']) - - def test_hierarchical_projects_crud(self): - # create a hierarchy with just a root project (which is a leaf as well) - projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=1) - root_project1 = projects_hierarchy[0] - - # create a hierarchy with one root project and one leaf project - projects_hierarchy = self._create_projects_hierarchy() - root_project2 = projects_hierarchy[0] - leaf_project = projects_hierarchy[1] - - # update description from leaf_project - leaf_project['description'] = 'new description' - self.resource_api.update_project(leaf_project['id'], leaf_project) - proj_ref = self.resource_api.get_project(leaf_project['id']) - self.assertDictEqual(leaf_project, proj_ref) - - # update the parent_id is not allowed - leaf_project['parent_id'] = root_project1['id'] - self.assertRaises(exception.ForbiddenNotSecurity, - self.resource_api.update_project, - leaf_project['id'], - leaf_project) - - # delete root_project1 - self.resource_api.delete_project(root_project1['id']) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - root_project1['id']) - - # delete root_project2 is not allowed since it is not a leaf project - self.assertRaises(exception.ForbiddenNotSecurity, - self.resource_api.delete_project, - root_project2['id']) - - def test_create_project_with_invalid_parent(self): - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, parent_id='fake') - self.assertRaises(exception.ProjectNotFound, - self.resource_api.create_project, - project['id'], - project) - - @unit.skip_if_no_multiple_domains_support - def test_create_leaf_project_with_different_domain(self): - root_project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(root_project['id'], root_project) - - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - leaf_project = unit.new_project_ref(domain_id=domain['id'], - parent_id=root_project['id']) - - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - leaf_project['id'], - leaf_project) - - def test_delete_hierarchical_leaf_project(self): - projects_hierarchy = self._create_projects_hierarchy() - root_project = projects_hierarchy[0] - leaf_project = projects_hierarchy[1] - - self.resource_api.delete_project(leaf_project['id']) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - leaf_project['id']) - - self.resource_api.delete_project(root_project['id']) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - root_project['id']) - - def test_delete_hierarchical_not_leaf_project(self): - projects_hierarchy = self._create_projects_hierarchy() - root_project = projects_hierarchy[0] - - self.assertRaises(exception.ForbiddenNotSecurity, - self.resource_api.delete_project, - root_project['id']) - - def test_update_project_parent(self): - projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3) - project1 = projects_hierarchy[0] - project2 = projects_hierarchy[1] - project3 = projects_hierarchy[2] - - # project2 is the parent from project3 - self.assertEqual(project3.get('parent_id'), project2['id']) - - # try to update project3 parent to parent1 - project3['parent_id'] = project1['id'] - self.assertRaises(exception.ForbiddenNotSecurity, - self.resource_api.update_project, - project3['id'], - project3) - - def test_create_project_under_disabled_one(self): - project1 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, enabled=False) - self.resource_api.create_project(project1['id'], project1) - - project2 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, - parent_id=project1['id']) - - # It's not possible to create a project under a disabled one in the - # hierarchy - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - project2['id'], - project2) - - def test_disable_hierarchical_leaf_project(self): - projects_hierarchy = self._create_projects_hierarchy() - leaf_project = projects_hierarchy[1] - - leaf_project['enabled'] = False - self.resource_api.update_project(leaf_project['id'], leaf_project) - - project_ref = self.resource_api.get_project(leaf_project['id']) - self.assertEqual(leaf_project['enabled'], project_ref['enabled']) - - def test_disable_hierarchical_not_leaf_project(self): - projects_hierarchy = self._create_projects_hierarchy() - root_project = projects_hierarchy[0] - - root_project['enabled'] = False - self.assertRaises(exception.ForbiddenNotSecurity, - self.resource_api.update_project, - root_project['id'], - root_project) - - def test_enable_project_with_disabled_parent(self): - projects_hierarchy = self._create_projects_hierarchy() - root_project = projects_hierarchy[0] - leaf_project = projects_hierarchy[1] - - # Disable leaf and root - leaf_project['enabled'] = False - self.resource_api.update_project(leaf_project['id'], leaf_project) - root_project['enabled'] = False - self.resource_api.update_project(root_project['id'], root_project) - - # Try to enable the leaf project, it's not possible since it has - # a disabled parent - leaf_project['enabled'] = True - self.assertRaises(exception.ForbiddenNotSecurity, - self.resource_api.update_project, - leaf_project['id'], - leaf_project) - - def _get_hierarchy_depth(self, project_id): - return len(self.resource_api.list_project_parents(project_id)) + 1 - - def test_check_hierarchy_depth(self): - # Should be allowed to have a hierarchy of the max depth specified - # in the config option plus one (to allow for the additional project - # acting as a domain after an upgrade) - projects_hierarchy = self._create_projects_hierarchy( - CONF.max_project_tree_depth) - leaf_project = projects_hierarchy[CONF.max_project_tree_depth - 1] - - depth = self._get_hierarchy_depth(leaf_project['id']) - self.assertEqual(CONF.max_project_tree_depth + 1, depth) - - # Creating another project in the hierarchy shouldn't be allowed - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, - parent_id=leaf_project['id']) - self.assertRaises(exception.ForbiddenNotSecurity, - self.resource_api.create_project, - project['id'], - project) - - def test_project_update_missing_attrs_with_a_value(self): - # Creating a project with no description attribute. - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - del project['description'] - project = self.resource_api.create_project(project['id'], project) - - # Add a description attribute. - project['description'] = uuid.uuid4().hex - self.resource_api.update_project(project['id'], project) - - project_ref = self.resource_api.get_project(project['id']) - self.assertDictEqual(project, project_ref) - - def test_project_update_missing_attrs_with_a_falsey_value(self): - # Creating a project with no description attribute. - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - del project['description'] - project = self.resource_api.create_project(project['id'], project) - - # Add a description attribute. - project['description'] = '' - self.resource_api.update_project(project['id'], project) - - project_ref = self.resource_api.get_project(project['id']) - self.assertDictEqual(project, project_ref) - - def test_domain_crud(self): - domain = unit.new_domain_ref() - domain_ref = self.resource_api.create_domain(domain['id'], domain) - self.assertDictEqual(domain, domain_ref) - domain_ref = self.resource_api.get_domain(domain['id']) - self.assertDictEqual(domain, domain_ref) - - domain['name'] = uuid.uuid4().hex - domain_ref = self.resource_api.update_domain(domain['id'], domain) - self.assertDictEqual(domain, domain_ref) - domain_ref = self.resource_api.get_domain(domain['id']) - self.assertDictEqual(domain, domain_ref) - - # Ensure an 'enabled' domain cannot be deleted - self.assertRaises(exception.ForbiddenNotSecurity, - self.resource_api.delete_domain, - domain_id=domain['id']) - - # Disable the domain - domain['enabled'] = False - self.resource_api.update_domain(domain['id'], domain) - - # Delete the domain - self.resource_api.delete_domain(domain['id']) - - # Make sure the domain no longer exists - self.assertRaises(exception.DomainNotFound, - self.resource_api.get_domain, - domain['id']) - - @unit.skip_if_no_multiple_domains_support - def test_domain_name_case_sensitivity(self): - # create a ref with a lowercase name - domain_name = 'test_domain' - ref = unit.new_domain_ref(name=domain_name) - - lower_case_domain = self.resource_api.create_domain(ref['id'], ref) - - # assign a new ID to the ref with the same name, but in uppercase - ref['id'] = uuid.uuid4().hex - ref['name'] = domain_name.upper() - upper_case_domain = self.resource_api.create_domain(ref['id'], ref) - - # We can get each domain by name - lower_case_domain_ref = self.resource_api.get_domain_by_name( - domain_name) - self.assertDictEqual(lower_case_domain, lower_case_domain_ref) - - upper_case_domain_ref = self.resource_api.get_domain_by_name( - domain_name.upper()) - self.assertDictEqual(upper_case_domain, upper_case_domain_ref) - - def test_project_attribute_update(self): - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(project['id'], project) - - # pick a key known to be non-existent - key = 'description' - - def assert_key_equals(value): - project_ref = self.resource_api.update_project( - project['id'], project) - self.assertEqual(value, project_ref[key]) - project_ref = self.resource_api.get_project(project['id']) - self.assertEqual(value, project_ref[key]) - - def assert_get_key_is(value): - project_ref = self.resource_api.update_project( - project['id'], project) - self.assertIs(project_ref.get(key), value) - project_ref = self.resource_api.get_project(project['id']) - self.assertIs(project_ref.get(key), value) - - # add an attribute that doesn't exist, set it to a falsey value - value = '' - project[key] = value - assert_key_equals(value) - - # set an attribute with a falsey value to null - value = None - project[key] = value - assert_get_key_is(value) - - # do it again, in case updating from this situation is handled oddly - value = None - project[key] = value - assert_get_key_is(value) - - # set a possibly-null value to a falsey value - value = '' - project[key] = value - assert_key_equals(value) - - # set a falsey value to a truthy value - value = uuid.uuid4().hex - project[key] = value - assert_key_equals(value) - - @unit.skip_if_cache_disabled('resource') - @unit.skip_if_no_multiple_domains_support - def test_domain_rename_invalidates_get_domain_by_name_cache(self): - domain = unit.new_domain_ref() - domain_id = domain['id'] - domain_name = domain['name'] - self.resource_api.create_domain(domain_id, domain) - domain_ref = self.resource_api.get_domain_by_name(domain_name) - domain_ref['name'] = uuid.uuid4().hex - self.resource_api.update_domain(domain_id, domain_ref) - self.assertRaises(exception.DomainNotFound, - self.resource_api.get_domain_by_name, - domain_name) - - @unit.skip_if_cache_disabled('resource') - def test_cache_layer_domain_crud(self): - domain = unit.new_domain_ref() - domain_id = domain['id'] - # Create Domain - self.resource_api.create_domain(domain_id, domain) - project_domain_ref = self.resource_api.get_project(domain_id) - domain_ref = self.resource_api.get_domain(domain_id) - updated_project_domain_ref = copy.deepcopy(project_domain_ref) - updated_project_domain_ref['name'] = uuid.uuid4().hex - updated_domain_ref = copy.deepcopy(domain_ref) - updated_domain_ref['name'] = updated_project_domain_ref['name'] - # Update domain, bypassing resource api manager - self.resource_api.driver.update_project(domain_id, - updated_project_domain_ref) - # Verify get_domain still returns the domain - self.assertDictContainsSubset( - domain_ref, self.resource_api.get_domain(domain_id)) - # Invalidate cache - self.resource_api.get_domain.invalidate(self.resource_api, - domain_id) - # Verify get_domain returns the updated domain - self.assertDictContainsSubset( - updated_domain_ref, self.resource_api.get_domain(domain_id)) - # Update the domain back to original ref, using the assignment api - # manager - self.resource_api.update_domain(domain_id, domain_ref) - self.assertDictContainsSubset( - domain_ref, self.resource_api.get_domain(domain_id)) - # Make sure domain is 'disabled', bypass resource api manager - project_domain_ref_disabled = project_domain_ref.copy() - project_domain_ref_disabled['enabled'] = False - self.resource_api.driver.update_project(domain_id, - project_domain_ref_disabled) - self.resource_api.driver.update_project(domain_id, {'enabled': False}) - # Delete domain, bypassing resource api manager - self.resource_api.driver.delete_project(domain_id) - # Verify get_domain still returns the domain - self.assertDictContainsSubset( - domain_ref, self.resource_api.get_domain(domain_id)) - # Invalidate cache - self.resource_api.get_domain.invalidate(self.resource_api, - domain_id) - # Verify get_domain now raises DomainNotFound - self.assertRaises(exception.DomainNotFound, - self.resource_api.get_domain, domain_id) - # Recreate Domain - self.resource_api.create_domain(domain_id, domain) - self.resource_api.get_domain(domain_id) - # Make sure domain is 'disabled', bypass resource api manager - domain['enabled'] = False - self.resource_api.driver.update_project(domain_id, domain) - self.resource_api.driver.update_project(domain_id, {'enabled': False}) - # Delete domain - self.resource_api.delete_domain(domain_id) - # verify DomainNotFound raised - self.assertRaises(exception.DomainNotFound, - self.resource_api.get_domain, - domain_id) - - @unit.skip_if_cache_disabled('resource') - @unit.skip_if_no_multiple_domains_support - def test_project_rename_invalidates_get_project_by_name_cache(self): - domain = unit.new_domain_ref() - project = unit.new_project_ref(domain_id=domain['id']) - project_id = project['id'] - project_name = project['name'] - self.resource_api.create_domain(domain['id'], domain) - # Create a project - self.resource_api.create_project(project_id, project) - self.resource_api.get_project_by_name(project_name, domain['id']) - project['name'] = uuid.uuid4().hex - self.resource_api.update_project(project_id, project) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project_by_name, - project_name, - domain['id']) - - @unit.skip_if_cache_disabled('resource') - @unit.skip_if_no_multiple_domains_support - def test_cache_layer_project_crud(self): - domain = unit.new_domain_ref() - project = unit.new_project_ref(domain_id=domain['id']) - project_id = project['id'] - self.resource_api.create_domain(domain['id'], domain) - # Create a project - self.resource_api.create_project(project_id, project) - self.resource_api.get_project(project_id) - updated_project = copy.deepcopy(project) - updated_project['name'] = uuid.uuid4().hex - # Update project, bypassing resource manager - self.resource_api.driver.update_project(project_id, - updated_project) - # Verify get_project still returns the original project_ref - self.assertDictContainsSubset( - project, self.resource_api.get_project(project_id)) - # Invalidate cache - self.resource_api.get_project.invalidate(self.resource_api, - project_id) - # Verify get_project now returns the new project - self.assertDictContainsSubset( - updated_project, - self.resource_api.get_project(project_id)) - # Update project using the resource_api manager back to original - self.resource_api.update_project(project['id'], project) - # Verify get_project returns the original project_ref - self.assertDictContainsSubset( - project, self.resource_api.get_project(project_id)) - # Delete project bypassing resource - self.resource_api.driver.delete_project(project_id) - # Verify get_project still returns the project_ref - self.assertDictContainsSubset( - project, self.resource_api.get_project(project_id)) - # Invalidate cache - self.resource_api.get_project.invalidate(self.resource_api, - project_id) - # Verify ProjectNotFound now raised - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project_id) - # recreate project - self.resource_api.create_project(project_id, project) - self.resource_api.get_project(project_id) - # delete project - self.resource_api.delete_project(project_id) - # Verify ProjectNotFound is raised - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project_id) - - @unit.skip_if_no_multiple_domains_support - def test_get_default_domain_by_name(self): - domain_name = 'default' - - domain = unit.new_domain_ref(name=domain_name) - self.resource_api.create_domain(domain['id'], domain) - - domain_ref = self.resource_api.get_domain_by_name(domain_name) - self.assertEqual(domain, domain_ref) - - def test_get_not_default_domain_by_name(self): - domain_name = 'foo' - self.assertRaises(exception.DomainNotFound, - self.resource_api.get_domain_by_name, - domain_name) - - def test_project_update_and_project_get_return_same_response(self): - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - - self.resource_api.create_project(project['id'], project) - - updated_project = {'enabled': False} - updated_project_ref = self.resource_api.update_project( - project['id'], updated_project) - - # SQL backend adds 'extra' field - updated_project_ref.pop('extra', None) - - self.assertIs(False, updated_project_ref['enabled']) - - project_ref = self.resource_api.get_project(project['id']) - self.assertDictEqual(updated_project_ref, project_ref) - - -class ResourceDriverTests(object): - """Tests for the resource driver. - - Subclasses must set self.driver to the driver instance. - - """ - - def test_create_project(self): - project_id = uuid.uuid4().hex - project = { - 'name': uuid.uuid4().hex, - 'id': project_id, - 'domain_id': uuid.uuid4().hex, - } - self.driver.create_project(project_id, project) - - def test_create_project_all_defined_properties(self): - project_id = uuid.uuid4().hex - project = { - 'name': uuid.uuid4().hex, - 'id': project_id, - 'domain_id': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - 'enabled': True, - 'parent_id': uuid.uuid4().hex, - 'is_domain': True, - } - self.driver.create_project(project_id, project) - - def test_create_project_null_domain(self): - project_id = uuid.uuid4().hex - project = { - 'name': uuid.uuid4().hex, - 'id': project_id, - 'domain_id': None, - } - self.driver.create_project(project_id, project) - - def test_create_project_same_name_same_domain_conflict(self): - name = uuid.uuid4().hex - domain_id = uuid.uuid4().hex - - project_id = uuid.uuid4().hex - project = { - 'name': name, - 'id': project_id, - 'domain_id': domain_id, - } - self.driver.create_project(project_id, project) - - project_id = uuid.uuid4().hex - project = { - 'name': name, - 'id': project_id, - 'domain_id': domain_id, - } - self.assertRaises(exception.Conflict, self.driver.create_project, - project_id, project) - - def test_create_project_same_id_conflict(self): - project_id = uuid.uuid4().hex - - project = { - 'name': uuid.uuid4().hex, - 'id': project_id, - 'domain_id': uuid.uuid4().hex, - } - self.driver.create_project(project_id, project) - - project = { - 'name': uuid.uuid4().hex, - 'id': project_id, - 'domain_id': uuid.uuid4().hex, - } - self.assertRaises(exception.Conflict, self.driver.create_project, - project_id, project) diff --git a/keystone-moon/keystone/tests/unit/resource/test_controllers.py b/keystone-moon/keystone/tests/unit/resource/test_controllers.py deleted file mode 100644 index b8f247c8..00000000 --- a/keystone-moon/keystone/tests/unit/resource/test_controllers.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2016 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from oslo_config import cfg - -from keystone import exception -from keystone.resource import controllers -from keystone.tests import unit -from keystone.tests.unit.ksfixtures import database - - -CONF = cfg.CONF - -_ADMIN_CONTEXT = {'is_admin': True, 'query_string': {}} - - -class TenantTestCaseNoDefaultDomain(unit.TestCase): - - def setUp(self): - super(TenantTestCaseNoDefaultDomain, self).setUp() - self.useFixture(database.Database()) - self.load_backends() - self.tenant_controller = controllers.Tenant() - - def test_setup(self): - # Other tests in this class assume there's no default domain, so make - # sure the setUp worked as expected. - self.assertRaises( - exception.DomainNotFound, - self.resource_api.get_domain, CONF.identity.default_domain_id) - - def test_get_all_projects(self): - # When get_all_projects is done and there's no default domain, the - # result is an empty list. - res = self.tenant_controller.get_all_projects(_ADMIN_CONTEXT) - self.assertEqual([], res['tenants']) - - def test_create_project(self): - # When a project is created using the v2 controller and there's no - # default domain, it doesn't fail with can't find domain (a default - # domain is created) - tenant = {'name': uuid.uuid4().hex} - self.tenant_controller.create_project(_ADMIN_CONTEXT, tenant) - # If the above doesn't fail then this is successful. diff --git a/keystone-moon/keystone/tests/unit/resource/test_core.py b/keystone-moon/keystone/tests/unit/resource/test_core.py deleted file mode 100644 index 2eb87e4c..00000000 --- a/keystone-moon/keystone/tests/unit/resource/test_core.py +++ /dev/null @@ -1,692 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import uuid - -import mock -from testtools import matchers - -from oslo_config import cfg -from oslotest import mockpatch - -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit.ksfixtures import database - - -CONF = cfg.CONF - - -class TestResourceManagerNoFixtures(unit.SQLDriverOverrides, unit.TestCase): - - def setUp(self): - super(TestResourceManagerNoFixtures, self).setUp() - self.useFixture(database.Database(self.sql_driver_version_overrides)) - self.load_backends() - - def test_ensure_default_domain_exists(self): - # When there's no default domain, ensure_default_domain_exists creates - # it. - - # First make sure there's no default domain. - self.assertRaises( - exception.DomainNotFound, - self.resource_api.get_domain, CONF.identity.default_domain_id) - - self.resource_api.ensure_default_domain_exists() - default_domain = self.resource_api.get_domain( - CONF.identity.default_domain_id) - - expected_domain = { - 'id': CONF.identity.default_domain_id, - 'name': 'Default', - 'enabled': True, - 'description': 'Domain created automatically to support V2.0 ' - 'operations.', - } - self.assertEqual(expected_domain, default_domain) - - def test_ensure_default_domain_exists_already_exists(self): - # When there's already a default domain, ensure_default_domain_exists - # doesn't do anything. - - name = uuid.uuid4().hex - description = uuid.uuid4().hex - domain_attrs = { - 'id': CONF.identity.default_domain_id, - 'name': name, - 'description': description, - } - self.resource_api.create_domain(CONF.identity.default_domain_id, - domain_attrs) - - self.resource_api.ensure_default_domain_exists() - - default_domain = self.resource_api.get_domain( - CONF.identity.default_domain_id) - - expected_domain = { - 'id': CONF.identity.default_domain_id, - 'name': name, - 'enabled': True, - 'description': description, - } - - self.assertEqual(expected_domain, default_domain) - - def test_ensure_default_domain_exists_fails(self): - # When there's an unexpected exception creating domain it's passed on. - - self.useFixture(mockpatch.PatchObject( - self.resource_api, 'create_domain', - side_effect=exception.UnexpectedError)) - - self.assertRaises(exception.UnexpectedError, - self.resource_api.ensure_default_domain_exists) - - def test_update_project_name_conflict(self): - name = uuid.uuid4().hex - description = uuid.uuid4().hex - domain_attrs = { - 'id': CONF.identity.default_domain_id, - 'name': name, - 'description': description, - } - domain = self.resource_api.create_domain( - CONF.identity.default_domain_id, domain_attrs) - project1 = unit.new_project_ref(domain_id=domain['id'], - name=uuid.uuid4().hex) - self.resource_api.create_project(project1['id'], project1) - project2 = unit.new_project_ref(domain_id=domain['id'], - name=uuid.uuid4().hex) - project = self.resource_api.create_project(project2['id'], project2) - - self.assertRaises(exception.Conflict, - self.resource_api.update_project, - project['id'], {'name': project1['name']}) - - -class DomainConfigDriverTests(object): - - def _domain_config_crud(self, sensitive): - domain = uuid.uuid4().hex - group = uuid.uuid4().hex - option = uuid.uuid4().hex - value = uuid.uuid4().hex - self.driver.create_config_option( - domain, group, option, value, sensitive) - res = self.driver.get_config_option( - domain, group, option, sensitive) - config = {'group': group, 'option': option, 'value': value} - self.assertEqual(config, res) - - value = uuid.uuid4().hex - self.driver.update_config_option( - domain, group, option, value, sensitive) - res = self.driver.get_config_option( - domain, group, option, sensitive) - config = {'group': group, 'option': option, 'value': value} - self.assertEqual(config, res) - - self.driver.delete_config_options( - domain, group, option, sensitive) - self.assertRaises(exception.DomainConfigNotFound, - self.driver.get_config_option, - domain, group, option, sensitive) - # ...and silent if we try to delete it again - self.driver.delete_config_options( - domain, group, option, sensitive) - - def test_whitelisted_domain_config_crud(self): - self._domain_config_crud(sensitive=False) - - def test_sensitive_domain_config_crud(self): - self._domain_config_crud(sensitive=True) - - def _list_domain_config(self, sensitive): - """Test listing by combination of domain, group & option.""" - config1 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex} - # Put config2 in the same group as config1 - config2 = {'group': config1['group'], 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex} - config3 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex, - 'value': 100} - domain = uuid.uuid4().hex - - for config in [config1, config2, config3]: - self.driver.create_config_option( - domain, config['group'], config['option'], - config['value'], sensitive) - - # Try listing all items from a domain - res = self.driver.list_config_options( - domain, sensitive=sensitive) - self.assertThat(res, matchers.HasLength(3)) - for res_entry in res: - self.assertIn(res_entry, [config1, config2, config3]) - - # Try listing by domain and group - res = self.driver.list_config_options( - domain, group=config1['group'], sensitive=sensitive) - self.assertThat(res, matchers.HasLength(2)) - for res_entry in res: - self.assertIn(res_entry, [config1, config2]) - - # Try listing by domain, group and option - res = self.driver.list_config_options( - domain, group=config2['group'], - option=config2['option'], sensitive=sensitive) - self.assertThat(res, matchers.HasLength(1)) - self.assertEqual(config2, res[0]) - - def test_list_whitelisted_domain_config_crud(self): - self._list_domain_config(False) - - def test_list_sensitive_domain_config_crud(self): - self._list_domain_config(True) - - def _delete_domain_configs(self, sensitive): - """Test deleting by combination of domain, group & option.""" - config1 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex} - # Put config2 and config3 in the same group as config1 - config2 = {'group': config1['group'], 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex} - config3 = {'group': config1['group'], 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex} - config4 = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex} - domain = uuid.uuid4().hex - - for config in [config1, config2, config3, config4]: - self.driver.create_config_option( - domain, config['group'], config['option'], - config['value'], sensitive) - - # Try deleting by domain, group and option - res = self.driver.delete_config_options( - domain, group=config2['group'], - option=config2['option'], sensitive=sensitive) - res = self.driver.list_config_options( - domain, sensitive=sensitive) - self.assertThat(res, matchers.HasLength(3)) - for res_entry in res: - self.assertIn(res_entry, [config1, config3, config4]) - - # Try deleting by domain and group - res = self.driver.delete_config_options( - domain, group=config4['group'], sensitive=sensitive) - res = self.driver.list_config_options( - domain, sensitive=sensitive) - self.assertThat(res, matchers.HasLength(2)) - for res_entry in res: - self.assertIn(res_entry, [config1, config3]) - - # Try deleting all items from a domain - res = self.driver.delete_config_options( - domain, sensitive=sensitive) - res = self.driver.list_config_options( - domain, sensitive=sensitive) - self.assertThat(res, matchers.HasLength(0)) - - def test_delete_whitelisted_domain_configs(self): - self._delete_domain_configs(False) - - def test_delete_sensitive_domain_configs(self): - self._delete_domain_configs(True) - - def _create_domain_config_twice(self, sensitive): - """Test conflict error thrown if create the same option twice.""" - config = {'group': uuid.uuid4().hex, 'option': uuid.uuid4().hex, - 'value': uuid.uuid4().hex} - domain = uuid.uuid4().hex - - self.driver.create_config_option( - domain, config['group'], config['option'], - config['value'], sensitive=sensitive) - self.assertRaises(exception.Conflict, - self.driver.create_config_option, - domain, config['group'], config['option'], - config['value'], sensitive=sensitive) - - def test_create_whitelisted_domain_config_twice(self): - self._create_domain_config_twice(False) - - def test_create_sensitive_domain_config_twice(self): - self._create_domain_config_twice(True) - - -class DomainConfigTests(object): - - def setUp(self): - self.domain = unit.new_domain_ref() - self.resource_api.create_domain(self.domain['id'], self.domain) - self.addCleanup(self.clean_up_domain) - - def clean_up_domain(self): - # NOTE(henry-nash): Deleting the domain will also delete any domain - # configs for this domain. - self.domain['enabled'] = False - self.resource_api.update_domain(self.domain['id'], self.domain) - self.resource_api.delete_domain(self.domain['id']) - del self.domain - - def test_create_domain_config_including_sensitive_option(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - - # password is sensitive, so check that the whitelisted portion and - # the sensitive piece have been stored in the appropriate locations. - res = self.domain_config_api.get_config(self.domain['id']) - config_whitelisted = copy.deepcopy(config) - config_whitelisted['ldap'].pop('password') - self.assertEqual(config_whitelisted, res) - res = self.domain_config_api.driver.get_config_option( - self.domain['id'], 'ldap', 'password', sensitive=True) - self.assertEqual(config['ldap']['password'], res['value']) - - # Finally, use the non-public API to get back the whole config - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - self.assertEqual(config, res) - - def test_get_partial_domain_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - - res = self.domain_config_api.get_config(self.domain['id'], - group='identity') - config_partial = copy.deepcopy(config) - config_partial.pop('ldap') - self.assertEqual(config_partial, res) - res = self.domain_config_api.get_config( - self.domain['id'], group='ldap', option='user_tree_dn') - self.assertEqual({'user_tree_dn': config['ldap']['user_tree_dn']}, res) - # ...but we should fail to get a sensitive option - self.assertRaises(exception.DomainConfigNotFound, - self.domain_config_api.get_config, self.domain['id'], - group='ldap', option='password') - - def test_delete_partial_domain_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - - self.domain_config_api.delete_config( - self.domain['id'], group='identity') - config_partial = copy.deepcopy(config) - config_partial.pop('identity') - config_partial['ldap'].pop('password') - res = self.domain_config_api.get_config(self.domain['id']) - self.assertEqual(config_partial, res) - - self.domain_config_api.delete_config( - self.domain['id'], group='ldap', option='url') - config_partial = copy.deepcopy(config_partial) - config_partial['ldap'].pop('url') - res = self.domain_config_api.get_config(self.domain['id']) - self.assertEqual(config_partial, res) - - def test_get_options_not_in_domain_config(self): - self.assertRaises(exception.DomainConfigNotFound, - self.domain_config_api.get_config, self.domain['id']) - config = {'ldap': {'url': uuid.uuid4().hex}} - - self.domain_config_api.create_config(self.domain['id'], config) - - self.assertRaises(exception.DomainConfigNotFound, - self.domain_config_api.get_config, self.domain['id'], - group='identity') - self.assertRaises(exception.DomainConfigNotFound, - self.domain_config_api.get_config, self.domain['id'], - group='ldap', option='user_tree_dn') - - def test_get_sensitive_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - self.assertEqual({}, res) - self.domain_config_api.create_config(self.domain['id'], config) - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - self.assertEqual(config, res) - - def test_update_partial_domain_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - - # Try updating a group - new_config = {'ldap': {'url': uuid.uuid4().hex, - 'user_filter': uuid.uuid4().hex}} - res = self.domain_config_api.update_config( - self.domain['id'], new_config, group='ldap') - expected_config = copy.deepcopy(config) - expected_config['ldap']['url'] = new_config['ldap']['url'] - expected_config['ldap']['user_filter'] = ( - new_config['ldap']['user_filter']) - expected_full_config = copy.deepcopy(expected_config) - expected_config['ldap'].pop('password') - res = self.domain_config_api.get_config(self.domain['id']) - self.assertEqual(expected_config, res) - # The sensitive option should still exist - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - self.assertEqual(expected_full_config, res) - - # Try updating a single whitelisted option - self.domain_config_api.delete_config(self.domain['id']) - self.domain_config_api.create_config(self.domain['id'], config) - new_config = {'url': uuid.uuid4().hex} - res = self.domain_config_api.update_config( - self.domain['id'], new_config, group='ldap', option='url') - - # Make sure whitelisted and full config is updated - expected_whitelisted_config = copy.deepcopy(config) - expected_whitelisted_config['ldap']['url'] = new_config['url'] - expected_full_config = copy.deepcopy(expected_whitelisted_config) - expected_whitelisted_config['ldap'].pop('password') - self.assertEqual(expected_whitelisted_config, res) - res = self.domain_config_api.get_config(self.domain['id']) - self.assertEqual(expected_whitelisted_config, res) - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - self.assertEqual(expected_full_config, res) - - # Try updating a single sensitive option - self.domain_config_api.delete_config(self.domain['id']) - self.domain_config_api.create_config(self.domain['id'], config) - new_config = {'password': uuid.uuid4().hex} - res = self.domain_config_api.update_config( - self.domain['id'], new_config, group='ldap', option='password') - # The whitelisted config should not have changed... - expected_whitelisted_config = copy.deepcopy(config) - expected_full_config = copy.deepcopy(config) - expected_whitelisted_config['ldap'].pop('password') - self.assertEqual(expected_whitelisted_config, res) - res = self.domain_config_api.get_config(self.domain['id']) - self.assertEqual(expected_whitelisted_config, res) - expected_full_config['ldap']['password'] = new_config['password'] - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - # ...but the sensitive piece should have. - self.assertEqual(expected_full_config, res) - - def test_update_invalid_partial_domain_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - # An extra group, when specifying one group should fail - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.update_config, - self.domain['id'], config, group='ldap') - # An extra option, when specifying one option should fail - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.update_config, - self.domain['id'], config['ldap'], - group='ldap', option='url') - - # Now try the right number of groups/options, but just not - # ones that are in the config provided - config = {'ldap': {'user_tree_dn': uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.update_config, - self.domain['id'], config, group='identity') - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.update_config, - self.domain['id'], config['ldap'], group='ldap', - option='url') - - # Now some valid groups/options, but just not ones that are in the - # existing config - config = {'ldap': {'user_tree_dn': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - config_wrong_group = {'identity': {'driver': uuid.uuid4().hex}} - self.assertRaises(exception.DomainConfigNotFound, - self.domain_config_api.update_config, - self.domain['id'], config_wrong_group, - group='identity') - config_wrong_option = {'url': uuid.uuid4().hex} - self.assertRaises(exception.DomainConfigNotFound, - self.domain_config_api.update_config, - self.domain['id'], config_wrong_option, - group='ldap', option='url') - - # And finally just some bad groups/options - bad_group = uuid.uuid4().hex - config = {bad_group: {'user': uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.update_config, - self.domain['id'], config, group=bad_group, - option='user') - bad_option = uuid.uuid4().hex - config = {'ldap': {bad_option: uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.update_config, - self.domain['id'], config, group='ldap', - option=bad_option) - - def test_create_invalid_domain_config(self): - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.create_config, - self.domain['id'], {}) - config = {uuid.uuid4().hex: uuid.uuid4().hex} - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.create_config, - self.domain['id'], config) - config = {uuid.uuid4().hex: {uuid.uuid4().hex: uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.create_config, - self.domain['id'], config) - config = {'ldap': {uuid.uuid4().hex: uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.create_config, - self.domain['id'], config) - # Try an option that IS in the standard conf, but neither whitelisted - # or marked as sensitive - config = {'identity': {'user_tree_dn': uuid.uuid4().hex}} - self.assertRaises(exception.InvalidDomainConfig, - self.domain_config_api.create_config, - self.domain['id'], config) - - def test_delete_invalid_partial_domain_config(self): - config = {'ldap': {'url': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - # Try deleting a group not in the config - self.assertRaises(exception.DomainConfigNotFound, - self.domain_config_api.delete_config, - self.domain['id'], group='identity') - # Try deleting an option not in the config - self.assertRaises(exception.DomainConfigNotFound, - self.domain_config_api.delete_config, - self.domain['id'], - group='ldap', option='user_tree_dn') - - def test_sensitive_substitution_in_domain_config(self): - # Create a config that contains a whitelisted option that requires - # substitution of a sensitive option. - config = {'ldap': {'url': 'my_url/%(password)s', - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - - # Read back the config with the internal method and ensure that the - # substitution has taken place. - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - expected_url = ( - config['ldap']['url'] % {'password': config['ldap']['password']}) - self.assertEqual(expected_url, res['ldap']['url']) - - def test_invalid_sensitive_substitution_in_domain_config(self): - """Check that invalid substitutions raise warnings.""" - mock_log = mock.Mock() - - invalid_option_config = { - 'ldap': {'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - - for invalid_option in ['my_url/%(passssword)s', - 'my_url/%(password', - 'my_url/%(password)', - 'my_url/%(password)d']: - invalid_option_config['ldap']['url'] = invalid_option - self.domain_config_api.create_config( - self.domain['id'], invalid_option_config) - - with mock.patch('keystone.resource.core.LOG', mock_log): - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - mock_log.warning.assert_any_call(mock.ANY) - self.assertEqual( - invalid_option_config['ldap']['url'], res['ldap']['url']) - - def test_escaped_sequence_in_domain_config(self): - """Check that escaped '%(' doesn't get interpreted.""" - mock_log = mock.Mock() - - escaped_option_config = { - 'ldap': {'url': 'my_url/%%(password)s', - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - - self.domain_config_api.create_config( - self.domain['id'], escaped_option_config) - - with mock.patch('keystone.resource.core.LOG', mock_log): - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - self.assertFalse(mock_log.warn.called) - # The escaping '%' should have been removed - self.assertEqual('my_url/%(password)s', res['ldap']['url']) - - @unit.skip_if_cache_disabled('domain_config') - def test_cache_layer_get_sensitive_config(self): - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - # cache the result - res = self.domain_config_api.get_config_with_sensitive_info( - self.domain['id']) - self.assertEqual(config, res) - - # delete, bypassing domain config manager api - self.domain_config_api.delete_config_options(self.domain['id']) - self.domain_config_api.delete_config_options(self.domain['id'], - sensitive=True) - - self.assertDictEqual( - res, self.domain_config_api.get_config_with_sensitive_info( - self.domain['id'])) - self.domain_config_api.get_config_with_sensitive_info.invalidate( - self.domain_config_api, self.domain['id']) - self.assertDictEqual( - {}, - self.domain_config_api.get_config_with_sensitive_info( - self.domain['id'])) - - def test_delete_domain_deletes_configs(self): - """Test domain deletion clears the domain configs.""" - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex, - 'password': uuid.uuid4().hex}} - self.domain_config_api.create_config(domain['id'], config) - - # Now delete the domain - domain['enabled'] = False - self.resource_api.update_domain(domain['id'], domain) - self.resource_api.delete_domain(domain['id']) - - # Check domain configs have also been deleted - self.assertRaises( - exception.DomainConfigNotFound, - self.domain_config_api.get_config, - domain['id']) - - # The get_config_with_sensitive_info does not throw an exception if - # the config is empty, it just returns an empty dict - self.assertDictEqual( - {}, - self.domain_config_api.get_config_with_sensitive_info( - domain['id'])) - - def test_config_registration(self): - type = uuid.uuid4().hex - self.domain_config_api.obtain_registration( - self.domain['id'], type) - self.domain_config_api.release_registration( - self.domain['id'], type=type) - - # Make sure that once someone has it, nobody else can get it. - # This includes the domain who already has it. - self.domain_config_api.obtain_registration( - self.domain['id'], type) - self.assertFalse( - self.domain_config_api.obtain_registration( - self.domain['id'], type)) - - # Make sure we can read who does have it - self.assertEqual( - self.domain['id'], - self.domain_config_api.read_registration(type)) - - # Make sure releasing it is silent if the domain specified doesn't - # have the registration - domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain2['id'], domain2) - self.domain_config_api.release_registration( - domain2['id'], type=type) - - # If nobody has the type registered, then trying to read it should - # raise ConfigRegistrationNotFound - self.domain_config_api.release_registration( - self.domain['id'], type=type) - self.assertRaises(exception.ConfigRegistrationNotFound, - self.domain_config_api.read_registration, - type) - - # Finally check multiple registrations are cleared if you free the - # registration without specifying the type - type2 = uuid.uuid4().hex - self.domain_config_api.obtain_registration( - self.domain['id'], type) - self.domain_config_api.obtain_registration( - self.domain['id'], type2) - self.domain_config_api.release_registration(self.domain['id']) - self.assertRaises(exception.ConfigRegistrationNotFound, - self.domain_config_api.read_registration, - type) - self.assertRaises(exception.ConfigRegistrationNotFound, - self.domain_config_api.read_registration, - type2) diff --git a/keystone-moon/keystone/tests/unit/rest.py b/keystone-moon/keystone/tests/unit/rest.py deleted file mode 100644 index 512c301d..00000000 --- a/keystone-moon/keystone/tests/unit/rest.py +++ /dev/null @@ -1,261 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_serialization import jsonutils -from six.moves import http_client -import webtest - -from keystone.auth import controllers as auth_controllers -from keystone.tests import unit -from keystone.tests.unit import default_fixtures -from keystone.tests.unit.ksfixtures import database - - -class RestfulTestCase(unit.TestCase): - """Performs restful tests against the WSGI app over HTTP. - - This class launches public & admin WSGI servers for every test, which can - be accessed by calling ``public_request()`` or ``admin_request()``, - respectfully. - - ``restful_request()`` and ``request()`` methods are also exposed if you - need to bypass restful conventions or access HTTP details in your test - implementation. - - Three new asserts are provided: - - * ``assertResponseSuccessful``: called automatically for every request - unless an ``expected_status`` is provided - * ``assertResponseStatus``: called instead of ``assertResponseSuccessful``, - if an ``expected_status`` is provided - * ``assertValidResponseHeaders``: validates that the response headers - appear as expected - - Requests are automatically serialized according to the defined - ``content_type``. Responses are automatically deserialized as well, and - available in the ``response.body`` attribute. The original body content is - available in the ``response.raw`` attribute. - - """ - - # default content type to test - content_type = 'json' - - def get_extensions(self): - return None - - def setUp(self, app_conf='keystone'): - super(RestfulTestCase, self).setUp() - - # Will need to reset the plug-ins - self.addCleanup(setattr, auth_controllers, 'AUTH_METHODS', {}) - - self.useFixture(database.Database(self.sql_driver_version_overrides)) - self.load_backends() - self.load_fixtures(default_fixtures) - - self.public_app = webtest.TestApp( - self.loadapp(app_conf, name='main')) - self.addCleanup(delattr, self, 'public_app') - self.admin_app = webtest.TestApp( - self.loadapp(app_conf, name='admin')) - self.addCleanup(delattr, self, 'admin_app') - - def request(self, app, path, body=None, headers=None, token=None, - expected_status=None, **kwargs): - if headers: - headers = {str(k): str(v) for k, v in headers.items()} - else: - headers = {} - - if token: - headers['X-Auth-Token'] = str(token) - - # sets environ['REMOTE_ADDR'] - kwargs.setdefault('remote_addr', 'localhost') - - response = app.request(path, headers=headers, - status=expected_status, body=body, - **kwargs) - - return response - - def assertResponseSuccessful(self, response): - """Asserts that a status code lies inside the 2xx range. - - :param response: :py:class:`httplib.HTTPResponse` to be - verified to have a status code between 200 and 299. - - example:: - - self.assertResponseSuccessful(response) - """ - self.assertTrue( - response.status_code >= 200 and response.status_code <= 299, - 'Status code %d is outside of the expected range (2xx)\n\n%s' % - (response.status, response.body)) - - def assertResponseStatus(self, response, expected_status): - """Asserts a specific status code on the response. - - :param response: :py:class:`httplib.HTTPResponse` - :param expected_status: The specific ``status`` result expected - - example:: - - self.assertResponseStatus(response, http_client.NO_CONTENT) - """ - self.assertEqual( - expected_status, response.status_code, - 'Status code %s is not %s, as expected\n\n%s' % - (response.status_code, expected_status, response.body)) - - def assertValidResponseHeaders(self, response): - """Ensures that response headers appear as expected.""" - self.assertIn('X-Auth-Token', response.headers.get('Vary')) - - def assertValidErrorResponse(self, response, - expected_status=http_client.BAD_REQUEST): - """Verify that the error response is valid. - - Subclasses can override this function based on the expected response. - - """ - self.assertEqual(expected_status, response.status_code) - error = response.result['error'] - self.assertEqual(response.status_code, error['code']) - self.assertIsNotNone(error.get('title')) - - def _to_content_type(self, body, headers, content_type=None): - """Attempt to encode JSON and XML automatically.""" - content_type = content_type or self.content_type - - if content_type == 'json': - headers['Accept'] = 'application/json' - if body: - headers['Content-Type'] = 'application/json' - # NOTE(davechen):dump the body to bytes since WSGI requires - # the body of the response to be `Bytestrings`. - # see pep-3333: - # https://www.python.org/dev/peps/pep-3333/#a-note-on-string-types - return jsonutils.dump_as_bytes(body) - - def _from_content_type(self, response, content_type=None): - """Attempt to decode JSON and XML automatically, if detected.""" - content_type = content_type or self.content_type - - if response.body is not None and response.body.strip(): - # if a body is provided, a Content-Type is also expected - header = response.headers.get('Content-Type') - self.assertIn(content_type, header) - - if content_type == 'json': - response.result = jsonutils.loads(response.body) - else: - response.result = response.body - - def restful_request(self, method='GET', headers=None, body=None, - content_type=None, response_content_type=None, - **kwargs): - """Serializes/deserializes json as request/response body. - - .. WARNING:: - - * Existing Accept header will be overwritten. - * Existing Content-Type header will be overwritten. - - """ - # Initialize headers dictionary - headers = {} if not headers else headers - - body = self._to_content_type(body, headers, content_type) - - # Perform the HTTP request/response - response = self.request(method=method, headers=headers, body=body, - **kwargs) - - response_content_type = response_content_type or content_type - self._from_content_type(response, content_type=response_content_type) - - # we can save some code & improve coverage by always doing this - if (method != 'HEAD' and - response.status_code >= http_client.BAD_REQUEST): - self.assertValidErrorResponse(response) - - # Contains the decoded response.body - return response - - def _request(self, convert=True, **kwargs): - if convert: - response = self.restful_request(**kwargs) - else: - response = self.request(**kwargs) - - self.assertValidResponseHeaders(response) - return response - - def public_request(self, **kwargs): - return self._request(app=self.public_app, **kwargs) - - def admin_request(self, **kwargs): - return self._request(app=self.admin_app, **kwargs) - - def _get_token(self, body): - """Convenience method so that we can test authenticated requests.""" - r = self.public_request(method='POST', path='/v2.0/tokens', body=body) - return self._get_token_id(r) - - def get_admin_token(self): - return self._get_token({ - 'auth': { - 'passwordCredentials': { - 'username': self.user_reqadmin['name'], - 'password': self.user_reqadmin['password'] - }, - 'tenantId': default_fixtures.SERVICE_TENANT_ID - } - }) - - def get_unscoped_token(self): - """Convenience method so that we can test authenticated requests.""" - return self._get_token({ - 'auth': { - 'passwordCredentials': { - 'username': self.user_foo['name'], - 'password': self.user_foo['password'], - }, - }, - }) - - def get_scoped_token(self, tenant_id=None): - """Convenience method so that we can test authenticated requests.""" - if not tenant_id: - tenant_id = self.tenant_bar['id'] - return self._get_token({ - 'auth': { - 'passwordCredentials': { - 'username': self.user_foo['name'], - 'password': self.user_foo['password'], - }, - 'tenantId': tenant_id, - }, - }) - - def _get_token_id(self, r): - """Helper method to return a token ID from a response. - - This needs to be overridden by child classes for on their content type. - - """ - raise NotImplementedError() diff --git a/keystone-moon/keystone/tests/unit/saml2/idp_saml2_metadata.xml b/keystone-moon/keystone/tests/unit/saml2/idp_saml2_metadata.xml deleted file mode 100644 index db235f7c..00000000 --- a/keystone-moon/keystone/tests/unit/saml2/idp_saml2_metadata.xml +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - - MIIDpTCCAo0CAREwDQYJKoZIhvcNAQEFBQAwgZ4xCjAIBgNVBAUTATUxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJDQTESMBAGA1UEBxMJU3Vubnl2YWxlMRIwEAYDVQQKEwlPcGVuU3RhY2sxETAPBgNVBAsTCEtleXN0b25lMSUwIwYJKoZIhvcNAQkBFhZrZXlzdG9uZUBvcGVuc3RhY2sub3JnMRQwEgYDVQQDEwtTZWxmIFNpZ25lZDAgFw0xMzA3MDkxNjI1MDBaGA8yMDcyMDEwMTE2MjUwMFowgY8xCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJDQTESMBAGA1UEBxMJU3Vubnl2YWxlMRIwEAYDVQQKEwlPcGVuU3RhY2sxETAPBgNVBAsTCEtleXN0b25lMSUwIwYJKoZIhvcNAQkBFhZrZXlzdG9uZUBvcGVuc3RhY2sub3JnMREwDwYDVQQDEwhLZXlzdG9uZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMTC6IdNd9Cg1DshcrT5gRVRF36nEmjSA9QWdik7B925PK70U4F6j4pz/5JL7plIo/8rJ4jJz9ccE7m0iA+IuABtEhEwXkG9rj47Oy0J4ZyDGSh2K1Bl78PA9zxXSzysUTSjBKdAh29dPYbJY7cgZJ0uC3AtfVceYiAOIi14SdFeZ0LZLDXBuLaqUmSMrmKwJ9wAMOCb/jbBP9/3Ycd0GYjlvrSBU4Bqb8/NHasyO4DpPN68OAoyD5r5jUtV8QZN03UjIsoux8e0lrL6+MVtJo0OfWvlSrlzS5HKSryY+uqqQEuxtZKpJM2MV85ujvjc8eDSChh2shhDjBem3FIlHKUCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAed9fHgdJrk+gZcO5gsqq6uURfDOuYD66GsSdZw4BqHjYAcnyWq2da+iw7Uxkqu7iLf2k4+Hu3xjDFrce479OwZkSnbXmqB7XspTGOuM8MgT7jB/ypKTOZ6qaZKSWK1Hta995hMrVVlhUNBLh0MPGqoVWYA4d7mblujgH9vp+4mpCciJagHks8K5FBmI+pobB+uFdSYDoRzX9LTpStspK4e3IoY8baILuGcdKimRNBv6ItG4hMrntAe1/nWMJyUu5rDTGf2V/vAaS0S/faJBwQSz1o38QHMTWHNspfwIdX3yMqI9u7/vYlz3rLy5WdBdUgZrZ3/VLmJTiJVZu5Owq4Q== - - - - - - - openstack - openstack - openstack - - - openstack - first - lastname - admin@example.com - 555-555-5555 - - diff --git a/keystone-moon/keystone/tests/unit/saml2/signed_saml2_assertion.xml b/keystone-moon/keystone/tests/unit/saml2/signed_saml2_assertion.xml deleted file mode 100644 index 414ff9cf..00000000 --- a/keystone-moon/keystone/tests/unit/saml2/signed_saml2_assertion.xml +++ /dev/null @@ -1,69 +0,0 @@ - - https://acme.com/FIM/sps/openstack/saml20 - - - - - - - - - - - Lem2TKyYt+/tJy2iSos1t0KxcJE= - - - b//GXtGeCIJPFsMAHrx4+3yjrL4smSpRLXG9PB3TLMJvU4fx8n2PzK7+VbtWNbZG -vSgbvbQR52jq77iyaRfQ2iELuFEY+YietLRi7hsitkJCEayPmU+BDlNIGuCXZjAy -7tmtGFkLlZZJaom1jAzHfZ5JPjZdM5hvQwrhCI2Kzyk= - - - MIICtjCCAh+gAwIBAgIJAJTeBUN2i9ZNMA0GCSqGSIb3DQEBBQUAME4xCzAJBgNV -BAYTAkhSMQ8wDQYDVQQIEwZaYWdyZWIxITAfBgNVBAoTGE5la2Egb3JnYW5pemFj -aWphIGQuby5vLjELMAkGA1UEAxMCQ0EwHhcNMTIxMjI4MTYwODA1WhcNMTQxMjI4 -MTYwODA1WjBvMQswCQYDVQQGEwJIUjEPMA0GA1UECBMGWmFncmViMQ8wDQYDVQQH -EwZaYWdyZWIxITAfBgNVBAoTGE5la2Egb3JnYW5pemFjaWphIGQuby5vLjEbMBkG -A1UEAxMSUHJvZ3JhbWVyc2thIGZpcm1hMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCB -iQKBgQCgWApHV5cma0GY/v/vmwgciDQBgITcitx2rG0F+ghXtGiEJeK75VY7jQwE -UFCbgV+AaOY2NQChK2FKec7Hss/5y+jbWfX2yVwX6TYcCwnOGXenz+cgx2Fwqpu3 -ncL6dYJMfdbKvojBaJQLJTaNjRJsZACButDsDtXDSH9QaRy+hQIDAQABo3sweTAJ -BgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0 -aWZpY2F0ZTAdBgNVHQ4EFgQUSo9ThP/MOg8QIRWxoPo8qKR8O2wwHwYDVR0jBBgw -FoAUAelckr4bx8MwZ7y+VlHE46Mbo+cwDQYJKoZIhvcNAQEFBQADgYEAy19Z7Z5/ -/MlWkogu41s0RxL9ffG60QQ0Y8hhDTmgHNx1itj0wT8pB7M4KVMbZ4hjjSFsfRq4 -Vj7jm6LwU0WtZ3HGl8TygTh8AAJvbLROnTjLL5MqI9d9pKvIIfZ2Qs3xmJ7JEv4H -UHeBXxQq/GmfBv3l+V5ObQ+EHKnyDodLHCk= - - - - - test_user - - - - - - - urn:oasis:names:tc:SAML:2.0:ac:classes:Password - https://acme.com/FIM/sps/openstack/saml20 - - - - - test_user - - - user_domain - - - admin - member - - - development - - - project_domain - - - diff --git a/keystone-moon/keystone/tests/unit/schema/__init__.py b/keystone-moon/keystone/tests/unit/schema/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/schema/v2.py b/keystone-moon/keystone/tests/unit/schema/v2.py deleted file mode 100644 index ed260a00..00000000 --- a/keystone-moon/keystone/tests/unit/schema/v2.py +++ /dev/null @@ -1,161 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import copy - -from keystone.common import validation -from keystone.common.validation import parameter_types -from keystone.common.validation import validators - - -_project_properties = { - 'id': parameter_types.id_string, - 'name': parameter_types.name, - 'enabled': parameter_types.boolean, - 'description': validation.nullable(parameter_types.description), -} - -_token_properties = { - 'audit_ids': { - 'type': 'array', - 'items': { - 'type': 'string', - }, - 'minItems': 1, - 'maxItems': 2, - }, - 'id': {'type': 'string'}, - 'expires': {'type': 'string'}, - 'issued_at': {'type': 'string'}, - 'tenant': { - 'type': 'object', - 'properties': _project_properties, - 'required': ['id', 'name', 'enabled'], - 'additionalProperties': False, - }, -} - -_role_properties = { - 'name': parameter_types.name, -} - -_user_properties = { - 'id': parameter_types.id_string, - 'name': parameter_types.name, - 'username': parameter_types.name, - 'roles': { - 'type': 'array', - 'items': { - 'type': 'object', - 'properties': _role_properties, - 'required': ['name'], - 'additionalProperties': False, - }, - }, - 'roles_links': { - 'type': 'array', - 'maxItems': 0, - }, -} - -_metadata_properties = { - 'is_admin': {'type': 'integer'}, - 'roles': { - 'type': 'array', - 'items': {'type': 'string'}, - }, -} - -_endpoint_properties = { - 'id': {'type': 'string'}, - 'adminURL': parameter_types.url, - 'internalURL': parameter_types.url, - 'publicURL': parameter_types.url, - 'region': {'type': 'string'}, -} - -_service_properties = { - 'type': {'type': 'string'}, - 'name': parameter_types.name, - 'endpoints_links': { - 'type': 'array', - 'maxItems': 0, - }, - 'endpoints': { - 'type': 'array', - 'minItems': 1, - 'items': { - 'type': 'object', - 'properties': _endpoint_properties, - 'required': ['id', 'publicURL'], - 'additionalProperties': False, - }, - }, -} - -_base_access_properties = { - 'metadata': { - 'type': 'object', - 'properties': _metadata_properties, - 'required': ['is_admin', 'roles'], - 'additionalProperties': False, - }, - 'serviceCatalog': { - 'type': 'array', - 'items': { - 'type': 'object', - 'properties': _service_properties, - 'required': ['name', 'type', 'endpoints_links', 'endpoints'], - 'additionalProperties': False, - }, - }, - 'token': { - 'type': 'object', - 'properties': _token_properties, - 'required': ['audit_ids', 'id', 'expires', 'issued_at'], - 'additionalProperties': False, - }, - 'user': { - 'type': 'object', - 'properties': _user_properties, - 'required': ['id', 'name', 'username', 'roles', 'roles_links'], - 'additionalProperties': False, - }, -} - -_unscoped_access_properties = copy.deepcopy(_base_access_properties) -unscoped_metadata = _unscoped_access_properties['metadata'] -unscoped_metadata['properties']['roles']['maxItems'] = 0 -_unscoped_access_properties['user']['properties']['roles']['maxItems'] = 0 -_unscoped_access_properties['serviceCatalog']['maxItems'] = 0 - -_scoped_access_properties = copy.deepcopy(_base_access_properties) -_scoped_access_properties['metadata']['properties']['roles']['minItems'] = 1 -_scoped_access_properties['serviceCatalog']['minItems'] = 1 -_scoped_access_properties['user']['properties']['roles']['minItems'] = 1 - -base_token_schema = { - 'type': 'object', - 'required': ['metadata', 'user', 'serviceCatalog', 'token'], - 'additionalProperties': False, -} - -unscoped_token_schema = copy.deepcopy(base_token_schema) -unscoped_token_schema['properties'] = _unscoped_access_properties - -scoped_token_schema = copy.deepcopy(base_token_schema) -scoped_token_schema['properties'] = _scoped_access_properties - -# Validator objects -unscoped_validator = validators.SchemaValidator(unscoped_token_schema) -scoped_validator = validators.SchemaValidator(scoped_token_schema) diff --git a/keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py b/keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py deleted file mode 100644 index 79065863..00000000 --- a/keystone-moon/keystone/tests/unit/test_associate_project_endpoint_extension.py +++ /dev/null @@ -1,1391 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import uuid - -import mock -from oslo_log import versionutils -from six.moves import http_client -from testtools import matchers - -from keystone.contrib.endpoint_filter import routers -from keystone.tests import unit -from keystone.tests.unit import test_v3 - - -class EndpointFilterTestCase(test_v3.RestfulTestCase): - - def config_overrides(self): - super(EndpointFilterTestCase, self).config_overrides() - self.config_fixture.config( - group='catalog', driver='endpoint_filter.sql') - - def setUp(self): - super(EndpointFilterTestCase, self).setUp() - self.default_request_url = ( - '/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': self.endpoint_id}) - - -class EndpointFilterDeprecateTestCase(test_v3.RestfulTestCase): - - @mock.patch.object(versionutils, 'report_deprecated_feature') - def test_exception_happens(self, mock_deprecator): - routers.EndpointFilterExtension(mock.ANY) - mock_deprecator.assert_called_once_with(mock.ANY, mock.ANY) - args, _kwargs = mock_deprecator.call_args - self.assertIn("Remove endpoint_filter_extension from", args[1]) - - -class EndpointFilterCRUDTestCase(EndpointFilterTestCase): - - def test_create_endpoint_project_association(self): - """PUT /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id} - - Valid endpoint and project id test case. - - """ - self.put(self.default_request_url) - - def test_create_endpoint_project_association_with_invalid_project(self): - """PUT OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id} - - Invalid project id test case. - - """ - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': uuid.uuid4().hex, - 'endpoint_id': self.endpoint_id}, - expected_status=http_client.NOT_FOUND) - - def test_create_endpoint_project_association_with_invalid_endpoint(self): - """PUT /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id} - - Invalid endpoint id test case. - - """ - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': uuid.uuid4().hex}, - expected_status=http_client.NOT_FOUND) - - def test_create_endpoint_project_association_with_unexpected_body(self): - """PUT /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id} - - Unexpected body in request. The body should be ignored. - - """ - self.put(self.default_request_url, - body={'project_id': self.default_domain_project_id}) - - def test_check_endpoint_project_association(self): - """HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id} - - Valid project and endpoint id test case. - - """ - self.put(self.default_request_url) - self.head('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': self.endpoint_id}) - - def test_check_endpoint_project_association_with_invalid_project(self): - """HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id} - - Invalid project id test case. - - """ - self.put(self.default_request_url) - self.head('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': uuid.uuid4().hex, - 'endpoint_id': self.endpoint_id}, - expected_status=http_client.NOT_FOUND) - - def test_check_endpoint_project_association_with_invalid_endpoint(self): - """HEAD /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id} - - Invalid endpoint id test case. - - """ - self.put(self.default_request_url) - self.head('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': uuid.uuid4().hex}, - expected_status=http_client.NOT_FOUND) - - def test_list_endpoints_associated_with_valid_project(self): - """GET /OS-EP-FILTER/projects/{project_id}/endpoints - - Valid project and endpoint id test case. - - """ - self.put(self.default_request_url) - resource_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % { - 'project_id': self.default_domain_project_id} - r = self.get(resource_url) - self.assertValidEndpointListResponse(r, self.endpoint, - resource_url=resource_url) - - def test_list_endpoints_associated_with_invalid_project(self): - """GET /OS-EP-FILTER/projects/{project_id}/endpoints - - Invalid project id test case. - - """ - self.put(self.default_request_url) - self.get('/OS-EP-FILTER/projects/%(project_id)s/endpoints' % { - 'project_id': uuid.uuid4().hex}, - expected_status=http_client.NOT_FOUND) - - def test_list_projects_associated_with_endpoint(self): - """GET /OS-EP-FILTER/endpoints/{endpoint_id}/projects - - Valid endpoint-project association test case. - - """ - self.put(self.default_request_url) - resource_url = '/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' % { - 'endpoint_id': self.endpoint_id} - r = self.get(resource_url) - self.assertValidProjectListResponse(r, self.default_domain_project, - resource_url=resource_url) - - def test_list_projects_with_no_endpoint_project_association(self): - """GET /OS-EP-FILTER/endpoints/{endpoint_id}/projects - - Valid endpoint id but no endpoint-project associations test case. - - """ - r = self.get('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' % - {'endpoint_id': self.endpoint_id}) - self.assertValidProjectListResponse(r, expected_length=0) - - def test_list_projects_associated_with_invalid_endpoint(self): - """GET /OS-EP-FILTER/endpoints/{endpoint_id}/projects - - Invalid endpoint id test case. - - """ - self.get('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' % - {'endpoint_id': uuid.uuid4().hex}, - expected_status=http_client.NOT_FOUND) - - def test_remove_endpoint_project_association(self): - """DELETE /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id} - - Valid project id and endpoint id test case. - - """ - self.put(self.default_request_url) - self.delete('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': self.endpoint_id}) - - def test_remove_endpoint_project_association_with_invalid_project(self): - """DELETE /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id} - - Invalid project id test case. - - """ - self.put(self.default_request_url) - self.delete('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': uuid.uuid4().hex, - 'endpoint_id': self.endpoint_id}, - expected_status=http_client.NOT_FOUND) - - def test_remove_endpoint_project_association_with_invalid_endpoint(self): - """DELETE /OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id} - - Invalid endpoint id test case. - - """ - self.put(self.default_request_url) - self.delete('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': uuid.uuid4().hex}, - expected_status=http_client.NOT_FOUND) - - def test_endpoint_project_association_cleanup_when_project_deleted(self): - self.put(self.default_request_url) - association_url = ('/OS-EP-FILTER/endpoints/%(endpoint_id)s/projects' % - {'endpoint_id': self.endpoint_id}) - r = self.get(association_url) - self.assertValidProjectListResponse(r, expected_length=1) - - self.delete('/projects/%(project_id)s' % { - 'project_id': self.default_domain_project_id}) - - r = self.get(association_url) - self.assertValidProjectListResponse(r, expected_length=0) - - def test_endpoint_project_association_cleanup_when_endpoint_deleted(self): - self.put(self.default_request_url) - association_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % { - 'project_id': self.default_domain_project_id} - r = self.get(association_url) - self.assertValidEndpointListResponse(r, expected_length=1) - - self.delete('/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}) - - r = self.get(association_url) - self.assertValidEndpointListResponse(r, expected_length=0) - - @unit.skip_if_cache_disabled('catalog') - def test_create_endpoint_project_association_invalidates_cache(self): - # NOTE(davechen): create another endpoint which will be added to - # default project, this should be done at first since - # `create_endpoint` will also invalidate cache. - endpoint_id2 = uuid.uuid4().hex - endpoint2 = unit.new_endpoint_ref(service_id=self.service_id, - region_id=self.region_id, - interface='public', - id=endpoint_id2) - self.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy()) - - # create endpoint project association. - self.put(self.default_request_url) - - # should get back only one endpoint that was just created. - user_id = uuid.uuid4().hex - catalog = self.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) - - # there is only one endpoints associated with the default project. - self.assertEqual(1, len(catalog[0]['endpoints'])) - self.assertEqual(self.endpoint_id, catalog[0]['endpoints'][0]['id']) - - # add the second endpoint to default project, bypassing - # catalog_api API manager. - self.catalog_api.driver.add_endpoint_to_project( - endpoint_id2, - self.default_domain_project_id) - - # but, we can just get back one endpoint from the cache, since the - # catalog is pulled out from cache and its haven't been invalidated. - catalog = self.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) - - self.assertEqual(1, len(catalog[0]['endpoints'])) - - # remove the endpoint2 from the default project, and add it again via - # catalog_api API manager. - self.catalog_api.driver.remove_endpoint_from_project( - endpoint_id2, - self.default_domain_project_id) - - # add second endpoint to default project, this can be done by calling - # the catalog_api API manager directly but call the REST API - # instead for consistency. - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': endpoint_id2}) - - # should get back two endpoints since the cache has been - # invalidated when the second endpoint was added to default project. - catalog = self.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) - - self.assertEqual(2, len(catalog[0]['endpoints'])) - - ep_id_list = [catalog[0]['endpoints'][0]['id'], - catalog[0]['endpoints'][1]['id']] - self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list) - - @unit.skip_if_cache_disabled('catalog') - def test_remove_endpoint_from_project_invalidates_cache(self): - endpoint_id2 = uuid.uuid4().hex - endpoint2 = unit.new_endpoint_ref(service_id=self.service_id, - region_id=self.region_id, - interface='public', - id=endpoint_id2) - self.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy()) - # create endpoint project association. - self.put(self.default_request_url) - - # add second endpoint to default project. - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': endpoint_id2}) - - # should get back only one endpoint that was just created. - user_id = uuid.uuid4().hex - catalog = self.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) - - # there are two endpoints associated with the default project. - ep_id_list = [catalog[0]['endpoints'][0]['id'], - catalog[0]['endpoints'][1]['id']] - self.assertEqual(2, len(catalog[0]['endpoints'])) - self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list) - - # remove the endpoint2 from the default project, bypassing - # catalog_api API manager. - self.catalog_api.driver.remove_endpoint_from_project( - endpoint_id2, - self.default_domain_project_id) - - # but, we can just still get back two endpoints from the cache, - # since the catalog is pulled out from cache and its haven't - # been invalidated. - catalog = self.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) - - self.assertEqual(2, len(catalog[0]['endpoints'])) - - # add back the endpoint2 to the default project, and remove it by - # catalog_api API manage. - self.catalog_api.driver.add_endpoint_to_project( - endpoint_id2, - self.default_domain_project_id) - - # remove the endpoint2 from the default project, this can be done - # by calling the catalog_api API manager directly but call - # the REST API instead for consistency. - self.delete('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.default_domain_project_id, - 'endpoint_id': endpoint_id2}) - - # should only get back one endpoint since the cache has been - # invalidated after the endpoint project association was removed. - catalog = self.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) - - self.assertEqual(1, len(catalog[0]['endpoints'])) - self.assertEqual(self.endpoint_id, catalog[0]['endpoints'][0]['id']) - - -class EndpointFilterTokenRequestTestCase(EndpointFilterTestCase): - - def test_project_scoped_token_using_endpoint_filter(self): - """Verify endpoints from project scoped token filtered.""" - # create a project to work with - ref = unit.new_project_ref(domain_id=self.domain_id) - r = self.post('/projects', body={'project': ref}) - project = self.assertValidProjectResponse(r, ref) - - # grant the user a role on the project - self.put( - '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % { - 'user_id': self.user['id'], - 'project_id': project['id'], - 'role_id': self.role['id']}) - - # set the user's preferred project - body = {'user': {'default_project_id': project['id']}} - r = self.patch('/users/%(user_id)s' % { - 'user_id': self.user['id']}, - body=body) - self.assertValidUserResponse(r) - - # add one endpoint to the project - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': project['id'], - 'endpoint_id': self.endpoint_id}) - - # attempt to authenticate without requesting a project - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) - r = self.post('/auth/tokens', body=auth_data) - self.assertValidProjectScopedTokenResponse( - r, - require_catalog=True, - endpoint_filter=True, - ep_filter_assoc=1) - self.assertEqual(project['id'], r.result['token']['project']['id']) - - def test_default_scoped_token_using_endpoint_filter(self): - """Verify endpoints from default scoped token filtered.""" - # add one endpoint to default project - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': self.endpoint_id}) - - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id']) - r = self.post('/auth/tokens', body=auth_data) - self.assertValidProjectScopedTokenResponse( - r, - require_catalog=True, - endpoint_filter=True, - ep_filter_assoc=1) - self.assertEqual(self.project['id'], - r.result['token']['project']['id']) - - # Ensure name of the service exists - self.assertIn('name', r.result['token']['catalog'][0]) - - # region and region_id should be the same in endpoints - endpoint = r.result['token']['catalog'][0]['endpoints'][0] - self.assertIn('region', endpoint) - self.assertIn('region_id', endpoint) - self.assertEqual(endpoint['region'], endpoint['region_id']) - - def test_scoped_token_with_no_catalog_using_endpoint_filter(self): - """Verify endpoint filter does not affect no catalog.""" - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': self.endpoint_id}) - - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id']) - r = self.post('/auth/tokens?nocatalog', body=auth_data) - self.assertValidProjectScopedTokenResponse( - r, - require_catalog=False) - self.assertEqual(self.project['id'], - r.result['token']['project']['id']) - - def test_invalid_endpoint_project_association(self): - """Verify an invalid endpoint-project association is handled.""" - # add first endpoint to default project - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': self.endpoint_id}) - - # create a second temporary endpoint - endpoint_id2 = uuid.uuid4().hex - endpoint2 = unit.new_endpoint_ref(service_id=self.service_id, - region_id=self.region_id, - interface='public', - id=endpoint_id2) - self.catalog_api.create_endpoint(endpoint_id2, endpoint2.copy()) - - # add second endpoint to default project - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': endpoint_id2}) - - # remove the temporary reference - # this will create inconsistency in the endpoint filter table - # which is fixed during the catalog creation for token request - self.catalog_api.delete_endpoint(endpoint_id2) - - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id']) - r = self.post('/auth/tokens', body=auth_data) - self.assertValidProjectScopedTokenResponse( - r, - require_catalog=True, - endpoint_filter=True, - ep_filter_assoc=1) - self.assertEqual(self.project['id'], - r.result['token']['project']['id']) - - def test_disabled_endpoint(self): - """Test that a disabled endpoint is handled.""" - # Add an enabled endpoint to the default project - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': self.endpoint_id}) - - # Add a disabled endpoint to the default project. - - # Create a disabled endpoint that's like the enabled one. - disabled_endpoint_ref = copy.copy(self.endpoint) - disabled_endpoint_id = uuid.uuid4().hex - disabled_endpoint_ref.update({ - 'id': disabled_endpoint_id, - 'enabled': False, - 'interface': 'internal' - }) - self.catalog_api.create_endpoint(disabled_endpoint_id, - disabled_endpoint_ref) - - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': disabled_endpoint_id}) - - # Authenticate to get token with catalog - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id']) - r = self.post('/auth/tokens', body=auth_data) - - endpoints = r.result['token']['catalog'][0]['endpoints'] - endpoint_ids = [ep['id'] for ep in endpoints] - self.assertEqual([self.endpoint_id], endpoint_ids) - - def test_multiple_endpoint_project_associations(self): - - def _create_an_endpoint(): - endpoint_ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id) - r = self.post('/endpoints', body={'endpoint': endpoint_ref}) - return r.result['endpoint']['id'] - - # create three endpoints - endpoint_id1 = _create_an_endpoint() - endpoint_id2 = _create_an_endpoint() - _create_an_endpoint() - - # only associate two endpoints with project - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': endpoint_id1}) - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': endpoint_id2}) - - # there should be only two endpoints in token catalog - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id']) - r = self.post('/auth/tokens', body=auth_data) - self.assertValidProjectScopedTokenResponse( - r, - require_catalog=True, - endpoint_filter=True, - ep_filter_assoc=2) - - def test_get_auth_catalog_using_endpoint_filter(self): - # add one endpoint to default project - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': self.endpoint_id}) - - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id']) - token_data = self.post('/auth/tokens', body=auth_data) - self.assertValidProjectScopedTokenResponse( - token_data, - require_catalog=True, - endpoint_filter=True, - ep_filter_assoc=1) - - auth_catalog = self.get('/auth/catalog', - token=token_data.headers['X-Subject-Token']) - self.assertEqual(token_data.result['token']['catalog'], - auth_catalog.result['catalog']) - - -class JsonHomeTests(EndpointFilterTestCase, test_v3.JsonHomeTestMixin): - JSON_HOME_DATA = { - 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/' - '1.0/rel/endpoint_projects': { - 'href-template': '/OS-EP-FILTER/endpoints/{endpoint_id}/projects', - 'href-vars': { - 'endpoint_id': - 'http://docs.openstack.org/api/openstack-identity/3/param/' - 'endpoint_id', - }, - }, - 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/' - '1.0/rel/endpoint_groups': { - 'href': '/OS-EP-FILTER/endpoint_groups', - }, - 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/' - '1.0/rel/endpoint_group': { - 'href-template': '/OS-EP-FILTER/endpoint_groups/' - '{endpoint_group_id}', - 'href-vars': { - 'endpoint_group_id': - 'http://docs.openstack.org/api/openstack-identity/3/' - 'ext/OS-EP-FILTER/1.0/param/endpoint_group_id', - }, - }, - 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/' - '1.0/rel/endpoint_group_to_project_association': { - 'href-template': '/OS-EP-FILTER/endpoint_groups/' - '{endpoint_group_id}/projects/{project_id}', - 'href-vars': { - 'project_id': - 'http://docs.openstack.org/api/openstack-identity/3/param/' - 'project_id', - 'endpoint_group_id': - 'http://docs.openstack.org/api/openstack-identity/3/' - 'ext/OS-EP-FILTER/1.0/param/endpoint_group_id', - }, - }, - 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/' - '1.0/rel/projects_associated_with_endpoint_group': { - 'href-template': '/OS-EP-FILTER/endpoint_groups/' - '{endpoint_group_id}/projects', - 'href-vars': { - 'endpoint_group_id': - 'http://docs.openstack.org/api/openstack-identity/3/' - 'ext/OS-EP-FILTER/1.0/param/endpoint_group_id', - }, - }, - 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/' - '1.0/rel/endpoints_in_endpoint_group': { - 'href-template': '/OS-EP-FILTER/endpoint_groups/' - '{endpoint_group_id}/endpoints', - 'href-vars': { - 'endpoint_group_id': - 'http://docs.openstack.org/api/openstack-identity/3/' - 'ext/OS-EP-FILTER/1.0/param/endpoint_group_id', - }, - }, - 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-EP-FILTER/' - '1.0/rel/project_endpoint_groups': { - 'href-template': '/OS-EP-FILTER/projects/{project_id}/' - 'endpoint_groups', - 'href-vars': { - 'project_id': - 'http://docs.openstack.org/api/openstack-identity/3/param/' - 'project_id', - }, - }, - } - - -class EndpointGroupCRUDTestCase(EndpointFilterTestCase): - - DEFAULT_ENDPOINT_GROUP_BODY = { - 'endpoint_group': { - 'description': 'endpoint group description', - 'filters': { - 'interface': 'admin' - }, - 'name': 'endpoint_group_name' - } - } - - DEFAULT_ENDPOINT_GROUP_URL = '/OS-EP-FILTER/endpoint_groups' - - def test_create_endpoint_group(self): - """POST /OS-EP-FILTER/endpoint_groups - - Valid endpoint group test case. - - """ - r = self.post(self.DEFAULT_ENDPOINT_GROUP_URL, - body=self.DEFAULT_ENDPOINT_GROUP_BODY) - expected_filters = (self.DEFAULT_ENDPOINT_GROUP_BODY - ['endpoint_group']['filters']) - expected_name = (self.DEFAULT_ENDPOINT_GROUP_BODY - ['endpoint_group']['name']) - self.assertEqual(expected_filters, - r.result['endpoint_group']['filters']) - self.assertEqual(expected_name, r.result['endpoint_group']['name']) - self.assertThat( - r.result['endpoint_group']['links']['self'], - matchers.EndsWith( - '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': r.result['endpoint_group']['id']})) - - def test_create_invalid_endpoint_group(self): - """POST /OS-EP-FILTER/endpoint_groups - - Invalid endpoint group creation test case. - - """ - invalid_body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY) - invalid_body['endpoint_group']['filters'] = {'foobar': 'admin'} - self.post(self.DEFAULT_ENDPOINT_GROUP_URL, - body=invalid_body, - expected_status=http_client.BAD_REQUEST) - - def test_get_endpoint_group(self): - """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group} - - Valid endpoint group test case. - - """ - # create an endpoint group to work with - response = self.post(self.DEFAULT_ENDPOINT_GROUP_URL, - body=self.DEFAULT_ENDPOINT_GROUP_BODY) - endpoint_group_id = response.result['endpoint_group']['id'] - endpoint_group_filters = response.result['endpoint_group']['filters'] - endpoint_group_name = response.result['endpoint_group']['name'] - url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} - self.get(url) - self.assertEqual(endpoint_group_id, - response.result['endpoint_group']['id']) - self.assertEqual(endpoint_group_filters, - response.result['endpoint_group']['filters']) - self.assertEqual(endpoint_group_name, - response.result['endpoint_group']['name']) - self.assertThat(response.result['endpoint_group']['links']['self'], - matchers.EndsWith(url)) - - def test_get_invalid_endpoint_group(self): - """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group} - - Invalid endpoint group test case. - - """ - endpoint_group_id = 'foobar' - url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} - self.get(url, expected_status=http_client.NOT_FOUND) - - def test_check_endpoint_group(self): - """HEAD /OS-EP-FILTER/endpoint_groups/{endpoint_group_id} - - Valid endpoint_group_id test case. - - """ - # create an endpoint group to work with - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} - self.head(url, expected_status=http_client.OK) - - def test_check_invalid_endpoint_group(self): - """HEAD /OS-EP-FILTER/endpoint_groups/{endpoint_group_id} - - Invalid endpoint_group_id test case. - - """ - endpoint_group_id = 'foobar' - url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} - self.head(url, expected_status=http_client.NOT_FOUND) - - def test_patch_endpoint_group(self): - """PATCH /OS-EP-FILTER/endpoint_groups/{endpoint_group} - - Valid endpoint group patch test case. - - """ - body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY) - body['endpoint_group']['filters'] = {'region_id': 'UK'} - body['endpoint_group']['name'] = 'patch_test' - # create an endpoint group to work with - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} - r = self.patch(url, body=body) - self.assertEqual(endpoint_group_id, - r.result['endpoint_group']['id']) - self.assertEqual(body['endpoint_group']['filters'], - r.result['endpoint_group']['filters']) - self.assertThat(r.result['endpoint_group']['links']['self'], - matchers.EndsWith(url)) - - def test_patch_nonexistent_endpoint_group(self): - """PATCH /OS-EP-FILTER/endpoint_groups/{endpoint_group} - - Invalid endpoint group patch test case. - - """ - body = { - 'endpoint_group': { - 'name': 'patch_test' - } - } - url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': 'ABC'} - self.patch(url, body=body, expected_status=http_client.NOT_FOUND) - - def test_patch_invalid_endpoint_group(self): - """PATCH /OS-EP-FILTER/endpoint_groups/{endpoint_group} - - Valid endpoint group patch test case. - - """ - body = { - 'endpoint_group': { - 'description': 'endpoint group description', - 'filters': { - 'region': 'UK' - }, - 'name': 'patch_test' - } - } - # create an endpoint group to work with - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} - self.patch(url, body=body, expected_status=http_client.BAD_REQUEST) - - # Perform a GET call to ensure that the content remains - # the same (as DEFAULT_ENDPOINT_GROUP_BODY) after attempting to update - # with an invalid filter - url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} - r = self.get(url) - del r.result['endpoint_group']['id'] - del r.result['endpoint_group']['links'] - self.assertDictEqual(self.DEFAULT_ENDPOINT_GROUP_BODY, r.result) - - def test_delete_endpoint_group(self): - """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group} - - Valid endpoint group test case. - - """ - # create an endpoint group to work with - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} - self.delete(url) - self.get(url, expected_status=http_client.NOT_FOUND) - - def test_delete_invalid_endpoint_group(self): - """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group} - - Invalid endpoint group test case. - - """ - endpoint_group_id = 'foobar' - url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} - self.delete(url, expected_status=http_client.NOT_FOUND) - - def test_add_endpoint_group_to_project(self): - """Create a valid endpoint group and project association.""" - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - self._create_endpoint_group_project_association(endpoint_group_id, - self.project_id) - - def test_add_endpoint_group_to_project_with_invalid_project_id(self): - """Create an invalid endpoint group and project association.""" - # create an endpoint group to work with - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - - # associate endpoint group with project - project_id = uuid.uuid4().hex - url = self._get_project_endpoint_group_url( - endpoint_group_id, project_id) - self.put(url, expected_status=http_client.NOT_FOUND) - - def test_get_endpoint_group_in_project(self): - """Test retrieving project endpoint group association.""" - # create an endpoint group to work with - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - - # associate endpoint group with project - url = self._get_project_endpoint_group_url( - endpoint_group_id, self.project_id) - self.put(url) - response = self.get(url) - self.assertEqual( - endpoint_group_id, - response.result['project_endpoint_group']['endpoint_group_id']) - self.assertEqual( - self.project_id, - response.result['project_endpoint_group']['project_id']) - - def test_get_invalid_endpoint_group_in_project(self): - """Test retrieving project endpoint group association.""" - endpoint_group_id = uuid.uuid4().hex - project_id = uuid.uuid4().hex - url = self._get_project_endpoint_group_url( - endpoint_group_id, project_id) - self.get(url, expected_status=http_client.NOT_FOUND) - - def test_list_endpoint_groups_in_project(self): - """GET /OS-EP-FILTER/projects/{project_id}/endpoint_groups.""" - # create an endpoint group to work with - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - - # associate endpoint group with project - url = self._get_project_endpoint_group_url( - endpoint_group_id, self.project_id) - self.put(url) - - url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' % - {'project_id': self.project_id}) - response = self.get(url) - - self.assertEqual( - endpoint_group_id, - response.result['endpoint_groups'][0]['id']) - - def test_list_endpoint_groups_in_invalid_project(self): - """Test retrieving from invalid project.""" - project_id = uuid.uuid4().hex - url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' % - {'project_id': project_id}) - self.get(url, expected_status=http_client.NOT_FOUND) - - def test_empty_endpoint_groups_in_project(self): - """Test when no endpoint groups associated with the project.""" - url = ('/OS-EP-FILTER/projects/%(project_id)s/endpoint_groups' % - {'project_id': self.project_id}) - response = self.get(url) - - self.assertEqual(0, len(response.result['endpoint_groups'])) - - def test_check_endpoint_group_to_project(self): - """Test HEAD with a valid endpoint group and project association.""" - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - self._create_endpoint_group_project_association(endpoint_group_id, - self.project_id) - url = self._get_project_endpoint_group_url( - endpoint_group_id, self.project_id) - self.head(url, expected_status=http_client.OK) - - def test_check_endpoint_group_to_project_with_invalid_project_id(self): - """Test HEAD with an invalid endpoint group and project association.""" - # create an endpoint group to work with - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - - # create an endpoint group to project association - url = self._get_project_endpoint_group_url( - endpoint_group_id, self.project_id) - self.put(url) - - # send a head request with an invalid project id - project_id = uuid.uuid4().hex - url = self._get_project_endpoint_group_url( - endpoint_group_id, project_id) - self.head(url, expected_status=http_client.NOT_FOUND) - - def test_list_endpoint_groups(self): - """GET /OS-EP-FILTER/endpoint_groups.""" - # create an endpoint group to work with - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - - # recover all endpoint groups - url = '/OS-EP-FILTER/endpoint_groups' - r = self.get(url) - self.assertNotEmpty(r.result['endpoint_groups']) - self.assertEqual(endpoint_group_id, - r.result['endpoint_groups'][0].get('id')) - - def test_list_projects_associated_with_endpoint_group(self): - """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}/projects - - Valid endpoint group test case. - - """ - # create an endpoint group to work with - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - - # associate endpoint group with project - self._create_endpoint_group_project_association(endpoint_group_id, - self.project_id) - - # recover list of projects associated with endpoint group - url = ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' - '/projects' % - {'endpoint_group_id': endpoint_group_id}) - self.get(url) - - def test_list_endpoints_associated_with_endpoint_group(self): - """GET /OS-EP-FILTER/endpoint_groups/{endpoint_group}/endpoints - - Valid endpoint group test case. - - """ - # create a service - service_ref = unit.new_service_ref() - response = self.post( - '/services', - body={'service': service_ref}) - - service_id = response.result['service']['id'] - - # create an endpoint - endpoint_ref = unit.new_endpoint_ref(service_id=service_id, - interface='public', - region_id=self.region_id) - response = self.post('/endpoints', body={'endpoint': endpoint_ref}) - endpoint_id = response.result['endpoint']['id'] - - # create an endpoint group - body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY) - body['endpoint_group']['filters'] = {'service_id': service_id} - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, body) - - # create association - self._create_endpoint_group_project_association(endpoint_group_id, - self.project_id) - - # recover list of endpoints associated with endpoint group - url = ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' - '/endpoints' % {'endpoint_group_id': endpoint_group_id}) - r = self.get(url) - self.assertNotEmpty(r.result['endpoints']) - self.assertEqual(endpoint_id, r.result['endpoints'][0].get('id')) - - def test_list_endpoints_associated_with_project_endpoint_group(self): - """GET /OS-EP-FILTER/projects/{project_id}/endpoints - - Valid project, endpoint id, and endpoint group test case. - - """ - # create a temporary service - service_ref = unit.new_service_ref() - response = self.post('/services', body={'service': service_ref}) - service_id2 = response.result['service']['id'] - - # create additional endpoints - self._create_endpoint_and_associations( - self.default_domain_project_id, service_id2) - self._create_endpoint_and_associations( - self.default_domain_project_id) - - # create project and endpoint association with default endpoint: - self.put(self.default_request_url) - - # create an endpoint group that contains a different endpoint - body = copy.deepcopy(self.DEFAULT_ENDPOINT_GROUP_BODY) - body['endpoint_group']['filters'] = {'service_id': service_id2} - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, body) - - # associate endpoint group with project - self._create_endpoint_group_project_association( - endpoint_group_id, self.default_domain_project_id) - - # Now get a list of the filtered endpoints - endpoints_url = '/OS-EP-FILTER/projects/%(project_id)s/endpoints' % { - 'project_id': self.default_domain_project_id} - r = self.get(endpoints_url) - endpoints = self.assertValidEndpointListResponse(r) - self.assertEqual(2, len(endpoints)) - - # Ensure catalog includes the endpoints from endpoint_group project - # association, this is needed when a project scoped token is issued - # and "endpoint_filter.sql" backend driver is in place. - user_id = uuid.uuid4().hex - catalog_list = self.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) - self.assertEqual(2, len(catalog_list)) - - # Now remove project endpoint group association - url = self._get_project_endpoint_group_url( - endpoint_group_id, self.default_domain_project_id) - self.delete(url) - - # Now remove endpoint group - url = '/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' % { - 'endpoint_group_id': endpoint_group_id} - self.delete(url) - - r = self.get(endpoints_url) - endpoints = self.assertValidEndpointListResponse(r) - self.assertEqual(1, len(endpoints)) - - catalog_list = self.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) - self.assertEqual(1, len(catalog_list)) - - def test_endpoint_group_project_cleanup_with_project(self): - # create endpoint group - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - - # create new project and associate with endpoint_group - project_ref = unit.new_project_ref(domain_id=self.domain_id) - r = self.post('/projects', body={'project': project_ref}) - project = self.assertValidProjectResponse(r, project_ref) - url = self._get_project_endpoint_group_url(endpoint_group_id, - project['id']) - self.put(url) - - # check that we can recover the project endpoint group association - self.get(url) - - # Now delete the project and then try and retrieve the project - # endpoint group association again - self.delete('/projects/%(project_id)s' % { - 'project_id': project['id']}) - self.get(url, expected_status=http_client.NOT_FOUND) - - def test_endpoint_group_project_cleanup_with_endpoint_group(self): - # create endpoint group - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - - # create new project and associate with endpoint_group - project_ref = unit.new_project_ref(domain_id=self.domain_id) - r = self.post('/projects', body={'project': project_ref}) - project = self.assertValidProjectResponse(r, project_ref) - url = self._get_project_endpoint_group_url(endpoint_group_id, - project['id']) - self.put(url) - - # check that we can recover the project endpoint group association - self.get(url) - - # now remove the project endpoint group association - self.delete(url) - self.get(url, expected_status=http_client.NOT_FOUND) - - def test_removing_an_endpoint_group_project(self): - # create an endpoint group - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - - # create an endpoint_group project - url = self._get_project_endpoint_group_url( - endpoint_group_id, self.default_domain_project_id) - self.put(url) - - # remove the endpoint group project - self.delete(url) - self.get(url, expected_status=http_client.NOT_FOUND) - - def test_remove_endpoint_group_with_project_association(self): - # create an endpoint group - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - - # create an endpoint_group project - project_endpoint_group_url = self._get_project_endpoint_group_url( - endpoint_group_id, self.default_domain_project_id) - self.put(project_endpoint_group_url) - - # remove endpoint group, the associated endpoint_group project will - # be removed as well. - endpoint_group_url = ('/OS-EP-FILTER/endpoint_groups/' - '%(endpoint_group_id)s' - % {'endpoint_group_id': endpoint_group_id}) - self.delete(endpoint_group_url) - self.get(endpoint_group_url, expected_status=http_client.NOT_FOUND) - self.get(project_endpoint_group_url, - expected_status=http_client.NOT_FOUND) - - @unit.skip_if_cache_disabled('catalog') - def test_add_endpoint_group_to_project_invalidates_catalog_cache(self): - # create another endpoint with 'admin' interface which matches - # 'filters' definition in endpoint group, then there should be two - # endpoints returned when retrieving v3 catalog if cache works as - # expected. - # this should be done at first since `create_endpoint` will also - # invalidate cache. - endpoint_id2 = uuid.uuid4().hex - endpoint2 = unit.new_endpoint_ref(service_id=self.service_id, - region_id=self.region_id, - interface='admin', - id=endpoint_id2) - self.catalog_api.create_endpoint(endpoint_id2, endpoint2) - - # create a project and endpoint association. - self.put(self.default_request_url) - - # there is only one endpoint associated with the default project. - user_id = uuid.uuid4().hex - catalog = self.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) - - self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1)) - - # create an endpoint group. - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - - # add the endpoint group to default project, bypassing - # catalog_api API manager. - self.catalog_api.driver.add_endpoint_group_to_project( - endpoint_group_id, - self.default_domain_project_id) - - # can get back only one endpoint from the cache, since the catalog - # is pulled out from cache. - invalid_catalog = self.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) - - self.assertThat(invalid_catalog[0]['endpoints'], - matchers.HasLength(1)) - self.assertEqual(catalog, invalid_catalog) - - # remove the endpoint group from default project, and add it again via - # catalog_api API manager. - self.catalog_api.driver.remove_endpoint_group_from_project( - endpoint_group_id, - self.default_domain_project_id) - - # add the endpoint group to default project. - self.catalog_api.add_endpoint_group_to_project( - endpoint_group_id, - self.default_domain_project_id) - - catalog = self.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) - - # now, it will return 2 endpoints since the cache has been - # invalidated. - self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2)) - - ep_id_list = [catalog[0]['endpoints'][0]['id'], - catalog[0]['endpoints'][1]['id']] - self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list) - - @unit.skip_if_cache_disabled('catalog') - def test_remove_endpoint_group_from_project_invalidates_cache(self): - # create another endpoint with 'admin' interface which matches - # 'filters' definition in endpoint group, then there should be two - # endpoints returned when retrieving v3 catalog. But only one - # endpoint will return after the endpoint group's deletion if cache - # works as expected. - # this should be done at first since `create_endpoint` will also - # invalidate cache. - endpoint_id2 = uuid.uuid4().hex - endpoint2 = unit.new_endpoint_ref(service_id=self.service_id, - region_id=self.region_id, - interface='admin', - id=endpoint_id2) - self.catalog_api.create_endpoint(endpoint_id2, endpoint2) - - # create project and endpoint association. - self.put(self.default_request_url) - - # create an endpoint group. - endpoint_group_id = self._create_valid_endpoint_group( - self.DEFAULT_ENDPOINT_GROUP_URL, self.DEFAULT_ENDPOINT_GROUP_BODY) - - # add the endpoint group to default project. - self.catalog_api.add_endpoint_group_to_project( - endpoint_group_id, - self.default_domain_project_id) - - # should get back two endpoints, one from endpoint project - # association, the other one is from endpoint_group project - # association. - user_id = uuid.uuid4().hex - catalog = self.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) - - self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2)) - - ep_id_list = [catalog[0]['endpoints'][0]['id'], - catalog[0]['endpoints'][1]['id']] - self.assertItemsEqual([self.endpoint_id, endpoint_id2], ep_id_list) - - # remove endpoint_group project association, bypassing - # catalog_api API manager. - self.catalog_api.driver.remove_endpoint_group_from_project( - endpoint_group_id, - self.default_domain_project_id) - - # still get back two endpoints, since the catalog is pulled out - # from cache and the cache haven't been invalidated. - invalid_catalog = self.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) - - self.assertThat(invalid_catalog[0]['endpoints'], - matchers.HasLength(2)) - self.assertEqual(catalog, invalid_catalog) - - # add back the endpoint_group project association and remove it from - # manager. - self.catalog_api.driver.add_endpoint_group_to_project( - endpoint_group_id, - self.default_domain_project_id) - - self.catalog_api.remove_endpoint_group_from_project( - endpoint_group_id, - self.default_domain_project_id) - - # should only get back one endpoint since the cache has been - # invalidated after the endpoint_group project association was - # removed. - catalog = self.catalog_api.get_v3_catalog( - user_id, - self.default_domain_project_id) - - self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1)) - self.assertEqual(self.endpoint_id, catalog[0]['endpoints'][0]['id']) - - def _create_valid_endpoint_group(self, url, body): - r = self.post(url, body=body) - return r.result['endpoint_group']['id'] - - def _create_endpoint_group_project_association(self, - endpoint_group_id, - project_id): - url = self._get_project_endpoint_group_url(endpoint_group_id, - project_id) - self.put(url) - - def _get_project_endpoint_group_url(self, - endpoint_group_id, - project_id): - return ('/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s' - '/projects/%(project_id)s' % - {'endpoint_group_id': endpoint_group_id, - 'project_id': project_id}) - - def _create_endpoint_and_associations(self, project_id, service_id=None): - """Creates an endpoint associated with service and project.""" - if not service_id: - # create a new service - service_ref = unit.new_service_ref() - response = self.post( - '/services', body={'service': service_ref}) - service_id = response.result['service']['id'] - - # create endpoint - endpoint_ref = unit.new_endpoint_ref(service_id=service_id, - interface='public', - region_id=self.region_id) - response = self.post('/endpoints', body={'endpoint': endpoint_ref}) - endpoint = response.result['endpoint'] - - # now add endpoint to project - self.put('/OS-EP-FILTER/projects/%(project_id)s' - '/endpoints/%(endpoint_id)s' % { - 'project_id': self.project['id'], - 'endpoint_id': endpoint['id']}) - return endpoint diff --git a/keystone-moon/keystone/tests/unit/test_auth.py b/keystone-moon/keystone/tests/unit/test_auth.py deleted file mode 100644 index 6f44b316..00000000 --- a/keystone-moon/keystone/tests/unit/test_auth.py +++ /dev/null @@ -1,1446 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import datetime -import random -import string -import uuid - -import mock -from oslo_config import cfg -import oslo_utils.fixture -from oslo_utils import timeutils -import six -from testtools import matchers - -from keystone import assignment -from keystone import auth -from keystone.common import authorization -from keystone.common import config -from keystone import exception -from keystone.models import token_model -from keystone.tests import unit -from keystone.tests.unit import default_fixtures -from keystone.tests.unit import ksfixtures -from keystone.tests.unit.ksfixtures import database -from keystone import token -from keystone.token import provider -from keystone import trust - - -CONF = cfg.CONF -TIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ' - -HOST = ''.join(random.choice(string.ascii_lowercase) for x in range( - random.randint(5, 15))) -HOST_URL = 'http://%s' % (HOST) - - -def _build_user_auth(token=None, user_id=None, username=None, - password=None, tenant_id=None, tenant_name=None, - trust_id=None): - """Build auth dictionary. - - It will create an auth dictionary based on all the arguments - that it receives. - """ - auth_json = {} - if token is not None: - auth_json['token'] = token - if username or password: - auth_json['passwordCredentials'] = {} - if username is not None: - auth_json['passwordCredentials']['username'] = username - if user_id is not None: - auth_json['passwordCredentials']['userId'] = user_id - if password is not None: - auth_json['passwordCredentials']['password'] = password - if tenant_name is not None: - auth_json['tenantName'] = tenant_name - if tenant_id is not None: - auth_json['tenantId'] = tenant_id - if trust_id is not None: - auth_json['trust_id'] = trust_id - return auth_json - - -class AuthTest(unit.TestCase): - def setUp(self): - self.useFixture(database.Database()) - super(AuthTest, self).setUp() - self.time_fixture = self.useFixture(oslo_utils.fixture.TimeFixture()) - - self.load_backends() - self.load_fixtures(default_fixtures) - - self.context_with_remote_user = {'environment': - {'REMOTE_USER': 'FOO', - 'AUTH_TYPE': 'Negotiate'}} - self.empty_context = {'environment': {}} - - self.controller = token.controllers.Auth() - - def assertEqualTokens(self, a, b, enforce_audit_ids=True): - """Assert that two tokens are equal. - - Compare two tokens except for their ids. This also truncates - the time in the comparison. - """ - def normalize(token): - token['access']['token']['id'] = 'dummy' - del token['access']['token']['expires'] - del token['access']['token']['issued_at'] - del token['access']['token']['audit_ids'] - return token - - self.assertCloseEnoughForGovernmentWork( - timeutils.parse_isotime(a['access']['token']['expires']), - timeutils.parse_isotime(b['access']['token']['expires'])) - self.assertCloseEnoughForGovernmentWork( - timeutils.parse_isotime(a['access']['token']['issued_at']), - timeutils.parse_isotime(b['access']['token']['issued_at'])) - if enforce_audit_ids: - self.assertIn(a['access']['token']['audit_ids'][0], - b['access']['token']['audit_ids']) - self.assertThat(len(a['access']['token']['audit_ids']), - matchers.LessThan(3)) - self.assertThat(len(b['access']['token']['audit_ids']), - matchers.LessThan(3)) - - return self.assertDictEqual(normalize(a), normalize(b)) - - -class AuthBadRequests(AuthTest): - def test_no_external_auth(self): - """Verify that _authenticate_external() raises exception if N/A.""" - self.assertRaises( - token.controllers.ExternalAuthNotApplicable, - self.controller._authenticate_external, - context={}, auth={}) - - def test_empty_remote_user(self): - """Verify exception is raised when REMOTE_USER is an empty string.""" - context = {'environment': {'REMOTE_USER': ''}} - self.assertRaises( - token.controllers.ExternalAuthNotApplicable, - self.controller._authenticate_external, - context=context, auth={}) - - def test_no_token_in_auth(self): - """Verify that _authenticate_token() raises exception if no token.""" - self.assertRaises( - exception.ValidationError, - self.controller._authenticate_token, - None, {}) - - def test_no_credentials_in_auth(self): - """Verify that _authenticate_local() raises exception if no creds.""" - self.assertRaises( - exception.ValidationError, - self.controller._authenticate_local, - None, {}) - - def test_empty_username_and_userid_in_auth(self): - """Verify that empty username and userID raises ValidationError.""" - self.assertRaises( - exception.ValidationError, - self.controller._authenticate_local, - None, {'passwordCredentials': {'password': 'abc', - 'userId': '', 'username': ''}}) - - def test_authenticate_blank_request_body(self): - """Verify sending empty json dict raises the right exception.""" - self.assertRaises(exception.ValidationError, - self.controller.authenticate, - {}, {}) - - def test_authenticate_blank_auth(self): - """Verify sending blank 'auth' raises the right exception.""" - body_dict = _build_user_auth() - self.assertRaises(exception.ValidationError, - self.controller.authenticate, - {}, body_dict) - - def test_authenticate_invalid_auth_content(self): - """Verify sending invalid 'auth' raises the right exception.""" - self.assertRaises(exception.ValidationError, - self.controller.authenticate, - {}, {'auth': 'abcd'}) - - def test_authenticate_user_id_too_large(self): - """Verify sending large 'userId' raises the right exception.""" - body_dict = _build_user_auth(user_id='0' * 65, username='FOO', - password='foo2') - self.assertRaises(exception.ValidationSizeError, - self.controller.authenticate, - {}, body_dict) - - def test_authenticate_username_too_large(self): - """Verify sending large 'username' raises the right exception.""" - body_dict = _build_user_auth(username='0' * 65, password='foo2') - self.assertRaises(exception.ValidationSizeError, - self.controller.authenticate, - {}, body_dict) - - def test_authenticate_tenant_id_too_large(self): - """Verify sending large 'tenantId' raises the right exception.""" - body_dict = _build_user_auth(username='FOO', password='foo2', - tenant_id='0' * 65) - self.assertRaises(exception.ValidationSizeError, - self.controller.authenticate, - {}, body_dict) - - def test_authenticate_tenant_name_too_large(self): - """Verify sending large 'tenantName' raises the right exception.""" - body_dict = _build_user_auth(username='FOO', password='foo2', - tenant_name='0' * 65) - self.assertRaises(exception.ValidationSizeError, - self.controller.authenticate, - {}, body_dict) - - def test_authenticate_token_too_large(self): - """Verify sending large 'token' raises the right exception.""" - body_dict = _build_user_auth(token={'id': '0' * 8193}) - self.assertRaises(exception.ValidationSizeError, - self.controller.authenticate, - {}, body_dict) - - def test_authenticate_password_too_large(self): - """Verify sending large 'password' raises the right exception.""" - length = CONF.identity.max_password_length + 1 - body_dict = _build_user_auth(username='FOO', password='0' * length) - self.assertRaises(exception.ValidationSizeError, - self.controller.authenticate, - {}, body_dict) - - def test_authenticate_fails_if_project_unsafe(self): - """Verify authenticate to a project with unsafe name fails.""" - # Start with url name restrictions off, so we can create the unsafe - # named project - self.config_fixture.config(group='resource', - project_name_url_safe='off') - unsafe_name = 'i am not / safe' - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id, name=unsafe_name) - self.resource_api.create_project(project['id'], project) - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], project['id'], self.role_member['id']) - no_context = {} - - body_dict = _build_user_auth( - username=self.user_foo['name'], - password=self.user_foo['password'], - tenant_name=project['name']) - - # Since name url restriction is off, we should be able to autenticate - self.controller.authenticate(no_context, body_dict) - - # Set the name url restriction to strict and we should fail to - # authenticate - self.config_fixture.config(group='resource', - project_name_url_safe='strict') - self.assertRaises(exception.Unauthorized, - self.controller.authenticate, - no_context, body_dict) - - -class AuthWithToken(AuthTest): - def test_unscoped_token(self): - """Verify getting an unscoped token with password creds.""" - body_dict = _build_user_auth(username='FOO', - password='foo2') - unscoped_token = self.controller.authenticate({}, body_dict) - self.assertNotIn('tenant', unscoped_token['access']['token']) - - def test_auth_invalid_token(self): - """Verify exception is raised if invalid token.""" - body_dict = _build_user_auth(token={"id": uuid.uuid4().hex}) - self.assertRaises( - exception.Unauthorized, - self.controller.authenticate, - {}, body_dict) - - def test_auth_bad_formatted_token(self): - """Verify exception is raised if invalid token.""" - body_dict = _build_user_auth(token={}) - self.assertRaises( - exception.ValidationError, - self.controller.authenticate, - {}, body_dict) - - def test_auth_unscoped_token_no_project(self): - """Verify getting an unscoped token with an unscoped token.""" - body_dict = _build_user_auth( - username='FOO', - password='foo2') - unscoped_token = self.controller.authenticate({}, body_dict) - - body_dict = _build_user_auth( - token=unscoped_token["access"]["token"]) - unscoped_token_2 = self.controller.authenticate({}, body_dict) - - self.assertEqualTokens(unscoped_token, unscoped_token_2) - - def test_auth_unscoped_token_project(self): - """Verify getting a token in a tenant with an unscoped token.""" - # Add a role in so we can check we get this back - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], - self.tenant_bar['id'], - self.role_member['id']) - # Get an unscoped token - body_dict = _build_user_auth( - username='FOO', - password='foo2') - unscoped_token = self.controller.authenticate({}, body_dict) - # Get a token on BAR tenant using the unscoped token - body_dict = _build_user_auth( - token=unscoped_token["access"]["token"], - tenant_name="BAR") - scoped_token = self.controller.authenticate({}, body_dict) - - tenant = scoped_token["access"]["token"]["tenant"] - roles = scoped_token["access"]["metadata"]["roles"] - self.assertEqual(self.tenant_bar['id'], tenant["id"]) - self.assertThat(roles, matchers.Contains(self.role_member['id'])) - - def test_auth_scoped_token_bad_project_with_debug(self): - """Authenticating with an invalid project fails.""" - # Bug 1379952 reports poor user feedback, even in insecure_debug mode, - # when the user accidentally passes a project name as an ID. - # This test intentionally does exactly that. - body_dict = _build_user_auth( - username=self.user_foo['name'], - password=self.user_foo['password'], - tenant_id=self.tenant_bar['name']) - - # with insecure_debug enabled, this produces a friendly exception. - self.config_fixture.config(debug=True, insecure_debug=True) - e = self.assertRaises( - exception.Unauthorized, - self.controller.authenticate, - {}, body_dict) - # explicitly verify that the error message shows that a *name* is - # found where an *ID* is expected - self.assertIn( - 'Project ID not found: %s' % self.tenant_bar['name'], - six.text_type(e)) - - def test_auth_scoped_token_bad_project_without_debug(self): - """Authenticating with an invalid project fails.""" - # Bug 1379952 reports poor user feedback, even in insecure_debug mode, - # when the user accidentally passes a project name as an ID. - # This test intentionally does exactly that. - body_dict = _build_user_auth( - username=self.user_foo['name'], - password=self.user_foo['password'], - tenant_id=self.tenant_bar['name']) - - # with insecure_debug disabled (the default), authentication failure - # details are suppressed. - e = self.assertRaises( - exception.Unauthorized, - self.controller.authenticate, - {}, body_dict) - # explicitly verify that the error message details above have been - # suppressed. - self.assertNotIn( - 'Project ID not found: %s' % self.tenant_bar['name'], - six.text_type(e)) - - def test_auth_token_project_group_role(self): - """Verify getting a token in a tenant with group roles.""" - # Add a v2 style role in so we can check we get this back - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], - self.tenant_bar['id'], - self.role_member['id']) - # Now create a group role for this user as well - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - new_group = unit.new_group_ref(domain_id=domain1['id']) - new_group = self.identity_api.create_group(new_group) - self.identity_api.add_user_to_group(self.user_foo['id'], - new_group['id']) - self.assignment_api.create_grant( - group_id=new_group['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_admin['id']) - - # Get a scoped token for the tenant - body_dict = _build_user_auth( - username='FOO', - password='foo2', - tenant_name="BAR") - - scoped_token = self.controller.authenticate({}, body_dict) - - tenant = scoped_token["access"]["token"]["tenant"] - roles = scoped_token["access"]["metadata"]["roles"] - self.assertEqual(self.tenant_bar['id'], tenant["id"]) - self.assertIn(self.role_member['id'], roles) - self.assertIn(self.role_admin['id'], roles) - - def test_belongs_to_no_tenant(self): - r = self.controller.authenticate( - {}, - auth={ - 'passwordCredentials': { - 'username': self.user_foo['name'], - 'password': self.user_foo['password'] - } - }) - unscoped_token_id = r['access']['token']['id'] - self.assertRaises( - exception.Unauthorized, - self.controller.validate_token, - dict(is_admin=True, query_string={'belongsTo': 'BAR'}), - token_id=unscoped_token_id) - - def test_belongs_to(self): - body_dict = _build_user_auth( - username='FOO', - password='foo2', - tenant_name="BAR") - - scoped_token = self.controller.authenticate({}, body_dict) - scoped_token_id = scoped_token['access']['token']['id'] - - self.assertRaises( - exception.Unauthorized, - self.controller.validate_token, - dict(is_admin=True, query_string={'belongsTo': 'me'}), - token_id=scoped_token_id) - - self.assertRaises( - exception.Unauthorized, - self.controller.validate_token, - dict(is_admin=True, query_string={'belongsTo': 'BAR'}), - token_id=scoped_token_id) - - def test_token_auth_with_binding(self): - self.config_fixture.config(group='token', bind=['kerberos']) - body_dict = _build_user_auth() - unscoped_token = self.controller.authenticate( - self.context_with_remote_user, body_dict) - - # the token should have bind information in it - bind = unscoped_token['access']['token']['bind'] - self.assertEqual('FOO', bind['kerberos']) - - body_dict = _build_user_auth( - token=unscoped_token['access']['token'], - tenant_name='BAR') - - # using unscoped token without remote user context fails - self.assertRaises( - exception.Unauthorized, - self.controller.authenticate, - self.empty_context, body_dict) - - # using token with remote user context succeeds - scoped_token = self.controller.authenticate( - self.context_with_remote_user, body_dict) - - # the bind information should be carried over from the original token - bind = scoped_token['access']['token']['bind'] - self.assertEqual('FOO', bind['kerberos']) - - def test_deleting_role_revokes_token(self): - role_controller = assignment.controllers.Role() - project1 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(project1['id'], project1) - role_one = unit.new_role_ref(id='role_one') - self.role_api.create_role(role_one['id'], role_one) - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], project1['id'], role_one['id']) - no_context = {} - - # Get a scoped token for the tenant - body_dict = _build_user_auth( - username=self.user_foo['name'], - password=self.user_foo['password'], - tenant_name=project1['name']) - token = self.controller.authenticate(no_context, body_dict) - # Ensure it is valid - token_id = token['access']['token']['id'] - self.controller.validate_token( - dict(is_admin=True, query_string={}), - token_id=token_id) - - # Delete the role, which should invalidate the token - role_controller.delete_role( - dict(is_admin=True, query_string={}), role_one['id']) - - # Check the token is now invalid - self.assertRaises( - exception.TokenNotFound, - self.controller.validate_token, - dict(is_admin=True, query_string={}), - token_id=token_id) - - def test_deleting_role_assignment_does_not_revoke_unscoped_token(self): - no_context = {} - admin_context = dict(is_admin=True, query_string={}) - - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(project['id'], project) - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], project['id'], role['id']) - - # Get an unscoped token. - token = self.controller.authenticate(no_context, _build_user_auth( - username=self.user_foo['name'], - password=self.user_foo['password'])) - token_id = token['access']['token']['id'] - - # Ensure it is valid - self.controller.validate_token(admin_context, token_id=token_id) - - # Delete the role assignment, which should not invalidate the token, - # because we're not consuming it with just an unscoped token. - self.assignment_api.remove_role_from_user_and_project( - self.user_foo['id'], project['id'], role['id']) - - # Ensure it is still valid - self.controller.validate_token(admin_context, token_id=token_id) - - def test_only_original_audit_id_is_kept(self): - context = {} - - def get_audit_ids(token): - return token['access']['token']['audit_ids'] - - # get a token - body_dict = _build_user_auth(username='FOO', password='foo2') - unscoped_token = self.controller.authenticate(context, body_dict) - starting_audit_id = get_audit_ids(unscoped_token)[0] - self.assertIsNotNone(starting_audit_id) - - # get another token to ensure the correct parent audit_id is set - body_dict = _build_user_auth(token=unscoped_token["access"]["token"]) - unscoped_token_2 = self.controller.authenticate(context, body_dict) - audit_ids = get_audit_ids(unscoped_token_2) - self.assertThat(audit_ids, matchers.HasLength(2)) - self.assertThat(audit_ids[-1], matchers.Equals(starting_audit_id)) - - # get another token from token 2 and ensure the correct parent - # audit_id is set - body_dict = _build_user_auth(token=unscoped_token_2["access"]["token"]) - unscoped_token_3 = self.controller.authenticate(context, body_dict) - audit_ids = get_audit_ids(unscoped_token_3) - self.assertThat(audit_ids, matchers.HasLength(2)) - self.assertThat(audit_ids[-1], matchers.Equals(starting_audit_id)) - - def test_revoke_by_audit_chain_id_original_token(self): - self.config_fixture.config(group='token', revoke_by_id=False) - context = {} - - # get a token - body_dict = _build_user_auth(username='FOO', password='foo2') - unscoped_token = self.controller.authenticate(context, body_dict) - token_id = unscoped_token['access']['token']['id'] - self.time_fixture.advance_time_seconds(1) - - # get a second token - body_dict = _build_user_auth(token=unscoped_token["access"]["token"]) - unscoped_token_2 = self.controller.authenticate(context, body_dict) - token_2_id = unscoped_token_2['access']['token']['id'] - self.time_fixture.advance_time_seconds(1) - - self.token_provider_api.revoke_token(token_id, revoke_chain=True) - - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_v2_token, - token_id=token_id) - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_v2_token, - token_id=token_2_id) - - def test_revoke_by_audit_chain_id_chained_token(self): - self.config_fixture.config(group='token', revoke_by_id=False) - context = {} - - # get a token - body_dict = _build_user_auth(username='FOO', password='foo2') - unscoped_token = self.controller.authenticate(context, body_dict) - token_id = unscoped_token['access']['token']['id'] - self.time_fixture.advance_time_seconds(1) - - # get a second token - body_dict = _build_user_auth(token=unscoped_token["access"]["token"]) - unscoped_token_2 = self.controller.authenticate(context, body_dict) - token_2_id = unscoped_token_2['access']['token']['id'] - self.time_fixture.advance_time_seconds(1) - - self.token_provider_api.revoke_token(token_2_id, revoke_chain=True) - - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_v2_token, - token_id=token_id) - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_v2_token, - token_id=token_2_id) - - def _mock_audit_info(self, parent_audit_id): - # NOTE(morgainfainberg): The token model and other cases that are - # extracting the audit id expect 'None' if the audit id doesn't - # exist. This ensures that the audit_id is None and the - # audit_chain_id will also return None. - return [None, None] - - def test_revoke_with_no_audit_info(self): - self.config_fixture.config(group='token', revoke_by_id=False) - context = {} - - with mock.patch.object(provider, 'audit_info', self._mock_audit_info): - # get a token - body_dict = _build_user_auth(username='FOO', password='foo2') - unscoped_token = self.controller.authenticate(context, body_dict) - token_id = unscoped_token['access']['token']['id'] - self.time_fixture.advance_time_seconds(1) - - # get a second token - body_dict = _build_user_auth( - token=unscoped_token['access']['token']) - unscoped_token_2 = self.controller.authenticate(context, body_dict) - token_2_id = unscoped_token_2['access']['token']['id'] - self.time_fixture.advance_time_seconds(1) - - self.token_provider_api.revoke_token(token_id, revoke_chain=True) - self.time_fixture.advance_time_seconds(1) - - revoke_events = self.revoke_api.list_events() - self.assertThat(revoke_events, matchers.HasLength(1)) - revoke_event = revoke_events[0].to_dict() - self.assertIn('expires_at', revoke_event) - self.assertEqual(unscoped_token_2['access']['token']['expires'], - revoke_event['expires_at']) - - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_v2_token, - token_id=token_id) - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_v2_token, - token_id=token_2_id) - - # get a new token, with no audit info - body_dict = _build_user_auth(username='FOO', password='foo2') - unscoped_token = self.controller.authenticate(context, body_dict) - token_id = unscoped_token['access']['token']['id'] - self.time_fixture.advance_time_seconds(1) - # get a second token - body_dict = _build_user_auth( - token=unscoped_token['access']['token']) - unscoped_token_2 = self.controller.authenticate(context, body_dict) - token_2_id = unscoped_token_2['access']['token']['id'] - self.time_fixture.advance_time_seconds(1) - - # Revoke by audit_id, no audit_info means both parent and child - # token are revoked. - self.token_provider_api.revoke_token(token_id) - self.time_fixture.advance_time_seconds(1) - - revoke_events = self.revoke_api.list_events() - self.assertThat(revoke_events, matchers.HasLength(2)) - revoke_event = revoke_events[1].to_dict() - self.assertIn('expires_at', revoke_event) - self.assertEqual(unscoped_token_2['access']['token']['expires'], - revoke_event['expires_at']) - - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_v2_token, - token_id=token_id) - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_v2_token, - token_id=token_2_id) - - -class FernetAuthWithToken(AuthWithToken): - def config_overrides(self): - super(FernetAuthWithToken, self).config_overrides() - self.config_fixture.config(group='token', provider='fernet') - self.useFixture(ksfixtures.KeyRepository(self.config_fixture)) - - def test_token_auth_with_binding(self): - self.config_fixture.config(group='token', bind=['kerberos']) - body_dict = _build_user_auth() - self.assertRaises(exception.NotImplemented, - self.controller.authenticate, - self.context_with_remote_user, - body_dict) - - def test_revoke_with_no_audit_info(self): - self.skipTest('Fernet with v2.0 and revocation is broken') - - def test_deleting_role_revokes_token(self): - self.skipTest('Fernet with v2.0 and revocation is broken') - - -class AuthWithPasswordCredentials(AuthTest): - def test_auth_invalid_user(self): - """Verify exception is raised if invalid user.""" - body_dict = _build_user_auth( - username=uuid.uuid4().hex, - password=uuid.uuid4().hex) - self.assertRaises( - exception.Unauthorized, - self.controller.authenticate, - {}, body_dict) - - def test_auth_valid_user_invalid_password(self): - """Verify exception is raised if invalid password.""" - body_dict = _build_user_auth( - username="FOO", - password=uuid.uuid4().hex) - self.assertRaises( - exception.Unauthorized, - self.controller.authenticate, - {}, body_dict) - - def test_auth_empty_password(self): - """Verify exception is raised if empty password.""" - body_dict = _build_user_auth( - username="FOO", - password="") - self.assertRaises( - exception.Unauthorized, - self.controller.authenticate, - {}, body_dict) - - def test_auth_no_password(self): - """Verify exception is raised if empty password.""" - body_dict = _build_user_auth(username="FOO") - self.assertRaises( - exception.ValidationError, - self.controller.authenticate, - {}, body_dict) - - def test_authenticate_blank_password_credentials(self): - """Sending empty dict as passwordCredentials raises 400 Bad Requset.""" - body_dict = {'passwordCredentials': {}, 'tenantName': 'demo'} - self.assertRaises(exception.ValidationError, - self.controller.authenticate, - {}, body_dict) - - def test_authenticate_no_username(self): - """Verify skipping username raises the right exception.""" - body_dict = _build_user_auth(password="pass", - tenant_name="demo") - self.assertRaises(exception.ValidationError, - self.controller.authenticate, - {}, body_dict) - - def test_bind_without_remote_user(self): - self.config_fixture.config(group='token', bind=['kerberos']) - body_dict = _build_user_auth(username='FOO', password='foo2', - tenant_name='BAR') - token = self.controller.authenticate({}, body_dict) - self.assertNotIn('bind', token['access']['token']) - - def test_change_default_domain_id(self): - # If the default_domain_id config option is not the default then the - # user in auth data is from the new default domain. - - # 1) Create a new domain. - new_domain = unit.new_domain_ref() - new_domain_id = new_domain['id'] - - self.resource_api.create_domain(new_domain_id, new_domain) - - # 2) Create user "foo" in new domain with different password than - # default-domain foo. - new_user = unit.create_user(self.identity_api, - name=self.user_foo['name'], - domain_id=new_domain_id) - - # 3) Update the default_domain_id config option to the new domain - - self.config_fixture.config(group='identity', - default_domain_id=new_domain_id) - - # 4) Authenticate as "foo" using the password in the new domain. - - body_dict = _build_user_auth( - username=self.user_foo['name'], - password=new_user['password']) - - # The test is successful if this doesn't raise, so no need to assert. - self.controller.authenticate({}, body_dict) - - -class AuthWithRemoteUser(AuthTest): - def test_unscoped_remote_authn(self): - """Verify getting an unscoped token with external authn.""" - body_dict = _build_user_auth( - username='FOO', - password='foo2') - local_token = self.controller.authenticate( - {}, body_dict) - - body_dict = _build_user_auth() - remote_token = self.controller.authenticate( - self.context_with_remote_user, body_dict) - - self.assertEqualTokens(local_token, remote_token, - enforce_audit_ids=False) - - def test_unscoped_remote_authn_jsonless(self): - """Verify that external auth with invalid request fails.""" - self.assertRaises( - exception.ValidationError, - self.controller.authenticate, - {'REMOTE_USER': 'FOO'}, - None) - - def test_scoped_remote_authn(self): - """Verify getting a token with external authn.""" - body_dict = _build_user_auth( - username='FOO', - password='foo2', - tenant_name='BAR') - local_token = self.controller.authenticate( - {}, body_dict) - - body_dict = _build_user_auth( - tenant_name='BAR') - remote_token = self.controller.authenticate( - self.context_with_remote_user, body_dict) - - self.assertEqualTokens(local_token, remote_token, - enforce_audit_ids=False) - - def test_scoped_nometa_remote_authn(self): - """Verify getting a token with external authn and no metadata.""" - body_dict = _build_user_auth( - username='TWO', - password='two2', - tenant_name='BAZ') - local_token = self.controller.authenticate( - {}, body_dict) - - body_dict = _build_user_auth(tenant_name='BAZ') - remote_token = self.controller.authenticate( - {'environment': {'REMOTE_USER': 'TWO'}}, body_dict) - - self.assertEqualTokens(local_token, remote_token, - enforce_audit_ids=False) - - def test_scoped_remote_authn_invalid_user(self): - """Verify that external auth with invalid user fails.""" - body_dict = _build_user_auth(tenant_name="BAR") - self.assertRaises( - exception.Unauthorized, - self.controller.authenticate, - {'environment': {'REMOTE_USER': uuid.uuid4().hex}}, - body_dict) - - def test_bind_with_kerberos(self): - self.config_fixture.config(group='token', bind=['kerberos']) - body_dict = _build_user_auth(tenant_name="BAR") - token = self.controller.authenticate(self.context_with_remote_user, - body_dict) - self.assertEqual('FOO', token['access']['token']['bind']['kerberos']) - - def test_bind_without_config_opt(self): - self.config_fixture.config(group='token', bind=['x509']) - body_dict = _build_user_auth(tenant_name='BAR') - token = self.controller.authenticate(self.context_with_remote_user, - body_dict) - self.assertNotIn('bind', token['access']['token']) - - -class AuthWithTrust(AuthTest): - def setUp(self): - super(AuthWithTrust, self).setUp() - - self.trust_controller = trust.controllers.TrustV3() - self.auth_v3_controller = auth.controllers.Auth() - self.trustor = self.user_foo - self.trustee = self.user_two - self.assigned_roles = [self.role_member['id'], - self.role_browser['id']] - for assigned_role in self.assigned_roles: - self.assignment_api.add_role_to_user_and_project( - self.trustor['id'], self.tenant_bar['id'], assigned_role) - - self.sample_data = {'trustor_user_id': self.trustor['id'], - 'trustee_user_id': self.trustee['id'], - 'project_id': self.tenant_bar['id'], - 'impersonation': True, - 'roles': [{'id': self.role_browser['id']}, - {'name': self.role_member['name']}]} - - def config_overrides(self): - super(AuthWithTrust, self).config_overrides() - self.config_fixture.config(group='trust', enabled=True) - - def _create_auth_context(self, token_id): - token_ref = token_model.KeystoneToken( - token_id=token_id, - token_data=self.token_provider_api.validate_token(token_id)) - auth_context = authorization.token_to_auth_context(token_ref) - # NOTE(gyee): if public_endpoint and admin_endpoint are not set, which - # is the default, the base url will be constructed from the environment - # variables wsgi.url_scheme, SERVER_NAME, SERVER_PORT, and SCRIPT_NAME. - # We have to set them in the context so the base url can be constructed - # accordingly. - return {'environment': {authorization.AUTH_CONTEXT_ENV: auth_context, - 'wsgi.url_scheme': 'http', - 'SCRIPT_NAME': '/v3', - 'SERVER_PORT': '80', - 'SERVER_NAME': HOST}, - 'token_id': token_id, - 'host_url': HOST_URL} - - def create_trust(self, trust_data, trustor_name, expires_at=None, - impersonation=True): - username = trustor_name - password = 'foo2' - unscoped_token = self.get_unscoped_token(username, password) - context = self._create_auth_context( - unscoped_token['access']['token']['id']) - trust_data_copy = copy.deepcopy(trust_data) - trust_data_copy['expires_at'] = expires_at - trust_data_copy['impersonation'] = impersonation - - return self.trust_controller.create_trust( - context, trust=trust_data_copy)['trust'] - - def get_unscoped_token(self, username, password='foo2'): - body_dict = _build_user_auth(username=username, password=password) - return self.controller.authenticate({}, body_dict) - - def build_v2_token_request(self, username, password, trust, - tenant_id=None): - if not tenant_id: - tenant_id = self.tenant_bar['id'] - unscoped_token = self.get_unscoped_token(username, password) - unscoped_token_id = unscoped_token['access']['token']['id'] - request_body = _build_user_auth(token={'id': unscoped_token_id}, - trust_id=trust['id'], - tenant_id=tenant_id) - return request_body - - def test_create_trust_bad_data_fails(self): - unscoped_token = self.get_unscoped_token(self.trustor['name']) - context = self._create_auth_context( - unscoped_token['access']['token']['id']) - bad_sample_data = {'trustor_user_id': self.trustor['id'], - 'project_id': self.tenant_bar['id'], - 'roles': [{'id': self.role_browser['id']}]} - - self.assertRaises(exception.ValidationError, - self.trust_controller.create_trust, - context, trust=bad_sample_data) - - def test_create_trust_no_roles(self): - unscoped_token = self.get_unscoped_token(self.trustor['name']) - context = {'token_id': unscoped_token['access']['token']['id']} - self.sample_data['roles'] = [] - self.assertRaises(exception.Forbidden, - self.trust_controller.create_trust, - context, trust=self.sample_data) - - def test_create_trust(self): - expires_at = (timeutils.utcnow() + - datetime.timedelta(minutes=10)).strftime(TIME_FORMAT) - new_trust = self.create_trust(self.sample_data, self.trustor['name'], - expires_at=expires_at) - self.assertEqual(self.trustor['id'], new_trust['trustor_user_id']) - self.assertEqual(self.trustee['id'], new_trust['trustee_user_id']) - role_ids = [self.role_browser['id'], self.role_member['id']] - self.assertTrue(timeutils.parse_strtime(new_trust['expires_at'], - fmt=TIME_FORMAT)) - self.assertIn('%s/v3/OS-TRUST/' % HOST_URL, - new_trust['links']['self']) - self.assertIn('%s/v3/OS-TRUST/' % HOST_URL, - new_trust['roles_links']['self']) - - for role in new_trust['roles']: - self.assertIn(role['id'], role_ids) - - def test_create_trust_expires_bad(self): - self.assertRaises(exception.ValidationTimeStampError, - self.create_trust, self.sample_data, - self.trustor['name'], expires_at="bad") - self.assertRaises(exception.ValidationTimeStampError, - self.create_trust, self.sample_data, - self.trustor['name'], expires_at="") - self.assertRaises(exception.ValidationTimeStampError, - self.create_trust, self.sample_data, - self.trustor['name'], expires_at="Z") - - def test_create_trust_expires_older_than_now(self): - self.assertRaises(exception.ValidationExpirationError, - self.create_trust, self.sample_data, - self.trustor['name'], - expires_at="2010-06-04T08:44:31.999999Z") - - def test_create_trust_without_project_id(self): - """Verify that trust can be created without project id. - - Also, token can be generated with that trust. - """ - unscoped_token = self.get_unscoped_token(self.trustor['name']) - context = self._create_auth_context( - unscoped_token['access']['token']['id']) - self.sample_data['project_id'] = None - self.sample_data['roles'] = [] - new_trust = self.trust_controller.create_trust( - context, trust=self.sample_data)['trust'] - self.assertEqual(self.trustor['id'], new_trust['trustor_user_id']) - self.assertEqual(self.trustee['id'], new_trust['trustee_user_id']) - self.assertIs(new_trust['impersonation'], True) - auth_response = self.fetch_v2_token_from_trust(new_trust) - token_user = auth_response['access']['user'] - self.assertEqual(token_user['id'], new_trust['trustor_user_id']) - - def test_get_trust(self): - unscoped_token = self.get_unscoped_token(self.trustor['name']) - context = self._create_auth_context( - unscoped_token['access']['token']['id']) - new_trust = self.trust_controller.create_trust( - context, trust=self.sample_data)['trust'] - trust = self.trust_controller.get_trust(context, - new_trust['id'])['trust'] - self.assertEqual(self.trustor['id'], trust['trustor_user_id']) - self.assertEqual(self.trustee['id'], trust['trustee_user_id']) - role_ids = [self.role_browser['id'], self.role_member['id']] - for role in new_trust['roles']: - self.assertIn(role['id'], role_ids) - - def test_get_trust_without_auth_context(self): - """Verify a trust cannot be retrieved if auth context is missing.""" - unscoped_token = self.get_unscoped_token(self.trustor['name']) - context = self._create_auth_context( - unscoped_token['access']['token']['id']) - new_trust = self.trust_controller.create_trust( - context, trust=self.sample_data)['trust'] - # Delete the auth context before calling get_trust(). - del context['environment'][authorization.AUTH_CONTEXT_ENV] - self.assertRaises(exception.Forbidden, - self.trust_controller.get_trust, context, - new_trust['id']) - - def test_create_trust_no_impersonation(self): - new_trust = self.create_trust(self.sample_data, self.trustor['name'], - expires_at=None, impersonation=False) - self.assertEqual(self.trustor['id'], new_trust['trustor_user_id']) - self.assertEqual(self.trustee['id'], new_trust['trustee_user_id']) - self.assertIs(new_trust['impersonation'], False) - auth_response = self.fetch_v2_token_from_trust(new_trust) - token_user = auth_response['access']['user'] - self.assertEqual(token_user['id'], new_trust['trustee_user_id']) - - def test_create_trust_impersonation(self): - new_trust = self.create_trust(self.sample_data, self.trustor['name']) - self.assertEqual(self.trustor['id'], new_trust['trustor_user_id']) - self.assertEqual(self.trustee['id'], new_trust['trustee_user_id']) - self.assertIs(new_trust['impersonation'], True) - auth_response = self.fetch_v2_token_from_trust(new_trust) - token_user = auth_response['access']['user'] - self.assertEqual(token_user['id'], new_trust['trustor_user_id']) - - def test_token_from_trust_wrong_user_fails(self): - new_trust = self.create_trust(self.sample_data, self.trustor['name']) - request_body = self.build_v2_token_request('FOO', 'foo2', new_trust) - self.assertRaises(exception.Forbidden, self.controller.authenticate, - {}, request_body) - - def test_token_from_trust_wrong_project_fails(self): - for assigned_role in self.assigned_roles: - self.assignment_api.add_role_to_user_and_project( - self.trustor['id'], self.tenant_baz['id'], assigned_role) - new_trust = self.create_trust(self.sample_data, self.trustor['name']) - request_body = self.build_v2_token_request('TWO', 'two2', new_trust, - self.tenant_baz['id']) - self.assertRaises(exception.Forbidden, self.controller.authenticate, - {}, request_body) - - def fetch_v2_token_from_trust(self, trust): - request_body = self.build_v2_token_request('TWO', 'two2', trust) - auth_response = self.controller.authenticate({}, request_body) - return auth_response - - def fetch_v3_token_from_trust(self, trust, trustee): - v3_password_data = { - 'identity': { - "methods": ["password"], - "password": { - "user": { - "id": trustee["id"], - "password": trustee["password"] - } - } - }, - 'scope': { - 'project': { - 'id': self.tenant_baz['id'] - } - } - } - auth_response = (self.auth_v3_controller.authenticate_for_token - ({'environment': {}, - 'query_string': {}}, - v3_password_data)) - token = auth_response.headers['X-Subject-Token'] - - v3_req_with_trust = { - "identity": { - "methods": ["token"], - "token": {"id": token}}, - "scope": { - "OS-TRUST:trust": {"id": trust['id']}}} - token_auth_response = (self.auth_v3_controller.authenticate_for_token - ({'environment': {}, - 'query_string': {}}, - v3_req_with_trust)) - return token_auth_response - - def test_create_v3_token_from_trust(self): - new_trust = self.create_trust(self.sample_data, self.trustor['name']) - auth_response = self.fetch_v3_token_from_trust(new_trust, self.trustee) - - trust_token_user = auth_response.json['token']['user'] - self.assertEqual(self.trustor['id'], trust_token_user['id']) - - trust_token_trust = auth_response.json['token']['OS-TRUST:trust'] - self.assertEqual(trust_token_trust['id'], new_trust['id']) - self.assertEqual(self.trustor['id'], - trust_token_trust['trustor_user']['id']) - self.assertEqual(self.trustee['id'], - trust_token_trust['trustee_user']['id']) - - trust_token_roles = auth_response.json['token']['roles'] - self.assertEqual(2, len(trust_token_roles)) - - def test_v3_trust_token_get_token_fails(self): - new_trust = self.create_trust(self.sample_data, self.trustor['name']) - auth_response = self.fetch_v3_token_from_trust(new_trust, self.trustee) - trust_token = auth_response.headers['X-Subject-Token'] - v3_token_data = {'identity': { - 'methods': ['token'], - 'token': {'id': trust_token} - }} - self.assertRaises( - exception.Forbidden, - self.auth_v3_controller.authenticate_for_token, - {'environment': {}, - 'query_string': {}}, v3_token_data) - - def test_token_from_trust(self): - new_trust = self.create_trust(self.sample_data, self.trustor['name']) - auth_response = self.fetch_v2_token_from_trust(new_trust) - - self.assertIsNotNone(auth_response) - self.assertEqual(2, - len(auth_response['access']['metadata']['roles']), - "user_foo has three roles, but the token should" - " only get the two roles specified in the trust.") - - def assert_token_count_for_trust(self, trust, expected_value): - tokens = self.token_provider_api._persistence._list_tokens( - self.trustee['id'], trust_id=trust['id']) - token_count = len(tokens) - self.assertEqual(expected_value, token_count) - - def test_delete_tokens_for_user_invalidates_tokens_from_trust(self): - new_trust = self.create_trust(self.sample_data, self.trustor['name']) - self.assert_token_count_for_trust(new_trust, 0) - self.fetch_v2_token_from_trust(new_trust) - self.assert_token_count_for_trust(new_trust, 1) - self.token_provider_api._persistence.delete_tokens_for_user( - self.trustee['id']) - self.assert_token_count_for_trust(new_trust, 0) - - def test_token_from_trust_cant_get_another_token(self): - new_trust = self.create_trust(self.sample_data, self.trustor['name']) - auth_response = self.fetch_v2_token_from_trust(new_trust) - trust_token_id = auth_response['access']['token']['id'] - request_body = _build_user_auth(token={'id': trust_token_id}, - tenant_id=self.tenant_bar['id']) - self.assertRaises( - exception.Unauthorized, - self.controller.authenticate, {}, request_body) - - def test_delete_trust_revokes_token(self): - unscoped_token = self.get_unscoped_token(self.trustor['name']) - new_trust = self.create_trust(self.sample_data, self.trustor['name']) - context = self._create_auth_context( - unscoped_token['access']['token']['id']) - self.fetch_v2_token_from_trust(new_trust) - trust_id = new_trust['id'] - tokens = self.token_provider_api._persistence._list_tokens( - self.trustor['id'], - trust_id=trust_id) - self.assertEqual(1, len(tokens)) - self.trust_controller.delete_trust(context, trust_id=trust_id) - tokens = self.token_provider_api._persistence._list_tokens( - self.trustor['id'], - trust_id=trust_id) - self.assertEqual(0, len(tokens)) - - def test_token_from_trust_with_no_role_fails(self): - new_trust = self.create_trust(self.sample_data, self.trustor['name']) - for assigned_role in self.assigned_roles: - self.assignment_api.remove_role_from_user_and_project( - self.trustor['id'], self.tenant_bar['id'], assigned_role) - request_body = self.build_v2_token_request('TWO', 'two2', new_trust) - self.assertRaises( - exception.Forbidden, - self.controller.authenticate, {}, request_body) - - def test_expired_trust_get_token_fails(self): - expires_at = (timeutils.utcnow() + - datetime.timedelta(minutes=5)).strftime(TIME_FORMAT) - time_expired = timeutils.utcnow() + datetime.timedelta(minutes=10) - new_trust = self.create_trust(self.sample_data, self.trustor['name'], - expires_at) - with mock.patch.object(timeutils, 'utcnow') as mock_now: - mock_now.return_value = time_expired - request_body = self.build_v2_token_request('TWO', 'two2', - new_trust) - self.assertRaises( - exception.Forbidden, - self.controller.authenticate, {}, request_body) - - def test_token_from_trust_with_wrong_role_fails(self): - new_trust = self.create_trust(self.sample_data, self.trustor['name']) - self.assignment_api.add_role_to_user_and_project( - self.trustor['id'], - self.tenant_bar['id'], - self.role_other['id']) - for assigned_role in self.assigned_roles: - self.assignment_api.remove_role_from_user_and_project( - self.trustor['id'], self.tenant_bar['id'], assigned_role) - - request_body = self.build_v2_token_request('TWO', 'two2', new_trust) - - self.assertRaises( - exception.Forbidden, - self.controller.authenticate, {}, request_body) - - def test_do_not_consume_remaining_uses_when_get_token_fails(self): - trust_data = copy.deepcopy(self.sample_data) - trust_data['remaining_uses'] = 3 - new_trust = self.create_trust(trust_data, self.trustor['name']) - - for assigned_role in self.assigned_roles: - self.assignment_api.remove_role_from_user_and_project( - self.trustor['id'], self.tenant_bar['id'], assigned_role) - - request_body = self.build_v2_token_request('TWO', 'two2', new_trust) - self.assertRaises(exception.Forbidden, - self.controller.authenticate, {}, request_body) - - unscoped_token = self.get_unscoped_token(self.trustor['name']) - context = self._create_auth_context( - unscoped_token['access']['token']['id']) - trust = self.trust_controller.get_trust(context, - new_trust['id'])['trust'] - self.assertEqual(3, trust['remaining_uses']) - - def disable_user(self, user): - user['enabled'] = False - self.identity_api.update_user(user['id'], user) - - def test_trust_get_token_fails_if_trustor_disabled(self): - new_trust = self.create_trust(self.sample_data, self.trustor['name']) - request_body = self.build_v2_token_request(self.trustee['name'], - self.trustee['password'], - new_trust) - self.disable_user(self.trustor) - self.assertRaises( - exception.Forbidden, - self.controller.authenticate, {}, request_body) - - def test_trust_get_token_fails_if_trustee_disabled(self): - new_trust = self.create_trust(self.sample_data, self.trustor['name']) - request_body = self.build_v2_token_request(self.trustee['name'], - self.trustee['password'], - new_trust) - self.disable_user(self.trustee) - self.assertRaises( - exception.Unauthorized, - self.controller.authenticate, {}, request_body) - - -class TokenExpirationTest(AuthTest): - - @mock.patch.object(timeutils, 'utcnow') - def _maintain_token_expiration(self, mock_utcnow): - """Token expiration should be maintained after re-auth & validation.""" - now = datetime.datetime.utcnow() - mock_utcnow.return_value = now - - r = self.controller.authenticate( - {}, - auth={ - 'passwordCredentials': { - 'username': self.user_foo['name'], - 'password': self.user_foo['password'] - } - }) - unscoped_token_id = r['access']['token']['id'] - original_expiration = r['access']['token']['expires'] - - mock_utcnow.return_value = now + datetime.timedelta(seconds=1) - - r = self.controller.validate_token( - dict(is_admin=True, query_string={}), - token_id=unscoped_token_id) - self.assertEqual(original_expiration, r['access']['token']['expires']) - - mock_utcnow.return_value = now + datetime.timedelta(seconds=2) - - r = self.controller.authenticate( - {}, - auth={ - 'token': { - 'id': unscoped_token_id, - }, - 'tenantId': self.tenant_bar['id'], - }) - scoped_token_id = r['access']['token']['id'] - self.assertEqual(original_expiration, r['access']['token']['expires']) - - mock_utcnow.return_value = now + datetime.timedelta(seconds=3) - - r = self.controller.validate_token( - dict(is_admin=True, query_string={}), - token_id=scoped_token_id) - self.assertEqual(original_expiration, r['access']['token']['expires']) - - def test_maintain_uuid_token_expiration(self): - self.config_fixture.config(group='token', provider='uuid') - self._maintain_token_expiration() - - -class AuthCatalog(unit.SQLDriverOverrides, AuthTest): - """Tests for the catalog provided in the auth response.""" - - def config_files(self): - config_files = super(AuthCatalog, self).config_files() - # We need to use a backend that supports disabled endpoints, like the - # SQL backend. - config_files.append(unit.dirs.tests_conf('backend_sql.conf')) - return config_files - - def _create_endpoints(self): - def create_region(**kwargs): - ref = unit.new_region_ref(**kwargs) - self.catalog_api.create_region(ref) - return ref - - def create_endpoint(service_id, region, **kwargs): - endpoint = unit.new_endpoint_ref(region_id=region, - service_id=service_id, **kwargs) - - self.catalog_api.create_endpoint(endpoint['id'], endpoint) - return endpoint - - # Create a service for use with the endpoints. - def create_service(**kwargs): - ref = unit.new_service_ref(**kwargs) - self.catalog_api.create_service(ref['id'], ref) - return ref - - enabled_service_ref = create_service(enabled=True) - disabled_service_ref = create_service(enabled=False) - - region = create_region() - - # Create endpoints - enabled_endpoint_ref = create_endpoint( - enabled_service_ref['id'], region['id']) - create_endpoint( - enabled_service_ref['id'], region['id'], enabled=False, - interface='internal') - create_endpoint( - disabled_service_ref['id'], region['id']) - - return enabled_endpoint_ref - - def test_auth_catalog_disabled_endpoint(self): - """On authenticate, get a catalog that excludes disabled endpoints.""" - endpoint_ref = self._create_endpoints() - - # Authenticate - body_dict = _build_user_auth( - username='FOO', - password='foo2', - tenant_name="BAR") - - token = self.controller.authenticate({}, body_dict) - - # Check the catalog - self.assertEqual(1, len(token['access']['serviceCatalog'])) - endpoint = token['access']['serviceCatalog'][0]['endpoints'][0] - self.assertEqual( - 1, len(token['access']['serviceCatalog'][0]['endpoints'])) - - exp_endpoint = { - 'id': endpoint_ref['id'], - 'publicURL': endpoint_ref['url'], - 'region': endpoint_ref['region_id'], - } - - self.assertEqual(exp_endpoint, endpoint) - - def test_validate_catalog_disabled_endpoint(self): - """On validate, get back a catalog that excludes disabled endpoints.""" - endpoint_ref = self._create_endpoints() - - # Authenticate - body_dict = _build_user_auth( - username='FOO', - password='foo2', - tenant_name="BAR") - - token = self.controller.authenticate({}, body_dict) - - # Validate - token_id = token['access']['token']['id'] - validate_ref = self.controller.validate_token( - dict(is_admin=True, query_string={}), - token_id=token_id) - - # Check the catalog - self.assertEqual(1, len(token['access']['serviceCatalog'])) - endpoint = validate_ref['access']['serviceCatalog'][0]['endpoints'][0] - self.assertEqual( - 1, len(token['access']['serviceCatalog'][0]['endpoints'])) - - exp_endpoint = { - 'id': endpoint_ref['id'], - 'publicURL': endpoint_ref['url'], - 'region': endpoint_ref['region_id'], - } - - self.assertEqual(exp_endpoint, endpoint) - - -class NonDefaultAuthTest(unit.TestCase): - - def test_add_non_default_auth_method(self): - self.config_fixture.config(group='auth', - methods=['password', 'token', 'custom']) - config.setup_authentication() - self.assertTrue(hasattr(CONF.auth, 'custom')) diff --git a/keystone-moon/keystone/tests/unit/test_auth_plugin.py b/keystone-moon/keystone/tests/unit/test_auth_plugin.py deleted file mode 100644 index f0862ed6..00000000 --- a/keystone-moon/keystone/tests/unit/test_auth_plugin.py +++ /dev/null @@ -1,190 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -import mock - -from keystone import auth -from keystone import exception -from keystone.tests import unit - - -# for testing purposes only -METHOD_NAME = 'simple_challenge_response' -EXPECTED_RESPONSE = uuid.uuid4().hex -DEMO_USER_ID = uuid.uuid4().hex - - -class SimpleChallengeResponse(auth.AuthMethodHandler): - def authenticate(self, context, auth_payload, user_context): - if 'response' in auth_payload: - if auth_payload['response'] != EXPECTED_RESPONSE: - raise exception.Unauthorized('Wrong answer') - user_context['user_id'] = DEMO_USER_ID - else: - return {"challenge": "What's the name of your high school?"} - - -class TestAuthPlugin(unit.SQLDriverOverrides, unit.TestCase): - def setUp(self): - super(TestAuthPlugin, self).setUp() - self.load_backends() - - self.api = auth.controllers.Auth() - - def config_overrides(self): - super(TestAuthPlugin, self).config_overrides() - method_opts = { - METHOD_NAME: - 'keystone.tests.unit.test_auth_plugin.SimpleChallengeResponse', - } - - self.auth_plugin_config_override( - methods=['external', 'password', 'token', METHOD_NAME], - **method_opts) - - def test_unsupported_auth_method(self): - method_name = uuid.uuid4().hex - auth_data = {'methods': [method_name]} - auth_data[method_name] = {'test': 'test'} - auth_data = {'identity': auth_data} - self.assertRaises(exception.AuthMethodNotSupported, - auth.controllers.AuthInfo.create, - None, - auth_data) - - def test_addition_auth_steps(self): - auth_data = {'methods': [METHOD_NAME]} - auth_data[METHOD_NAME] = { - 'test': 'test'} - auth_data = {'identity': auth_data} - auth_info = auth.controllers.AuthInfo.create(None, auth_data) - auth_context = {'extras': {}, 'method_names': []} - try: - self.api.authenticate({'environment': {}}, auth_info, auth_context) - except exception.AdditionalAuthRequired as e: - self.assertIn('methods', e.authentication) - self.assertIn(METHOD_NAME, e.authentication['methods']) - self.assertIn(METHOD_NAME, e.authentication) - self.assertIn('challenge', e.authentication[METHOD_NAME]) - - # test correct response - auth_data = {'methods': [METHOD_NAME]} - auth_data[METHOD_NAME] = { - 'response': EXPECTED_RESPONSE} - auth_data = {'identity': auth_data} - auth_info = auth.controllers.AuthInfo.create(None, auth_data) - auth_context = {'extras': {}, 'method_names': []} - self.api.authenticate({'environment': {}}, auth_info, auth_context) - self.assertEqual(DEMO_USER_ID, auth_context['user_id']) - - # test incorrect response - auth_data = {'methods': [METHOD_NAME]} - auth_data[METHOD_NAME] = { - 'response': uuid.uuid4().hex} - auth_data = {'identity': auth_data} - auth_info = auth.controllers.AuthInfo.create(None, auth_data) - auth_context = {'extras': {}, 'method_names': []} - self.assertRaises(exception.Unauthorized, - self.api.authenticate, - {'environment': {}}, - auth_info, - auth_context) - - def test_duplicate_method(self): - # Having the same method twice doesn't cause load_auth_methods to fail. - self.auth_plugin_config_override( - methods=['external', 'external']) - self.clear_auth_plugin_registry() - auth.controllers.load_auth_methods() - self.assertIn('external', auth.controllers.AUTH_METHODS) - - -class TestAuthPluginDynamicOptions(TestAuthPlugin): - def config_overrides(self): - super(TestAuthPluginDynamicOptions, self).config_overrides() - # Clear the override for the [auth] ``methods`` option so it is - # possible to load the options from the config file. - self.config_fixture.conf.clear_override('methods', group='auth') - - def config_files(self): - config_files = super(TestAuthPluginDynamicOptions, self).config_files() - config_files.append(unit.dirs.tests_conf('test_auth_plugin.conf')) - return config_files - - -class TestMapped(unit.TestCase): - def setUp(self): - super(TestMapped, self).setUp() - self.load_backends() - - self.api = auth.controllers.Auth() - - def config_files(self): - config_files = super(TestMapped, self).config_files() - config_files.append(unit.dirs.tests_conf('test_auth_plugin.conf')) - return config_files - - def auth_plugin_config_override(self, methods=None, **method_classes): - # Do not apply the auth plugin overrides so that the config file is - # tested - pass - - def _test_mapped_invocation_with_method_name(self, method_name): - with mock.patch.object(auth.plugins.mapped.Mapped, - 'authenticate', - return_value=None) as authenticate: - context = {'environment': {}} - auth_data = { - 'identity': { - 'methods': [method_name], - method_name: {'protocol': method_name}, - } - } - auth_info = auth.controllers.AuthInfo.create(context, auth_data) - auth_context = {'extras': {}, - 'method_names': [], - 'user_id': uuid.uuid4().hex} - self.api.authenticate(context, auth_info, auth_context) - # make sure Mapped plugin got invoked with the correct payload - ((context, auth_payload, auth_context), - kwargs) = authenticate.call_args - self.assertEqual(method_name, auth_payload['protocol']) - - def test_mapped_with_remote_user(self): - with mock.patch.object(auth.plugins.mapped.Mapped, - 'authenticate', - return_value=None) as authenticate: - # external plugin should fail and pass to mapped plugin - method_name = 'saml2' - auth_data = {'methods': [method_name]} - # put the method name in the payload so its easier to correlate - # method name with payload - auth_data[method_name] = {'protocol': method_name} - auth_data = {'identity': auth_data} - auth_info = auth.controllers.AuthInfo.create(None, auth_data) - auth_context = {'extras': {}, - 'method_names': [], - 'user_id': uuid.uuid4().hex} - environment = {'environment': {'REMOTE_USER': 'foo@idp.com'}} - self.api.authenticate(environment, auth_info, auth_context) - # make sure Mapped plugin got invoked with the correct payload - ((context, auth_payload, auth_context), - kwargs) = authenticate.call_args - self.assertEqual(method_name, auth_payload['protocol']) - - def test_supporting_multiple_methods(self): - for method_name in ['saml2', 'openid', 'x509']: - self._test_mapped_invocation_with_method_name(method_name) diff --git a/keystone-moon/keystone/tests/unit/test_backend.py b/keystone-moon/keystone/tests/unit/test_backend.py deleted file mode 100644 index 302fc2c2..00000000 --- a/keystone-moon/keystone/tests/unit/test_backend.py +++ /dev/null @@ -1,6851 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import datetime -import hashlib -import uuid - -from keystoneclient.common import cms -import mock -from oslo_config import cfg -from oslo_utils import timeutils -import six -from six.moves import range -from testtools import matchers - -from keystone.catalog import core -from keystone.common import driver_hints -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit import default_fixtures -from keystone.tests.unit import filtering -from keystone.tests.unit import utils as test_utils -from keystone.token import provider - - -CONF = cfg.CONF -DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id -NULL_OBJECT = object() - - -class AssignmentTestHelperMixin(object): - """Mixin class to aid testing of assignments. - - This class supports data driven test plans that enable: - - - Creation of initial entities, such as domains, users, groups, projects - and roles - - Creation of assignments referencing the above entities - - A set of input parameters and expected outputs to list_role_assignments - based on the above test data - - A test plan is a dict of the form: - - test_plan = { - entities: details and number of entities, - group_memberships: group-user entity memberships, - assignments: list of assignments to create, - tests: list of pairs of input params and expected outputs} - - An example test plan: - - test_plan = { - # First, create the entities required. Entities are specified by - # a dict with the key being the entity type and the value an - # entity specification which can be one of: - # - # - a simple number, e.g. {'users': 3} creates 3 users - # - a dict where more information regarding the contents of the entity - # is required, e.g. {'domains' : {'users : 3}} creates a domain - # with three users - # - a list of entity specifications if multiple are required - # - # The following creates a domain that contains a single user, group and - # project, as well as creating three roles. - - 'entities': {'domains': {'users': 1, 'groups': 1, 'projects': 1}, - 'roles': 3}, - - # If it is required that an existing domain be used for the new - # entities, then the id of that domain can be included in the - # domain dict. For example, if alternatively we wanted to add 3 users - # to the default domain, add a second domain containing 3 projects as - # well as 5 additional empty domains, the entities would be defined as: - # - # 'entities': {'domains': [{'id': DEFAULT_DOMAIN, 'users': 3}, - # {'projects': 3}, 5]}, - # - # A project hierarchy can be specified within the 'projects' section by - # nesting the 'project' key, for example to create a project with three - # sub-projects you would use: - - 'projects': {'project': 3} - - # A more complex hierarchy can also be defined, for example the - # following would define three projects each containing a - # sub-project, each of which contain a further three sub-projects. - - 'projects': [{'project': {'project': 3}}, - {'project': {'project': 3}}, - {'project': {'project': 3}}] - - # A list of groups and their members. In this case make users with - # index 0 and 1 members of group with index 0. Users and Groups are - # indexed in the order they appear in the 'entities' key above. - - 'group_memberships': [{'group': 0, 'users': [0, 1]}] - - # Next, create assignments between the entities, referencing the - # entities by index, i.e. 'user': 0 refers to user[0]. Entities are - # indexed in the order they appear in the 'entities' key above within - # their entity type. - - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'group': 0, 'role': 2, 'domain': 0}, - {'user': 0, 'role': 2, 'project': 0}], - - # Finally, define an array of tests where list_role_assignment() is - # called with the given input parameters and the results are then - # confirmed to be as given in 'results'. Again, all entities are - # referenced by index. - - 'tests': [ - {'params': {}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'group': 0, 'role': 2, 'domain': 0}, - {'user': 0, 'role': 2, 'project': 0}]}, - {'params': {'role': 2}, - 'results': [{'group': 0, 'role': 2, 'domain': 0}, - {'user': 0, 'role': 2, 'project': 0}]}] - - # The 'params' key also supports the 'effective' and - # 'inherited_to_projects' options to list_role_assignments.} - - """ - def _handle_project_spec(self, test_data, domain_id, project_spec, - parent_id=None): - """Handle the creation of a project or hierarchy of projects. - - project_spec may either be a count of the number of projects to - create, or it may be a list of the form: - - [{'project': project_spec}, {'project': project_spec}, ...] - - This method is called recursively to handle the creation of a - hierarchy of projects. - - """ - def _create_project(domain_id, parent_id): - new_project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain_id, 'parent_id': parent_id} - new_project = self.resource_api.create_project(new_project['id'], - new_project) - return new_project - - if isinstance(project_spec, list): - for this_spec in project_spec: - self._handle_project_spec( - test_data, domain_id, this_spec, parent_id=parent_id) - elif isinstance(project_spec, dict): - new_proj = _create_project(domain_id, parent_id) - test_data['projects'].append(new_proj) - self._handle_project_spec( - test_data, domain_id, project_spec['project'], - parent_id=new_proj['id']) - else: - for _ in range(project_spec): - test_data['projects'].append( - _create_project(domain_id, parent_id)) - - def _handle_domain_spec(self, test_data, domain_spec): - """Handle the creation of domains and their contents. - - domain_spec may either be a count of the number of empty domains to - create, a dict describing the domain contents, or a list of - domain_specs. - - In the case when a list is provided, this method calls itself - recursively to handle the list elements. - - This method will insert any entities created into test_data - - """ - def _create_domain(domain_id=None): - if domain_id is None: - new_domain = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex} - self.resource_api.create_domain(new_domain['id'], - new_domain) - return new_domain - else: - # The test plan specified an existing domain to use - return self.resource_api.get_domain(domain_id) - - def _create_entity_in_domain(entity_type, domain_id): - """Create a user or group entity in the domain.""" - - new_entity = {'name': uuid.uuid4().hex, 'domain_id': domain_id} - if entity_type == 'users': - new_entity = self.identity_api.create_user(new_entity) - elif entity_type == 'groups': - new_entity = self.identity_api.create_group(new_entity) - else: - # Must be a bad test plan - raise exception.NotImplemented() - return new_entity - - if isinstance(domain_spec, list): - for x in domain_spec: - self._handle_domain_spec(test_data, x) - elif isinstance(domain_spec, dict): - # If there is a domain ID specified, then use it - the_domain = _create_domain(domain_spec.get('id')) - test_data['domains'].append(the_domain) - for entity_type, value in domain_spec.items(): - if entity_type == 'id': - # We already used this above to determine whether to - # use and existing domain - continue - if entity_type == 'projects': - # If it's projects, we need to handle the potential - # specification of a project hierarchy - self._handle_project_spec( - test_data, the_domain['id'], value) - else: - # It's a count of number of entities - for _ in range(value): - test_data[entity_type].append( - _create_entity_in_domain( - entity_type, the_domain['id'])) - else: - for _ in range(domain_spec): - test_data['domains'].append(_create_domain()) - - def create_entities(self, entity_pattern): - """Create the entities specified in the test plan. - - Process the 'entities' key in the test plan, creating the requested - entities. Each created entity will be added to the array of entities - stored in the returned test_data object, e.g.: - - test_data['users'] = [user[0], user[1]....] - - """ - def _create_role(): - new_role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - return self.role_api.create_role(new_role['id'], new_role) - - test_data = {} - for entity in ['users', 'groups', 'domains', 'projects', 'roles']: - test_data[entity] = [] - - # Create any domains requested and, if specified, any entities within - # those domains - if 'domains' in entity_pattern: - self._handle_domain_spec(test_data, entity_pattern['domains']) - - # Create any roles requested - if 'roles' in entity_pattern: - for _ in range(entity_pattern['roles']): - test_data['roles'].append(_create_role()) - - return test_data - - def _convert_entity_shorthand(self, key, shorthand_data, reference_data): - """Convert a shorthand entity description into a full ID reference. - - In test plan definitions, we allow a shorthand for referencing to an - entity of the form: - - 'user': 0 - - which is actually shorthand for: - - 'user_id': reference_data['users'][0]['id'] - - This method converts the shorthand version into the full reference. - - """ - expanded_key = '%s_id' % key - reference_index = '%ss' % key - index_value = ( - reference_data[reference_index][shorthand_data[key]]['id']) - return expanded_key, index_value - - def create_group_memberships(self, group_pattern, test_data): - """Create the group memberships specified in the test plan.""" - - for group_spec in group_pattern: - # Each membership specification is a dict of the form: - # - # {'group': 0, 'users': [list of user indexes]} - # - # Add all users in the list to the specified group, first - # converting from index to full entity ID. - group_value = test_data['groups'][group_spec['group']]['id'] - for user_index in group_spec['users']: - user_value = test_data['users'][user_index]['id'] - self.identity_api.add_user_to_group(user_value, group_value) - return test_data - - def create_assignments(self, assignment_pattern, test_data): - """Create the assignments specified in the test plan.""" - - # First store how many assignments are already in the system, - # so during the tests we can check the number of new assignments - # created. - test_data['initial_assignment_count'] = ( - len(self.assignment_api.list_role_assignments())) - - # Now create the new assignments in the test plan - for assignment in assignment_pattern: - # Each assignment is a dict of the form: - # - # { 'user': 0, 'project':1, 'role': 6} - # - # where the value of each item is the index into the array of - # entities created earlier. - # - # We process the assignment dict to create the args required to - # make the create_grant() call. - args = {} - for param in assignment: - if param == 'inherited_to_projects': - args[param] = assignment[param] - else: - # Turn 'entity : 0' into 'entity_id = ac6736ba873d' - # where entity in user, group, project or domain - key, value = self._convert_entity_shorthand( - param, assignment, test_data) - args[key] = value - self.assignment_api.create_grant(**args) - return test_data - - def execute_assignment_tests(self, test_plan, test_data): - """Execute the test plan, based on the created test_data.""" - - def check_results(expected, actual, param_arg_count): - if param_arg_count == 0: - # It was an unfiltered call, so default fixture assignments - # might be polluting our answer - so we take into account - # how many assignments there were before the test. - self.assertEqual( - len(expected) + test_data['initial_assignment_count'], - len(actual)) - else: - self.assertThat(actual, matchers.HasLength(len(expected))) - - for each_expected in expected: - expected_assignment = {} - for param in each_expected: - if param == 'inherited_to_projects': - expected_assignment[param] = each_expected[param] - elif param == 'indirect': - # We're expecting the result to contain an indirect - # dict with the details how the role came to be placed - # on this entity - so convert the key/value pairs of - # that dict into real entity references. - indirect_term = {} - for indirect_param in each_expected[param]: - key, value = self._convert_entity_shorthand( - indirect_param, each_expected[param], - test_data) - indirect_term[key] = value - expected_assignment[param] = indirect_term - else: - # Convert a simple shorthand entry into a full - # entity reference - key, value = self._convert_entity_shorthand( - param, each_expected, test_data) - expected_assignment[key] = value - self.assertIn(expected_assignment, actual) - - # Go through each test in the array, processing the input params, which - # we build into an args dict, and then call list_role_assignments. Then - # check the results against those specified in the test plan. - for test in test_plan.get('tests', []): - args = {} - for param in test['params']: - if param in ['effective', 'inherited']: - # Just pass the value into the args - args[param] = test['params'][param] - else: - # Turn 'entity : 0' into 'entity_id = ac6736ba873d' - # where entity in user, group, project or domain - key, value = self._convert_entity_shorthand( - param, test['params'], test_data) - args[key] = value - results = self.assignment_api.list_role_assignments(**args) - check_results(test['results'], results, len(args)) - - def execute_assignment_test_plan(self, test_plan): - """Create entities, assignments and execute the test plan. - - The standard method to call to create entities and assignments and - execute the tests as specified in the test_plan. The test_data - dict is returned so that, if required, the caller can execute - additional manual tests with the entities and assignments created. - - """ - test_data = self.create_entities(test_plan['entities']) - if 'group_memberships' in test_plan: - self.create_group_memberships(test_plan['group_memberships'], - test_data) - if 'assignments' in test_plan: - test_data = self.create_assignments(test_plan['assignments'], - test_data) - self.execute_assignment_tests(test_plan, test_data) - return test_data - - -class IdentityTests(AssignmentTestHelperMixin): - def _get_domain_fixture(self): - domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain['id'], domain) - return domain - - def _set_domain_scope(self, domain_id): - # We only provide a domain scope if we have multiple drivers - if CONF.identity.domain_specific_drivers_enabled: - return domain_id - - def test_project_add_and_remove_user_role(self): - user_ids = self.assignment_api.list_user_ids_for_project( - self.tenant_bar['id']) - self.assertNotIn(self.user_two['id'], user_ids) - - self.assignment_api.add_role_to_user_and_project( - tenant_id=self.tenant_bar['id'], - user_id=self.user_two['id'], - role_id=self.role_other['id']) - user_ids = self.assignment_api.list_user_ids_for_project( - self.tenant_bar['id']) - self.assertIn(self.user_two['id'], user_ids) - - self.assignment_api.remove_role_from_user_and_project( - tenant_id=self.tenant_bar['id'], - user_id=self.user_two['id'], - role_id=self.role_other['id']) - - user_ids = self.assignment_api.list_user_ids_for_project( - self.tenant_bar['id']) - self.assertNotIn(self.user_two['id'], user_ids) - - def test_remove_user_role_not_assigned(self): - # Expect failure if attempt to remove a role that was never assigned to - # the user. - self.assertRaises(exception.RoleNotFound, - self.assignment_api. - remove_role_from_user_and_project, - tenant_id=self.tenant_bar['id'], - user_id=self.user_two['id'], - role_id=self.role_other['id']) - - def test_authenticate_bad_user(self): - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, - user_id=uuid.uuid4().hex, - password=self.user_foo['password']) - - def test_authenticate_bad_password(self): - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, - user_id=self.user_foo['id'], - password=uuid.uuid4().hex) - - def test_authenticate(self): - user_ref = self.identity_api.authenticate( - context={}, - user_id=self.user_sna['id'], - password=self.user_sna['password']) - # NOTE(termie): the password field is left in user_sna to make - # it easier to authenticate in tests, but should - # not be returned by the api - self.user_sna.pop('password') - self.user_sna['enabled'] = True - self.assertDictEqual(user_ref, self.user_sna) - - def test_authenticate_and_get_roles_no_metadata(self): - user = { - 'name': 'NO_META', - 'domain_id': DEFAULT_DOMAIN_ID, - 'password': 'no_meta2', - } - new_user = self.identity_api.create_user(user) - self.assignment_api.add_user_to_project(self.tenant_baz['id'], - new_user['id']) - user_ref = self.identity_api.authenticate( - context={}, - user_id=new_user['id'], - password=user['password']) - self.assertNotIn('password', user_ref) - # NOTE(termie): the password field is left in user_sna to make - # it easier to authenticate in tests, but should - # not be returned by the api - user.pop('password') - self.assertDictContainsSubset(user, user_ref) - role_list = self.assignment_api.get_roles_for_user_and_project( - new_user['id'], self.tenant_baz['id']) - self.assertEqual(1, len(role_list)) - self.assertIn(CONF.member_role_id, role_list) - - def test_authenticate_if_no_password_set(self): - id_ = uuid.uuid4().hex - user = { - 'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, - } - self.identity_api.create_user(user) - - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, - user_id=id_, - password='password') - - def test_create_unicode_user_name(self): - unicode_name = u'name \u540d\u5b57' - user = {'name': unicode_name, - 'domain_id': DEFAULT_DOMAIN_ID, - 'password': uuid.uuid4().hex} - ref = self.identity_api.create_user(user) - self.assertEqual(unicode_name, ref['name']) - - def test_get_project(self): - tenant_ref = self.resource_api.get_project(self.tenant_bar['id']) - self.assertDictEqual(tenant_ref, self.tenant_bar) - - def test_get_project_404(self): - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - uuid.uuid4().hex) - - def test_get_project_by_name(self): - tenant_ref = self.resource_api.get_project_by_name( - self.tenant_bar['name'], - DEFAULT_DOMAIN_ID) - self.assertDictEqual(tenant_ref, self.tenant_bar) - - def test_get_project_by_name_404(self): - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project_by_name, - uuid.uuid4().hex, - DEFAULT_DOMAIN_ID) - - def test_list_user_ids_for_project(self): - user_ids = self.assignment_api.list_user_ids_for_project( - self.tenant_baz['id']) - self.assertEqual(2, len(user_ids)) - self.assertIn(self.user_two['id'], user_ids) - self.assertIn(self.user_badguy['id'], user_ids) - - def test_list_user_ids_for_project_no_duplicates(self): - # Create user - user_ref = { - 'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, - 'password': uuid.uuid4().hex, - 'enabled': True} - user_ref = self.identity_api.create_user(user_ref) - # Create project - project_ref = { - 'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID} - self.resource_api.create_project( - project_ref['id'], project_ref) - # Create 2 roles and give user each role in project - for i in range(2): - role_ref = { - 'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex} - self.role_api.create_role(role_ref['id'], role_ref) - self.assignment_api.add_role_to_user_and_project( - user_id=user_ref['id'], - tenant_id=project_ref['id'], - role_id=role_ref['id']) - # Get the list of user_ids in project - user_ids = self.assignment_api.list_user_ids_for_project( - project_ref['id']) - # Ensure the user is only returned once - self.assertEqual(1, len(user_ids)) - - def test_get_project_user_ids_404(self): - self.assertRaises(exception.ProjectNotFound, - self.assignment_api.list_user_ids_for_project, - uuid.uuid4().hex) - - def test_get_user(self): - user_ref = self.identity_api.get_user(self.user_foo['id']) - # NOTE(termie): the password field is left in user_foo to make - # it easier to authenticate in tests, but should - # not be returned by the api - self.user_foo.pop('password') - self.assertDictEqual(user_ref, self.user_foo) - - @unit.skip_if_cache_disabled('identity') - def test_cache_layer_get_user(self): - user = { - 'name': uuid.uuid4().hex.lower(), - 'domain_id': DEFAULT_DOMAIN_ID - } - self.identity_api.create_user(user) - ref = self.identity_api.get_user_by_name(user['name'], - user['domain_id']) - # cache the result. - self.identity_api.get_user(ref['id']) - # delete bypassing identity api - domain_id, driver, entity_id = ( - self.identity_api._get_domain_driver_and_entity_id(ref['id'])) - driver.delete_user(entity_id) - - self.assertDictEqual(ref, self.identity_api.get_user(ref['id'])) - self.identity_api.get_user.invalidate(self.identity_api, ref['id']) - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user, ref['id']) - user = { - 'name': uuid.uuid4().hex.lower(), - 'domain_id': DEFAULT_DOMAIN_ID - } - self.identity_api.create_user(user) - ref = self.identity_api.get_user_by_name(user['name'], - user['domain_id']) - user['description'] = uuid.uuid4().hex - # cache the result. - self.identity_api.get_user(ref['id']) - # update using identity api and get back updated user. - user_updated = self.identity_api.update_user(ref['id'], user) - self.assertDictContainsSubset(self.identity_api.get_user(ref['id']), - user_updated) - self.assertDictContainsSubset( - self.identity_api.get_user_by_name(ref['name'], ref['domain_id']), - user_updated) - - def test_get_user_404(self): - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user, - uuid.uuid4().hex) - - def test_get_user_by_name(self): - user_ref = self.identity_api.get_user_by_name( - self.user_foo['name'], DEFAULT_DOMAIN_ID) - # NOTE(termie): the password field is left in user_foo to make - # it easier to authenticate in tests, but should - # not be returned by the api - self.user_foo.pop('password') - self.assertDictEqual(user_ref, self.user_foo) - - @unit.skip_if_cache_disabled('identity') - def test_cache_layer_get_user_by_name(self): - user = { - 'name': uuid.uuid4().hex.lower(), - 'domain_id': DEFAULT_DOMAIN_ID - } - self.identity_api.create_user(user) - ref = self.identity_api.get_user_by_name(user['name'], - user['domain_id']) - # delete bypassing the identity api. - domain_id, driver, entity_id = ( - self.identity_api._get_domain_driver_and_entity_id(ref['id'])) - driver.delete_user(entity_id) - - self.assertDictEqual(ref, self.identity_api.get_user_by_name( - user['name'], DEFAULT_DOMAIN_ID)) - self.identity_api.get_user_by_name.invalidate( - self.identity_api, user['name'], DEFAULT_DOMAIN_ID) - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user_by_name, - user['name'], DEFAULT_DOMAIN_ID) - user = { - 'name': uuid.uuid4().hex.lower(), - 'domain_id': DEFAULT_DOMAIN_ID - } - self.identity_api.create_user(user) - ref = self.identity_api.get_user_by_name(user['name'], - user['domain_id']) - user['description'] = uuid.uuid4().hex - user_updated = self.identity_api.update_user(ref['id'], user) - self.assertDictContainsSubset(self.identity_api.get_user(ref['id']), - user_updated) - self.assertDictContainsSubset( - self.identity_api.get_user_by_name(ref['name'], ref['domain_id']), - user_updated) - - def test_get_user_by_name_404(self): - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user_by_name, - uuid.uuid4().hex, - DEFAULT_DOMAIN_ID) - - def test_create_duplicate_user_name_fails(self): - user = {'name': 'fake1', - 'domain_id': DEFAULT_DOMAIN_ID, - 'password': 'fakepass', - 'tenants': ['bar']} - user = self.identity_api.create_user(user) - self.assertRaises(exception.Conflict, - self.identity_api.create_user, - user) - - def test_create_duplicate_user_name_in_different_domains(self): - new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(new_domain['id'], new_domain) - user1 = {'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, - 'password': uuid.uuid4().hex} - user2 = {'name': user1['name'], - 'domain_id': new_domain['id'], - 'password': uuid.uuid4().hex} - self.identity_api.create_user(user1) - self.identity_api.create_user(user2) - - def test_move_user_between_domains(self): - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain2['id'], domain2) - user = {'name': uuid.uuid4().hex, - 'domain_id': domain1['id'], - 'password': uuid.uuid4().hex} - user = self.identity_api.create_user(user) - user['domain_id'] = domain2['id'] - self.identity_api.update_user(user['id'], user) - - def test_move_user_between_domains_with_clashing_names_fails(self): - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain2['id'], domain2) - # First, create a user in domain1 - user1 = {'name': uuid.uuid4().hex, - 'domain_id': domain1['id'], - 'password': uuid.uuid4().hex} - user1 = self.identity_api.create_user(user1) - # Now create a user in domain2 with a potentially clashing - # name - which should work since we have domain separation - user2 = {'name': user1['name'], - 'domain_id': domain2['id'], - 'password': uuid.uuid4().hex} - user2 = self.identity_api.create_user(user2) - # Now try and move user1 into the 2nd domain - which should - # fail since the names clash - user1['domain_id'] = domain2['id'] - self.assertRaises(exception.Conflict, - self.identity_api.update_user, - user1['id'], - user1) - - def test_rename_duplicate_user_name_fails(self): - user1 = {'name': 'fake1', - 'domain_id': DEFAULT_DOMAIN_ID, - 'password': 'fakepass', - 'tenants': ['bar']} - user2 = {'name': 'fake2', - 'domain_id': DEFAULT_DOMAIN_ID, - 'password': 'fakepass', - 'tenants': ['bar']} - self.identity_api.create_user(user1) - user2 = self.identity_api.create_user(user2) - user2['name'] = 'fake1' - self.assertRaises(exception.Conflict, - self.identity_api.update_user, - user2['id'], - user2) - - def test_update_user_id_fails(self): - user = {'name': 'fake1', - 'domain_id': DEFAULT_DOMAIN_ID, - 'password': 'fakepass', - 'tenants': ['bar']} - user = self.identity_api.create_user(user) - original_id = user['id'] - user['id'] = 'fake2' - self.assertRaises(exception.ValidationError, - self.identity_api.update_user, - original_id, - user) - user_ref = self.identity_api.get_user(original_id) - self.assertEqual(original_id, user_ref['id']) - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user, - 'fake2') - - def test_create_duplicate_project_id_fails(self): - tenant = {'id': 'fake1', 'name': 'fake1', - 'domain_id': DEFAULT_DOMAIN_ID} - self.resource_api.create_project('fake1', tenant) - tenant['name'] = 'fake2' - self.assertRaises(exception.Conflict, - self.resource_api.create_project, - 'fake1', - tenant) - - def test_create_duplicate_project_name_fails(self): - tenant = {'id': 'fake1', 'name': 'fake', - 'domain_id': DEFAULT_DOMAIN_ID} - self.resource_api.create_project('fake1', tenant) - tenant['id'] = 'fake2' - self.assertRaises(exception.Conflict, - self.resource_api.create_project, - 'fake1', - tenant) - - def test_create_duplicate_project_name_in_different_domains(self): - new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(new_domain['id'], new_domain) - tenant1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID} - tenant2 = {'id': uuid.uuid4().hex, 'name': tenant1['name'], - 'domain_id': new_domain['id']} - self.resource_api.create_project(tenant1['id'], tenant1) - self.resource_api.create_project(tenant2['id'], tenant2) - - def test_move_project_between_domains(self): - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain2['id'], domain2) - project = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'domain_id': domain1['id']} - self.resource_api.create_project(project['id'], project) - project['domain_id'] = domain2['id'] - self.resource_api.update_project(project['id'], project) - - def test_move_project_between_domains_with_clashing_names_fails(self): - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain2['id'], domain2) - # First, create a project in domain1 - project1 = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'domain_id': domain1['id']} - self.resource_api.create_project(project1['id'], project1) - # Now create a project in domain2 with a potentially clashing - # name - which should work since we have domain separation - project2 = {'id': uuid.uuid4().hex, - 'name': project1['name'], - 'domain_id': domain2['id']} - self.resource_api.create_project(project2['id'], project2) - # Now try and move project1 into the 2nd domain - which should - # fail since the names clash - project1['domain_id'] = domain2['id'] - self.assertRaises(exception.Conflict, - self.resource_api.update_project, - project1['id'], - project1) - - def test_rename_duplicate_project_name_fails(self): - tenant1 = {'id': 'fake1', 'name': 'fake1', - 'domain_id': DEFAULT_DOMAIN_ID} - tenant2 = {'id': 'fake2', 'name': 'fake2', - 'domain_id': DEFAULT_DOMAIN_ID} - self.resource_api.create_project('fake1', tenant1) - self.resource_api.create_project('fake2', tenant2) - tenant2['name'] = 'fake1' - self.assertRaises(exception.Error, - self.resource_api.update_project, - 'fake2', - tenant2) - - def test_update_project_id_does_nothing(self): - tenant = {'id': 'fake1', 'name': 'fake1', - 'domain_id': DEFAULT_DOMAIN_ID} - self.resource_api.create_project('fake1', tenant) - tenant['id'] = 'fake2' - self.resource_api.update_project('fake1', tenant) - tenant_ref = self.resource_api.get_project('fake1') - self.assertEqual('fake1', tenant_ref['id']) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - 'fake2') - - def test_list_role_assignments_unfiltered(self): - """Test unfiltered listing of role assignments.""" - - test_plan = { - # Create a domain, with a user, group & project - 'entities': {'domains': {'users': 1, 'groups': 1, 'projects': 1}, - 'roles': 3}, - # Create a grant of each type (user/group on project/domain) - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'group': 0, 'role': 2, 'domain': 0}, - {'group': 0, 'role': 2, 'project': 0}], - 'tests': [ - # Check that we get back the 4 assignments - {'params': {}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'group': 0, 'role': 2, 'domain': 0}, - {'group': 0, 'role': 2, 'project': 0}]} - ] - } - self.execute_assignment_test_plan(test_plan) - - def test_list_role_assignments_filtered_by_role(self): - """Test listing of role assignments filtered by role ID.""" - - test_plan = { - # Create a user, group & project in the default domain - 'entities': {'domains': {'id': DEFAULT_DOMAIN_ID, - 'users': 1, 'groups': 1, 'projects': 1}, - 'roles': 3}, - # Create a grant of each type (user/group on project/domain) - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'group': 0, 'role': 2, 'domain': 0}, - {'group': 0, 'role': 2, 'project': 0}], - 'tests': [ - # Check that when filtering by role, we only get back those - # that match - {'params': {'role': 2}, - 'results': [{'group': 0, 'role': 2, 'domain': 0}, - {'group': 0, 'role': 2, 'project': 0}]} - ] - } - test_data = self.execute_assignment_test_plan(test_plan) - - # Also test that list_role_assignments_for_role() gives the same answer - assignment_list = self.assignment_api.list_role_assignments_for_role( - role_id=test_data['roles'][2]['id']) - self.assertThat(assignment_list, matchers.HasLength(2)) - - # Now check that each of our two new entries are in the list - self.assertIn( - {'group_id': test_data['groups'][0]['id'], - 'domain_id': DEFAULT_DOMAIN_ID, - 'role_id': test_data['roles'][2]['id']}, - assignment_list) - self.assertIn( - {'group_id': test_data['groups'][0]['id'], - 'project_id': test_data['projects'][0]['id'], - 'role_id': test_data['roles'][2]['id']}, - assignment_list) - - def test_list_group_role_assignment(self): - # When a group role assignment is created and the role assignments are - # listed then the group role assignment is included in the list. - - test_plan = { - 'entities': {'domains': {'id': DEFAULT_DOMAIN_ID, - 'groups': 1, 'projects': 1}, - 'roles': 1}, - 'assignments': [{'group': 0, 'role': 0, 'project': 0}], - 'tests': [ - {'params': {}, - 'results': [{'group': 0, 'role': 0, 'project': 0}]} - ] - } - self.execute_assignment_test_plan(test_plan) - - def test_list_role_assignments_bad_role(self): - assignment_list = self.assignment_api.list_role_assignments_for_role( - role_id=uuid.uuid4().hex) - self.assertEqual([], assignment_list) - - def test_add_duplicate_role_grant(self): - roles_ref = self.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.tenant_bar['id']) - self.assertNotIn(self.role_admin['id'], roles_ref) - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], self.tenant_bar['id'], self.role_admin['id']) - self.assertRaises(exception.Conflict, - self.assignment_api.add_role_to_user_and_project, - self.user_foo['id'], - self.tenant_bar['id'], - self.role_admin['id']) - - def test_get_role_by_user_and_project_with_user_in_group(self): - """Test for get role by user and project, user was added into a group. - - Test Plan: - - - Create a user, a project & a group, add this user to group - - Create roles and grant them to user and project - - Check the role list get by the user and project was as expected - - """ - user_ref = {'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, - 'password': uuid.uuid4().hex, - 'enabled': True} - user_ref = self.identity_api.create_user(user_ref) - - project_ref = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID} - self.resource_api.create_project(project_ref['id'], project_ref) - - group = {'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID} - group_id = self.identity_api.create_group(group)['id'] - self.identity_api.add_user_to_group(user_ref['id'], group_id) - - role_ref_list = [] - for i in range(2): - role_ref = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role_ref['id'], role_ref) - role_ref_list.append(role_ref) - - self.assignment_api.add_role_to_user_and_project( - user_id=user_ref['id'], - tenant_id=project_ref['id'], - role_id=role_ref['id']) - - role_list = self.assignment_api.get_roles_for_user_and_project( - user_id=user_ref['id'], - tenant_id=project_ref['id']) - - self.assertEqual(set(role_list), - set([r['id'] for r in role_ref_list])) - - def test_get_role_by_user_and_project(self): - roles_ref = self.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.tenant_bar['id']) - self.assertNotIn(self.role_admin['id'], roles_ref) - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], self.tenant_bar['id'], self.role_admin['id']) - roles_ref = self.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.tenant_bar['id']) - self.assertIn(self.role_admin['id'], roles_ref) - self.assertNotIn('member', roles_ref) - - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], self.tenant_bar['id'], 'member') - roles_ref = self.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.tenant_bar['id']) - self.assertIn(self.role_admin['id'], roles_ref) - self.assertIn('member', roles_ref) - - def test_get_roles_for_user_and_domain(self): - """Test for getting roles for user on a domain. - - Test Plan: - - - Create a domain, with 2 users - - Check no roles yet exit - - Give user1 two roles on the domain, user2 one role - - Get roles on user1 and the domain - maybe sure we only - get back the 2 roles on user1 - - Delete both roles from user1 - - Check we get no roles back for user1 on domain - - """ - new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(new_domain['id'], new_domain) - new_user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex, - 'enabled': True, 'domain_id': new_domain['id']} - new_user1 = self.identity_api.create_user(new_user1) - new_user2 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex, - 'enabled': True, 'domain_id': new_domain['id']} - new_user2 = self.identity_api.create_user(new_user2) - roles_ref = self.assignment_api.list_grants( - user_id=new_user1['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - # Now create the grants (roles are defined in default_fixtures) - self.assignment_api.create_grant(user_id=new_user1['id'], - domain_id=new_domain['id'], - role_id='member') - self.assignment_api.create_grant(user_id=new_user1['id'], - domain_id=new_domain['id'], - role_id='other') - self.assignment_api.create_grant(user_id=new_user2['id'], - domain_id=new_domain['id'], - role_id='admin') - # Read back the roles for user1 on domain - roles_ids = self.assignment_api.get_roles_for_user_and_domain( - new_user1['id'], new_domain['id']) - self.assertEqual(2, len(roles_ids)) - self.assertIn(self.role_member['id'], roles_ids) - self.assertIn(self.role_other['id'], roles_ids) - - # Now delete both grants for user1 - self.assignment_api.delete_grant(user_id=new_user1['id'], - domain_id=new_domain['id'], - role_id='member') - self.assignment_api.delete_grant(user_id=new_user1['id'], - domain_id=new_domain['id'], - role_id='other') - roles_ref = self.assignment_api.list_grants( - user_id=new_user1['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - - def test_get_roles_for_user_and_domain_404(self): - """Test errors raised when getting roles for user on a domain. - - Test Plan: - - - Check non-existing user gives UserNotFound - - Check non-existing domain gives DomainNotFound - - """ - new_domain = self._get_domain_fixture() - new_user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex, - 'enabled': True, 'domain_id': new_domain['id']} - new_user1 = self.identity_api.create_user(new_user1) - - self.assertRaises(exception.UserNotFound, - self.assignment_api.get_roles_for_user_and_domain, - uuid.uuid4().hex, - new_domain['id']) - - self.assertRaises(exception.DomainNotFound, - self.assignment_api.get_roles_for_user_and_domain, - new_user1['id'], - uuid.uuid4().hex) - - def test_get_roles_for_user_and_project_404(self): - self.assertRaises(exception.UserNotFound, - self.assignment_api.get_roles_for_user_and_project, - uuid.uuid4().hex, - self.tenant_bar['id']) - - self.assertRaises(exception.ProjectNotFound, - self.assignment_api.get_roles_for_user_and_project, - self.user_foo['id'], - uuid.uuid4().hex) - - def test_add_role_to_user_and_project_404(self): - self.assertRaises(exception.ProjectNotFound, - self.assignment_api.add_role_to_user_and_project, - self.user_foo['id'], - uuid.uuid4().hex, - self.role_admin['id']) - - self.assertRaises(exception.RoleNotFound, - self.assignment_api.add_role_to_user_and_project, - self.user_foo['id'], - self.tenant_bar['id'], - uuid.uuid4().hex) - - def test_add_role_to_user_and_project_no_user(self): - # If add_role_to_user_and_project and the user doesn't exist, then - # no error. - user_id_not_exist = uuid.uuid4().hex - self.assignment_api.add_role_to_user_and_project( - user_id_not_exist, self.tenant_bar['id'], self.role_admin['id']) - - def test_remove_role_from_user_and_project(self): - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], self.tenant_bar['id'], 'member') - self.assignment_api.remove_role_from_user_and_project( - self.user_foo['id'], self.tenant_bar['id'], 'member') - roles_ref = self.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.tenant_bar['id']) - self.assertNotIn('member', roles_ref) - self.assertRaises(exception.NotFound, - self.assignment_api. - remove_role_from_user_and_project, - self.user_foo['id'], - self.tenant_bar['id'], - 'member') - - def test_get_role_grant_by_user_and_project(self): - roles_ref = self.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.tenant_bar['id']) - self.assertEqual(1, len(roles_ref)) - self.assignment_api.create_grant(user_id=self.user_foo['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_admin['id']) - roles_ref = self.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.tenant_bar['id']) - self.assertIn(self.role_admin['id'], - [role_ref['id'] for role_ref in roles_ref]) - - self.assignment_api.create_grant(user_id=self.user_foo['id'], - project_id=self.tenant_bar['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.tenant_bar['id']) - - roles_ref_ids = [] - for ref in roles_ref: - roles_ref_ids.append(ref['id']) - self.assertIn(self.role_admin['id'], roles_ref_ids) - self.assertIn('member', roles_ref_ids) - - def test_remove_role_grant_from_user_and_project(self): - self.assignment_api.create_grant(user_id=self.user_foo['id'], - project_id=self.tenant_baz['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.tenant_baz['id']) - self.assertDictEqual(roles_ref[0], self.role_member) - - self.assignment_api.delete_grant(user_id=self.user_foo['id'], - project_id=self.tenant_baz['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.tenant_baz['id']) - self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - user_id=self.user_foo['id'], - project_id=self.tenant_baz['id'], - role_id='member') - - def test_get_role_assignment_by_project_not_found(self): - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.check_grant_role_id, - user_id=self.user_foo['id'], - project_id=self.tenant_baz['id'], - role_id='member') - - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.check_grant_role_id, - group_id=uuid.uuid4().hex, - project_id=self.tenant_baz['id'], - role_id='member') - - def test_get_role_assignment_by_domain_not_found(self): - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.check_grant_role_id, - user_id=self.user_foo['id'], - domain_id=self.domain_default['id'], - role_id='member') - - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.check_grant_role_id, - group_id=uuid.uuid4().hex, - domain_id=self.domain_default['id'], - role_id='member') - - def test_del_role_assignment_by_project_not_found(self): - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - user_id=self.user_foo['id'], - project_id=self.tenant_baz['id'], - role_id='member') - - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - group_id=uuid.uuid4().hex, - project_id=self.tenant_baz['id'], - role_id='member') - - def test_del_role_assignment_by_domain_not_found(self): - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - user_id=self.user_foo['id'], - domain_id=self.domain_default['id'], - role_id='member') - - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - group_id=uuid.uuid4().hex, - domain_id=self.domain_default['id'], - role_id='member') - - def test_get_and_remove_role_grant_by_group_and_project(self): - new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(new_domain['id'], new_domain) - new_group = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex} - new_group = self.identity_api.create_group(new_group) - new_user = {'name': 'new_user', 'password': 'secret', - 'enabled': True, 'domain_id': new_domain['id']} - new_user = self.identity_api.create_user(new_user) - self.identity_api.add_user_to_group(new_user['id'], - new_group['id']) - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - project_id=self.tenant_bar['id']) - self.assertEqual(0, len(roles_ref)) - self.assignment_api.create_grant(group_id=new_group['id'], - project_id=self.tenant_bar['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - project_id=self.tenant_bar['id']) - self.assertDictEqual(roles_ref[0], self.role_member) - - self.assignment_api.delete_grant(group_id=new_group['id'], - project_id=self.tenant_bar['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - project_id=self.tenant_bar['id']) - self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - group_id=new_group['id'], - project_id=self.tenant_bar['id'], - role_id='member') - - def test_get_and_remove_role_grant_by_group_and_domain(self): - new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(new_domain['id'], new_domain) - new_group = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex} - new_group = self.identity_api.create_group(new_group) - new_user = {'name': 'new_user', 'password': uuid.uuid4().hex, - 'enabled': True, 'domain_id': new_domain['id']} - new_user = self.identity_api.create_user(new_user) - self.identity_api.add_user_to_group(new_user['id'], - new_group['id']) - - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - - self.assignment_api.create_grant(group_id=new_group['id'], - domain_id=new_domain['id'], - role_id='member') - - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) - self.assertDictEqual(roles_ref[0], self.role_member) - - self.assignment_api.delete_grant(group_id=new_group['id'], - domain_id=new_domain['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - group_id=new_group['id'], - domain_id=new_domain['id'], - role_id='member') - - def test_get_and_remove_correct_role_grant_from_a_mix(self): - new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(new_domain['id'], new_domain) - new_project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': new_domain['id']} - self.resource_api.create_project(new_project['id'], new_project) - new_group = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex} - new_group = self.identity_api.create_group(new_group) - new_group2 = {'domain_id': new_domain['id'], 'name': uuid.uuid4().hex} - new_group2 = self.identity_api.create_group(new_group2) - new_user = {'name': 'new_user', 'password': uuid.uuid4().hex, - 'enabled': True, 'domain_id': new_domain['id']} - new_user = self.identity_api.create_user(new_user) - new_user2 = {'name': 'new_user2', 'password': uuid.uuid4().hex, - 'enabled': True, 'domain_id': new_domain['id']} - new_user2 = self.identity_api.create_user(new_user2) - self.identity_api.add_user_to_group(new_user['id'], - new_group['id']) - # First check we have no grants - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - # Now add the grant we are going to test for, and some others as - # well just to make sure we get back the right one - self.assignment_api.create_grant(group_id=new_group['id'], - domain_id=new_domain['id'], - role_id='member') - - self.assignment_api.create_grant(group_id=new_group2['id'], - domain_id=new_domain['id'], - role_id=self.role_admin['id']) - self.assignment_api.create_grant(user_id=new_user2['id'], - domain_id=new_domain['id'], - role_id=self.role_admin['id']) - self.assignment_api.create_grant(group_id=new_group['id'], - project_id=new_project['id'], - role_id=self.role_admin['id']) - - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) - self.assertDictEqual(roles_ref[0], self.role_member) - - self.assignment_api.delete_grant(group_id=new_group['id'], - domain_id=new_domain['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - group_id=new_group['id'], - domain_id=new_domain['id'], - role_id='member') - - def test_get_and_remove_role_grant_by_user_and_domain(self): - new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(new_domain['id'], new_domain) - new_user = {'name': 'new_user', 'password': 'secret', - 'enabled': True, 'domain_id': new_domain['id']} - new_user = self.identity_api.create_user(new_user) - roles_ref = self.assignment_api.list_grants( - user_id=new_user['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - self.assignment_api.create_grant(user_id=new_user['id'], - domain_id=new_domain['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - user_id=new_user['id'], - domain_id=new_domain['id']) - self.assertDictEqual(roles_ref[0], self.role_member) - - self.assignment_api.delete_grant(user_id=new_user['id'], - domain_id=new_domain['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - user_id=new_user['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - user_id=new_user['id'], - domain_id=new_domain['id'], - role_id='member') - - def test_get_and_remove_role_grant_by_group_and_cross_domain(self): - group1_domain1_role = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex} - self.role_api.create_role(group1_domain1_role['id'], - group1_domain1_role) - group1_domain2_role = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex} - self.role_api.create_role(group1_domain2_role['id'], - group1_domain2_role) - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain2['id'], domain2) - group1 = {'domain_id': domain1['id'], 'name': uuid.uuid4().hex} - group1 = self.identity_api.create_group(group1) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain1['id']) - self.assertEqual(0, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain2['id']) - self.assertEqual(0, len(roles_ref)) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain1['id'], - role_id=group1_domain1_role['id']) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain2['id'], - role_id=group1_domain2_role['id']) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain1['id']) - self.assertDictEqual(roles_ref[0], group1_domain1_role) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain2['id']) - self.assertDictEqual(roles_ref[0], group1_domain2_role) - - self.assignment_api.delete_grant(group_id=group1['id'], - domain_id=domain2['id'], - role_id=group1_domain2_role['id']) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain2['id']) - self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - group_id=group1['id'], - domain_id=domain2['id'], - role_id=group1_domain2_role['id']) - - def test_get_and_remove_role_grant_by_user_and_cross_domain(self): - user1_domain1_role = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex} - self.role_api.create_role(user1_domain1_role['id'], user1_domain1_role) - user1_domain2_role = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex} - self.role_api.create_role(user1_domain2_role['id'], user1_domain2_role) - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain2['id'], domain2) - user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'password': uuid.uuid4().hex, 'enabled': True} - user1 = self.identity_api.create_user(user1) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain1['id']) - self.assertEqual(0, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain2['id']) - self.assertEqual(0, len(roles_ref)) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain1['id'], - role_id=user1_domain1_role['id']) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain2['id'], - role_id=user1_domain2_role['id']) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain1['id']) - self.assertDictEqual(roles_ref[0], user1_domain1_role) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain2['id']) - self.assertDictEqual(roles_ref[0], user1_domain2_role) - - self.assignment_api.delete_grant(user_id=user1['id'], - domain_id=domain2['id'], - role_id=user1_domain2_role['id']) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain2['id']) - self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - user_id=user1['id'], - domain_id=domain2['id'], - role_id=user1_domain2_role['id']) - - def test_role_grant_by_group_and_cross_domain_project(self): - role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role1['id'], role1) - role2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role2['id'], role2) - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain2['id'], domain2) - group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'enabled': True} - group1 = self.identity_api.create_group(group1) - project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain2['id']} - self.resource_api.create_project(project1['id'], project1) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) - self.assertEqual(0, len(roles_ref)) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=role1['id']) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=role2['id']) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) - - roles_ref_ids = [] - for ref in roles_ref: - roles_ref_ids.append(ref['id']) - self.assertIn(role1['id'], roles_ref_ids) - self.assertIn(role2['id'], roles_ref_ids) - - self.assignment_api.delete_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=role1['id']) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) - self.assertEqual(1, len(roles_ref)) - self.assertDictEqual(roles_ref[0], role2) - - def test_role_grant_by_user_and_cross_domain_project(self): - role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role1['id'], role1) - role2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role2['id'], role2) - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain2['id'], domain2) - user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'password': uuid.uuid4().hex, 'enabled': True} - user1 = self.identity_api.create_user(user1) - project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain2['id']} - self.resource_api.create_project(project1['id'], project1) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(0, len(roles_ref)) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role1['id']) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role2['id']) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - - roles_ref_ids = [] - for ref in roles_ref: - roles_ref_ids.append(ref['id']) - self.assertIn(role1['id'], roles_ref_ids) - self.assertIn(role2['id'], roles_ref_ids) - - self.assignment_api.delete_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role1['id']) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(1, len(roles_ref)) - self.assertDictEqual(roles_ref[0], role2) - - def test_delete_user_grant_no_user(self): - # Can delete a grant where the user doesn't exist. - role_id = uuid.uuid4().hex - role = {'id': role_id, 'name': uuid.uuid4().hex} - self.role_api.create_role(role_id, role) - - user_id = uuid.uuid4().hex - - self.assignment_api.create_grant(role_id, user_id=user_id, - project_id=self.tenant_bar['id']) - - self.assignment_api.delete_grant(role_id, user_id=user_id, - project_id=self.tenant_bar['id']) - - def test_delete_group_grant_no_group(self): - # Can delete a grant where the group doesn't exist. - role_id = uuid.uuid4().hex - role = {'id': role_id, 'name': uuid.uuid4().hex} - self.role_api.create_role(role_id, role) - - group_id = uuid.uuid4().hex - - self.assignment_api.create_grant(role_id, group_id=group_id, - project_id=self.tenant_bar['id']) - - self.assignment_api.delete_grant(role_id, group_id=group_id, - project_id=self.tenant_bar['id']) - - def test_grant_crud_throws_exception_if_invalid_role(self): - """Ensure RoleNotFound thrown if role does not exist.""" - - def assert_role_not_found_exception(f, **kwargs): - self.assertRaises(exception.RoleNotFound, f, - role_id=uuid.uuid4().hex, **kwargs) - - user = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID, - 'password': uuid.uuid4().hex, 'enabled': True} - user_resp = self.identity_api.create_user(user) - group = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True} - group_resp = self.identity_api.create_group(group) - project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID} - project_resp = self.resource_api.create_project(project['id'], project) - - for manager_call in [self.assignment_api.create_grant, - self.assignment_api.get_grant, - self.assignment_api.delete_grant]: - assert_role_not_found_exception( - manager_call, - user_id=user_resp['id'], project_id=project_resp['id']) - assert_role_not_found_exception( - manager_call, - group_id=group_resp['id'], project_id=project_resp['id']) - assert_role_not_found_exception( - manager_call, - user_id=user_resp['id'], domain_id=DEFAULT_DOMAIN_ID) - assert_role_not_found_exception( - manager_call, - group_id=group_resp['id'], domain_id=DEFAULT_DOMAIN_ID) - - def test_multi_role_grant_by_user_group_on_project_domain(self): - role_list = [] - for _ in range(10): - role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role['id'], role) - role_list.append(role) - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'password': uuid.uuid4().hex, 'enabled': True} - user1 = self.identity_api.create_user(user1) - group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'enabled': True} - group1 = self.identity_api.create_group(group1) - group2 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'enabled': True} - group2 = self.identity_api.create_group(group2) - project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain1['id']} - self.resource_api.create_project(project1['id'], project1) - - self.identity_api.add_user_to_group(user1['id'], - group1['id']) - self.identity_api.add_user_to_group(user1['id'], - group2['id']) - - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(0, len(roles_ref)) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain1['id'], - role_id=role_list[0]['id']) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain1['id'], - role_id=role_list[1]['id']) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain1['id'], - role_id=role_list[2]['id']) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain1['id'], - role_id=role_list[3]['id']) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role_list[4]['id']) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role_list[5]['id']) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=role_list[6]['id']) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=role_list[7]['id']) - roles_ref = self.assignment_api.list_grants(user_id=user1['id'], - domain_id=domain1['id']) - self.assertEqual(2, len(roles_ref)) - self.assertIn(role_list[0], roles_ref) - self.assertIn(role_list[1], roles_ref) - roles_ref = self.assignment_api.list_grants(group_id=group1['id'], - domain_id=domain1['id']) - self.assertEqual(2, len(roles_ref)) - self.assertIn(role_list[2], roles_ref) - self.assertIn(role_list[3], roles_ref) - roles_ref = self.assignment_api.list_grants(user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(2, len(roles_ref)) - self.assertIn(role_list[4], roles_ref) - self.assertIn(role_list[5], roles_ref) - roles_ref = self.assignment_api.list_grants(group_id=group1['id'], - project_id=project1['id']) - self.assertEqual(2, len(roles_ref)) - self.assertIn(role_list[6], roles_ref) - self.assertIn(role_list[7], roles_ref) - - # Now test the alternate way of getting back lists of grants, - # where user and group roles are combined. These should match - # the above results. - combined_list = self.assignment_api.get_roles_for_user_and_project( - user1['id'], project1['id']) - self.assertEqual(4, len(combined_list)) - self.assertIn(role_list[4]['id'], combined_list) - self.assertIn(role_list[5]['id'], combined_list) - self.assertIn(role_list[6]['id'], combined_list) - self.assertIn(role_list[7]['id'], combined_list) - - combined_role_list = self.assignment_api.get_roles_for_user_and_domain( - user1['id'], domain1['id']) - self.assertEqual(4, len(combined_role_list)) - self.assertIn(role_list[0]['id'], combined_role_list) - self.assertIn(role_list[1]['id'], combined_role_list) - self.assertIn(role_list[2]['id'], combined_role_list) - self.assertIn(role_list[3]['id'], combined_role_list) - - def test_multi_group_grants_on_project_domain(self): - """Test multiple group roles for user on project and domain. - - Test Plan: - - - Create 6 roles - - Create a domain, with a project, user and two groups - - Make the user a member of both groups - - Check no roles yet exit - - Assign a role to each user and both groups on both the - project and domain - - Get a list of effective roles for the user on both the - project and domain, checking we get back the correct three - roles - - """ - role_list = [] - for _ in range(6): - role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role['id'], role) - role_list.append(role) - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'password': uuid.uuid4().hex, 'enabled': True} - user1 = self.identity_api.create_user(user1) - group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'enabled': True} - group1 = self.identity_api.create_group(group1) - group2 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'enabled': True} - group2 = self.identity_api.create_group(group2) - project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain1['id']} - self.resource_api.create_project(project1['id'], project1) - - self.identity_api.add_user_to_group(user1['id'], - group1['id']) - self.identity_api.add_user_to_group(user1['id'], - group2['id']) - - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(0, len(roles_ref)) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain1['id'], - role_id=role_list[0]['id']) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain1['id'], - role_id=role_list[1]['id']) - self.assignment_api.create_grant(group_id=group2['id'], - domain_id=domain1['id'], - role_id=role_list[2]['id']) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role_list[3]['id']) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=role_list[4]['id']) - self.assignment_api.create_grant(group_id=group2['id'], - project_id=project1['id'], - role_id=role_list[5]['id']) - - # Read by the roles, ensuring we get the correct 3 roles for - # both project and domain - combined_list = self.assignment_api.get_roles_for_user_and_project( - user1['id'], project1['id']) - self.assertEqual(3, len(combined_list)) - self.assertIn(role_list[3]['id'], combined_list) - self.assertIn(role_list[4]['id'], combined_list) - self.assertIn(role_list[5]['id'], combined_list) - - combined_role_list = self.assignment_api.get_roles_for_user_and_domain( - user1['id'], domain1['id']) - self.assertEqual(3, len(combined_role_list)) - self.assertIn(role_list[0]['id'], combined_role_list) - self.assertIn(role_list[1]['id'], combined_role_list) - self.assertIn(role_list[2]['id'], combined_role_list) - - def test_delete_role_with_user_and_group_grants(self): - role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role1['id'], role1) - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain1['id']} - self.resource_api.create_project(project1['id'], project1) - user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'password': uuid.uuid4().hex, 'enabled': True} - user1 = self.identity_api.create_user(user1) - group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'enabled': True} - group1 = self.identity_api.create_group(group1) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role1['id']) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain1['id'], - role_id=role1['id']) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=role1['id']) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain1['id'], - role_id=role1['id']) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(1, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) - self.assertEqual(1, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain1['id']) - self.assertEqual(1, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain1['id']) - self.assertEqual(1, len(roles_ref)) - self.role_api.delete_role(role1['id']) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(0, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) - self.assertEqual(0, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain1['id']) - self.assertEqual(0, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain1['id']) - self.assertEqual(0, len(roles_ref)) - - def test_delete_user_with_group_project_domain_links(self): - role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role1['id'], role1) - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain1['id']} - self.resource_api.create_project(project1['id'], project1) - user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'password': uuid.uuid4().hex, 'enabled': True} - user1 = self.identity_api.create_user(user1) - group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'enabled': True} - group1 = self.identity_api.create_group(group1) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role1['id']) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain1['id'], - role_id=role1['id']) - self.identity_api.add_user_to_group(user_id=user1['id'], - group_id=group1['id']) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(1, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - domain_id=domain1['id']) - self.assertEqual(1, len(roles_ref)) - self.identity_api.check_user_in_group( - user_id=user1['id'], - group_id=group1['id']) - self.identity_api.delete_user(user1['id']) - self.assertRaises(exception.NotFound, - self.identity_api.check_user_in_group, - user1['id'], - group1['id']) - - def test_delete_group_with_user_project_domain_links(self): - role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role1['id'], role1) - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain1['id']} - self.resource_api.create_project(project1['id'], project1) - user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'password': uuid.uuid4().hex, 'enabled': True} - user1 = self.identity_api.create_user(user1) - group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'enabled': True} - group1 = self.identity_api.create_group(group1) - - self.assignment_api.create_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=role1['id']) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain1['id'], - role_id=role1['id']) - self.identity_api.add_user_to_group(user_id=user1['id'], - group_id=group1['id']) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - project_id=project1['id']) - self.assertEqual(1, len(roles_ref)) - roles_ref = self.assignment_api.list_grants( - group_id=group1['id'], - domain_id=domain1['id']) - self.assertEqual(1, len(roles_ref)) - self.identity_api.check_user_in_group( - user_id=user1['id'], - group_id=group1['id']) - self.identity_api.delete_group(group1['id']) - self.identity_api.get_user(user1['id']) - - def test_list_role_assignment_by_domain(self): - """Test listing of role assignment filtered by domain.""" - - test_plan = { - # A domain with 3 users, 1 group, a spoiler domain and 2 roles. - 'entities': {'domains': [{'users': 3, 'groups': 1}, 1], - 'roles': 2}, - # Users 1 & 2 are in the group - 'group_memberships': [{'group': 0, 'users': [1, 2]}], - # Assign a role for user 0 and the group - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'group': 0, 'role': 1, 'domain': 0}], - 'tests': [ - # List all effective assignments for domain[0]. - # Should get one direct user role and user roles for each of - # the users in the group. - {'params': {'domain': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 1, 'role': 1, 'domain': 0, - 'indirect': {'group': 0}}, - {'user': 2, 'role': 1, 'domain': 0, - 'indirect': {'group': 0}} - ]}, - # Using domain[1] should return nothing - {'params': {'domain': 1, 'effective': True}, - 'results': []}, - ] - } - self.execute_assignment_test_plan(test_plan) - - def test_list_role_assignment_by_user_with_domain_group_roles(self): - """Test listing assignments by user, with group roles on a domain.""" - - test_plan = { - # A domain with 3 users, 3 groups, a spoiler domain - # plus 3 roles. - 'entities': {'domains': [{'users': 3, 'groups': 3}, 1], - 'roles': 3}, - # Users 1 & 2 are in the group 0, User 1 also in group 1 - 'group_memberships': [{'group': 0, 'users': [0, 1]}, - {'group': 1, 'users': [0]}], - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'group': 0, 'role': 1, 'domain': 0}, - {'group': 1, 'role': 2, 'domain': 0}, - # ...and two spoiler assignments - {'user': 1, 'role': 1, 'domain': 0}, - {'group': 2, 'role': 2, 'domain': 0}], - 'tests': [ - # List all effective assignments for user[0]. - # Should get one direct user role and a user roles for each of - # groups 0 and 1 - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'domain': 0, - 'indirect': {'group': 0}}, - {'user': 0, 'role': 2, 'domain': 0, - 'indirect': {'group': 1}} - ]}, - # Adding domain[0] as a filter should return the same data - {'params': {'user': 0, 'domain': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'domain': 0, - 'indirect': {'group': 0}}, - {'user': 0, 'role': 2, 'domain': 0, - 'indirect': {'group': 1}} - ]}, - # Using domain[1] should return nothing - {'params': {'user': 0, 'domain': 1, 'effective': True}, - 'results': []}, - # Using user[2] should return nothing - {'params': {'user': 2, 'domain': 0, 'effective': True}, - 'results': []}, - ] - } - self.execute_assignment_test_plan(test_plan) - - def test_delete_domain_with_user_group_project_links(self): - # TODO(chungg):add test case once expected behaviour defined - pass - - def test_add_user_to_project(self): - self.assignment_api.add_user_to_project(self.tenant_baz['id'], - self.user_foo['id']) - tenants = self.assignment_api.list_projects_for_user( - self.user_foo['id']) - self.assertIn(self.tenant_baz, tenants) - - def test_add_user_to_project_missing_default_role(self): - self.role_api.delete_role(CONF.member_role_id) - self.assertRaises(exception.RoleNotFound, - self.role_api.get_role, - CONF.member_role_id) - self.assignment_api.add_user_to_project(self.tenant_baz['id'], - self.user_foo['id']) - tenants = ( - self.assignment_api.list_projects_for_user(self.user_foo['id'])) - self.assertIn(self.tenant_baz, tenants) - default_role = self.role_api.get_role(CONF.member_role_id) - self.assertIsNotNone(default_role) - - def test_add_user_to_project_404(self): - self.assertRaises(exception.ProjectNotFound, - self.assignment_api.add_user_to_project, - uuid.uuid4().hex, - self.user_foo['id']) - - def test_add_user_to_project_no_user(self): - # If add_user_to_project and the user doesn't exist, then - # no error. - user_id_not_exist = uuid.uuid4().hex - self.assignment_api.add_user_to_project(self.tenant_bar['id'], - user_id_not_exist) - - def test_remove_user_from_project(self): - self.assignment_api.add_user_to_project(self.tenant_baz['id'], - self.user_foo['id']) - self.assignment_api.remove_user_from_project(self.tenant_baz['id'], - self.user_foo['id']) - tenants = self.assignment_api.list_projects_for_user( - self.user_foo['id']) - self.assertNotIn(self.tenant_baz, tenants) - - def test_remove_user_from_project_race_delete_role(self): - self.assignment_api.add_user_to_project(self.tenant_baz['id'], - self.user_foo['id']) - self.assignment_api.add_role_to_user_and_project( - tenant_id=self.tenant_baz['id'], - user_id=self.user_foo['id'], - role_id=self.role_other['id']) - - # Mock a race condition, delete a role after - # get_roles_for_user_and_project() is called in - # remove_user_from_project(). - roles = self.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.tenant_baz['id']) - self.role_api.delete_role(self.role_other['id']) - self.assignment_api.get_roles_for_user_and_project = mock.Mock( - return_value=roles) - self.assignment_api.remove_user_from_project(self.tenant_baz['id'], - self.user_foo['id']) - tenants = self.assignment_api.list_projects_for_user( - self.user_foo['id']) - self.assertNotIn(self.tenant_baz, tenants) - - def test_remove_user_from_project_404(self): - self.assertRaises(exception.ProjectNotFound, - self.assignment_api.remove_user_from_project, - uuid.uuid4().hex, - self.user_foo['id']) - - self.assertRaises(exception.UserNotFound, - self.assignment_api.remove_user_from_project, - self.tenant_bar['id'], - uuid.uuid4().hex) - - self.assertRaises(exception.NotFound, - self.assignment_api.remove_user_from_project, - self.tenant_baz['id'], - self.user_foo['id']) - - def test_list_user_project_ids_404(self): - self.assertRaises(exception.UserNotFound, - self.assignment_api.list_projects_for_user, - uuid.uuid4().hex) - - def test_update_project_404(self): - self.assertRaises(exception.ProjectNotFound, - self.resource_api.update_project, - uuid.uuid4().hex, - dict()) - - def test_delete_project_404(self): - self.assertRaises(exception.ProjectNotFound, - self.resource_api.delete_project, - uuid.uuid4().hex) - - def test_update_user_404(self): - user_id = uuid.uuid4().hex - self.assertRaises(exception.UserNotFound, - self.identity_api.update_user, - user_id, - {'id': user_id, - 'domain_id': DEFAULT_DOMAIN_ID}) - - def test_delete_user_with_project_association(self): - user = {'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, - 'password': uuid.uuid4().hex} - user = self.identity_api.create_user(user) - self.assignment_api.add_user_to_project(self.tenant_bar['id'], - user['id']) - self.identity_api.delete_user(user['id']) - self.assertRaises(exception.UserNotFound, - self.assignment_api.list_projects_for_user, - user['id']) - - def test_delete_user_with_project_roles(self): - user = {'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, - 'password': uuid.uuid4().hex} - user = self.identity_api.create_user(user) - self.assignment_api.add_role_to_user_and_project( - user['id'], - self.tenant_bar['id'], - self.role_member['id']) - self.identity_api.delete_user(user['id']) - self.assertRaises(exception.UserNotFound, - self.assignment_api.list_projects_for_user, - user['id']) - - def test_delete_user_404(self): - self.assertRaises(exception.UserNotFound, - self.identity_api.delete_user, - uuid.uuid4().hex) - - def test_delete_role_404(self): - self.assertRaises(exception.RoleNotFound, - self.role_api.delete_role, - uuid.uuid4().hex) - - def test_create_update_delete_unicode_project(self): - unicode_project_name = u'name \u540d\u5b57' - project = {'id': uuid.uuid4().hex, - 'name': unicode_project_name, - 'description': uuid.uuid4().hex, - 'domain_id': CONF.identity.default_domain_id} - self.resource_api.create_project(project['id'], project) - self.resource_api.update_project(project['id'], project) - self.resource_api.delete_project(project['id']) - - def test_create_project_with_no_enabled_field(self): - ref = { - 'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex.lower(), - 'domain_id': DEFAULT_DOMAIN_ID} - self.resource_api.create_project(ref['id'], ref) - - project = self.resource_api.get_project(ref['id']) - self.assertIs(project['enabled'], True) - - def test_create_project_long_name_fails(self): - tenant = {'id': 'fake1', 'name': 'a' * 65, - 'domain_id': DEFAULT_DOMAIN_ID} - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - tenant['id'], - tenant) - - def test_create_project_blank_name_fails(self): - tenant = {'id': 'fake1', 'name': '', - 'domain_id': DEFAULT_DOMAIN_ID} - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - tenant['id'], - tenant) - - def test_create_project_invalid_name_fails(self): - tenant = {'id': 'fake1', 'name': None, - 'domain_id': DEFAULT_DOMAIN_ID} - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - tenant['id'], - tenant) - tenant = {'id': 'fake1', 'name': 123, - 'domain_id': DEFAULT_DOMAIN_ID} - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - tenant['id'], - tenant) - - def test_update_project_blank_name_fails(self): - tenant = {'id': 'fake1', 'name': 'fake1', - 'domain_id': DEFAULT_DOMAIN_ID} - self.resource_api.create_project('fake1', tenant) - tenant['name'] = '' - self.assertRaises(exception.ValidationError, - self.resource_api.update_project, - tenant['id'], - tenant) - - def test_update_project_long_name_fails(self): - tenant = {'id': 'fake1', 'name': 'fake1', - 'domain_id': DEFAULT_DOMAIN_ID} - self.resource_api.create_project('fake1', tenant) - tenant['name'] = 'a' * 65 - self.assertRaises(exception.ValidationError, - self.resource_api.update_project, - tenant['id'], - tenant) - - def test_update_project_invalid_name_fails(self): - tenant = {'id': 'fake1', 'name': 'fake1', - 'domain_id': DEFAULT_DOMAIN_ID} - self.resource_api.create_project('fake1', tenant) - tenant['name'] = None - self.assertRaises(exception.ValidationError, - self.resource_api.update_project, - tenant['id'], - tenant) - - tenant['name'] = 123 - self.assertRaises(exception.ValidationError, - self.resource_api.update_project, - tenant['id'], - tenant) - - def test_create_user_long_name_fails(self): - user = {'name': 'a' * 256, - 'domain_id': DEFAULT_DOMAIN_ID} - self.assertRaises(exception.ValidationError, - self.identity_api.create_user, - user) - - def test_create_user_blank_name_fails(self): - user = {'name': '', - 'domain_id': DEFAULT_DOMAIN_ID} - self.assertRaises(exception.ValidationError, - self.identity_api.create_user, - user) - - def test_create_user_missed_password(self): - user = {'name': 'fake1', - 'domain_id': DEFAULT_DOMAIN_ID} - user = self.identity_api.create_user(user) - self.identity_api.get_user(user['id']) - # Make sure the user is not allowed to login - # with a password that is empty string or None - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, - user_id=user['id'], - password='') - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, - user_id=user['id'], - password=None) - - def test_create_user_none_password(self): - user = {'name': 'fake1', 'password': None, - 'domain_id': DEFAULT_DOMAIN_ID} - user = self.identity_api.create_user(user) - self.identity_api.get_user(user['id']) - # Make sure the user is not allowed to login - # with a password that is empty string or None - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, - user_id=user['id'], - password='') - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, - user_id=user['id'], - password=None) - - def test_create_user_invalid_name_fails(self): - user = {'name': None, - 'domain_id': DEFAULT_DOMAIN_ID} - self.assertRaises(exception.ValidationError, - self.identity_api.create_user, - user) - - user = {'name': 123, - 'domain_id': DEFAULT_DOMAIN_ID} - self.assertRaises(exception.ValidationError, - self.identity_api.create_user, - user) - - def test_update_project_invalid_enabled_type_string(self): - project = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'enabled': True, - 'domain_id': DEFAULT_DOMAIN_ID} - self.resource_api.create_project(project['id'], project) - project_ref = self.resource_api.get_project(project['id']) - self.assertEqual(True, project_ref['enabled']) - - # Strings are not valid boolean values - project['enabled'] = "false" - self.assertRaises(exception.ValidationError, - self.resource_api.update_project, - project['id'], - project) - - def test_create_project_invalid_enabled_type_string(self): - project = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, - # invalid string value - 'enabled': "true"} - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - project['id'], - project) - - def test_create_project_invalid_domain_id(self): - project = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'domain_id': uuid.uuid4().hex, - 'enabled': True} - self.assertRaises(exception.DomainNotFound, - self.resource_api.create_project, - project['id'], - project) - - def test_create_user_invalid_enabled_type_string(self): - user = {'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, - 'password': uuid.uuid4().hex, - # invalid string value - 'enabled': "true"} - self.assertRaises(exception.ValidationError, - self.identity_api.create_user, - user) - - def test_update_user_long_name_fails(self): - user = {'name': 'fake1', - 'domain_id': DEFAULT_DOMAIN_ID} - user = self.identity_api.create_user(user) - user['name'] = 'a' * 256 - self.assertRaises(exception.ValidationError, - self.identity_api.update_user, - user['id'], - user) - - def test_update_user_blank_name_fails(self): - user = {'name': 'fake1', - 'domain_id': DEFAULT_DOMAIN_ID} - user = self.identity_api.create_user(user) - user['name'] = '' - self.assertRaises(exception.ValidationError, - self.identity_api.update_user, - user['id'], - user) - - def test_update_user_invalid_name_fails(self): - user = {'name': 'fake1', - 'domain_id': DEFAULT_DOMAIN_ID} - user = self.identity_api.create_user(user) - - user['name'] = None - self.assertRaises(exception.ValidationError, - self.identity_api.update_user, - user['id'], - user) - - user['name'] = 123 - self.assertRaises(exception.ValidationError, - self.identity_api.update_user, - user['id'], - user) - - def test_list_users(self): - users = self.identity_api.list_users( - domain_scope=self._set_domain_scope(DEFAULT_DOMAIN_ID)) - self.assertEqual(len(default_fixtures.USERS), len(users)) - user_ids = set(user['id'] for user in users) - expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id'] - for user in default_fixtures.USERS) - for user_ref in users: - self.assertNotIn('password', user_ref) - self.assertEqual(expected_user_ids, user_ids) - - def test_list_groups(self): - group1 = { - 'domain_id': DEFAULT_DOMAIN_ID, - 'name': uuid.uuid4().hex} - group2 = { - 'domain_id': DEFAULT_DOMAIN_ID, - 'name': uuid.uuid4().hex} - group1 = self.identity_api.create_group(group1) - group2 = self.identity_api.create_group(group2) - groups = self.identity_api.list_groups( - domain_scope=self._set_domain_scope(DEFAULT_DOMAIN_ID)) - self.assertEqual(2, len(groups)) - group_ids = [] - for group in groups: - group_ids.append(group.get('id')) - self.assertIn(group1['id'], group_ids) - self.assertIn(group2['id'], group_ids) - - def test_list_domains(self): - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - self.resource_api.create_domain(domain2['id'], domain2) - domains = self.resource_api.list_domains() - self.assertEqual(3, len(domains)) - domain_ids = [] - for domain in domains: - domain_ids.append(domain.get('id')) - self.assertIn(DEFAULT_DOMAIN_ID, domain_ids) - self.assertIn(domain1['id'], domain_ids) - self.assertIn(domain2['id'], domain_ids) - - def test_list_projects(self): - projects = self.resource_api.list_projects() - self.assertEqual(4, len(projects)) - project_ids = [] - for project in projects: - project_ids.append(project.get('id')) - self.assertIn(self.tenant_bar['id'], project_ids) - self.assertIn(self.tenant_baz['id'], project_ids) - - def test_list_projects_with_multiple_filters(self): - # Create a project - project = {'id': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID, - 'name': uuid.uuid4().hex, 'description': uuid.uuid4().hex, - 'enabled': True, 'parent_id': None, 'is_domain': False} - self.resource_api.create_project(project['id'], project) - - # Build driver hints with the project's name and inexistent description - hints = driver_hints.Hints() - hints.add_filter('name', project['name']) - hints.add_filter('description', uuid.uuid4().hex) - - # Retrieve projects based on hints and check an empty list is returned - projects = self.resource_api.list_projects(hints) - self.assertEqual([], projects) - - # Build correct driver hints - hints = driver_hints.Hints() - hints.add_filter('name', project['name']) - hints.add_filter('description', project['description']) - - # Retrieve projects based on hints - projects = self.resource_api.list_projects(hints) - - # Check that the returned list contains only the first project - self.assertEqual(1, len(projects)) - self.assertEqual(project, projects[0]) - - def test_list_projects_for_domain(self): - project_ids = ([x['id'] for x in - self.resource_api.list_projects_in_domain( - DEFAULT_DOMAIN_ID)]) - self.assertEqual(4, len(project_ids)) - self.assertIn(self.tenant_bar['id'], project_ids) - self.assertIn(self.tenant_baz['id'], project_ids) - self.assertIn(self.tenant_mtu['id'], project_ids) - self.assertIn(self.tenant_service['id'], project_ids) - - @unit.skip_if_no_multiple_domains_support - def test_list_projects_for_alternate_domain(self): - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain1['id']} - self.resource_api.create_project(project1['id'], project1) - project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain1['id']} - self.resource_api.create_project(project2['id'], project2) - project_ids = ([x['id'] for x in - self.resource_api.list_projects_in_domain( - domain1['id'])]) - self.assertEqual(2, len(project_ids)) - self.assertIn(project1['id'], project_ids) - self.assertIn(project2['id'], project_ids) - - def _create_projects_hierarchy(self, hierarchy_size=2, - domain_id=DEFAULT_DOMAIN_ID, - is_domain=False): - """Creates a project hierarchy with specified size. - - :param hierarchy_size: the desired hierarchy size, default is 2 - - a project with one child. - :param domain_id: domain where the projects hierarchy will be created. - :param is_domain: if the hierarchy will have the is_domain flag active - or not. - - :returns projects: a list of the projects in the created hierarchy. - - """ - project_id = uuid.uuid4().hex - project = {'id': project_id, - 'description': '', - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': None, - 'domain_id': domain_id, - 'is_domain': is_domain} - self.resource_api.create_project(project_id, project) - - projects = [project] - for i in range(1, hierarchy_size): - new_project = {'id': uuid.uuid4().hex, - 'description': '', - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': project_id, - 'is_domain': is_domain} - new_project['domain_id'] = domain_id - - self.resource_api.create_project(new_project['id'], new_project) - projects.append(new_project) - project_id = new_project['id'] - - return projects - - @unit.skip_if_no_multiple_domains_support - def test_create_domain_with_project_api(self): - project_id = uuid.uuid4().hex - project = {'id': project_id, - 'description': '', - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': None, - 'is_domain': True} - ref = self.resource_api.create_project(project['id'], project) - self.assertTrue(ref['is_domain']) - self.assertEqual(DEFAULT_DOMAIN_ID, ref['domain_id']) - - @unit.skip_if_no_multiple_domains_support - @test_utils.wip('waiting for projects acting as domains implementation') - def test_is_domain_sub_project_has_parent_domain_id(self): - project = {'id': uuid.uuid4().hex, - 'description': '', - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': None, - 'is_domain': True} - self.resource_api.create_project(project['id'], project) - - sub_project_id = uuid.uuid4().hex - sub_project = {'id': sub_project_id, - 'description': '', - 'domain_id': project['id'], - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': project['id'], - 'is_domain': True} - ref = self.resource_api.create_project(sub_project['id'], sub_project) - self.assertTrue(ref['is_domain']) - self.assertEqual(project['id'], ref['parent_id']) - self.assertEqual(project['id'], ref['domain_id']) - - @unit.skip_if_no_multiple_domains_support - @test_utils.wip('waiting for projects acting as domains implementation') - def test_delete_domain_with_project_api(self): - project_id = uuid.uuid4().hex - project = {'id': project_id, - 'description': '', - 'domain_id': None, - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': None, - 'is_domain': True} - self.resource_api.create_project(project['id'], project) - - # Try to delete is_domain project that is enabled - self.assertRaises(exception.ValidationError, - self.resource_api.delete_project, - project['id']) - - # Disable the project - project['enabled'] = False - self.resource_api.update_project(project['id'], project) - - # Successfuly delete the project - self.resource_api.delete_project(project['id']) - - @unit.skip_if_no_multiple_domains_support - @test_utils.wip('waiting for projects acting as domains implementation') - def test_create_domain_under_regular_project_hierarchy_fails(self): - # Creating a regular project hierarchy. Projects acting as domains - # can't have a parent that is a regular project. - projects_hierarchy = self._create_projects_hierarchy() - parent = projects_hierarchy[1] - project_id = uuid.uuid4().hex - project = {'id': project_id, - 'description': '', - 'domain_id': parent['id'], - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': parent['id'], - 'is_domain': True} - - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - project['id'], project) - - @unit.skip_if_no_multiple_domains_support - @test_utils.wip('waiting for projects acting as domains implementation') - def test_create_project_under_domain_hierarchy(self): - projects_hierarchy = self._create_projects_hierarchy(is_domain=True) - parent = projects_hierarchy[1] - project = {'id': uuid.uuid4().hex, - 'description': '', - 'domain_id': parent['id'], - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': parent['id'], - 'is_domain': False} - - ref = self.resource_api.create_project(project['id'], project) - self.assertFalse(ref['is_domain']) - self.assertEqual(parent['id'], ref['parent_id']) - self.assertEqual(parent['id'], ref['domain_id']) - - def test_create_project_without_is_domain_flag(self): - project = {'id': uuid.uuid4().hex, - 'description': '', - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': None} - - ref = self.resource_api.create_project(project['id'], project) - # The is_domain flag should be False by default - self.assertFalse(ref['is_domain']) - - def test_create_is_domain_project(self): - project = {'id': uuid.uuid4().hex, - 'description': '', - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': None, - 'is_domain': True} - - ref = self.resource_api.create_project(project['id'], project) - self.assertTrue(ref['is_domain']) - - @test_utils.wip('waiting for projects acting as domains implementation') - def test_create_project_with_parent_id_and_without_domain_id(self): - project = {'id': uuid.uuid4().hex, - 'description': '', - 'domain_id': None, - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': None} - self.resource_api.create_project(project['id'], project) - - sub_project = {'id': uuid.uuid4().hex, - 'description': '', - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': project['id']} - ref = self.resource_api.create_project(sub_project['id'], sub_project) - - # The domain_id should be set to the parent domain_id - self.assertEqual(project['domain_id'], ref['domain_id']) - - @test_utils.wip('waiting for projects acting as domains implementation') - def test_create_project_with_domain_id_and_without_parent_id(self): - project = {'id': uuid.uuid4().hex, - 'description': '', - 'domain_id': None, - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': None} - self.resource_api.create_project(project['id'], project) - - sub_project = {'id': uuid.uuid4().hex, - 'description': '', - 'enabled': True, - 'domain_id': project['id'], - 'name': uuid.uuid4().hex} - ref = self.resource_api.create_project(sub_project['id'], sub_project) - - # The parent_id should be set to the domain_id - self.assertEqual(ref['parent_id'], project['id']) - - def test_check_leaf_projects(self): - projects_hierarchy = self._create_projects_hierarchy() - root_project = projects_hierarchy[0] - leaf_project = projects_hierarchy[1] - - self.assertFalse(self.resource_api.is_leaf_project( - root_project['id'])) - self.assertTrue(self.resource_api.is_leaf_project( - leaf_project['id'])) - - # Delete leaf_project - self.resource_api.delete_project(leaf_project['id']) - - # Now, root_project should be leaf - self.assertTrue(self.resource_api.is_leaf_project( - root_project['id'])) - - def test_list_projects_in_subtree(self): - projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3) - project1 = projects_hierarchy[0] - project2 = projects_hierarchy[1] - project3 = projects_hierarchy[2] - project4 = {'id': uuid.uuid4().hex, - 'description': '', - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': project2['id'], - 'is_domain': False} - self.resource_api.create_project(project4['id'], project4) - - subtree = self.resource_api.list_projects_in_subtree(project1['id']) - self.assertEqual(3, len(subtree)) - self.assertIn(project2, subtree) - self.assertIn(project3, subtree) - self.assertIn(project4, subtree) - - subtree = self.resource_api.list_projects_in_subtree(project2['id']) - self.assertEqual(2, len(subtree)) - self.assertIn(project3, subtree) - self.assertIn(project4, subtree) - - subtree = self.resource_api.list_projects_in_subtree(project3['id']) - self.assertEqual(0, len(subtree)) - - def test_list_projects_in_subtree_with_circular_reference(self): - project1_id = uuid.uuid4().hex - project2_id = uuid.uuid4().hex - - project1 = {'id': project1_id, - 'description': '', - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True, - 'name': uuid.uuid4().hex} - self.resource_api.create_project(project1['id'], project1) - - project2 = {'id': project2_id, - 'description': '', - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': project1_id} - self.resource_api.create_project(project2['id'], project2) - - project1['parent_id'] = project2_id # Adds cyclic reference - - # NOTE(dstanek): The manager does not allow parent_id to be updated. - # Instead will directly use the driver to create the cyclic - # reference. - self.resource_api.driver.update_project(project1_id, project1) - - subtree = self.resource_api.list_projects_in_subtree(project1_id) - - # NOTE(dstanek): If a cyclic refence is detected the code bails - # and returns None instead of falling into the infinite - # recursion trap. - self.assertIsNone(subtree) - - def test_list_projects_in_subtree_invalid_project_id(self): - self.assertRaises(exception.ValidationError, - self.resource_api.list_projects_in_subtree, - None) - - self.assertRaises(exception.ProjectNotFound, - self.resource_api.list_projects_in_subtree, - uuid.uuid4().hex) - - def test_list_project_parents(self): - projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3) - project1 = projects_hierarchy[0] - project2 = projects_hierarchy[1] - project3 = projects_hierarchy[2] - project4 = {'id': uuid.uuid4().hex, - 'description': '', - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': project2['id'], - 'is_domain': False} - self.resource_api.create_project(project4['id'], project4) - - parents1 = self.resource_api.list_project_parents(project3['id']) - self.assertEqual(2, len(parents1)) - self.assertIn(project1, parents1) - self.assertIn(project2, parents1) - - parents2 = self.resource_api.list_project_parents(project4['id']) - self.assertEqual(parents1, parents2) - - parents = self.resource_api.list_project_parents(project1['id']) - self.assertEqual(0, len(parents)) - - def test_list_project_parents_invalid_project_id(self): - self.assertRaises(exception.ValidationError, - self.resource_api.list_project_parents, - None) - - self.assertRaises(exception.ProjectNotFound, - self.resource_api.list_project_parents, - uuid.uuid4().hex) - - def test_delete_project_with_role_assignments(self): - tenant = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID} - self.resource_api.create_project(tenant['id'], tenant) - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], tenant['id'], 'member') - self.resource_api.delete_project(tenant['id']) - self.assertRaises(exception.NotFound, - self.resource_api.get_project, - tenant['id']) - - def test_delete_role_check_role_grant(self): - role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - alt_role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role['id'], role) - self.role_api.create_role(alt_role['id'], alt_role) - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], self.tenant_bar['id'], role['id']) - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], self.tenant_bar['id'], alt_role['id']) - self.role_api.delete_role(role['id']) - roles_ref = self.assignment_api.get_roles_for_user_and_project( - self.user_foo['id'], self.tenant_bar['id']) - self.assertNotIn(role['id'], roles_ref) - self.assertIn(alt_role['id'], roles_ref) - - def test_create_project_doesnt_modify_passed_in_dict(self): - new_project = {'id': 'tenant_id', 'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID} - original_project = new_project.copy() - self.resource_api.create_project('tenant_id', new_project) - self.assertDictEqual(original_project, new_project) - - def test_create_user_doesnt_modify_passed_in_dict(self): - new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID} - original_user = new_user.copy() - self.identity_api.create_user(new_user) - self.assertDictEqual(original_user, new_user) - - def test_update_user_enable(self): - user = {'name': 'fake1', 'enabled': True, - 'domain_id': DEFAULT_DOMAIN_ID} - user = self.identity_api.create_user(user) - user_ref = self.identity_api.get_user(user['id']) - self.assertEqual(True, user_ref['enabled']) - - user['enabled'] = False - self.identity_api.update_user(user['id'], user) - user_ref = self.identity_api.get_user(user['id']) - self.assertEqual(user['enabled'], user_ref['enabled']) - - # If not present, enabled field should not be updated - del user['enabled'] - self.identity_api.update_user(user['id'], user) - user_ref = self.identity_api.get_user(user['id']) - self.assertEqual(False, user_ref['enabled']) - - user['enabled'] = True - self.identity_api.update_user(user['id'], user) - user_ref = self.identity_api.get_user(user['id']) - self.assertEqual(user['enabled'], user_ref['enabled']) - - del user['enabled'] - self.identity_api.update_user(user['id'], user) - user_ref = self.identity_api.get_user(user['id']) - self.assertEqual(True, user_ref['enabled']) - - # Integers are valid Python's booleans. Explicitly test it. - user['enabled'] = 0 - self.identity_api.update_user(user['id'], user) - user_ref = self.identity_api.get_user(user['id']) - self.assertEqual(False, user_ref['enabled']) - - # Any integers other than 0 are interpreted as True - user['enabled'] = -42 - self.identity_api.update_user(user['id'], user) - user_ref = self.identity_api.get_user(user['id']) - self.assertEqual(True, user_ref['enabled']) - - def test_update_user_name(self): - user = {'name': uuid.uuid4().hex, - 'enabled': True, - 'domain_id': DEFAULT_DOMAIN_ID} - user = self.identity_api.create_user(user) - user_ref = self.identity_api.get_user(user['id']) - self.assertEqual(user['name'], user_ref['name']) - - changed_name = user_ref['name'] + '_changed' - user_ref['name'] = changed_name - updated_user = self.identity_api.update_user(user_ref['id'], user_ref) - - # NOTE(dstanek): the SQL backend adds an 'extra' field containing a - # dictionary of the extra fields in addition to the - # fields in the object. For the details see: - # SqlIdentity.test_update_project_returns_extra - updated_user.pop('extra', None) - - self.assertDictEqual(user_ref, updated_user) - - user_ref = self.identity_api.get_user(user_ref['id']) - self.assertEqual(changed_name, user_ref['name']) - - def test_update_user_enable_fails(self): - user = {'name': 'fake1', 'enabled': True, - 'domain_id': DEFAULT_DOMAIN_ID} - user = self.identity_api.create_user(user) - user_ref = self.identity_api.get_user(user['id']) - self.assertEqual(True, user_ref['enabled']) - - # Strings are not valid boolean values - user['enabled'] = "false" - self.assertRaises(exception.ValidationError, - self.identity_api.update_user, - user['id'], - user) - - def test_update_project_enable(self): - tenant = {'id': 'fake1', 'name': 'fake1', 'enabled': True, - 'domain_id': DEFAULT_DOMAIN_ID} - self.resource_api.create_project('fake1', tenant) - tenant_ref = self.resource_api.get_project('fake1') - self.assertEqual(True, tenant_ref['enabled']) - - tenant['enabled'] = False - self.resource_api.update_project('fake1', tenant) - tenant_ref = self.resource_api.get_project('fake1') - self.assertEqual(tenant['enabled'], tenant_ref['enabled']) - - # If not present, enabled field should not be updated - del tenant['enabled'] - self.resource_api.update_project('fake1', tenant) - tenant_ref = self.resource_api.get_project('fake1') - self.assertEqual(False, tenant_ref['enabled']) - - tenant['enabled'] = True - self.resource_api.update_project('fake1', tenant) - tenant_ref = self.resource_api.get_project('fake1') - self.assertEqual(tenant['enabled'], tenant_ref['enabled']) - - del tenant['enabled'] - self.resource_api.update_project('fake1', tenant) - tenant_ref = self.resource_api.get_project('fake1') - self.assertEqual(True, tenant_ref['enabled']) - - def test_add_user_to_group(self): - domain = self._get_domain_fixture() - new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex} - new_group = self.identity_api.create_group(new_group) - new_user = {'name': 'new_user', 'password': uuid.uuid4().hex, - 'enabled': True, 'domain_id': domain['id']} - new_user = self.identity_api.create_user(new_user) - self.identity_api.add_user_to_group(new_user['id'], - new_group['id']) - groups = self.identity_api.list_groups_for_user(new_user['id']) - - found = False - for x in groups: - if (x['id'] == new_group['id']): - found = True - self.assertTrue(found) - - def test_add_user_to_group_404(self): - domain = self._get_domain_fixture() - new_user = {'name': 'new_user', 'password': uuid.uuid4().hex, - 'enabled': True, 'domain_id': domain['id']} - new_user = self.identity_api.create_user(new_user) - self.assertRaises(exception.GroupNotFound, - self.identity_api.add_user_to_group, - new_user['id'], - uuid.uuid4().hex) - - new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex} - new_group = self.identity_api.create_group(new_group) - self.assertRaises(exception.UserNotFound, - self.identity_api.add_user_to_group, - uuid.uuid4().hex, - new_group['id']) - - self.assertRaises(exception.NotFound, - self.identity_api.add_user_to_group, - uuid.uuid4().hex, - uuid.uuid4().hex) - - def test_check_user_in_group(self): - domain = self._get_domain_fixture() - new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex} - new_group = self.identity_api.create_group(new_group) - new_user = {'name': 'new_user', 'password': uuid.uuid4().hex, - 'enabled': True, 'domain_id': domain['id']} - new_user = self.identity_api.create_user(new_user) - self.identity_api.add_user_to_group(new_user['id'], - new_group['id']) - self.identity_api.check_user_in_group(new_user['id'], new_group['id']) - - def test_create_invalid_domain_fails(self): - new_group = {'domain_id': "doesnotexist", 'name': uuid.uuid4().hex} - self.assertRaises(exception.DomainNotFound, - self.identity_api.create_group, - new_group) - new_user = {'name': 'new_user', 'password': uuid.uuid4().hex, - 'enabled': True, 'domain_id': "doesnotexist"} - self.assertRaises(exception.DomainNotFound, - self.identity_api.create_user, - new_user) - - def test_check_user_not_in_group(self): - new_group = { - 'domain_id': DEFAULT_DOMAIN_ID, - 'name': uuid.uuid4().hex} - new_group = self.identity_api.create_group(new_group) - - new_user = {'name': 'new_user', 'password': uuid.uuid4().hex, - 'enabled': True, 'domain_id': DEFAULT_DOMAIN_ID} - new_user = self.identity_api.create_user(new_user) - - self.assertRaises(exception.NotFound, - self.identity_api.check_user_in_group, - new_user['id'], - new_group['id']) - - def test_check_user_in_group_404(self): - new_user = {'name': 'new_user', 'password': uuid.uuid4().hex, - 'enabled': True, 'domain_id': DEFAULT_DOMAIN_ID} - new_user = self.identity_api.create_user(new_user) - - new_group = { - 'domain_id': DEFAULT_DOMAIN_ID, - 'name': uuid.uuid4().hex} - new_group = self.identity_api.create_group(new_group) - - self.assertRaises(exception.UserNotFound, - self.identity_api.check_user_in_group, - uuid.uuid4().hex, - new_group['id']) - - self.assertRaises(exception.GroupNotFound, - self.identity_api.check_user_in_group, - new_user['id'], - uuid.uuid4().hex) - - self.assertRaises(exception.NotFound, - self.identity_api.check_user_in_group, - uuid.uuid4().hex, - uuid.uuid4().hex) - - def test_list_users_in_group(self): - domain = self._get_domain_fixture() - new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex} - new_group = self.identity_api.create_group(new_group) - # Make sure we get an empty list back on a new group, not an error. - user_refs = self.identity_api.list_users_in_group(new_group['id']) - self.assertEqual([], user_refs) - # Make sure we get the correct users back once they have been added - # to the group. - new_user = {'name': 'new_user', 'password': uuid.uuid4().hex, - 'enabled': True, 'domain_id': domain['id']} - new_user = self.identity_api.create_user(new_user) - self.identity_api.add_user_to_group(new_user['id'], - new_group['id']) - user_refs = self.identity_api.list_users_in_group(new_group['id']) - found = False - for x in user_refs: - if (x['id'] == new_user['id']): - found = True - self.assertNotIn('password', x) - self.assertTrue(found) - - def test_list_users_in_group_404(self): - self.assertRaises(exception.GroupNotFound, - self.identity_api.list_users_in_group, - uuid.uuid4().hex) - - def test_list_groups_for_user(self): - domain = self._get_domain_fixture() - test_groups = [] - test_users = [] - GROUP_COUNT = 3 - USER_COUNT = 2 - - for x in range(0, USER_COUNT): - new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex, - 'enabled': True, 'domain_id': domain['id']} - new_user = self.identity_api.create_user(new_user) - test_users.append(new_user) - positive_user = test_users[0] - negative_user = test_users[1] - - for x in range(0, USER_COUNT): - group_refs = self.identity_api.list_groups_for_user( - test_users[x]['id']) - self.assertEqual(0, len(group_refs)) - - for x in range(0, GROUP_COUNT): - before_count = x - after_count = x + 1 - new_group = {'domain_id': domain['id'], - 'name': uuid.uuid4().hex} - new_group = self.identity_api.create_group(new_group) - test_groups.append(new_group) - - # add the user to the group and ensure that the - # group count increases by one for each - group_refs = self.identity_api.list_groups_for_user( - positive_user['id']) - self.assertEqual(before_count, len(group_refs)) - self.identity_api.add_user_to_group( - positive_user['id'], - new_group['id']) - group_refs = self.identity_api.list_groups_for_user( - positive_user['id']) - self.assertEqual(after_count, len(group_refs)) - - # Make sure the group count for the unrelated user did not change - group_refs = self.identity_api.list_groups_for_user( - negative_user['id']) - self.assertEqual(0, len(group_refs)) - - # remove the user from each group and ensure that - # the group count reduces by one for each - for x in range(0, 3): - before_count = GROUP_COUNT - x - after_count = GROUP_COUNT - x - 1 - group_refs = self.identity_api.list_groups_for_user( - positive_user['id']) - self.assertEqual(before_count, len(group_refs)) - self.identity_api.remove_user_from_group( - positive_user['id'], - test_groups[x]['id']) - group_refs = self.identity_api.list_groups_for_user( - positive_user['id']) - self.assertEqual(after_count, len(group_refs)) - # Make sure the group count for the unrelated user - # did not change - group_refs = self.identity_api.list_groups_for_user( - negative_user['id']) - self.assertEqual(0, len(group_refs)) - - def test_remove_user_from_group(self): - domain = self._get_domain_fixture() - new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex} - new_group = self.identity_api.create_group(new_group) - new_user = {'name': 'new_user', 'password': uuid.uuid4().hex, - 'enabled': True, 'domain_id': domain['id']} - new_user = self.identity_api.create_user(new_user) - self.identity_api.add_user_to_group(new_user['id'], - new_group['id']) - groups = self.identity_api.list_groups_for_user(new_user['id']) - self.assertIn(new_group['id'], [x['id'] for x in groups]) - self.identity_api.remove_user_from_group(new_user['id'], - new_group['id']) - groups = self.identity_api.list_groups_for_user(new_user['id']) - self.assertNotIn(new_group['id'], [x['id'] for x in groups]) - - def test_remove_user_from_group_404(self): - domain = self._get_domain_fixture() - new_user = {'name': 'new_user', 'password': uuid.uuid4().hex, - 'enabled': True, 'domain_id': domain['id']} - new_user = self.identity_api.create_user(new_user) - new_group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex} - new_group = self.identity_api.create_group(new_group) - self.assertRaises(exception.GroupNotFound, - self.identity_api.remove_user_from_group, - new_user['id'], - uuid.uuid4().hex) - - self.assertRaises(exception.UserNotFound, - self.identity_api.remove_user_from_group, - uuid.uuid4().hex, - new_group['id']) - - self.assertRaises(exception.NotFound, - self.identity_api.remove_user_from_group, - uuid.uuid4().hex, - uuid.uuid4().hex) - - def test_group_crud(self): - domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain['id'], domain) - group = {'domain_id': domain['id'], 'name': uuid.uuid4().hex} - group = self.identity_api.create_group(group) - group_ref = self.identity_api.get_group(group['id']) - self.assertDictContainsSubset(group, group_ref) - - group['name'] = uuid.uuid4().hex - self.identity_api.update_group(group['id'], group) - group_ref = self.identity_api.get_group(group['id']) - self.assertDictContainsSubset(group, group_ref) - - self.identity_api.delete_group(group['id']) - self.assertRaises(exception.GroupNotFound, - self.identity_api.get_group, - group['id']) - - def test_get_group_by_name(self): - group_name = uuid.uuid4().hex - group = {'domain_id': DEFAULT_DOMAIN_ID, 'name': group_name} - group = self.identity_api.create_group(group) - spoiler = {'domain_id': DEFAULT_DOMAIN_ID, 'name': uuid.uuid4().hex} - self.identity_api.create_group(spoiler) - - group_ref = self.identity_api.get_group_by_name( - group_name, DEFAULT_DOMAIN_ID) - self.assertDictEqual(group_ref, group) - - def test_get_group_by_name_404(self): - self.assertRaises(exception.GroupNotFound, - self.identity_api.get_group_by_name, - uuid.uuid4().hex, - DEFAULT_DOMAIN_ID) - - @unit.skip_if_cache_disabled('identity') - def test_cache_layer_group_crud(self): - group = {'domain_id': DEFAULT_DOMAIN_ID, 'name': uuid.uuid4().hex} - group = self.identity_api.create_group(group) - # cache the result - group_ref = self.identity_api.get_group(group['id']) - # delete the group bypassing identity api. - domain_id, driver, entity_id = ( - self.identity_api._get_domain_driver_and_entity_id(group['id'])) - driver.delete_group(entity_id) - - self.assertEqual(group_ref, self.identity_api.get_group(group['id'])) - self.identity_api.get_group.invalidate(self.identity_api, group['id']) - self.assertRaises(exception.GroupNotFound, - self.identity_api.get_group, group['id']) - - group = {'domain_id': DEFAULT_DOMAIN_ID, 'name': uuid.uuid4().hex} - group = self.identity_api.create_group(group) - # cache the result - self.identity_api.get_group(group['id']) - group['name'] = uuid.uuid4().hex - group_ref = self.identity_api.update_group(group['id'], group) - # after updating through identity api, get updated group - self.assertDictContainsSubset(self.identity_api.get_group(group['id']), - group_ref) - - def test_create_duplicate_group_name_fails(self): - group1 = {'domain_id': DEFAULT_DOMAIN_ID, 'name': uuid.uuid4().hex} - group2 = {'domain_id': DEFAULT_DOMAIN_ID, 'name': group1['name']} - group1 = self.identity_api.create_group(group1) - self.assertRaises(exception.Conflict, - self.identity_api.create_group, - group2) - - def test_create_duplicate_group_name_in_different_domains(self): - new_domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(new_domain['id'], new_domain) - group1 = {'domain_id': DEFAULT_DOMAIN_ID, 'name': uuid.uuid4().hex} - group2 = {'domain_id': new_domain['id'], 'name': group1['name']} - group1 = self.identity_api.create_group(group1) - group2 = self.identity_api.create_group(group2) - - def test_move_group_between_domains(self): - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain2['id'], domain2) - group = {'name': uuid.uuid4().hex, - 'domain_id': domain1['id']} - group = self.identity_api.create_group(group) - group['domain_id'] = domain2['id'] - self.identity_api.update_group(group['id'], group) - - def test_move_group_between_domains_with_clashing_names_fails(self): - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain2['id'], domain2) - # First, create a group in domain1 - group1 = {'name': uuid.uuid4().hex, - 'domain_id': domain1['id']} - group1 = self.identity_api.create_group(group1) - # Now create a group in domain2 with a potentially clashing - # name - which should work since we have domain separation - group2 = {'name': group1['name'], - 'domain_id': domain2['id']} - group2 = self.identity_api.create_group(group2) - # Now try and move group1 into the 2nd domain - which should - # fail since the names clash - group1['domain_id'] = domain2['id'] - self.assertRaises(exception.Conflict, - self.identity_api.update_group, - group1['id'], - group1) - - @unit.skip_if_no_multiple_domains_support - def test_project_crud(self): - domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'enabled': True} - self.resource_api.create_domain(domain['id'], domain) - project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain['id']} - self.resource_api.create_project(project['id'], project) - project_ref = self.resource_api.get_project(project['id']) - self.assertDictContainsSubset(project, project_ref) - - project['name'] = uuid.uuid4().hex - self.resource_api.update_project(project['id'], project) - project_ref = self.resource_api.get_project(project['id']) - self.assertDictContainsSubset(project, project_ref) - - self.resource_api.delete_project(project['id']) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project['id']) - - def test_domain_delete_hierarchy(self): - domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'enabled': True} - self.resource_api.create_domain(domain['id'], domain) - - # Creating a root and a leaf project inside the domain - projects_hierarchy = self._create_projects_hierarchy( - domain_id=domain['id']) - root_project = projects_hierarchy[0] - leaf_project = projects_hierarchy[0] - - # Disable the domain - domain['enabled'] = False - self.resource_api.update_domain(domain['id'], domain) - - # Delete the domain - self.resource_api.delete_domain(domain['id']) - - # Make sure the domain no longer exists - self.assertRaises(exception.DomainNotFound, - self.resource_api.get_domain, - domain['id']) - - # Make sure the root project no longer exists - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - root_project['id']) - - # Make sure the leaf project no longer exists - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - leaf_project['id']) - - def test_hierarchical_projects_crud(self): - # create a hierarchy with just a root project (which is a leaf as well) - projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=1) - root_project1 = projects_hierarchy[0] - - # create a hierarchy with one root project and one leaf project - projects_hierarchy = self._create_projects_hierarchy() - root_project2 = projects_hierarchy[0] - leaf_project = projects_hierarchy[1] - - # update description from leaf_project - leaf_project['description'] = 'new description' - self.resource_api.update_project(leaf_project['id'], leaf_project) - proj_ref = self.resource_api.get_project(leaf_project['id']) - self.assertDictEqual(proj_ref, leaf_project) - - # update the parent_id is not allowed - leaf_project['parent_id'] = root_project1['id'] - self.assertRaises(exception.ForbiddenAction, - self.resource_api.update_project, - leaf_project['id'], - leaf_project) - - # delete root_project1 - self.resource_api.delete_project(root_project1['id']) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - root_project1['id']) - - # delete root_project2 is not allowed since it is not a leaf project - self.assertRaises(exception.ForbiddenAction, - self.resource_api.delete_project, - root_project2['id']) - - def test_create_project_with_invalid_parent(self): - project = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': '', - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True, - 'parent_id': 'fake', - 'is_domain': False} - self.assertRaises(exception.ProjectNotFound, - self.resource_api.create_project, - project['id'], - project) - - @unit.skip_if_no_multiple_domains_support - def test_create_leaf_project_with_different_domain(self): - root_project = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': '', - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True, - 'parent_id': None, - 'is_domain': False} - self.resource_api.create_project(root_project['id'], root_project) - - domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'enabled': True} - self.resource_api.create_domain(domain['id'], domain) - leaf_project = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': '', - 'domain_id': domain['id'], - 'enabled': True, - 'parent_id': root_project['id'], - 'is_domain': False} - - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - leaf_project['id'], - leaf_project) - - def test_delete_hierarchical_leaf_project(self): - projects_hierarchy = self._create_projects_hierarchy() - root_project = projects_hierarchy[0] - leaf_project = projects_hierarchy[1] - - self.resource_api.delete_project(leaf_project['id']) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - leaf_project['id']) - - self.resource_api.delete_project(root_project['id']) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - root_project['id']) - - def test_delete_hierarchical_not_leaf_project(self): - projects_hierarchy = self._create_projects_hierarchy() - root_project = projects_hierarchy[0] - - self.assertRaises(exception.ForbiddenAction, - self.resource_api.delete_project, - root_project['id']) - - def test_update_project_parent(self): - projects_hierarchy = self._create_projects_hierarchy(hierarchy_size=3) - project1 = projects_hierarchy[0] - project2 = projects_hierarchy[1] - project3 = projects_hierarchy[2] - - # project2 is the parent from project3 - self.assertEqual(project3.get('parent_id'), project2['id']) - - # try to update project3 parent to parent1 - project3['parent_id'] = project1['id'] - self.assertRaises(exception.ForbiddenAction, - self.resource_api.update_project, - project3['id'], - project3) - - def test_create_project_under_disabled_one(self): - project1 = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': False, - 'parent_id': None, - 'is_domain': False} - self.resource_api.create_project(project1['id'], project1) - - project2 = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, - 'parent_id': project1['id'], - 'is_domain': False} - - # It's not possible to create a project under a disabled one in the - # hierarchy - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - project2['id'], - project2) - - def test_disable_hierarchical_leaf_project(self): - projects_hierarchy = self._create_projects_hierarchy() - leaf_project = projects_hierarchy[1] - - leaf_project['enabled'] = False - self.resource_api.update_project(leaf_project['id'], leaf_project) - - project_ref = self.resource_api.get_project(leaf_project['id']) - self.assertEqual(project_ref['enabled'], leaf_project['enabled']) - - def test_disable_hierarchical_not_leaf_project(self): - projects_hierarchy = self._create_projects_hierarchy() - root_project = projects_hierarchy[0] - - root_project['enabled'] = False - self.assertRaises(exception.ForbiddenAction, - self.resource_api.update_project, - root_project['id'], - root_project) - - def test_enable_project_with_disabled_parent(self): - projects_hierarchy = self._create_projects_hierarchy() - root_project = projects_hierarchy[0] - leaf_project = projects_hierarchy[1] - - # Disable leaf and root - leaf_project['enabled'] = False - self.resource_api.update_project(leaf_project['id'], leaf_project) - root_project['enabled'] = False - self.resource_api.update_project(root_project['id'], root_project) - - # Try to enable the leaf project, it's not possible since it has - # a disabled parent - leaf_project['enabled'] = True - self.assertRaises(exception.ForbiddenAction, - self.resource_api.update_project, - leaf_project['id'], - leaf_project) - - def _get_hierarchy_depth(self, project_id): - return len(self.resource_api.list_project_parents(project_id)) + 1 - - def test_check_hierarchy_depth(self): - # First create a hierarchy with the max allowed depth - projects_hierarchy = self._create_projects_hierarchy( - CONF.max_project_tree_depth) - leaf_project = projects_hierarchy[CONF.max_project_tree_depth - 1] - - depth = self._get_hierarchy_depth(leaf_project['id']) - self.assertEqual(CONF.max_project_tree_depth, depth) - - # Creating another project in the hierarchy shouldn't be allowed - project_id = uuid.uuid4().hex - project = { - 'id': project_id, - 'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, - 'parent_id': leaf_project['id'], - 'is_domain': False} - self.assertRaises(exception.ForbiddenAction, - self.resource_api.create_project, - project_id, - project) - - def test_project_update_missing_attrs_with_a_value(self): - # Creating a project with no description attribute. - project = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True, - 'parent_id': None, - 'is_domain': False} - self.resource_api.create_project(project['id'], project) - - # Add a description attribute. - project['description'] = uuid.uuid4().hex - self.resource_api.update_project(project['id'], project) - - project_ref = self.resource_api.get_project(project['id']) - self.assertDictEqual(project_ref, project) - - def test_project_update_missing_attrs_with_a_falsey_value(self): - # Creating a project with no description attribute. - project = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True, - 'parent_id': None, - 'is_domain': False} - self.resource_api.create_project(project['id'], project) - - # Add a description attribute. - project['description'] = '' - self.resource_api.update_project(project['id'], project) - - project_ref = self.resource_api.get_project(project['id']) - self.assertDictEqual(project_ref, project) - - def test_domain_crud(self): - domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'enabled': True} - self.resource_api.create_domain(domain['id'], domain) - domain_ref = self.resource_api.get_domain(domain['id']) - self.assertDictEqual(domain_ref, domain) - - domain['name'] = uuid.uuid4().hex - self.resource_api.update_domain(domain['id'], domain) - domain_ref = self.resource_api.get_domain(domain['id']) - self.assertDictEqual(domain_ref, domain) - - # Ensure an 'enabled' domain cannot be deleted - self.assertRaises(exception.ForbiddenAction, - self.resource_api.delete_domain, - domain_id=domain['id']) - - # Disable the domain - domain['enabled'] = False - self.resource_api.update_domain(domain['id'], domain) - - # Delete the domain - self.resource_api.delete_domain(domain['id']) - - # Make sure the domain no longer exists - self.assertRaises(exception.DomainNotFound, - self.resource_api.get_domain, - domain['id']) - - @unit.skip_if_no_multiple_domains_support - def test_create_domain_case_sensitivity(self): - # create a ref with a lowercase name - ref = { - 'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex.lower()} - self.resource_api.create_domain(ref['id'], ref) - - # assign a new ID with the same name, but this time in uppercase - ref['id'] = uuid.uuid4().hex - ref['name'] = ref['name'].upper() - self.resource_api.create_domain(ref['id'], ref) - - def test_attribute_update(self): - project = { - 'domain_id': DEFAULT_DOMAIN_ID, - 'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex} - self.resource_api.create_project(project['id'], project) - - # pick a key known to be non-existent - key = 'description' - - def assert_key_equals(value): - project_ref = self.resource_api.update_project( - project['id'], project) - self.assertEqual(value, project_ref[key]) - project_ref = self.resource_api.get_project(project['id']) - self.assertEqual(value, project_ref[key]) - - def assert_get_key_is(value): - project_ref = self.resource_api.update_project( - project['id'], project) - self.assertIs(project_ref.get(key), value) - project_ref = self.resource_api.get_project(project['id']) - self.assertIs(project_ref.get(key), value) - - # add an attribute that doesn't exist, set it to a falsey value - value = '' - project[key] = value - assert_key_equals(value) - - # set an attribute with a falsey value to null - value = None - project[key] = value - assert_get_key_is(value) - - # do it again, in case updating from this situation is handled oddly - value = None - project[key] = value - assert_get_key_is(value) - - # set a possibly-null value to a falsey value - value = '' - project[key] = value - assert_key_equals(value) - - # set a falsey value to a truthy value - value = uuid.uuid4().hex - project[key] = value - assert_key_equals(value) - - def test_user_crud(self): - user_dict = {'domain_id': DEFAULT_DOMAIN_ID, - 'name': uuid.uuid4().hex, 'password': 'passw0rd'} - user = self.identity_api.create_user(user_dict) - user_ref = self.identity_api.get_user(user['id']) - del user_dict['password'] - user_ref_dict = {x: user_ref[x] for x in user_ref} - self.assertDictContainsSubset(user_dict, user_ref_dict) - - user_dict['password'] = uuid.uuid4().hex - self.identity_api.update_user(user['id'], user_dict) - user_ref = self.identity_api.get_user(user['id']) - del user_dict['password'] - user_ref_dict = {x: user_ref[x] for x in user_ref} - self.assertDictContainsSubset(user_dict, user_ref_dict) - - self.identity_api.delete_user(user['id']) - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user, - user['id']) - - def test_list_projects_for_user(self): - domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain['id'], domain) - user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex, - 'domain_id': domain['id'], 'enabled': True} - user1 = self.identity_api.create_user(user1) - user_projects = self.assignment_api.list_projects_for_user(user1['id']) - self.assertEqual(0, len(user_projects)) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_member['id']) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=self.tenant_baz['id'], - role_id=self.role_member['id']) - user_projects = self.assignment_api.list_projects_for_user(user1['id']) - self.assertEqual(2, len(user_projects)) - - def test_list_projects_for_user_with_grants(self): - # Create two groups each with a role on a different project, and - # make user1 a member of both groups. Both these new projects - # should now be included, along with any direct user grants. - domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain['id'], domain) - user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex, - 'domain_id': domain['id'], 'enabled': True} - user1 = self.identity_api.create_user(user1) - group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']} - group1 = self.identity_api.create_group(group1) - group2 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']} - group2 = self.identity_api.create_group(group2) - project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain['id']} - self.resource_api.create_project(project1['id'], project1) - project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain['id']} - self.resource_api.create_project(project2['id'], project2) - self.identity_api.add_user_to_group(user1['id'], group1['id']) - self.identity_api.add_user_to_group(user1['id'], group2['id']) - - # Create 3 grants, one user grant, the other two as group grants - self.assignment_api.create_grant(user_id=user1['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_member['id']) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=project1['id'], - role_id=self.role_admin['id']) - self.assignment_api.create_grant(group_id=group2['id'], - project_id=project2['id'], - role_id=self.role_admin['id']) - user_projects = self.assignment_api.list_projects_for_user(user1['id']) - self.assertEqual(3, len(user_projects)) - - @unit.skip_if_cache_disabled('resource') - @unit.skip_if_no_multiple_domains_support - def test_domain_rename_invalidates_get_domain_by_name_cache(self): - domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'enabled': True} - domain_id = domain['id'] - domain_name = domain['name'] - self.resource_api.create_domain(domain_id, domain) - domain_ref = self.resource_api.get_domain_by_name(domain_name) - domain_ref['name'] = uuid.uuid4().hex - self.resource_api.update_domain(domain_id, domain_ref) - self.assertRaises(exception.DomainNotFound, - self.resource_api.get_domain_by_name, - domain_name) - - @unit.skip_if_cache_disabled('resource') - def test_cache_layer_domain_crud(self): - domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'enabled': True} - domain_id = domain['id'] - # Create Domain - self.resource_api.create_domain(domain_id, domain) - domain_ref = self.resource_api.get_domain(domain_id) - updated_domain_ref = copy.deepcopy(domain_ref) - updated_domain_ref['name'] = uuid.uuid4().hex - # Update domain, bypassing resource api manager - self.resource_api.driver.update_domain(domain_id, updated_domain_ref) - # Verify get_domain still returns the domain - self.assertDictContainsSubset( - domain_ref, self.resource_api.get_domain(domain_id)) - # Invalidate cache - self.resource_api.get_domain.invalidate(self.resource_api, - domain_id) - # Verify get_domain returns the updated domain - self.assertDictContainsSubset( - updated_domain_ref, self.resource_api.get_domain(domain_id)) - # Update the domain back to original ref, using the assignment api - # manager - self.resource_api.update_domain(domain_id, domain_ref) - self.assertDictContainsSubset( - domain_ref, self.resource_api.get_domain(domain_id)) - # Make sure domain is 'disabled', bypass resource api manager - domain_ref_disabled = domain_ref.copy() - domain_ref_disabled['enabled'] = False - self.resource_api.driver.update_domain(domain_id, - domain_ref_disabled) - # Delete domain, bypassing resource api manager - self.resource_api.driver.delete_domain(domain_id) - # Verify get_domain still returns the domain - self.assertDictContainsSubset( - domain_ref, self.resource_api.get_domain(domain_id)) - # Invalidate cache - self.resource_api.get_domain.invalidate(self.resource_api, - domain_id) - # Verify get_domain now raises DomainNotFound - self.assertRaises(exception.DomainNotFound, - self.resource_api.get_domain, domain_id) - # Recreate Domain - self.resource_api.create_domain(domain_id, domain) - self.resource_api.get_domain(domain_id) - # Make sure domain is 'disabled', bypass resource api manager - domain['enabled'] = False - self.resource_api.driver.update_domain(domain_id, domain) - # Delete domain - self.resource_api.delete_domain(domain_id) - # verify DomainNotFound raised - self.assertRaises(exception.DomainNotFound, - self.resource_api.get_domain, - domain_id) - - @unit.skip_if_cache_disabled('resource') - @unit.skip_if_no_multiple_domains_support - def test_project_rename_invalidates_get_project_by_name_cache(self): - domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'enabled': True} - project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain['id']} - project_id = project['id'] - project_name = project['name'] - self.resource_api.create_domain(domain['id'], domain) - # Create a project - self.resource_api.create_project(project_id, project) - self.resource_api.get_project_by_name(project_name, domain['id']) - project['name'] = uuid.uuid4().hex - self.resource_api.update_project(project_id, project) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project_by_name, - project_name, - domain['id']) - - @unit.skip_if_cache_disabled('resource') - @unit.skip_if_no_multiple_domains_support - def test_cache_layer_project_crud(self): - domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'enabled': True} - project = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain['id']} - project_id = project['id'] - self.resource_api.create_domain(domain['id'], domain) - # Create a project - self.resource_api.create_project(project_id, project) - self.resource_api.get_project(project_id) - updated_project = copy.deepcopy(project) - updated_project['name'] = uuid.uuid4().hex - # Update project, bypassing resource manager - self.resource_api.driver.update_project(project_id, - updated_project) - # Verify get_project still returns the original project_ref - self.assertDictContainsSubset( - project, self.resource_api.get_project(project_id)) - # Invalidate cache - self.resource_api.get_project.invalidate(self.resource_api, - project_id) - # Verify get_project now returns the new project - self.assertDictContainsSubset( - updated_project, - self.resource_api.get_project(project_id)) - # Update project using the resource_api manager back to original - self.resource_api.update_project(project['id'], project) - # Verify get_project returns the original project_ref - self.assertDictContainsSubset( - project, self.resource_api.get_project(project_id)) - # Delete project bypassing resource - self.resource_api.driver.delete_project(project_id) - # Verify get_project still returns the project_ref - self.assertDictContainsSubset( - project, self.resource_api.get_project(project_id)) - # Invalidate cache - self.resource_api.get_project.invalidate(self.resource_api, - project_id) - # Verify ProjectNotFound now raised - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project_id) - # recreate project - self.resource_api.create_project(project_id, project) - self.resource_api.get_project(project_id) - # delete project - self.resource_api.delete_project(project_id) - # Verify ProjectNotFound is raised - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project_id) - - def create_user_dict(self, **attributes): - user_dict = {'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True} - user_dict.update(attributes) - return user_dict - - def test_arbitrary_attributes_are_returned_from_create_user(self): - attr_value = uuid.uuid4().hex - user_data = self.create_user_dict(arbitrary_attr=attr_value) - - user = self.identity_api.create_user(user_data) - - self.assertEqual(attr_value, user['arbitrary_attr']) - - def test_arbitrary_attributes_are_returned_from_get_user(self): - attr_value = uuid.uuid4().hex - user_data = self.create_user_dict(arbitrary_attr=attr_value) - - user_data = self.identity_api.create_user(user_data) - - user = self.identity_api.get_user(user_data['id']) - self.assertEqual(attr_value, user['arbitrary_attr']) - - def test_new_arbitrary_attributes_are_returned_from_update_user(self): - user_data = self.create_user_dict() - - user = self.identity_api.create_user(user_data) - attr_value = uuid.uuid4().hex - user['arbitrary_attr'] = attr_value - updated_user = self.identity_api.update_user(user['id'], user) - - self.assertEqual(attr_value, updated_user['arbitrary_attr']) - - def test_updated_arbitrary_attributes_are_returned_from_update_user(self): - attr_value = uuid.uuid4().hex - user_data = self.create_user_dict(arbitrary_attr=attr_value) - - new_attr_value = uuid.uuid4().hex - user = self.identity_api.create_user(user_data) - user['arbitrary_attr'] = new_attr_value - updated_user = self.identity_api.update_user(user['id'], user) - - self.assertEqual(new_attr_value, updated_user['arbitrary_attr']) - - def test_create_grant_no_user(self): - # If call create_grant with a user that doesn't exist, doesn't fail. - self.assignment_api.create_grant( - self.role_other['id'], - user_id=uuid.uuid4().hex, - project_id=self.tenant_bar['id']) - - def test_create_grant_no_group(self): - # If call create_grant with a group that doesn't exist, doesn't fail. - self.assignment_api.create_grant( - self.role_other['id'], - group_id=uuid.uuid4().hex, - project_id=self.tenant_bar['id']) - - @unit.skip_if_no_multiple_domains_support - def test_get_default_domain_by_name(self): - domain_name = 'default' - - domain = {'id': uuid.uuid4().hex, 'name': domain_name, 'enabled': True} - self.resource_api.create_domain(domain['id'], domain) - - domain_ref = self.resource_api.get_domain_by_name(domain_name) - self.assertEqual(domain, domain_ref) - - def test_get_not_default_domain_by_name(self): - domain_name = 'foo' - self.assertRaises(exception.DomainNotFound, - self.resource_api.get_domain_by_name, - domain_name) - - def test_project_update_and_project_get_return_same_response(self): - project = { - 'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'domain_id': CONF.identity.default_domain_id, - 'description': uuid.uuid4().hex, - 'enabled': True} - - self.resource_api.create_project(project['id'], project) - - updated_project = {'enabled': False} - updated_project_ref = self.resource_api.update_project( - project['id'], updated_project) - - # SQL backend adds 'extra' field - updated_project_ref.pop('extra', None) - - self.assertIs(False, updated_project_ref['enabled']) - - project_ref = self.resource_api.get_project(project['id']) - self.assertDictEqual(project_ref, updated_project_ref) - - def test_user_update_and_user_get_return_same_response(self): - user = { - 'name': uuid.uuid4().hex, - 'domain_id': CONF.identity.default_domain_id, - 'description': uuid.uuid4().hex, - 'enabled': True} - - user = self.identity_api.create_user(user) - - updated_user = {'enabled': False} - updated_user_ref = self.identity_api.update_user( - user['id'], updated_user) - - # SQL backend adds 'extra' field - updated_user_ref.pop('extra', None) - - self.assertIs(False, updated_user_ref['enabled']) - - user_ref = self.identity_api.get_user(user['id']) - self.assertDictEqual(user_ref, updated_user_ref) - - def test_delete_group_removes_role_assignments(self): - # When a group is deleted any role assignments for the group are - # removed. - - MEMBER_ROLE_ID = 'member' - - def get_member_assignments(): - assignments = self.assignment_api.list_role_assignments() - return [x for x in assignments if x['role_id'] == MEMBER_ROLE_ID] - - orig_member_assignments = get_member_assignments() - - # Create a group. - new_group = { - 'domain_id': DEFAULT_DOMAIN_ID, - 'name': self.getUniqueString(prefix='tdgrra')} - new_group = self.identity_api.create_group(new_group) - - # Create a project. - new_project = { - 'id': uuid.uuid4().hex, - 'name': self.getUniqueString(prefix='tdgrra'), - 'domain_id': DEFAULT_DOMAIN_ID} - self.resource_api.create_project(new_project['id'], new_project) - - # Assign a role to the group. - self.assignment_api.create_grant( - group_id=new_group['id'], project_id=new_project['id'], - role_id=MEMBER_ROLE_ID) - - # Delete the group. - self.identity_api.delete_group(new_group['id']) - - # Check that the role assignment for the group is gone - member_assignments = get_member_assignments() - - self.assertThat(member_assignments, - matchers.Equals(orig_member_assignments)) - - def test_get_roles_for_groups_on_domain(self): - """Test retrieving group domain roles. - - Test Plan: - - - Create a domain, three groups and three roles - - Assign one an inherited and the others a non-inherited group role - to the domain - - Ensure that only the non-inherited roles are returned on the domain - - """ - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - group_list = [] - group_id_list = [] - role_list = [] - for _ in range(3): - group = {'name': uuid.uuid4().hex, 'domain_id': domain1['id']} - group = self.identity_api.create_group(group) - group_list.append(group) - group_id_list.append(group['id']) - - role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role['id'], role) - role_list.append(role) - - # Assign the roles - one is inherited - self.assignment_api.create_grant(group_id=group_list[0]['id'], - domain_id=domain1['id'], - role_id=role_list[0]['id']) - self.assignment_api.create_grant(group_id=group_list[1]['id'], - domain_id=domain1['id'], - role_id=role_list[1]['id']) - self.assignment_api.create_grant(group_id=group_list[2]['id'], - domain_id=domain1['id'], - role_id=role_list[2]['id'], - inherited_to_projects=True) - - # Now get the effective roles for the groups on the domain project. We - # shouldn't get back the inherited role. - - role_refs = self.assignment_api.get_roles_for_groups( - group_id_list, domain_id=domain1['id']) - - self.assertThat(role_refs, matchers.HasLength(2)) - self.assertIn(role_list[0], role_refs) - self.assertIn(role_list[1], role_refs) - - def test_get_roles_for_groups_on_project(self): - """Test retrieving group project roles. - - Test Plan: - - - Create two domains, two projects, six groups and six roles - - Project1 is in Domain1, Project2 is in Domain2 - - Domain2/Project2 are spoilers - - Assign a different direct group role to each project as well - as both an inherited and non-inherited role to each domain - - Get the group roles for Project 1 - depending on whether we have - enabled inheritance, we should either get back just the direct role - or both the direct one plus the inherited domain role from Domain 1 - - """ - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain2['id'], domain2) - project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain1['id']} - self.resource_api.create_project(project1['id'], project1) - project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain2['id']} - self.resource_api.create_project(project2['id'], project2) - group_list = [] - group_id_list = [] - role_list = [] - for _ in range(6): - group = {'name': uuid.uuid4().hex, 'domain_id': domain1['id']} - group = self.identity_api.create_group(group) - group_list.append(group) - group_id_list.append(group['id']) - - role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role['id'], role) - role_list.append(role) - - # Assign the roles - one inherited and one non-inherited on Domain1, - # plus one on Project1 - self.assignment_api.create_grant(group_id=group_list[0]['id'], - domain_id=domain1['id'], - role_id=role_list[0]['id']) - self.assignment_api.create_grant(group_id=group_list[1]['id'], - domain_id=domain1['id'], - role_id=role_list[1]['id'], - inherited_to_projects=True) - self.assignment_api.create_grant(group_id=group_list[2]['id'], - project_id=project1['id'], - role_id=role_list[2]['id']) - - # ...and a duplicate set of spoiler assignments to Domain2/Project2 - self.assignment_api.create_grant(group_id=group_list[3]['id'], - domain_id=domain2['id'], - role_id=role_list[3]['id']) - self.assignment_api.create_grant(group_id=group_list[4]['id'], - domain_id=domain2['id'], - role_id=role_list[4]['id'], - inherited_to_projects=True) - self.assignment_api.create_grant(group_id=group_list[5]['id'], - project_id=project2['id'], - role_id=role_list[5]['id']) - - # Now get the effective roles for all groups on the Project1. With - # inheritance off, we should only get back the direct role. - - self.config_fixture.config(group='os_inherit', enabled=False) - role_refs = self.assignment_api.get_roles_for_groups( - group_id_list, project_id=project1['id']) - - self.assertThat(role_refs, matchers.HasLength(1)) - self.assertIn(role_list[2], role_refs) - - # With inheritance on, we should also get back the inherited role from - # its owning domain. - - self.config_fixture.config(group='os_inherit', enabled=True) - role_refs = self.assignment_api.get_roles_for_groups( - group_id_list, project_id=project1['id']) - - self.assertThat(role_refs, matchers.HasLength(2)) - self.assertIn(role_list[1], role_refs) - self.assertIn(role_list[2], role_refs) - - def test_list_domains_for_groups(self): - """Test retrieving domains for a list of groups. - - Test Plan: - - - Create three domains, three groups and one role - - Assign a non-inherited group role to two domains, and an inherited - group role to the third - - Ensure only the domains with non-inherited roles are returned - - """ - domain_list = [] - group_list = [] - group_id_list = [] - for _ in range(3): - domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain['id'], domain) - domain_list.append(domain) - - group = {'name': uuid.uuid4().hex, 'domain_id': domain['id']} - group = self.identity_api.create_group(group) - group_list.append(group) - group_id_list.append(group['id']) - - role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role1['id'], role1) - - # Assign the roles - one is inherited - self.assignment_api.create_grant(group_id=group_list[0]['id'], - domain_id=domain_list[0]['id'], - role_id=role1['id']) - self.assignment_api.create_grant(group_id=group_list[1]['id'], - domain_id=domain_list[1]['id'], - role_id=role1['id']) - self.assignment_api.create_grant(group_id=group_list[2]['id'], - domain_id=domain_list[2]['id'], - role_id=role1['id'], - inherited_to_projects=True) - - # Now list the domains that have roles for any of the 3 groups - # We shouldn't get back domain[2] since that had an inherited role. - - domain_refs = ( - self.assignment_api.list_domains_for_groups(group_id_list)) - - self.assertThat(domain_refs, matchers.HasLength(2)) - self.assertIn(domain_list[0], domain_refs) - self.assertIn(domain_list[1], domain_refs) - - def test_list_projects_for_groups(self): - """Test retrieving projects for a list of groups. - - Test Plan: - - - Create two domains, four projects, seven groups and seven roles - - Project1-3 are in Domain1, Project4 is in Domain2 - - Domain2/Project4 are spoilers - - Project1 and 2 have direct group roles, Project3 has no direct - roles but should inherit a group role from Domain1 - - Get the projects for the group roles that are assigned to Project1 - Project2 and the inherited one on Domain1. Depending on whether we - have enabled inheritance, we should either get back just the projects - with direct roles (Project 1 and 2) or also Project3 due to its - inherited role from Domain1. - - """ - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain2['id'], domain2) - project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain1['id'], 'is_domain': False} - project1 = self.resource_api.create_project(project1['id'], project1) - project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain1['id'], 'is_domain': False} - project2 = self.resource_api.create_project(project2['id'], project2) - project3 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain1['id'], 'is_domain': False} - project3 = self.resource_api.create_project(project3['id'], project3) - project4 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain2['id'], 'is_domain': False} - project4 = self.resource_api.create_project(project4['id'], project4) - group_list = [] - role_list = [] - for _ in range(7): - group = {'name': uuid.uuid4().hex, 'domain_id': domain1['id']} - group = self.identity_api.create_group(group) - group_list.append(group) - - role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role['id'], role) - role_list.append(role) - - # Assign the roles - one inherited and one non-inherited on Domain1, - # plus one on Project1 and Project2 - self.assignment_api.create_grant(group_id=group_list[0]['id'], - domain_id=domain1['id'], - role_id=role_list[0]['id']) - self.assignment_api.create_grant(group_id=group_list[1]['id'], - domain_id=domain1['id'], - role_id=role_list[1]['id'], - inherited_to_projects=True) - self.assignment_api.create_grant(group_id=group_list[2]['id'], - project_id=project1['id'], - role_id=role_list[2]['id']) - self.assignment_api.create_grant(group_id=group_list[3]['id'], - project_id=project2['id'], - role_id=role_list[3]['id']) - - # ...and a few of spoiler assignments to Domain2/Project4 - self.assignment_api.create_grant(group_id=group_list[4]['id'], - domain_id=domain2['id'], - role_id=role_list[4]['id']) - self.assignment_api.create_grant(group_id=group_list[5]['id'], - domain_id=domain2['id'], - role_id=role_list[5]['id'], - inherited_to_projects=True) - self.assignment_api.create_grant(group_id=group_list[6]['id'], - project_id=project4['id'], - role_id=role_list[6]['id']) - - # Now get the projects for the groups that have roles on Project1, - # Project2 and the inherited role on Domain!. With inheritance off, - # we should only get back the projects with direct role. - - self.config_fixture.config(group='os_inherit', enabled=False) - group_id_list = [group_list[1]['id'], group_list[2]['id'], - group_list[3]['id']] - project_refs = ( - self.assignment_api.list_projects_for_groups(group_id_list)) - - self.assertThat(project_refs, matchers.HasLength(2)) - self.assertIn(project1, project_refs) - self.assertIn(project2, project_refs) - - # With inheritance on, we should also get back the Project3 due to the - # inherited role from its owning domain. - - self.config_fixture.config(group='os_inherit', enabled=True) - project_refs = ( - self.assignment_api.list_projects_for_groups(group_id_list)) - - self.assertThat(project_refs, matchers.HasLength(3)) - self.assertIn(project1, project_refs) - self.assertIn(project2, project_refs) - self.assertIn(project3, project_refs) - - def test_update_role_no_name(self): - # A user can update a role and not include the name. - - # description is picked just because it's not name. - self.role_api.update_role(self.role_member['id'], - {'description': uuid.uuid4().hex}) - # If the previous line didn't raise an exception then the test passes. - - def test_update_role_same_name(self): - # A user can update a role and set the name to be the same as it was. - - self.role_api.update_role(self.role_member['id'], - {'name': self.role_member['name']}) - # If the previous line didn't raise an exception then the test passes. - - -class TokenTests(object): - def _create_token_id(self): - # Use a token signed by the cms module - token_id = "" - for i in range(1, 20): - token_id += uuid.uuid4().hex - return cms.cms_sign_token(token_id, - CONF.signing.certfile, - CONF.signing.keyfile) - - def _assert_revoked_token_list_matches_token_persistence( - self, revoked_token_id_list): - # Assert that the list passed in matches the list returned by the - # token persistence service - persistence_list = [ - x['id'] - for x in self.token_provider_api.list_revoked_tokens() - ] - self.assertEqual(persistence_list, revoked_token_id_list) - - def test_token_crud(self): - token_id = self._create_token_id() - data = {'id': token_id, 'a': 'b', - 'trust_id': None, - 'user': {'id': 'testuserid'}} - data_ref = self.token_provider_api._persistence.create_token(token_id, - data) - expires = data_ref.pop('expires') - data_ref.pop('user_id') - self.assertIsInstance(expires, datetime.datetime) - data_ref.pop('id') - data.pop('id') - self.assertDictEqual(data_ref, data) - - new_data_ref = self.token_provider_api._persistence.get_token(token_id) - expires = new_data_ref.pop('expires') - self.assertIsInstance(expires, datetime.datetime) - new_data_ref.pop('user_id') - new_data_ref.pop('id') - - self.assertEqual(data, new_data_ref) - - self.token_provider_api._persistence.delete_token(token_id) - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api._persistence.get_token, token_id) - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api._persistence.delete_token, token_id) - - def create_token_sample_data(self, token_id=None, tenant_id=None, - trust_id=None, user_id=None, expires=None): - if token_id is None: - token_id = self._create_token_id() - if user_id is None: - user_id = 'testuserid' - # FIXME(morganfainberg): These tokens look nothing like "Real" tokens. - # This should be fixed when token issuance is cleaned up. - data = {'id': token_id, 'a': 'b', - 'user': {'id': user_id}} - if tenant_id is not None: - data['tenant'] = {'id': tenant_id, 'name': tenant_id} - if tenant_id is NULL_OBJECT: - data['tenant'] = None - if expires is not None: - data['expires'] = expires - if trust_id is not None: - data['trust_id'] = trust_id - data.setdefault('access', {}).setdefault('trust', {}) - # Testuserid2 is used here since a trustee will be different in - # the cases of impersonation and therefore should not match the - # token's user_id. - data['access']['trust']['trustee_user_id'] = 'testuserid2' - data['token_version'] = provider.V2 - # Issue token stores a copy of all token data at token['token_data']. - # This emulates that assumption as part of the test. - data['token_data'] = copy.deepcopy(data) - new_token = self.token_provider_api._persistence.create_token(token_id, - data) - return new_token['id'], data - - def test_delete_tokens(self): - tokens = self.token_provider_api._persistence._list_tokens( - 'testuserid') - self.assertEqual(0, len(tokens)) - token_id1, data = self.create_token_sample_data( - tenant_id='testtenantid') - token_id2, data = self.create_token_sample_data( - tenant_id='testtenantid') - token_id3, data = self.create_token_sample_data( - tenant_id='testtenantid', - user_id='testuserid1') - tokens = self.token_provider_api._persistence._list_tokens( - 'testuserid') - self.assertEqual(2, len(tokens)) - self.assertIn(token_id2, tokens) - self.assertIn(token_id1, tokens) - self.token_provider_api._persistence.delete_tokens( - user_id='testuserid', - tenant_id='testtenantid') - tokens = self.token_provider_api._persistence._list_tokens( - 'testuserid') - self.assertEqual(0, len(tokens)) - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._persistence.get_token, - token_id1) - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._persistence.get_token, - token_id2) - - self.token_provider_api._persistence.get_token(token_id3) - - def test_delete_tokens_trust(self): - tokens = self.token_provider_api._persistence._list_tokens( - user_id='testuserid') - self.assertEqual(0, len(tokens)) - token_id1, data = self.create_token_sample_data( - tenant_id='testtenantid', - trust_id='testtrustid') - token_id2, data = self.create_token_sample_data( - tenant_id='testtenantid', - user_id='testuserid1', - trust_id='testtrustid1') - tokens = self.token_provider_api._persistence._list_tokens( - 'testuserid') - self.assertEqual(1, len(tokens)) - self.assertIn(token_id1, tokens) - self.token_provider_api._persistence.delete_tokens( - user_id='testuserid', - tenant_id='testtenantid', - trust_id='testtrustid') - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._persistence.get_token, - token_id1) - self.token_provider_api._persistence.get_token(token_id2) - - def _test_token_list(self, token_list_fn): - tokens = token_list_fn('testuserid') - self.assertEqual(0, len(tokens)) - token_id1, data = self.create_token_sample_data() - tokens = token_list_fn('testuserid') - self.assertEqual(1, len(tokens)) - self.assertIn(token_id1, tokens) - token_id2, data = self.create_token_sample_data() - tokens = token_list_fn('testuserid') - self.assertEqual(2, len(tokens)) - self.assertIn(token_id2, tokens) - self.assertIn(token_id1, tokens) - self.token_provider_api._persistence.delete_token(token_id1) - tokens = token_list_fn('testuserid') - self.assertIn(token_id2, tokens) - self.assertNotIn(token_id1, tokens) - self.token_provider_api._persistence.delete_token(token_id2) - tokens = token_list_fn('testuserid') - self.assertNotIn(token_id2, tokens) - self.assertNotIn(token_id1, tokens) - - # tenant-specific tokens - tenant1 = uuid.uuid4().hex - tenant2 = uuid.uuid4().hex - token_id3, data = self.create_token_sample_data(tenant_id=tenant1) - token_id4, data = self.create_token_sample_data(tenant_id=tenant2) - # test for existing but empty tenant (LP:1078497) - token_id5, data = self.create_token_sample_data(tenant_id=NULL_OBJECT) - tokens = token_list_fn('testuserid') - self.assertEqual(3, len(tokens)) - self.assertNotIn(token_id1, tokens) - self.assertNotIn(token_id2, tokens) - self.assertIn(token_id3, tokens) - self.assertIn(token_id4, tokens) - self.assertIn(token_id5, tokens) - tokens = token_list_fn('testuserid', tenant2) - self.assertEqual(1, len(tokens)) - self.assertNotIn(token_id1, tokens) - self.assertNotIn(token_id2, tokens) - self.assertNotIn(token_id3, tokens) - self.assertIn(token_id4, tokens) - - def test_token_list(self): - self._test_token_list( - self.token_provider_api._persistence._list_tokens) - - def test_token_list_trust(self): - trust_id = uuid.uuid4().hex - token_id5, data = self.create_token_sample_data(trust_id=trust_id) - tokens = self.token_provider_api._persistence._list_tokens( - 'testuserid', trust_id=trust_id) - self.assertEqual(1, len(tokens)) - self.assertIn(token_id5, tokens) - - def test_get_token_404(self): - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._persistence.get_token, - uuid.uuid4().hex) - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._persistence.get_token, - None) - - def test_delete_token_404(self): - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._persistence.delete_token, - uuid.uuid4().hex) - - def test_expired_token(self): - token_id = uuid.uuid4().hex - expire_time = timeutils.utcnow() - datetime.timedelta(minutes=1) - data = {'id_hash': token_id, 'id': token_id, 'a': 'b', - 'expires': expire_time, - 'trust_id': None, - 'user': {'id': 'testuserid'}} - data_ref = self.token_provider_api._persistence.create_token(token_id, - data) - data_ref.pop('user_id') - self.assertDictEqual(data_ref, data) - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._persistence.get_token, - token_id) - - def test_null_expires_token(self): - token_id = uuid.uuid4().hex - data = {'id': token_id, 'id_hash': token_id, 'a': 'b', 'expires': None, - 'user': {'id': 'testuserid'}} - data_ref = self.token_provider_api._persistence.create_token(token_id, - data) - self.assertIsNotNone(data_ref['expires']) - new_data_ref = self.token_provider_api._persistence.get_token(token_id) - - # MySQL doesn't store microseconds, so discard them before testing - data_ref['expires'] = data_ref['expires'].replace(microsecond=0) - new_data_ref['expires'] = new_data_ref['expires'].replace( - microsecond=0) - - self.assertEqual(data_ref, new_data_ref) - - def check_list_revoked_tokens(self, token_ids): - revoked_ids = [x['id'] - for x in self.token_provider_api.list_revoked_tokens()] - self._assert_revoked_token_list_matches_token_persistence(revoked_ids) - for token_id in token_ids: - self.assertIn(token_id, revoked_ids) - - def delete_token(self): - token_id = uuid.uuid4().hex - data = {'id_hash': token_id, 'id': token_id, 'a': 'b', - 'user': {'id': 'testuserid'}} - data_ref = self.token_provider_api._persistence.create_token(token_id, - data) - self.token_provider_api._persistence.delete_token(token_id) - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api._persistence.get_token, - data_ref['id']) - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api._persistence.delete_token, - data_ref['id']) - return token_id - - def test_list_revoked_tokens_returns_empty_list(self): - revoked_ids = [x['id'] - for x in self.token_provider_api.list_revoked_tokens()] - self._assert_revoked_token_list_matches_token_persistence(revoked_ids) - self.assertEqual([], revoked_ids) - - def test_list_revoked_tokens_for_single_token(self): - self.check_list_revoked_tokens([self.delete_token()]) - - def test_list_revoked_tokens_for_multiple_tokens(self): - self.check_list_revoked_tokens([self.delete_token() - for x in six.moves.range(2)]) - - def test_flush_expired_token(self): - token_id = uuid.uuid4().hex - expire_time = timeutils.utcnow() - datetime.timedelta(minutes=1) - data = {'id_hash': token_id, 'id': token_id, 'a': 'b', - 'expires': expire_time, - 'trust_id': None, - 'user': {'id': 'testuserid'}} - data_ref = self.token_provider_api._persistence.create_token(token_id, - data) - data_ref.pop('user_id') - self.assertDictEqual(data_ref, data) - - token_id = uuid.uuid4().hex - expire_time = timeutils.utcnow() + datetime.timedelta(minutes=1) - data = {'id_hash': token_id, 'id': token_id, 'a': 'b', - 'expires': expire_time, - 'trust_id': None, - 'user': {'id': 'testuserid'}} - data_ref = self.token_provider_api._persistence.create_token(token_id, - data) - data_ref.pop('user_id') - self.assertDictEqual(data_ref, data) - - self.token_provider_api._persistence.flush_expired_tokens() - tokens = self.token_provider_api._persistence._list_tokens( - 'testuserid') - self.assertEqual(1, len(tokens)) - self.assertIn(token_id, tokens) - - @unit.skip_if_cache_disabled('token') - def test_revocation_list_cache(self): - expire_time = timeutils.utcnow() + datetime.timedelta(minutes=10) - token_id = uuid.uuid4().hex - token_data = {'id_hash': token_id, 'id': token_id, 'a': 'b', - 'expires': expire_time, - 'trust_id': None, - 'user': {'id': 'testuserid'}} - token2_id = uuid.uuid4().hex - token2_data = {'id_hash': token2_id, 'id': token2_id, 'a': 'b', - 'expires': expire_time, - 'trust_id': None, - 'user': {'id': 'testuserid'}} - # Create 2 Tokens. - self.token_provider_api._persistence.create_token(token_id, - token_data) - self.token_provider_api._persistence.create_token(token2_id, - token2_data) - # Verify the revocation list is empty. - self.assertEqual( - [], self.token_provider_api._persistence.list_revoked_tokens()) - self.assertEqual([], self.token_provider_api.list_revoked_tokens()) - # Delete a token directly, bypassing the manager. - self.token_provider_api._persistence.driver.delete_token(token_id) - # Verify the revocation list is still empty. - self.assertEqual( - [], self.token_provider_api._persistence.list_revoked_tokens()) - self.assertEqual([], self.token_provider_api.list_revoked_tokens()) - # Invalidate the revocation list. - self.token_provider_api._persistence.invalidate_revocation_list() - # Verify the deleted token is in the revocation list. - revoked_ids = [x['id'] - for x in self.token_provider_api.list_revoked_tokens()] - self._assert_revoked_token_list_matches_token_persistence(revoked_ids) - self.assertIn(token_id, revoked_ids) - # Delete the second token, through the manager - self.token_provider_api._persistence.delete_token(token2_id) - revoked_ids = [x['id'] - for x in self.token_provider_api.list_revoked_tokens()] - self._assert_revoked_token_list_matches_token_persistence(revoked_ids) - # Verify both tokens are in the revocation list. - self.assertIn(token_id, revoked_ids) - self.assertIn(token2_id, revoked_ids) - - def _test_predictable_revoked_pki_token_id(self, hash_fn): - token_id = self._create_token_id() - token_id_hash = hash_fn(token_id).hexdigest() - token = {'user': {'id': uuid.uuid4().hex}} - - self.token_provider_api._persistence.create_token(token_id, token) - self.token_provider_api._persistence.delete_token(token_id) - - revoked_ids = [x['id'] - for x in self.token_provider_api.list_revoked_tokens()] - self._assert_revoked_token_list_matches_token_persistence(revoked_ids) - self.assertIn(token_id_hash, revoked_ids) - self.assertNotIn(token_id, revoked_ids) - for t in self.token_provider_api._persistence.list_revoked_tokens(): - self.assertIn('expires', t) - - def test_predictable_revoked_pki_token_id_default(self): - self._test_predictable_revoked_pki_token_id(hashlib.md5) - - def test_predictable_revoked_pki_token_id_sha256(self): - self.config_fixture.config(group='token', hash_algorithm='sha256') - self._test_predictable_revoked_pki_token_id(hashlib.sha256) - - def test_predictable_revoked_uuid_token_id(self): - token_id = uuid.uuid4().hex - token = {'user': {'id': uuid.uuid4().hex}} - - self.token_provider_api._persistence.create_token(token_id, token) - self.token_provider_api._persistence.delete_token(token_id) - - revoked_tokens = self.token_provider_api.list_revoked_tokens() - revoked_ids = [x['id'] for x in revoked_tokens] - self._assert_revoked_token_list_matches_token_persistence(revoked_ids) - self.assertIn(token_id, revoked_ids) - for t in revoked_tokens: - self.assertIn('expires', t) - - def test_create_unicode_token_id(self): - token_id = six.text_type(self._create_token_id()) - self.create_token_sample_data(token_id=token_id) - self.token_provider_api._persistence.get_token(token_id) - - def test_create_unicode_user_id(self): - user_id = six.text_type(uuid.uuid4().hex) - token_id, data = self.create_token_sample_data(user_id=user_id) - self.token_provider_api._persistence.get_token(token_id) - - def test_token_expire_timezone(self): - - @test_utils.timezone - def _create_token(expire_time): - token_id = uuid.uuid4().hex - user_id = six.text_type(uuid.uuid4().hex) - return self.create_token_sample_data(token_id=token_id, - user_id=user_id, - expires=expire_time) - - for d in ['+0', '-11', '-8', '-5', '+5', '+8', '+14']: - test_utils.TZ = 'UTC' + d - expire_time = timeutils.utcnow() + datetime.timedelta(minutes=1) - token_id, data_in = _create_token(expire_time) - data_get = self.token_provider_api._persistence.get_token(token_id) - - self.assertEqual(data_in['id'], data_get['id'], - 'TZ=%s' % test_utils.TZ) - - expire_time_expired = ( - timeutils.utcnow() + datetime.timedelta(minutes=-1)) - token_id, data_in = _create_token(expire_time_expired) - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._persistence.get_token, - data_in['id']) - - -class TokenCacheInvalidation(object): - def _create_test_data(self): - self.user = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'password': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, 'enabled': True} - self.tenant = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, 'enabled': True} - - # Create an equivalent of a scoped token - token_dict = {'user': self.user, 'tenant': self.tenant, - 'metadata': {}, 'id': 'placeholder'} - token_id, data = self.token_provider_api.issue_v2_token(token_dict) - self.scoped_token_id = token_id - - # ..and an un-scoped one - token_dict = {'user': self.user, 'tenant': None, - 'metadata': {}, 'id': 'placeholder'} - token_id, data = self.token_provider_api.issue_v2_token(token_dict) - self.unscoped_token_id = token_id - - # Validate them, in the various ways possible - this will load the - # responses into the token cache. - self._check_scoped_tokens_are_valid() - self._check_unscoped_tokens_are_valid() - - def _check_unscoped_tokens_are_invalid(self): - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api.validate_token, - self.unscoped_token_id) - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api.validate_v2_token, - self.unscoped_token_id) - - def _check_scoped_tokens_are_invalid(self): - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api.validate_token, - self.scoped_token_id) - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api.validate_token, - self.scoped_token_id, - self.tenant['id']) - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api.validate_v2_token, - self.scoped_token_id) - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api.validate_v2_token, - self.scoped_token_id, - self.tenant['id']) - - def _check_scoped_tokens_are_valid(self): - self.token_provider_api.validate_token(self.scoped_token_id) - self.token_provider_api.validate_token( - self.scoped_token_id, belongs_to=self.tenant['id']) - self.token_provider_api.validate_v2_token(self.scoped_token_id) - self.token_provider_api.validate_v2_token( - self.scoped_token_id, belongs_to=self.tenant['id']) - - def _check_unscoped_tokens_are_valid(self): - self.token_provider_api.validate_token(self.unscoped_token_id) - self.token_provider_api.validate_v2_token(self.unscoped_token_id) - - def test_delete_unscoped_token(self): - self.token_provider_api._persistence.delete_token( - self.unscoped_token_id) - self._check_unscoped_tokens_are_invalid() - self._check_scoped_tokens_are_valid() - - def test_delete_scoped_token_by_id(self): - self.token_provider_api._persistence.delete_token(self.scoped_token_id) - self._check_scoped_tokens_are_invalid() - self._check_unscoped_tokens_are_valid() - - def test_delete_scoped_token_by_user(self): - self.token_provider_api._persistence.delete_tokens(self.user['id']) - # Since we are deleting all tokens for this user, they should all - # now be invalid. - self._check_scoped_tokens_are_invalid() - self._check_unscoped_tokens_are_invalid() - - def test_delete_scoped_token_by_user_and_tenant(self): - self.token_provider_api._persistence.delete_tokens( - self.user['id'], - tenant_id=self.tenant['id']) - self._check_scoped_tokens_are_invalid() - self._check_unscoped_tokens_are_valid() - - -class TrustTests(object): - def create_sample_trust(self, new_id, remaining_uses=None): - self.trustor = self.user_foo - self.trustee = self.user_two - trust_data = (self.trust_api.create_trust - (new_id, - {'trustor_user_id': self.trustor['id'], - 'trustee_user_id': self.user_two['id'], - 'project_id': self.tenant_bar['id'], - 'expires_at': timeutils. - parse_isotime('2031-02-18T18:10:00Z'), - 'impersonation': True, - 'remaining_uses': remaining_uses}, - roles=[{"id": "member"}, - {"id": "other"}, - {"id": "browser"}])) - return trust_data - - def test_delete_trust(self): - new_id = uuid.uuid4().hex - trust_data = self.create_sample_trust(new_id) - trust_id = trust_data['id'] - self.assertIsNotNone(trust_data) - trust_data = self.trust_api.get_trust(trust_id) - self.assertEqual(new_id, trust_data['id']) - self.trust_api.delete_trust(trust_id) - self.assertRaises(exception.TrustNotFound, - self.trust_api.get_trust, - trust_id) - - def test_delete_trust_not_found(self): - trust_id = uuid.uuid4().hex - self.assertRaises(exception.TrustNotFound, - self.trust_api.delete_trust, - trust_id) - - def test_get_trust(self): - new_id = uuid.uuid4().hex - trust_data = self.create_sample_trust(new_id) - trust_id = trust_data['id'] - self.assertIsNotNone(trust_data) - trust_data = self.trust_api.get_trust(trust_id) - self.assertEqual(new_id, trust_data['id']) - self.trust_api.delete_trust(trust_data['id']) - - def test_get_deleted_trust(self): - new_id = uuid.uuid4().hex - trust_data = self.create_sample_trust(new_id) - self.assertIsNotNone(trust_data) - self.assertIsNone(trust_data['deleted_at']) - self.trust_api.delete_trust(new_id) - self.assertRaises(exception.TrustNotFound, - self.trust_api.get_trust, - new_id) - deleted_trust = self.trust_api.get_trust(trust_data['id'], - deleted=True) - self.assertEqual(trust_data['id'], deleted_trust['id']) - self.assertIsNotNone(deleted_trust.get('deleted_at')) - - def test_create_trust(self): - new_id = uuid.uuid4().hex - trust_data = self.create_sample_trust(new_id) - - self.assertEqual(new_id, trust_data['id']) - self.assertEqual(self.trustee['id'], trust_data['trustee_user_id']) - self.assertEqual(self.trustor['id'], trust_data['trustor_user_id']) - self.assertTrue(timeutils.normalize_time(trust_data['expires_at']) > - timeutils.utcnow()) - - self.assertEqual([{'id': 'member'}, - {'id': 'other'}, - {'id': 'browser'}], trust_data['roles']) - - def test_list_trust_by_trustee(self): - for i in range(3): - self.create_sample_trust(uuid.uuid4().hex) - trusts = self.trust_api.list_trusts_for_trustee(self.trustee['id']) - self.assertEqual(3, len(trusts)) - self.assertEqual(trusts[0]["trustee_user_id"], self.trustee['id']) - trusts = self.trust_api.list_trusts_for_trustee(self.trustor['id']) - self.assertEqual(0, len(trusts)) - - def test_list_trust_by_trustor(self): - for i in range(3): - self.create_sample_trust(uuid.uuid4().hex) - trusts = self.trust_api.list_trusts_for_trustor(self.trustor['id']) - self.assertEqual(3, len(trusts)) - self.assertEqual(trusts[0]["trustor_user_id"], self.trustor['id']) - trusts = self.trust_api.list_trusts_for_trustor(self.trustee['id']) - self.assertEqual(0, len(trusts)) - - def test_list_trusts(self): - for i in range(3): - self.create_sample_trust(uuid.uuid4().hex) - trusts = self.trust_api.list_trusts() - self.assertEqual(3, len(trusts)) - - def test_trust_has_remaining_uses_positive(self): - # create a trust with limited uses, check that we have uses left - trust_data = self.create_sample_trust(uuid.uuid4().hex, - remaining_uses=5) - self.assertEqual(5, trust_data['remaining_uses']) - # create a trust with unlimited uses, check that we have uses left - trust_data = self.create_sample_trust(uuid.uuid4().hex) - self.assertIsNone(trust_data['remaining_uses']) - - def test_trust_has_remaining_uses_negative(self): - # try to create a trust with no remaining uses, check that it fails - self.assertRaises(exception.ValidationError, - self.create_sample_trust, - uuid.uuid4().hex, - remaining_uses=0) - # try to create a trust with negative remaining uses, - # check that it fails - self.assertRaises(exception.ValidationError, - self.create_sample_trust, - uuid.uuid4().hex, - remaining_uses=-12) - - def test_consume_use(self): - # consume a trust repeatedly until it has no uses anymore - trust_data = self.create_sample_trust(uuid.uuid4().hex, - remaining_uses=2) - self.trust_api.consume_use(trust_data['id']) - t = self.trust_api.get_trust(trust_data['id']) - self.assertEqual(1, t['remaining_uses']) - self.trust_api.consume_use(trust_data['id']) - # This was the last use, the trust isn't available anymore - self.assertRaises(exception.TrustNotFound, - self.trust_api.get_trust, - trust_data['id']) - - -class CatalogTests(object): - - _legacy_endpoint_id_in_endpoint = False - _enabled_default_to_true_when_creating_endpoint = False - - def test_region_crud(self): - # create - region_id = '0' * 255 - new_region = { - 'id': region_id, - 'description': uuid.uuid4().hex, - } - res = self.catalog_api.create_region( - new_region.copy()) - # Ensure that we don't need to have a - # parent_region_id in the original supplied - # ref dict, but that it will be returned from - # the endpoint, with None value. - expected_region = new_region.copy() - expected_region['parent_region_id'] = None - self.assertDictEqual(res, expected_region) - - # Test adding another region with the one above - # as its parent. We will check below whether deleting - # the parent successfully deletes any child regions. - parent_region_id = region_id - region_id = uuid.uuid4().hex - new_region = { - 'id': region_id, - 'description': uuid.uuid4().hex, - 'parent_region_id': parent_region_id, - } - res = self.catalog_api.create_region( - new_region.copy()) - self.assertDictEqual(new_region, res) - - # list - regions = self.catalog_api.list_regions() - self.assertThat(regions, matchers.HasLength(2)) - region_ids = [x['id'] for x in regions] - self.assertIn(parent_region_id, region_ids) - self.assertIn(region_id, region_ids) - - # update - region_desc_update = {'description': uuid.uuid4().hex} - res = self.catalog_api.update_region(region_id, region_desc_update) - expected_region = new_region.copy() - expected_region['description'] = region_desc_update['description'] - self.assertDictEqual(expected_region, res) - - # delete - self.catalog_api.delete_region(parent_region_id) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.delete_region, - parent_region_id) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - parent_region_id) - # Ensure the child is also gone... - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - region_id) - - def _create_region_with_parent_id(self, parent_id=None): - new_region = { - 'id': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - 'parent_region_id': parent_id - } - self.catalog_api.create_region( - new_region) - return new_region - - def test_list_regions_filtered_by_parent_region_id(self): - new_region = self._create_region_with_parent_id() - parent_id = new_region['id'] - new_region = self._create_region_with_parent_id(parent_id) - new_region = self._create_region_with_parent_id(parent_id) - - # filter by parent_region_id - hints = driver_hints.Hints() - hints.add_filter('parent_region_id', parent_id) - regions = self.catalog_api.list_regions(hints) - for region in regions: - self.assertEqual(parent_id, region['parent_region_id']) - - @unit.skip_if_cache_disabled('catalog') - def test_cache_layer_region_crud(self): - region_id = uuid.uuid4().hex - new_region = { - 'id': region_id, - 'description': uuid.uuid4().hex, - } - self.catalog_api.create_region(new_region.copy()) - updated_region = copy.deepcopy(new_region) - updated_region['description'] = uuid.uuid4().hex - # cache the result - self.catalog_api.get_region(region_id) - # update the region bypassing catalog_api - self.catalog_api.driver.update_region(region_id, updated_region) - self.assertDictContainsSubset(new_region, - self.catalog_api.get_region(region_id)) - self.catalog_api.get_region.invalidate(self.catalog_api, region_id) - self.assertDictContainsSubset(updated_region, - self.catalog_api.get_region(region_id)) - # delete the region - self.catalog_api.driver.delete_region(region_id) - # still get the old region - self.assertDictContainsSubset(updated_region, - self.catalog_api.get_region(region_id)) - self.catalog_api.get_region.invalidate(self.catalog_api, region_id) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, region_id) - - @unit.skip_if_cache_disabled('catalog') - def test_invalidate_cache_when_updating_region(self): - region_id = uuid.uuid4().hex - new_region = { - 'id': region_id, - 'description': uuid.uuid4().hex - } - self.catalog_api.create_region(new_region) - - # cache the region - self.catalog_api.get_region(region_id) - - # update the region via catalog_api - new_description = {'description': uuid.uuid4().hex} - self.catalog_api.update_region(region_id, new_description) - - # assert that we can get the new region - current_region = self.catalog_api.get_region(region_id) - self.assertEqual(new_description['description'], - current_region['description']) - - def test_create_region_with_duplicate_id(self): - region_id = uuid.uuid4().hex - new_region = { - 'id': region_id, - 'description': uuid.uuid4().hex - } - self.catalog_api.create_region(new_region) - # Create region again with duplicate id - self.assertRaises(exception.Conflict, - self.catalog_api.create_region, - new_region) - - def test_get_region_404(self): - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - uuid.uuid4().hex) - - def test_delete_region_404(self): - self.assertRaises(exception.RegionNotFound, - self.catalog_api.delete_region, - uuid.uuid4().hex) - - def test_create_region_invalid_parent_region_404(self): - region_id = uuid.uuid4().hex - new_region = { - 'id': region_id, - 'description': uuid.uuid4().hex, - 'parent_region_id': 'nonexisting' - } - self.assertRaises(exception.RegionNotFound, - self.catalog_api.create_region, - new_region) - - def test_avoid_creating_circular_references_in_regions_update(self): - region_one = self._create_region_with_parent_id() - - # self circle: region_one->region_one - self.assertRaises(exception.CircularRegionHierarchyError, - self.catalog_api.update_region, - region_one['id'], - {'parent_region_id': region_one['id']}) - - # region_one->region_two->region_one - region_two = self._create_region_with_parent_id(region_one['id']) - self.assertRaises(exception.CircularRegionHierarchyError, - self.catalog_api.update_region, - region_one['id'], - {'parent_region_id': region_two['id']}) - - # region_one region_two->region_three->region_four->region_two - region_three = self._create_region_with_parent_id(region_two['id']) - region_four = self._create_region_with_parent_id(region_three['id']) - self.assertRaises(exception.CircularRegionHierarchyError, - self.catalog_api.update_region, - region_two['id'], - {'parent_region_id': region_four['id']}) - - @mock.patch.object(core.CatalogDriverV8, - "_ensure_no_circle_in_hierarchical_regions") - def test_circular_regions_can_be_deleted(self, mock_ensure_on_circle): - # turn off the enforcement so that cycles can be created for the test - mock_ensure_on_circle.return_value = None - - region_one = self._create_region_with_parent_id() - - # self circle: region_one->region_one - self.catalog_api.update_region( - region_one['id'], - {'parent_region_id': region_one['id']}) - self.catalog_api.delete_region(region_one['id']) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - region_one['id']) - - # region_one->region_two->region_one - region_one = self._create_region_with_parent_id() - region_two = self._create_region_with_parent_id(region_one['id']) - self.catalog_api.update_region( - region_one['id'], - {'parent_region_id': region_two['id']}) - self.catalog_api.delete_region(region_one['id']) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - region_one['id']) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - region_two['id']) - - # region_one->region_two->region_three->region_one - region_one = self._create_region_with_parent_id() - region_two = self._create_region_with_parent_id(region_one['id']) - region_three = self._create_region_with_parent_id(region_two['id']) - self.catalog_api.update_region( - region_one['id'], - {'parent_region_id': region_three['id']}) - self.catalog_api.delete_region(region_two['id']) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - region_two['id']) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - region_one['id']) - self.assertRaises(exception.RegionNotFound, - self.catalog_api.get_region, - region_three['id']) - - def test_service_crud(self): - # create - service_id = uuid.uuid4().hex - new_service = { - 'id': service_id, - 'type': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - } - res = self.catalog_api.create_service( - service_id, - new_service.copy()) - new_service['enabled'] = True - self.assertDictEqual(new_service, res) - - # list - services = self.catalog_api.list_services() - self.assertIn(service_id, [x['id'] for x in services]) - - # update - service_name_update = {'name': uuid.uuid4().hex} - res = self.catalog_api.update_service(service_id, service_name_update) - expected_service = new_service.copy() - expected_service['name'] = service_name_update['name'] - self.assertDictEqual(expected_service, res) - - # delete - self.catalog_api.delete_service(service_id) - self.assertRaises(exception.ServiceNotFound, - self.catalog_api.delete_service, - service_id) - self.assertRaises(exception.ServiceNotFound, - self.catalog_api.get_service, - service_id) - - def _create_random_service(self): - service_id = uuid.uuid4().hex - new_service = { - 'id': service_id, - 'type': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - } - return self.catalog_api.create_service(service_id, new_service.copy()) - - def test_service_filtering(self): - target_service = self._create_random_service() - unrelated_service1 = self._create_random_service() - unrelated_service2 = self._create_random_service() - - # filter by type - hint_for_type = driver_hints.Hints() - hint_for_type.add_filter(name="type", value=target_service['type']) - services = self.catalog_api.list_services(hint_for_type) - - self.assertEqual(1, len(services)) - filtered_service = services[0] - self.assertEqual(target_service['type'], filtered_service['type']) - self.assertEqual(target_service['id'], filtered_service['id']) - - # filter should have been removed, since it was already used by the - # backend - self.assertEqual(0, len(hint_for_type.filters)) - - # the backend shouldn't filter by name, since this is handled by the - # front end - hint_for_name = driver_hints.Hints() - hint_for_name.add_filter(name="name", value=target_service['name']) - services = self.catalog_api.list_services(hint_for_name) - - self.assertEqual(3, len(services)) - - # filter should still be there, since it wasn't used by the backend - self.assertEqual(1, len(hint_for_name.filters)) - - self.catalog_api.delete_service(target_service['id']) - self.catalog_api.delete_service(unrelated_service1['id']) - self.catalog_api.delete_service(unrelated_service2['id']) - - @unit.skip_if_cache_disabled('catalog') - def test_cache_layer_service_crud(self): - service_id = uuid.uuid4().hex - new_service = { - 'id': service_id, - 'type': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - } - res = self.catalog_api.create_service( - service_id, - new_service.copy()) - new_service['enabled'] = True - self.assertDictEqual(new_service, res) - self.catalog_api.get_service(service_id) - updated_service = copy.deepcopy(new_service) - updated_service['description'] = uuid.uuid4().hex - # update bypassing catalog api - self.catalog_api.driver.update_service(service_id, updated_service) - self.assertDictContainsSubset(new_service, - self.catalog_api.get_service(service_id)) - self.catalog_api.get_service.invalidate(self.catalog_api, service_id) - self.assertDictContainsSubset(updated_service, - self.catalog_api.get_service(service_id)) - - # delete bypassing catalog api - self.catalog_api.driver.delete_service(service_id) - self.assertDictContainsSubset(updated_service, - self.catalog_api.get_service(service_id)) - self.catalog_api.get_service.invalidate(self.catalog_api, service_id) - self.assertRaises(exception.ServiceNotFound, - self.catalog_api.delete_service, - service_id) - self.assertRaises(exception.ServiceNotFound, - self.catalog_api.get_service, - service_id) - - @unit.skip_if_cache_disabled('catalog') - def test_invalidate_cache_when_updating_service(self): - service_id = uuid.uuid4().hex - new_service = { - 'id': service_id, - 'type': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - } - self.catalog_api.create_service( - service_id, - new_service.copy()) - - # cache the service - self.catalog_api.get_service(service_id) - - # update the service via catalog api - new_type = {'type': uuid.uuid4().hex} - self.catalog_api.update_service(service_id, new_type) - - # assert that we can get the new service - current_service = self.catalog_api.get_service(service_id) - self.assertEqual(new_type['type'], current_service['type']) - - def test_delete_service_with_endpoint(self): - # create a service - service = { - 'id': uuid.uuid4().hex, - 'type': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - } - self.catalog_api.create_service(service['id'], service) - - # create an endpoint attached to the service - endpoint = { - 'id': uuid.uuid4().hex, - 'region': uuid.uuid4().hex, - 'interface': uuid.uuid4().hex[:8], - 'url': uuid.uuid4().hex, - 'service_id': service['id'], - } - self.catalog_api.create_endpoint(endpoint['id'], endpoint) - - # deleting the service should also delete the endpoint - self.catalog_api.delete_service(service['id']) - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.get_endpoint, - endpoint['id']) - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.delete_endpoint, - endpoint['id']) - - def test_cache_layer_delete_service_with_endpoint(self): - service = { - 'id': uuid.uuid4().hex, - 'type': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - } - self.catalog_api.create_service(service['id'], service) - - # create an endpoint attached to the service - endpoint = { - 'id': uuid.uuid4().hex, - 'region_id': None, - 'interface': uuid.uuid4().hex[:8], - 'url': uuid.uuid4().hex, - 'service_id': service['id'], - } - self.catalog_api.create_endpoint(endpoint['id'], endpoint) - # cache the result - self.catalog_api.get_service(service['id']) - self.catalog_api.get_endpoint(endpoint['id']) - # delete the service bypassing catalog api - self.catalog_api.driver.delete_service(service['id']) - self.assertDictContainsSubset(endpoint, - self.catalog_api. - get_endpoint(endpoint['id'])) - self.assertDictContainsSubset(service, - self.catalog_api. - get_service(service['id'])) - self.catalog_api.get_endpoint.invalidate(self.catalog_api, - endpoint['id']) - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.get_endpoint, - endpoint['id']) - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.delete_endpoint, - endpoint['id']) - # multiple endpoints associated with a service - second_endpoint = { - 'id': uuid.uuid4().hex, - 'region_id': None, - 'interface': uuid.uuid4().hex[:8], - 'url': uuid.uuid4().hex, - 'service_id': service['id'], - } - self.catalog_api.create_service(service['id'], service) - self.catalog_api.create_endpoint(endpoint['id'], endpoint) - self.catalog_api.create_endpoint(second_endpoint['id'], - second_endpoint) - self.catalog_api.delete_service(service['id']) - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.get_endpoint, - endpoint['id']) - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.delete_endpoint, - endpoint['id']) - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.get_endpoint, - second_endpoint['id']) - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.delete_endpoint, - second_endpoint['id']) - - def test_get_service_404(self): - self.assertRaises(exception.ServiceNotFound, - self.catalog_api.get_service, - uuid.uuid4().hex) - - def test_delete_service_404(self): - self.assertRaises(exception.ServiceNotFound, - self.catalog_api.delete_service, - uuid.uuid4().hex) - - def test_create_endpoint_nonexistent_service(self): - endpoint = { - 'id': uuid.uuid4().hex, - 'service_id': uuid.uuid4().hex, - } - self.assertRaises(exception.ValidationError, - self.catalog_api.create_endpoint, - endpoint['id'], - endpoint) - - def test_update_endpoint_nonexistent_service(self): - dummy_service, enabled_endpoint, dummy_disabled_endpoint = ( - self._create_endpoints()) - new_endpoint = { - 'service_id': uuid.uuid4().hex, - } - self.assertRaises(exception.ValidationError, - self.catalog_api.update_endpoint, - enabled_endpoint['id'], - new_endpoint) - - def test_create_endpoint_nonexistent_region(self): - service = { - 'id': uuid.uuid4().hex, - 'type': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - } - self.catalog_api.create_service(service['id'], service.copy()) - - endpoint = { - 'id': uuid.uuid4().hex, - 'service_id': service['id'], - 'interface': 'public', - 'url': uuid.uuid4().hex, - 'region_id': uuid.uuid4().hex, - } - self.assertRaises(exception.ValidationError, - self.catalog_api.create_endpoint, - endpoint['id'], - endpoint) - - def test_update_endpoint_nonexistent_region(self): - dummy_service, enabled_endpoint, dummy_disabled_endpoint = ( - self._create_endpoints()) - new_endpoint = { - 'region_id': uuid.uuid4().hex, - } - self.assertRaises(exception.ValidationError, - self.catalog_api.update_endpoint, - enabled_endpoint['id'], - new_endpoint) - - def test_get_endpoint_404(self): - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.get_endpoint, - uuid.uuid4().hex) - - def test_delete_endpoint_404(self): - self.assertRaises(exception.EndpointNotFound, - self.catalog_api.delete_endpoint, - uuid.uuid4().hex) - - def test_create_endpoint(self): - service = { - 'id': uuid.uuid4().hex, - 'type': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - } - self.catalog_api.create_service(service['id'], service.copy()) - - endpoint = { - 'id': uuid.uuid4().hex, - 'region_id': None, - 'service_id': service['id'], - 'interface': 'public', - 'url': uuid.uuid4().hex, - } - self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy()) - - def test_update_endpoint(self): - dummy_service_ref, endpoint_ref, dummy_disabled_endpoint_ref = ( - self._create_endpoints()) - res = self.catalog_api.update_endpoint(endpoint_ref['id'], - {'interface': 'private'}) - expected_endpoint = endpoint_ref.copy() - expected_endpoint['interface'] = 'private' - if self._legacy_endpoint_id_in_endpoint: - expected_endpoint['legacy_endpoint_id'] = None - if self._enabled_default_to_true_when_creating_endpoint: - expected_endpoint['enabled'] = True - self.assertDictEqual(expected_endpoint, res) - - def _create_endpoints(self): - # Creates a service and 2 endpoints for the service in the same region. - # The 'public' interface is enabled and the 'internal' interface is - # disabled. - - def create_endpoint(service_id, region, **kwargs): - id_ = uuid.uuid4().hex - ref = { - 'id': id_, - 'interface': 'public', - 'region_id': region, - 'service_id': service_id, - 'url': 'http://localhost/%s' % uuid.uuid4().hex, - } - ref.update(kwargs) - self.catalog_api.create_endpoint(id_, ref) - return ref - - # Create a service for use with the endpoints. - service_id = uuid.uuid4().hex - service_ref = { - 'id': service_id, - 'name': uuid.uuid4().hex, - 'type': uuid.uuid4().hex, - } - self.catalog_api.create_service(service_id, service_ref) - - region = {'id': uuid.uuid4().hex} - self.catalog_api.create_region(region) - - # Create endpoints - enabled_endpoint_ref = create_endpoint(service_id, region['id']) - disabled_endpoint_ref = create_endpoint( - service_id, region['id'], enabled=False, interface='internal') - - return service_ref, enabled_endpoint_ref, disabled_endpoint_ref - - def test_list_endpoints(self): - service = { - 'id': uuid.uuid4().hex, - 'type': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - } - self.catalog_api.create_service(service['id'], service.copy()) - - expected_ids = set([uuid.uuid4().hex for _ in range(3)]) - for endpoint_id in expected_ids: - endpoint = { - 'id': endpoint_id, - 'region_id': None, - 'service_id': service['id'], - 'interface': 'public', - 'url': uuid.uuid4().hex, - } - self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy()) - - endpoints = self.catalog_api.list_endpoints() - self.assertEqual(expected_ids, set(e['id'] for e in endpoints)) - - def test_get_catalog_endpoint_disabled(self): - """Get back only enabled endpoints when get the v2 catalog.""" - - service_ref, enabled_endpoint_ref, dummy_disabled_endpoint_ref = ( - self._create_endpoints()) - - user_id = uuid.uuid4().hex - project_id = uuid.uuid4().hex - catalog = self.catalog_api.get_catalog(user_id, project_id) - - exp_entry = { - 'id': enabled_endpoint_ref['id'], - 'name': service_ref['name'], - 'publicURL': enabled_endpoint_ref['url'], - } - - region = enabled_endpoint_ref['region_id'] - self.assertEqual(exp_entry, catalog[region][service_ref['type']]) - - def test_get_v3_catalog_endpoint_disabled(self): - """Get back only enabled endpoints when get the v3 catalog.""" - - enabled_endpoint_ref = self._create_endpoints()[1] - - user_id = uuid.uuid4().hex - project_id = uuid.uuid4().hex - catalog = self.catalog_api.get_v3_catalog(user_id, project_id) - - endpoint_ids = [x['id'] for x in catalog[0]['endpoints']] - self.assertEqual([enabled_endpoint_ref['id']], endpoint_ids) - - @unit.skip_if_cache_disabled('catalog') - def test_invalidate_cache_when_updating_endpoint(self): - service = { - 'id': uuid.uuid4().hex, - 'type': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - } - self.catalog_api.create_service(service['id'], service) - - # create an endpoint attached to the service - endpoint_id = uuid.uuid4().hex - endpoint = { - 'id': endpoint_id, - 'region_id': None, - 'interface': uuid.uuid4().hex[:8], - 'url': uuid.uuid4().hex, - 'service_id': service['id'], - } - self.catalog_api.create_endpoint(endpoint_id, endpoint) - - # cache the endpoint - self.catalog_api.get_endpoint(endpoint_id) - - # update the endpoint via catalog api - new_url = {'url': uuid.uuid4().hex} - self.catalog_api.update_endpoint(endpoint_id, new_url) - - # assert that we can get the new endpoint - current_endpoint = self.catalog_api.get_endpoint(endpoint_id) - self.assertEqual(new_url['url'], current_endpoint['url']) - - -class PolicyTests(object): - def _new_policy_ref(self): - return { - 'id': uuid.uuid4().hex, - 'policy': uuid.uuid4().hex, - 'type': uuid.uuid4().hex, - 'endpoint_id': uuid.uuid4().hex, - } - - def assertEqualPolicies(self, a, b): - self.assertEqual(a['id'], b['id']) - self.assertEqual(a['endpoint_id'], b['endpoint_id']) - self.assertEqual(a['policy'], b['policy']) - self.assertEqual(a['type'], b['type']) - - def test_create(self): - ref = self._new_policy_ref() - res = self.policy_api.create_policy(ref['id'], ref) - self.assertEqualPolicies(ref, res) - - def test_get(self): - ref = self._new_policy_ref() - res = self.policy_api.create_policy(ref['id'], ref) - - res = self.policy_api.get_policy(ref['id']) - self.assertEqualPolicies(ref, res) - - def test_list(self): - ref = self._new_policy_ref() - self.policy_api.create_policy(ref['id'], ref) - - res = self.policy_api.list_policies() - res = [x for x in res if x['id'] == ref['id']][0] - self.assertEqualPolicies(ref, res) - - def test_update(self): - ref = self._new_policy_ref() - self.policy_api.create_policy(ref['id'], ref) - orig = ref - - ref = self._new_policy_ref() - - # (cannot change policy ID) - self.assertRaises(exception.ValidationError, - self.policy_api.update_policy, - orig['id'], - ref) - - ref['id'] = orig['id'] - res = self.policy_api.update_policy(orig['id'], ref) - self.assertEqualPolicies(ref, res) - - def test_delete(self): - ref = self._new_policy_ref() - self.policy_api.create_policy(ref['id'], ref) - - self.policy_api.delete_policy(ref['id']) - self.assertRaises(exception.PolicyNotFound, - self.policy_api.delete_policy, - ref['id']) - self.assertRaises(exception.PolicyNotFound, - self.policy_api.get_policy, - ref['id']) - res = self.policy_api.list_policies() - self.assertFalse(len([x for x in res if x['id'] == ref['id']])) - - def test_get_policy_404(self): - self.assertRaises(exception.PolicyNotFound, - self.policy_api.get_policy, - uuid.uuid4().hex) - - def test_update_policy_404(self): - ref = self._new_policy_ref() - self.assertRaises(exception.PolicyNotFound, - self.policy_api.update_policy, - ref['id'], - ref) - - def test_delete_policy_404(self): - self.assertRaises(exception.PolicyNotFound, - self.policy_api.delete_policy, - uuid.uuid4().hex) - - -class InheritanceTests(AssignmentTestHelperMixin): - - def test_role_assignments_user_domain_to_project_inheritance(self): - test_plan = { - 'entities': {'domains': {'users': 2, 'projects': 1}, - 'roles': 3}, - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'user': 0, 'role': 2, 'domain': 0, - 'inherited_to_projects': True}, - {'user': 1, 'role': 1, 'project': 0}], - 'tests': [ - # List all direct assignments for user[0] - {'params': {'user': 0}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'user': 0, 'role': 2, 'domain': 0, - 'inherited_to_projects': 'projects'}]}, - # Now the effective ones - so the domain role should turn into - # a project role - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'domain': 0}}]}, - # Narrow down to effective roles for user[0] and project[0] - {'params': {'user': 0, 'project': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 1, 'project': 0}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'domain': 0}}]} - ] - } - self.config_fixture.config(group='os_inherit', enabled=True) - self.execute_assignment_test_plan(test_plan) - - def test_inherited_role_assignments_excluded_if_os_inherit_false(self): - test_plan = { - 'entities': {'domains': {'users': 2, 'groups': 1, 'projects': 1}, - 'roles': 4}, - 'group_memberships': [{'group': 0, 'users': [0]}], - 'assignments': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'user': 0, 'role': 2, 'domain': 0, - 'inherited_to_projects': True}, - {'user': 1, 'role': 1, 'project': 0}, - {'group': 0, 'role': 3, 'project': 0}], - 'tests': [ - # List all direct assignments for user[0], since os-inherit is - # disabled, we should not see the inherited role - {'params': {'user': 0}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}]}, - # Same in effective mode - inherited roles should not be - # included or expanded...but the group role should now - # turn up as a user role, since group expansion is not - # part of os-inherit. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'domain': 0}, - {'user': 0, 'role': 1, 'project': 0}, - {'user': 0, 'role': 3, 'project': 0, - 'indirect': {'group': 0}}]}, - ] - } - self.config_fixture.config(group='os_inherit', enabled=False) - self.execute_assignment_test_plan(test_plan) - - def _test_crud_inherited_and_direct_assignment(self, **kwargs): - """Tests inherited and direct assignments for the actor and target - - Ensure it is possible to create both inherited and direct role - assignments for the same actor on the same target. The actor and the - target are specified in the kwargs as ('user_id' or 'group_id') and - ('project_id' or 'domain_id'), respectively. - - """ - - # Create a new role to avoid assignments loaded from default fixtures - role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - role = self.role_api.create_role(role['id'], role) - - # Define the common assigment entity - assignment_entity = {'role_id': role['id']} - assignment_entity.update(kwargs) - - # Define assignments under test - direct_assignment_entity = assignment_entity.copy() - inherited_assignment_entity = assignment_entity.copy() - inherited_assignment_entity['inherited_to_projects'] = 'projects' - - # Create direct assignment and check grants - self.assignment_api.create_grant(inherited_to_projects=False, - **assignment_entity) - - grants = self.assignment_api.list_role_assignments_for_role(role['id']) - self.assertThat(grants, matchers.HasLength(1)) - self.assertIn(direct_assignment_entity, grants) - - # Now add inherited assignment and check grants - self.assignment_api.create_grant(inherited_to_projects=True, - **assignment_entity) - - grants = self.assignment_api.list_role_assignments_for_role(role['id']) - self.assertThat(grants, matchers.HasLength(2)) - self.assertIn(direct_assignment_entity, grants) - self.assertIn(inherited_assignment_entity, grants) - - # Delete both and check grants - self.assignment_api.delete_grant(inherited_to_projects=False, - **assignment_entity) - self.assignment_api.delete_grant(inherited_to_projects=True, - **assignment_entity) - - grants = self.assignment_api.list_role_assignments_for_role(role['id']) - self.assertEqual([], grants) - - def test_crud_inherited_and_direct_assignment_for_user_on_domain(self): - self._test_crud_inherited_and_direct_assignment( - user_id=self.user_foo['id'], domain_id=DEFAULT_DOMAIN_ID) - - def test_crud_inherited_and_direct_assignment_for_group_on_domain(self): - group = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID} - group = self.identity_api.create_group(group) - - self._test_crud_inherited_and_direct_assignment( - group_id=group['id'], domain_id=DEFAULT_DOMAIN_ID) - - def test_crud_inherited_and_direct_assignment_for_user_on_project(self): - self._test_crud_inherited_and_direct_assignment( - user_id=self.user_foo['id'], project_id=self.tenant_baz['id']) - - def test_crud_inherited_and_direct_assignment_for_group_on_project(self): - group = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID} - group = self.identity_api.create_group(group) - - self._test_crud_inherited_and_direct_assignment( - group_id=group['id'], project_id=self.tenant_baz['id']) - - def test_inherited_role_grants_for_user(self): - """Test inherited user roles. - - Test Plan: - - - Enable OS-INHERIT extension - - Create 3 roles - - Create a domain, with a project and a user - - Check no roles yet exit - - Assign a direct user role to the project and a (non-inherited) - user role to the domain - - Get a list of effective roles - should only get the one direct role - - Now add an inherited user role to the domain - - Get a list of effective roles - should have two roles, one - direct and one by virtue of the inherited user role - - Also get effective roles for the domain - the role marked as - inherited should not show up - - """ - self.config_fixture.config(group='os_inherit', enabled=True) - role_list = [] - for _ in range(3): - role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role['id'], role) - role_list.append(role) - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'password': uuid.uuid4().hex, 'enabled': True} - user1 = self.identity_api.create_user(user1) - project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain1['id']} - self.resource_api.create_project(project1['id'], project1) - - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(0, len(roles_ref)) - - # Create the first two roles - the domain one is not inherited - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role_list[0]['id']) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain1['id'], - role_id=role_list[1]['id']) - - # Now get the effective roles for the user and project, this - # should only include the direct role assignment on the project - combined_list = self.assignment_api.get_roles_for_user_and_project( - user1['id'], project1['id']) - self.assertEqual(1, len(combined_list)) - self.assertIn(role_list[0]['id'], combined_list) - - # Now add an inherited role on the domain - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain1['id'], - role_id=role_list[2]['id'], - inherited_to_projects=True) - - # Now get the effective roles for the user and project again, this - # should now include the inherited role on the domain - combined_list = self.assignment_api.get_roles_for_user_and_project( - user1['id'], project1['id']) - self.assertEqual(2, len(combined_list)) - self.assertIn(role_list[0]['id'], combined_list) - self.assertIn(role_list[2]['id'], combined_list) - - # Finally, check that the inherited role does not appear as a valid - # directly assigned role on the domain itself - combined_role_list = self.assignment_api.get_roles_for_user_and_domain( - user1['id'], domain1['id']) - self.assertEqual(1, len(combined_role_list)) - self.assertIn(role_list[1]['id'], combined_role_list) - - # TODO(henry-nash): The test above uses get_roles_for_user_and_project - # and get_roles_for_user_and_domain, which will, in a subsequent patch, - # be re-implemeted to simply call list_role_assignments (see blueprint - # remove-role-metadata). - # - # The test plan below therefore mirrors this test, to ensure that - # list_role_assignments works the same. Once get_roles_for_user_and - # project/domain have been re-implemented then the manual tests above - # can be refactored to simply ensure it gives the same answers. - test_plan = { - # A domain with a user & project, plus 3 roles. - 'entities': {'domains': {'users': 1, 'projects': 1}, - 'roles': 3}, - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'domain': 0}, - {'user': 0, 'role': 2, 'domain': 0, - 'inherited_to_projects': True}], - 'tests': [ - # List all effective assignments for user[0] on project[0]. - # Should get one direct role and one inherited role. - {'params': {'user': 0, 'project': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'domain': 0}}]}, - # Ensure effective mode on the domain does not list the - # inherited role on that domain - {'params': {'user': 0, 'domain': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 1, 'domain': 0}]}, - # Ensure non-inherited mode also only returns the non-inherited - # role on the domain - {'params': {'user': 0, 'domain': 0, 'inherited': False}, - 'results': [{'user': 0, 'role': 1, 'domain': 0}]}, - ] - } - self.execute_assignment_test_plan(test_plan) - - def test_inherited_role_grants_for_group(self): - """Test inherited group roles. - - Test Plan: - - - Enable OS-INHERIT extension - - Create 4 roles - - Create a domain, with a project, user and two groups - - Make the user a member of both groups - - Check no roles yet exit - - Assign a direct user role to the project and a (non-inherited) - group role on the domain - - Get a list of effective roles - should only get the one direct role - - Now add two inherited group roles to the domain - - Get a list of effective roles - should have three roles, one - direct and two by virtue of inherited group roles - - """ - self.config_fixture.config(group='os_inherit', enabled=True) - role_list = [] - for _ in range(4): - role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.role_api.create_role(role['id'], role) - role_list.append(role) - domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain1['id'], domain1) - user1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'password': uuid.uuid4().hex, 'enabled': True} - user1 = self.identity_api.create_user(user1) - group1 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'enabled': True} - group1 = self.identity_api.create_group(group1) - group2 = {'name': uuid.uuid4().hex, 'domain_id': domain1['id'], - 'enabled': True} - group2 = self.identity_api.create_group(group2) - project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain1['id']} - self.resource_api.create_project(project1['id'], project1) - - self.identity_api.add_user_to_group(user1['id'], - group1['id']) - self.identity_api.add_user_to_group(user1['id'], - group2['id']) - - roles_ref = self.assignment_api.list_grants( - user_id=user1['id'], - project_id=project1['id']) - self.assertEqual(0, len(roles_ref)) - - # Create two roles - the domain one is not inherited - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project1['id'], - role_id=role_list[0]['id']) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain1['id'], - role_id=role_list[1]['id']) - - # Now get the effective roles for the user and project, this - # should only include the direct role assignment on the project - combined_list = self.assignment_api.get_roles_for_user_and_project( - user1['id'], project1['id']) - self.assertEqual(1, len(combined_list)) - self.assertIn(role_list[0]['id'], combined_list) - - # Now add to more group roles, both inherited, to the domain - self.assignment_api.create_grant(group_id=group2['id'], - domain_id=domain1['id'], - role_id=role_list[2]['id'], - inherited_to_projects=True) - self.assignment_api.create_grant(group_id=group2['id'], - domain_id=domain1['id'], - role_id=role_list[3]['id'], - inherited_to_projects=True) - - # Now get the effective roles for the user and project again, this - # should now include the inherited roles on the domain - combined_list = self.assignment_api.get_roles_for_user_and_project( - user1['id'], project1['id']) - self.assertEqual(3, len(combined_list)) - self.assertIn(role_list[0]['id'], combined_list) - self.assertIn(role_list[2]['id'], combined_list) - self.assertIn(role_list[3]['id'], combined_list) - - # TODO(henry-nash): The test above uses get_roles_for_user_and_project - # which will, in a subsequent patch, be re-implemeted to simply call - # list_role_assignments (see blueprint remove-role-metadata). - # - # The test plan below therefore mirrors this test, to ensure that - # list_role_assignments works the same. Once - # get_roles_for_user_and_project has been re-implemented then the - # manual tests above can be refactored to simply ensure it gives - # the same answers. - test_plan = { - # A domain with a user and project, 2 groups, plus 4 roles. - 'entities': {'domains': {'users': 1, 'projects': 1, 'groups': 2}, - 'roles': 4}, - 'group_memberships': [{'group': 0, 'users': [0]}, - {'group': 1, 'users': [0]}], - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'group': 0, 'role': 1, 'domain': 0}, - {'group': 1, 'role': 2, 'domain': 0, - 'inherited_to_projects': True}, - {'group': 1, 'role': 3, 'domain': 0, - 'inherited_to_projects': True}], - 'tests': [ - # List all effective assignments for user[0] on project[0]. - # Should get one direct role and both inherited roles, but - # not the direct one on domain[0], even though user[0] is - # in group[0]. - {'params': {'user': 0, 'project': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 2, 'project': 0, - 'indirect': {'domain': 0, 'group': 1}}, - {'user': 0, 'role': 3, 'project': 0, - 'indirect': {'domain': 0, 'group': 1}}]} - ] - } - self.execute_assignment_test_plan(test_plan) - - def test_list_projects_for_user_with_inherited_grants(self): - """Test inherited user roles. - - Test Plan: - - - Enable OS-INHERIT extension - - Create a domain, with two projects and a user - - Assign an inherited user role on the domain, as well as a direct - user role to a separate project in a different domain - - Get a list of projects for user, should return all three projects - - """ - self.config_fixture.config(group='os_inherit', enabled=True) - domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain['id'], domain) - user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex, - 'domain_id': domain['id'], 'enabled': True} - user1 = self.identity_api.create_user(user1) - project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain['id']} - self.resource_api.create_project(project1['id'], project1) - project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain['id']} - self.resource_api.create_project(project2['id'], project2) - - # Create 2 grants, one on a project and one inherited grant - # on the domain - self.assignment_api.create_grant(user_id=user1['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_member['id']) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain['id'], - role_id=self.role_admin['id'], - inherited_to_projects=True) - # Should get back all three projects, one by virtue of the direct - # grant, plus both projects in the domain - user_projects = self.assignment_api.list_projects_for_user(user1['id']) - self.assertEqual(3, len(user_projects)) - - # TODO(henry-nash): The test above uses list_projects_for_user - # which may, in a subsequent patch, be re-implemeted to call - # list_role_assignments and then report only the distinct projects. - # - # The test plan below therefore mirrors this test, to ensure that - # list_role_assignments works the same. Once list_projects_for_user - # has been re-implemented then the manual tests above can be - # refactored. - test_plan = { - # A domain with 1 project, plus a second domain with 2 projects, - # as well as a user. Also, create 2 roles. - 'entities': {'domains': [{'projects': 1}, - {'users': 1, 'projects': 2}], - 'roles': 2}, - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'domain': 1, - 'inherited_to_projects': True}], - 'tests': [ - # List all effective assignments for user[0] - # Should get one direct role plus one inherited role for each - # project in domain - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'project': 1, - 'indirect': {'domain': 1}}, - {'user': 0, 'role': 1, 'project': 2, - 'indirect': {'domain': 1}}]} - ] - } - self.execute_assignment_test_plan(test_plan) - - def test_list_projects_for_user_with_inherited_user_project_grants(self): - """Test inherited role assignments for users on nested projects. - - Test Plan: - - - Enable OS-INHERIT extension - - Create a hierarchy of projects with one root and one leaf project - - Assign an inherited user role on root project - - Assign a non-inherited user role on root project - - Get a list of projects for user, should return both projects - - Disable OS-INHERIT extension - - Get a list of projects for user, should return only root project - - """ - # Enable OS-INHERIT extension - self.config_fixture.config(group='os_inherit', enabled=True) - root_project = {'id': uuid.uuid4().hex, - 'description': '', - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': None, - 'is_domain': False} - self.resource_api.create_project(root_project['id'], root_project) - leaf_project = {'id': uuid.uuid4().hex, - 'description': '', - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': root_project['id'], - 'is_domain': False} - self.resource_api.create_project(leaf_project['id'], leaf_project) - - user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, 'enabled': True} - user = self.identity_api.create_user(user) - - # Grant inherited user role - self.assignment_api.create_grant(user_id=user['id'], - project_id=root_project['id'], - role_id=self.role_admin['id'], - inherited_to_projects=True) - # Grant non-inherited user role - self.assignment_api.create_grant(user_id=user['id'], - project_id=root_project['id'], - role_id=self.role_member['id']) - # Should get back both projects: because the direct role assignment for - # the root project and inherited role assignment for leaf project - user_projects = self.assignment_api.list_projects_for_user(user['id']) - self.assertEqual(2, len(user_projects)) - self.assertIn(root_project, user_projects) - self.assertIn(leaf_project, user_projects) - - # Disable OS-INHERIT extension - self.config_fixture.config(group='os_inherit', enabled=False) - # Should get back just root project - due the direct role assignment - user_projects = self.assignment_api.list_projects_for_user(user['id']) - self.assertEqual(1, len(user_projects)) - self.assertIn(root_project, user_projects) - - # TODO(henry-nash): The test above uses list_projects_for_user - # which may, in a subsequent patch, be re-implemeted to call - # list_role_assignments and then report only the distinct projects. - # - # The test plan below therefore mirrors this test, to ensure that - # list_role_assignments works the same. Once list_projects_for_user - # has been re-implemented then the manual tests above can be - # refactored. - test_plan = { - # A domain with a project and sub-project, plus a user. - # Also, create 2 roles. - 'entities': { - 'domains': {'id': DEFAULT_DOMAIN_ID, 'users': 1, - 'projects': {'project': 1}}, - 'roles': 2}, - # A direct role and an inherited role on the parent - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'project': 0, - 'inherited_to_projects': True}], - 'tests': [ - # List all effective assignments for user[0] - should get back - # one direct role plus one inherited role. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'project': 1, - 'indirect': {'project': 0}}]} - ] - } - - test_plan_with_os_inherit_disabled = { - 'tests': [ - # List all effective assignments for user[0] - should only get - # back the one direct role. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0}]} - ] - } - self.config_fixture.config(group='os_inherit', enabled=True) - test_data = self.execute_assignment_test_plan(test_plan) - self.config_fixture.config(group='os_inherit', enabled=False) - # Pass the existing test data in to allow execution of 2nd test plan - self.execute_assignment_tests( - test_plan_with_os_inherit_disabled, test_data) - - def test_list_projects_for_user_with_inherited_group_grants(self): - """Test inherited group roles. - - Test Plan: - - - Enable OS-INHERIT extension - - Create two domains, each with two projects - - Create a user and group - - Make the user a member of the group - - Assign a user role two projects, an inherited - group role to one domain and an inherited regular role on - the other domain - - Get a list of projects for user, should return both pairs of projects - from the domain, plus the one separate project - - """ - self.config_fixture.config(group='os_inherit', enabled=True) - domain = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain['id'], domain) - domain2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(domain2['id'], domain2) - project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain['id']} - self.resource_api.create_project(project1['id'], project1) - project2 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain['id']} - self.resource_api.create_project(project2['id'], project2) - project3 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain2['id']} - self.resource_api.create_project(project3['id'], project3) - project4 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex, - 'domain_id': domain2['id']} - self.resource_api.create_project(project4['id'], project4) - user1 = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex, - 'domain_id': domain['id'], 'enabled': True} - user1 = self.identity_api.create_user(user1) - group1 = {'name': uuid.uuid4().hex, 'domain_id': domain['id']} - group1 = self.identity_api.create_group(group1) - self.identity_api.add_user_to_group(user1['id'], group1['id']) - - # Create 4 grants: - # - one user grant on a project in domain2 - # - one user grant on a project in the default domain - # - one inherited user grant on domain - # - one inherited group grant on domain2 - self.assignment_api.create_grant(user_id=user1['id'], - project_id=project3['id'], - role_id=self.role_member['id']) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_member['id']) - self.assignment_api.create_grant(user_id=user1['id'], - domain_id=domain['id'], - role_id=self.role_admin['id'], - inherited_to_projects=True) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=domain2['id'], - role_id=self.role_admin['id'], - inherited_to_projects=True) - # Should get back all five projects, but without a duplicate for - # project3 (since it has both a direct user role and an inherited role) - user_projects = self.assignment_api.list_projects_for_user(user1['id']) - self.assertEqual(5, len(user_projects)) - - # TODO(henry-nash): The test above uses list_projects_for_user - # which may, in a subsequent patch, be re-implemeted to call - # list_role_assignments and then report only the distinct projects. - # - # The test plan below therefore mirrors this test, to ensure that - # list_role_assignments works the same. Once list_projects_for_user - # has been re-implemented then the manual tests above can be - # refactored. - test_plan = { - # A domain with a 1 project, plus a second domain with 2 projects, - # as well as a user & group and a 3rd domain with 2 projects. - # Also, created 2 roles. - 'entities': {'domains': [{'projects': 1}, - {'users': 1, 'groups': 1, 'projects': 2}, - {'projects': 2}], - 'roles': 2}, - 'group_memberships': [{'group': 0, 'users': [0]}], - 'assignments': [{'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 0, 'project': 3}, - {'user': 0, 'role': 1, 'domain': 1, - 'inherited_to_projects': True}, - {'user': 0, 'role': 1, 'domain': 2, - 'inherited_to_projects': True}], - 'tests': [ - # List all effective assignments for user[0] - # Should get back both direct roles plus roles on both projects - # from each domain. Duplicates should not be fitered out. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 3}, - {'user': 0, 'role': 0, 'project': 0}, - {'user': 0, 'role': 1, 'project': 1, - 'indirect': {'domain': 1}}, - {'user': 0, 'role': 1, 'project': 2, - 'indirect': {'domain': 1}}, - {'user': 0, 'role': 1, 'project': 3, - 'indirect': {'domain': 2}}, - {'user': 0, 'role': 1, 'project': 4, - 'indirect': {'domain': 2}}]} - ] - } - self.execute_assignment_test_plan(test_plan) - - def test_list_projects_for_user_with_inherited_group_project_grants(self): - """Test inherited role assignments for groups on nested projects. - - Test Plan: - - - Enable OS-INHERIT extension - - Create a hierarchy of projects with one root and one leaf project - - Assign an inherited group role on root project - - Assign a non-inherited group role on root project - - Get a list of projects for user, should return both projects - - Disable OS-INHERIT extension - - Get a list of projects for user, should return only root project - - """ - self.config_fixture.config(group='os_inherit', enabled=True) - root_project = {'id': uuid.uuid4().hex, - 'description': '', - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': None, - 'is_domain': False} - self.resource_api.create_project(root_project['id'], root_project) - leaf_project = {'id': uuid.uuid4().hex, - 'description': '', - 'domain_id': DEFAULT_DOMAIN_ID, - 'enabled': True, - 'name': uuid.uuid4().hex, - 'parent_id': root_project['id'], - 'is_domain': False} - self.resource_api.create_project(leaf_project['id'], leaf_project) - - user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex, - 'domain_id': DEFAULT_DOMAIN_ID, 'enabled': True} - user = self.identity_api.create_user(user) - - group = {'name': uuid.uuid4().hex, 'domain_id': DEFAULT_DOMAIN_ID} - group = self.identity_api.create_group(group) - self.identity_api.add_user_to_group(user['id'], group['id']) - - # Grant inherited group role - self.assignment_api.create_grant(group_id=group['id'], - project_id=root_project['id'], - role_id=self.role_admin['id'], - inherited_to_projects=True) - # Grant non-inherited group role - self.assignment_api.create_grant(group_id=group['id'], - project_id=root_project['id'], - role_id=self.role_member['id']) - # Should get back both projects: because the direct role assignment for - # the root project and inherited role assignment for leaf project - user_projects = self.assignment_api.list_projects_for_user(user['id']) - self.assertEqual(2, len(user_projects)) - self.assertIn(root_project, user_projects) - self.assertIn(leaf_project, user_projects) - - # Disable OS-INHERIT extension - self.config_fixture.config(group='os_inherit', enabled=False) - # Should get back just root project - due the direct role assignment - user_projects = self.assignment_api.list_projects_for_user(user['id']) - self.assertEqual(1, len(user_projects)) - self.assertIn(root_project, user_projects) - - # TODO(henry-nash): The test above uses list_projects_for_user - # which may, in a subsequent patch, be re-implemeted to call - # list_role_assignments and then report only the distinct projects. - # - # The test plan below therefore mirrors this test, to ensure that - # list_role_assignments works the same. Once list_projects_for_user - # has been re-implemented then the manual tests above can be - # refactored. - test_plan = { - # A domain with a project ans sub-project, plus a user. - # Also, create 2 roles. - 'entities': { - 'domains': {'id': DEFAULT_DOMAIN_ID, 'users': 1, 'groups': 1, - 'projects': {'project': 1}}, - 'roles': 2}, - 'group_memberships': [{'group': 0, 'users': [0]}], - # A direct role and an inherited role on the parent - 'assignments': [{'group': 0, 'role': 0, 'project': 0}, - {'group': 0, 'role': 1, 'project': 0, - 'inherited_to_projects': True}], - 'tests': [ - # List all effective assignments for user[0] - should get back - # one direct role plus one inherited role. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0, - 'indirect': {'group': 0}}, - {'user': 0, 'role': 1, 'project': 1, - 'indirect': {'group': 0, 'project': 0}}]} - ] - } - - test_plan_with_os_inherit_disabled = { - 'tests': [ - # List all effective assignments for user[0] - should only get - # back the one direct role. - {'params': {'user': 0, 'effective': True}, - 'results': [{'user': 0, 'role': 0, 'project': 0, - 'indirect': {'group': 0}}]} - ] - } - self.config_fixture.config(group='os_inherit', enabled=True) - test_data = self.execute_assignment_test_plan(test_plan) - self.config_fixture.config(group='os_inherit', enabled=False) - # Pass the existing test data in to allow execution of 2nd test plan - self.execute_assignment_tests( - test_plan_with_os_inherit_disabled, test_data) - - -class FilterTests(filtering.FilterTests): - def test_list_entities_filtered(self): - for entity in ['user', 'group', 'project']: - # Create 20 entities - entity_list = self._create_test_data(entity, 20) - - # Try filtering to get one an exact item out of the list - hints = driver_hints.Hints() - hints.add_filter('name', entity_list[10]['name']) - entities = self._list_entities(entity)(hints=hints) - self.assertEqual(1, len(entities)) - self.assertEqual(entities[0]['id'], entity_list[10]['id']) - # Check the driver has removed the filter from the list hints - self.assertFalse(hints.get_exact_filter_by_name('name')) - self._delete_test_data(entity, entity_list) - - def test_list_users_inexact_filtered(self): - # Create 20 users, some with specific names. We set the names at create - # time (rather than updating them), since the LDAP driver does not - # support name updates. - user_name_data = { - # user index: name for user - 5: 'The', - 6: 'The Ministry', - 7: 'The Ministry of', - 8: 'The Ministry of Silly', - 9: 'The Ministry of Silly Walks', - # ...and one for useful case insensitivity testing - 10: 'The ministry of silly walks OF' - } - user_list = self._create_test_data( - 'user', 20, domain_id=DEFAULT_DOMAIN_ID, name_dict=user_name_data) - - hints = driver_hints.Hints() - hints.add_filter('name', 'ministry', comparator='contains') - users = self.identity_api.list_users(hints=hints) - self.assertEqual(5, len(users)) - self._match_with_list(users, user_list, - list_start=6, list_end=11) - # TODO(henry-nash) Check inexact filter has been removed. - - hints = driver_hints.Hints() - hints.add_filter('name', 'The', comparator='startswith') - users = self.identity_api.list_users(hints=hints) - self.assertEqual(6, len(users)) - self._match_with_list(users, user_list, - list_start=5, list_end=11) - # TODO(henry-nash) Check inexact filter has been removed. - - hints = driver_hints.Hints() - hints.add_filter('name', 'of', comparator='endswith') - users = self.identity_api.list_users(hints=hints) - self.assertEqual(2, len(users)) - # We can't assume we will get back the users in any particular order - self.assertIn(user_list[7]['id'], [users[0]['id'], users[1]['id']]) - self.assertIn(user_list[10]['id'], [users[0]['id'], users[1]['id']]) - # TODO(henry-nash) Check inexact filter has been removed. - - # TODO(henry-nash): Add some case sensitive tests. However, - # these would be hard to validate currently, since: - # - # For SQL, the issue is that MySQL 0.7, by default, is installed in - # case insensitive mode (which is what is run by default for our - # SQL backend tests). For production deployments. OpenStack - # assumes a case sensitive database. For these tests, therefore, we - # need to be able to check the sensitivity of the database so as to - # know whether to run case sensitive tests here. - # - # For LDAP/AD, although dependent on the schema being used, attributes - # are typically configured to be case aware, but not case sensitive. - - self._delete_test_data('user', user_list) - - def test_groups_for_user_filtered(self): - """Test use of filtering doesn't break groups_for_user listing. - - Some backends may use filtering to achieve the list of groups for a - user, so test that it can combine a second filter. - - Test Plan: - - - Create 10 groups, some with names we can filter on - - Create 2 users - - Assign 1 of those users to most of the groups, including some of the - well known named ones - - Assign the other user to other groups as spoilers - - Ensure that when we list groups for users with a filter on the group - name, both restrictions have been enforced on what is returned. - - """ - - number_of_groups = 10 - group_name_data = { - # entity index: name for entity - 5: 'The', - 6: 'The Ministry', - 9: 'The Ministry of Silly Walks', - } - group_list = self._create_test_data( - 'group', number_of_groups, - domain_id=DEFAULT_DOMAIN_ID, name_dict=group_name_data) - user_list = self._create_test_data('user', 2) - - for group in range(7): - # Create membership, including with two out of the three groups - # with well know names - self.identity_api.add_user_to_group(user_list[0]['id'], - group_list[group]['id']) - # ...and some spoiler memberships - for group in range(7, number_of_groups): - self.identity_api.add_user_to_group(user_list[1]['id'], - group_list[group]['id']) - - hints = driver_hints.Hints() - hints.add_filter('name', 'The', comparator='startswith') - groups = self.identity_api.list_groups_for_user( - user_list[0]['id'], hints=hints) - # We should only get back 2 out of the 3 groups that start with 'The' - # hence showing that both "filters" have been applied - self.assertThat(len(groups), matchers.Equals(2)) - self.assertIn(group_list[5]['id'], [groups[0]['id'], groups[1]['id']]) - self.assertIn(group_list[6]['id'], [groups[0]['id'], groups[1]['id']]) - self._delete_test_data('user', user_list) - self._delete_test_data('group', group_list) - - def _get_user_name_field_size(self): - """Return the size of the user name field for the backend. - - Subclasses can override this method to indicate that the user name - field is limited in length. The user name is the field used in the test - that validates that a filter value works even if it's longer than a - field. - - If the backend doesn't limit the value length then return None. - - """ - return None - - def test_filter_value_wider_than_field(self): - # If a filter value is given that's larger than the field in the - # backend then no values are returned. - - user_name_field_size = self._get_user_name_field_size() - - if user_name_field_size is None: - # The backend doesn't limit the size of the user name, so pass this - # test. - return - - # Create some users just to make sure would return something if the - # filter was ignored. - self._create_test_data('user', 2) - - hints = driver_hints.Hints() - value = 'A' * (user_name_field_size + 1) - hints.add_filter('name', value) - users = self.identity_api.list_users(hints=hints) - self.assertEqual([], users) - - def test_list_users_in_group_filtered(self): - number_of_users = 10 - user_name_data = { - 1: 'Arthur Conan Doyle', - 3: 'Arthur Rimbaud', - 9: 'Arthur Schopenhauer', - } - user_list = self._create_test_data( - 'user', number_of_users, - domain_id=DEFAULT_DOMAIN_ID, name_dict=user_name_data) - group = self._create_one_entity('group', - DEFAULT_DOMAIN_ID, 'Great Writers') - for i in range(7): - self.identity_api.add_user_to_group(user_list[i]['id'], - group['id']) - - hints = driver_hints.Hints() - hints.add_filter('name', 'Arthur', comparator='startswith') - users = self.identity_api.list_users_in_group(group['id'], hints=hints) - self.assertThat(len(users), matchers.Equals(2)) - self.assertIn(user_list[1]['id'], [users[0]['id'], users[1]['id']]) - self.assertIn(user_list[3]['id'], [users[0]['id'], users[1]['id']]) - self._delete_test_data('user', user_list) - self._delete_entity('group')(group['id']) - - -class LimitTests(filtering.FilterTests): - ENTITIES = ['user', 'group', 'project'] - - def setUp(self): - """Setup for Limit Test Cases.""" - - self.domain1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex} - self.resource_api.create_domain(self.domain1['id'], self.domain1) - self.addCleanup(self.clean_up_domain) - - self.entity_lists = {} - self.domain1_entity_lists = {} - - for entity in self.ENTITIES: - # Create 20 entities, 14 of which are in domain1 - self.entity_lists[entity] = self._create_test_data(entity, 6) - self.domain1_entity_lists[entity] = self._create_test_data( - entity, 14, self.domain1['id']) - self.addCleanup(self.clean_up_entities) - - def clean_up_domain(self): - """Clean up domain test data from Limit Test Cases.""" - - self.domain1['enabled'] = False - self.resource_api.update_domain(self.domain1['id'], self.domain1) - self.resource_api.delete_domain(self.domain1['id']) - del self.domain1 - - def clean_up_entities(self): - """Clean up entity test data from Limit Test Cases.""" - for entity in self.ENTITIES: - self._delete_test_data(entity, self.entity_lists[entity]) - self._delete_test_data(entity, self.domain1_entity_lists[entity]) - del self.entity_lists - del self.domain1_entity_lists - - def _test_list_entity_filtered_and_limited(self, entity): - self.config_fixture.config(list_limit=10) - # Should get back just 10 entities in domain1 - hints = driver_hints.Hints() - hints.add_filter('domain_id', self.domain1['id']) - entities = self._list_entities(entity)(hints=hints) - self.assertEqual(hints.limit['limit'], len(entities)) - self.assertTrue(hints.limit['truncated']) - self._match_with_list(entities, self.domain1_entity_lists[entity]) - - # Override with driver specific limit - if entity == 'project': - self.config_fixture.config(group='resource', list_limit=5) - else: - self.config_fixture.config(group='identity', list_limit=5) - - # Should get back just 5 users in domain1 - hints = driver_hints.Hints() - hints.add_filter('domain_id', self.domain1['id']) - entities = self._list_entities(entity)(hints=hints) - self.assertEqual(hints.limit['limit'], len(entities)) - self._match_with_list(entities, self.domain1_entity_lists[entity]) - - # Finally, let's pretend we want to get the full list of entities, - # even with the limits set, as part of some internal calculation. - # Calling the API without a hints list should achieve this, and - # return at least the 20 entries we created (there may be other - # entities lying around created by other tests/setup). - entities = self._list_entities(entity)() - self.assertTrue(len(entities) >= 20) - - def test_list_users_filtered_and_limited(self): - self._test_list_entity_filtered_and_limited('user') - - def test_list_groups_filtered_and_limited(self): - self._test_list_entity_filtered_and_limited('group') - - def test_list_projects_filtered_and_limited(self): - self._test_list_entity_filtered_and_limited('project') diff --git a/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py b/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py deleted file mode 100644 index f72cad63..00000000 --- a/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy.py +++ /dev/null @@ -1,249 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from six.moves import range -from testtools import matchers - -from keystone import exception -from keystone.tests import unit - - -class PolicyAssociationTests(object): - - def _assert_correct_policy(self, endpoint, policy): - ref = ( - self.endpoint_policy_api.get_policy_for_endpoint(endpoint['id'])) - self.assertEqual(policy['id'], ref['id']) - - def _assert_correct_endpoints(self, policy, endpoint_list): - endpoint_id_list = [ep['id'] for ep in endpoint_list] - endpoints = ( - self.endpoint_policy_api.list_endpoints_for_policy(policy['id'])) - self.assertThat(endpoints, matchers.HasLength(len(endpoint_list))) - for endpoint in endpoints: - self.assertIn(endpoint['id'], endpoint_id_list) - - def load_sample_data(self): - """Create sample data to test policy associations. - - The following data is created: - - - 3 regions, in a hierarchy, 0 -> 1 -> 2 (where 0 is top) - - 3 services - - 6 endpoints, 2 in each region, with a mixture of services: - 0 - region 0, Service 0 - 1 - region 0, Service 1 - 2 - region 1, Service 1 - 3 - region 1, Service 2 - 4 - region 2, Service 2 - 5 - region 2, Service 0 - - """ - def new_endpoint(region_id, service_id): - endpoint = unit.new_endpoint_ref(interface='test', - region_id=region_id, - service_id=service_id, - url='/url') - self.endpoint.append(self.catalog_api.create_endpoint( - endpoint['id'], endpoint)) - - self.policy = [] - self.endpoint = [] - self.service = [] - self.region = [] - - parent_region_id = None - for i in range(3): - policy = unit.new_policy_ref() - self.policy.append(self.policy_api.create_policy(policy['id'], - policy)) - service = unit.new_service_ref() - self.service.append(self.catalog_api.create_service(service['id'], - service)) - region = unit.new_region_ref(parent_region_id=parent_region_id) - # Link the regions together as a hierarchy, [0] at the top - parent_region_id = region['id'] - self.region.append(self.catalog_api.create_region(region)) - - new_endpoint(self.region[0]['id'], self.service[0]['id']) - new_endpoint(self.region[0]['id'], self.service[1]['id']) - new_endpoint(self.region[1]['id'], self.service[1]['id']) - new_endpoint(self.region[1]['id'], self.service[2]['id']) - new_endpoint(self.region[2]['id'], self.service[2]['id']) - new_endpoint(self.region[2]['id'], self.service[0]['id']) - - def test_policy_to_endpoint_association_crud(self): - self.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], endpoint_id=self.endpoint[0]['id']) - self.endpoint_policy_api.check_policy_association( - self.policy[0]['id'], endpoint_id=self.endpoint[0]['id']) - self.endpoint_policy_api.delete_policy_association( - self.policy[0]['id'], endpoint_id=self.endpoint[0]['id']) - self.assertRaises(exception.NotFound, - self.endpoint_policy_api.check_policy_association, - self.policy[0]['id'], - endpoint_id=self.endpoint[0]['id']) - - def test_overwriting_policy_to_endpoint_association(self): - self.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], endpoint_id=self.endpoint[0]['id']) - self.endpoint_policy_api.create_policy_association( - self.policy[1]['id'], endpoint_id=self.endpoint[0]['id']) - self.assertRaises(exception.NotFound, - self.endpoint_policy_api.check_policy_association, - self.policy[0]['id'], - endpoint_id=self.endpoint[0]['id']) - self.endpoint_policy_api.check_policy_association( - self.policy[1]['id'], endpoint_id=self.endpoint[0]['id']) - - def test_invalid_policy_to_endpoint_association(self): - self.assertRaises(exception.InvalidPolicyAssociation, - self.endpoint_policy_api.create_policy_association, - self.policy[0]['id']) - self.assertRaises(exception.InvalidPolicyAssociation, - self.endpoint_policy_api.create_policy_association, - self.policy[0]['id'], - endpoint_id=self.endpoint[0]['id'], - region_id=self.region[0]['id']) - self.assertRaises(exception.InvalidPolicyAssociation, - self.endpoint_policy_api.create_policy_association, - self.policy[0]['id'], - endpoint_id=self.endpoint[0]['id'], - service_id=self.service[0]['id']) - self.assertRaises(exception.InvalidPolicyAssociation, - self.endpoint_policy_api.create_policy_association, - self.policy[0]['id'], - region_id=self.region[0]['id']) - - def test_policy_to_explicit_endpoint_association(self): - # Associate policy 0 with endpoint 0 - self.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], endpoint_id=self.endpoint[0]['id']) - self._assert_correct_policy(self.endpoint[0], self.policy[0]) - self._assert_correct_endpoints(self.policy[0], [self.endpoint[0]]) - self.assertRaises(exception.NotFound, - self.endpoint_policy_api.get_policy_for_endpoint, - uuid.uuid4().hex) - - def test_policy_to_service_association(self): - self.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], service_id=self.service[0]['id']) - self.endpoint_policy_api.create_policy_association( - self.policy[1]['id'], service_id=self.service[1]['id']) - - # Endpoints 0 and 5 are part of service 0 - self._assert_correct_policy(self.endpoint[0], self.policy[0]) - self._assert_correct_policy(self.endpoint[5], self.policy[0]) - self._assert_correct_endpoints( - self.policy[0], [self.endpoint[0], self.endpoint[5]]) - - # Endpoints 1 and 2 are part of service 1 - self._assert_correct_policy(self.endpoint[1], self.policy[1]) - self._assert_correct_policy(self.endpoint[2], self.policy[1]) - self._assert_correct_endpoints( - self.policy[1], [self.endpoint[1], self.endpoint[2]]) - - def test_policy_to_region_and_service_association(self): - self.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], service_id=self.service[0]['id'], - region_id=self.region[0]['id']) - self.endpoint_policy_api.create_policy_association( - self.policy[1]['id'], service_id=self.service[1]['id'], - region_id=self.region[1]['id']) - self.endpoint_policy_api.create_policy_association( - self.policy[2]['id'], service_id=self.service[2]['id'], - region_id=self.region[2]['id']) - - # Endpoint 0 is in region 0 with service 0, so should get policy 0 - self._assert_correct_policy(self.endpoint[0], self.policy[0]) - # Endpoint 5 is in Region 2 with service 0, so should also get - # policy 0 by searching up the tree to Region 0 - self._assert_correct_policy(self.endpoint[5], self.policy[0]) - - # Looking the other way round, policy 2 should only be in use by - # endpoint 4, since that's the only endpoint in region 2 with the - # correct service - self._assert_correct_endpoints( - self.policy[2], [self.endpoint[4]]) - # Policy 1 should only be in use by endpoint 2, since that's the only - # endpoint in region 1 (and region 2 below it) with the correct service - self._assert_correct_endpoints( - self.policy[1], [self.endpoint[2]]) - # Policy 0 should be in use by endpoint 0, as well as 5 (since 5 is - # of the correct service and in region 2 below it) - self._assert_correct_endpoints( - self.policy[0], [self.endpoint[0], self.endpoint[5]]) - - def test_delete_association_by_entity(self): - self.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], endpoint_id=self.endpoint[0]['id']) - self.endpoint_policy_api.delete_association_by_endpoint( - self.endpoint[0]['id']) - self.assertRaises(exception.NotFound, - self.endpoint_policy_api.check_policy_association, - self.policy[0]['id'], - endpoint_id=self.endpoint[0]['id']) - # Make sure deleting it again is silent - since this method is used - # in response to notifications by the controller. - self.endpoint_policy_api.delete_association_by_endpoint( - self.endpoint[0]['id']) - - # Now try with service - ensure both combined region & service - # associations and explicit service ones are removed - self.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], service_id=self.service[0]['id'], - region_id=self.region[0]['id']) - self.endpoint_policy_api.create_policy_association( - self.policy[1]['id'], service_id=self.service[0]['id'], - region_id=self.region[1]['id']) - self.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], service_id=self.service[0]['id']) - - self.endpoint_policy_api.delete_association_by_service( - self.service[0]['id']) - - self.assertRaises(exception.NotFound, - self.endpoint_policy_api.check_policy_association, - self.policy[0]['id'], - service_id=self.service[0]['id'], - region_id=self.region[0]['id']) - self.assertRaises(exception.NotFound, - self.endpoint_policy_api.check_policy_association, - self.policy[1]['id'], - service_id=self.service[0]['id'], - region_id=self.region[1]['id']) - self.assertRaises(exception.NotFound, - self.endpoint_policy_api.check_policy_association, - self.policy[0]['id'], - service_id=self.service[0]['id']) - - # Finally, check delete by region - self.endpoint_policy_api.create_policy_association( - self.policy[0]['id'], service_id=self.service[0]['id'], - region_id=self.region[0]['id']) - - self.endpoint_policy_api.delete_association_by_region( - self.region[0]['id']) - - self.assertRaises(exception.NotFound, - self.endpoint_policy_api.check_policy_association, - self.policy[0]['id'], - service_id=self.service[0]['id'], - region_id=self.region[0]['id']) - self.assertRaises(exception.NotFound, - self.endpoint_policy_api.check_policy_association, - self.policy[0]['id'], - service_id=self.service[0]['id']) diff --git a/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy_sql.py b/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy_sql.py deleted file mode 100644 index 134a03f0..00000000 --- a/keystone-moon/keystone/tests/unit/test_backend_endpoint_policy_sql.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import sql -from keystone.tests.unit import test_backend_endpoint_policy -from keystone.tests.unit import test_backend_sql - - -class SqlPolicyAssociationTable(test_backend_sql.SqlModels): - """Set of tests for checking SQL Policy Association Mapping.""" - - def test_policy_association_mapping(self): - cols = (('id', sql.String, 64), - ('policy_id', sql.String, 64), - ('endpoint_id', sql.String, 64), - ('service_id', sql.String, 64), - ('region_id', sql.String, 64)) - self.assertExpectedSchema('policy_association', cols) - - -class SqlPolicyAssociationTests( - test_backend_sql.SqlTests, - test_backend_endpoint_policy.PolicyAssociationTests): - - def load_fixtures(self, fixtures): - super(SqlPolicyAssociationTests, self).load_fixtures(fixtures) - self.load_sample_data() diff --git a/keystone-moon/keystone/tests/unit/test_backend_federation_sql.py b/keystone-moon/keystone/tests/unit/test_backend_federation_sql.py deleted file mode 100644 index 995c564d..00000000 --- a/keystone-moon/keystone/tests/unit/test_backend_federation_sql.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import sql -from keystone.tests.unit import test_backend_sql - - -class SqlFederation(test_backend_sql.SqlModels): - """Set of tests for checking SQL Federation.""" - - def test_identity_provider(self): - cols = (('id', sql.String, 64), - ('enabled', sql.Boolean, None), - ('description', sql.Text, None)) - self.assertExpectedSchema('identity_provider', cols) - - def test_idp_remote_ids(self): - cols = (('idp_id', sql.String, 64), - ('remote_id', sql.String, 255)) - self.assertExpectedSchema('idp_remote_ids', cols) - - def test_federated_protocol(self): - cols = (('id', sql.String, 64), - ('idp_id', sql.String, 64), - ('mapping_id', sql.String, 64)) - self.assertExpectedSchema('federation_protocol', cols) - - def test_mapping(self): - cols = (('id', sql.String, 64), - ('rules', sql.JsonBlob, None)) - self.assertExpectedSchema('mapping', cols) - - def test_service_provider(self): - cols = (('auth_url', sql.String, 256), - ('id', sql.String, 64), - ('enabled', sql.Boolean, None), - ('description', sql.Text, None), - ('relay_state_prefix', sql.String, 256), - ('sp_url', sql.String, 256)) - self.assertExpectedSchema('service_provider', cols) diff --git a/keystone-moon/keystone/tests/unit/test_backend_id_mapping_sql.py b/keystone-moon/keystone/tests/unit/test_backend_id_mapping_sql.py deleted file mode 100644 index e6635e18..00000000 --- a/keystone-moon/keystone/tests/unit/test_backend_id_mapping_sql.py +++ /dev/null @@ -1,198 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from testtools import matchers - -from keystone.common import sql -from keystone.identity.mapping_backends import mapping -from keystone.tests import unit -from keystone.tests.unit import identity_mapping as mapping_sql -from keystone.tests.unit import test_backend_sql - - -class SqlIDMappingTable(test_backend_sql.SqlModels): - """Set of tests for checking SQL Identity ID Mapping.""" - - def test_id_mapping(self): - cols = (('public_id', sql.String, 64), - ('domain_id', sql.String, 64), - ('local_id', sql.String, 64), - ('entity_type', sql.Enum, None)) - self.assertExpectedSchema('id_mapping', cols) - - -class SqlIDMapping(test_backend_sql.SqlTests): - - def setUp(self): - super(SqlIDMapping, self).setUp() - self.load_sample_data() - - def load_sample_data(self): - self.addCleanup(self.clean_sample_data) - domainA = unit.new_domain_ref() - self.domainA = self.resource_api.create_domain(domainA['id'], domainA) - domainB = unit.new_domain_ref() - self.domainB = self.resource_api.create_domain(domainB['id'], domainB) - - def clean_sample_data(self): - if hasattr(self, 'domainA'): - self.domainA['enabled'] = False - self.resource_api.update_domain(self.domainA['id'], self.domainA) - self.resource_api.delete_domain(self.domainA['id']) - if hasattr(self, 'domainB'): - self.domainB['enabled'] = False - self.resource_api.update_domain(self.domainB['id'], self.domainB) - self.resource_api.delete_domain(self.domainB['id']) - - def test_invalid_public_key(self): - self.assertIsNone(self.id_mapping_api.get_id_mapping(uuid.uuid4().hex)) - - def test_id_mapping_crud(self): - initial_mappings = len(mapping_sql.list_id_mappings()) - local_id1 = uuid.uuid4().hex - local_id2 = uuid.uuid4().hex - local_entity1 = {'domain_id': self.domainA['id'], - 'local_id': local_id1, - 'entity_type': mapping.EntityType.USER} - local_entity2 = {'domain_id': self.domainB['id'], - 'local_id': local_id2, - 'entity_type': mapping.EntityType.GROUP} - - # Check no mappings for the new local entities - self.assertIsNone(self.id_mapping_api.get_public_id(local_entity1)) - self.assertIsNone(self.id_mapping_api.get_public_id(local_entity2)) - - # Create the new mappings and then read them back - public_id1 = self.id_mapping_api.create_id_mapping(local_entity1) - public_id2 = self.id_mapping_api.create_id_mapping(local_entity2) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings + 2)) - self.assertEqual( - public_id1, self.id_mapping_api.get_public_id(local_entity1)) - self.assertEqual( - public_id2, self.id_mapping_api.get_public_id(local_entity2)) - - local_id_ref = self.id_mapping_api.get_id_mapping(public_id1) - self.assertEqual(self.domainA['id'], local_id_ref['domain_id']) - self.assertEqual(local_id1, local_id_ref['local_id']) - self.assertEqual(mapping.EntityType.USER, local_id_ref['entity_type']) - # Check we have really created a new external ID - self.assertNotEqual(local_id1, public_id1) - - local_id_ref = self.id_mapping_api.get_id_mapping(public_id2) - self.assertEqual(self.domainB['id'], local_id_ref['domain_id']) - self.assertEqual(local_id2, local_id_ref['local_id']) - self.assertEqual(mapping.EntityType.GROUP, local_id_ref['entity_type']) - # Check we have really created a new external ID - self.assertNotEqual(local_id2, public_id2) - - # Create another mappings, this time specifying a public ID to use - new_public_id = uuid.uuid4().hex - public_id3 = self.id_mapping_api.create_id_mapping( - {'domain_id': self.domainB['id'], 'local_id': local_id2, - 'entity_type': mapping.EntityType.USER}, - public_id=new_public_id) - self.assertEqual(new_public_id, public_id3) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings + 3)) - - # Delete the mappings we created, and make sure the mapping count - # goes back to where it was - self.id_mapping_api.delete_id_mapping(public_id1) - self.id_mapping_api.delete_id_mapping(public_id2) - self.id_mapping_api.delete_id_mapping(public_id3) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings)) - - def test_id_mapping_handles_unicode(self): - initial_mappings = len(mapping_sql.list_id_mappings()) - local_id = u'fäké1' - local_entity = {'domain_id': self.domainA['id'], - 'local_id': local_id, - 'entity_type': mapping.EntityType.USER} - - # Check no mappings for the new local entity - self.assertIsNone(self.id_mapping_api.get_public_id(local_entity)) - - # Create the new mapping and then read it back - public_id = self.id_mapping_api.create_id_mapping(local_entity) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings + 1)) - self.assertEqual( - public_id, self.id_mapping_api.get_public_id(local_entity)) - - def test_delete_public_id_is_silent(self): - # Test that deleting an invalid public key is silent - self.id_mapping_api.delete_id_mapping(uuid.uuid4().hex) - - def test_purge_mappings(self): - initial_mappings = len(mapping_sql.list_id_mappings()) - local_id1 = uuid.uuid4().hex - local_id2 = uuid.uuid4().hex - local_id3 = uuid.uuid4().hex - local_id4 = uuid.uuid4().hex - local_id5 = uuid.uuid4().hex - - # Create five mappings,two in domainA, three in domainB - self.id_mapping_api.create_id_mapping( - {'domain_id': self.domainA['id'], 'local_id': local_id1, - 'entity_type': mapping.EntityType.USER}) - self.id_mapping_api.create_id_mapping( - {'domain_id': self.domainA['id'], 'local_id': local_id2, - 'entity_type': mapping.EntityType.USER}) - public_id3 = self.id_mapping_api.create_id_mapping( - {'domain_id': self.domainB['id'], 'local_id': local_id3, - 'entity_type': mapping.EntityType.GROUP}) - public_id4 = self.id_mapping_api.create_id_mapping( - {'domain_id': self.domainB['id'], 'local_id': local_id4, - 'entity_type': mapping.EntityType.USER}) - public_id5 = self.id_mapping_api.create_id_mapping( - {'domain_id': self.domainB['id'], 'local_id': local_id5, - 'entity_type': mapping.EntityType.USER}) - - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings + 5)) - - # Purge mappings for domainA, should be left with those in B - self.id_mapping_api.purge_mappings( - {'domain_id': self.domainA['id']}) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings + 3)) - self.id_mapping_api.get_id_mapping(public_id3) - self.id_mapping_api.get_id_mapping(public_id4) - self.id_mapping_api.get_id_mapping(public_id5) - - # Purge mappings for type Group, should purge one more - self.id_mapping_api.purge_mappings( - {'entity_type': mapping.EntityType.GROUP}) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings + 2)) - self.id_mapping_api.get_id_mapping(public_id4) - self.id_mapping_api.get_id_mapping(public_id5) - - # Purge mapping for a specific local identifier - self.id_mapping_api.purge_mappings( - {'domain_id': self.domainB['id'], 'local_id': local_id4, - 'entity_type': mapping.EntityType.USER}) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings + 1)) - self.id_mapping_api.get_id_mapping(public_id5) - - # Purge mappings the remaining mappings - self.id_mapping_api.purge_mappings({}) - self.assertThat(mapping_sql.list_id_mappings(), - matchers.HasLength(initial_mappings)) diff --git a/keystone-moon/keystone/tests/unit/test_backend_kvs.py b/keystone-moon/keystone/tests/unit/test_backend_kvs.py deleted file mode 100644 index 36af1c36..00000000 --- a/keystone-moon/keystone/tests/unit/test_backend_kvs.py +++ /dev/null @@ -1,113 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -import datetime -import uuid - -from oslo_utils import timeutils -import six - -from keystone.common import utils -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit.ksfixtures import database -from keystone.tests.unit.token import test_backends as token_tests - - -class KvsToken(unit.TestCase, token_tests.TokenTests): - def setUp(self): - super(KvsToken, self).setUp() - self.load_backends() - - def test_flush_expired_token(self): - self.assertRaises( - exception.NotImplemented, - self.token_provider_api._persistence.flush_expired_tokens) - - def _update_user_token_index_direct(self, user_key, token_id, new_data): - persistence = self.token_provider_api._persistence - token_list = persistence.driver._get_user_token_list_with_expiry( - user_key) - # Update the user-index so that the expires time is _actually_ expired - # since we do not do an explicit get on the token, we only reference - # the data in the user index (to save extra round-trips to the kvs - # backend). - for i, data in enumerate(token_list): - if data[0] == token_id: - token_list[i] = new_data - break - self.token_provider_api._persistence.driver._store.set(user_key, - token_list) - - def test_cleanup_user_index_on_create(self): - user_id = six.text_type(uuid.uuid4().hex) - valid_token_id, data = self.create_token_sample_data(user_id=user_id) - expired_token_id, expired_data = self.create_token_sample_data( - user_id=user_id) - - expire_delta = datetime.timedelta(seconds=86400) - - # NOTE(morganfainberg): Directly access the data cache since we need to - # get expired tokens as well as valid tokens. - token_persistence = self.token_provider_api._persistence - user_key = token_persistence.driver._prefix_user_id(user_id) - user_token_list = token_persistence.driver._store.get(user_key) - valid_token_ref = token_persistence.get_token(valid_token_id) - expired_token_ref = token_persistence.get_token(expired_token_id) - expected_user_token_list = [ - (valid_token_id, utils.isotime(valid_token_ref['expires'], - subsecond=True)), - (expired_token_id, utils.isotime(expired_token_ref['expires'], - subsecond=True))] - self.assertEqual(expected_user_token_list, user_token_list) - new_expired_data = (expired_token_id, - utils.isotime( - (timeutils.utcnow() - expire_delta), - subsecond=True)) - self._update_user_token_index_direct(user_key, expired_token_id, - new_expired_data) - valid_token_id_2, valid_data_2 = self.create_token_sample_data( - user_id=user_id) - valid_token_ref_2 = token_persistence.get_token(valid_token_id_2) - expected_user_token_list = [ - (valid_token_id, utils.isotime(valid_token_ref['expires'], - subsecond=True)), - (valid_token_id_2, utils.isotime(valid_token_ref_2['expires'], - subsecond=True))] - user_token_list = token_persistence.driver._store.get(user_key) - self.assertEqual(expected_user_token_list, user_token_list) - - # Test that revoked tokens are removed from the list on create. - token_persistence.delete_token(valid_token_id_2) - new_token_id, data = self.create_token_sample_data(user_id=user_id) - new_token_ref = token_persistence.get_token(new_token_id) - expected_user_token_list = [ - (valid_token_id, utils.isotime(valid_token_ref['expires'], - subsecond=True)), - (new_token_id, utils.isotime(new_token_ref['expires'], - subsecond=True))] - user_token_list = token_persistence.driver._store.get(user_key) - self.assertEqual(expected_user_token_list, user_token_list) - - -class KvsTokenCacheInvalidation(unit.TestCase, - token_tests.TokenCacheInvalidation): - def setUp(self): - super(KvsTokenCacheInvalidation, self).setUp() - self.useFixture(database.Database(self.sql_driver_version_overrides)) - self.load_backends() - self._create_test_data() - - def config_overrides(self): - super(KvsTokenCacheInvalidation, self).config_overrides() - self.config_fixture.config(group='token', driver='kvs') diff --git a/keystone-moon/keystone/tests/unit/test_backend_ldap.py b/keystone-moon/keystone/tests/unit/test_backend_ldap.py deleted file mode 100644 index cf618633..00000000 --- a/keystone-moon/keystone/tests/unit/test_backend_ldap.py +++ /dev/null @@ -1,3287 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2012 OpenStack Foundation -# Copyright 2013 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import uuid - -import ldap -import mock -from oslo_config import cfg -from oslo_log import versionutils -from oslotest import mockpatch -import pkg_resources -from six.moves import http_client -from six.moves import range -from testtools import matchers - -from keystone.common import cache -from keystone.common import driver_hints -from keystone.common import ldap as common_ldap -from keystone.common.ldap import core as common_ldap_core -from keystone import exception -from keystone import identity -from keystone.identity.mapping_backends import mapping as map -from keystone import resource -from keystone.tests import unit -from keystone.tests.unit.assignment import test_backends as assignment_tests -from keystone.tests.unit import default_fixtures -from keystone.tests.unit.identity import test_backends as identity_tests -from keystone.tests.unit import identity_mapping as mapping_sql -from keystone.tests.unit.ksfixtures import database -from keystone.tests.unit.ksfixtures import ldapdb -from keystone.tests.unit.resource import test_backends as resource_tests -from keystone.tests.unit.utils import wip - - -CONF = cfg.CONF - - -def _assert_backends(testcase, **kwargs): - - def _get_backend_cls(testcase, subsystem): - observed_backend = getattr(testcase, subsystem + '_api').driver - return observed_backend.__class__ - - def _get_domain_specific_backend_cls(manager, domain): - observed_backend = manager.domain_configs.get_domain_driver(domain) - return observed_backend.__class__ - - def _get_entrypoint_cls(subsystem, name): - entrypoint = entrypoint_map['keystone.' + subsystem][name] - return entrypoint.resolve() - - def _load_domain_specific_configs(manager): - if (not manager.domain_configs.configured and - CONF.identity.domain_specific_drivers_enabled): - manager.domain_configs.setup_domain_drivers( - manager.driver, manager.resource_api) - - def _assert_equal(expected_cls, observed_cls, subsystem, - domain=None): - msg = ('subsystem %(subsystem)s expected %(expected_cls)r, ' - 'but observed %(observed_cls)r') - if domain: - subsystem = '%s[domain=%s]' % (subsystem, domain) - assert expected_cls == observed_cls, msg % { - 'expected_cls': expected_cls, - 'observed_cls': observed_cls, - 'subsystem': subsystem, - } - - env = pkg_resources.Environment() - keystone_dist = env['keystone'][0] - entrypoint_map = pkg_resources.get_entry_map(keystone_dist) - - for subsystem, entrypoint_name in kwargs.items(): - if isinstance(entrypoint_name, str): - observed_cls = _get_backend_cls(testcase, subsystem) - expected_cls = _get_entrypoint_cls(subsystem, entrypoint_name) - _assert_equal(expected_cls, observed_cls, subsystem) - - elif isinstance(entrypoint_name, dict): - manager = getattr(testcase, subsystem + '_api') - _load_domain_specific_configs(manager) - - for domain, entrypoint_name in entrypoint_name.items(): - if domain is None: - observed_cls = _get_backend_cls(testcase, subsystem) - expected_cls = _get_entrypoint_cls( - subsystem, entrypoint_name) - _assert_equal(expected_cls, observed_cls, subsystem) - continue - - observed_cls = _get_domain_specific_backend_cls( - manager, domain) - expected_cls = _get_entrypoint_cls(subsystem, entrypoint_name) - _assert_equal(expected_cls, observed_cls, subsystem, domain) - - else: - raise ValueError('%r is not an expected value for entrypoint name' - % entrypoint_name) - - -def create_group_container(identity_api): - # Create the groups base entry (ou=Groups,cn=example,cn=com) - group_api = identity_api.driver.group - conn = group_api.get_connection() - dn = 'ou=Groups,cn=example,cn=com' - conn.add_s(dn, [('objectclass', ['organizationalUnit']), - ('ou', ['Groups'])]) - - -class BaseLDAPIdentity(identity_tests.IdentityTests, - assignment_tests.AssignmentTests, - resource_tests.ResourceTests): - - def setUp(self): - super(BaseLDAPIdentity, self).setUp() - self.ldapdb = self.useFixture(ldapdb.LDAPDatabase()) - - self.load_backends() - self.load_fixtures(default_fixtures) - self.config_fixture.config(group='os_inherit', enabled=False) - - def _get_domain_fixture(self): - """Domains in LDAP are read-only, so just return the static one.""" - return self.resource_api.get_domain(CONF.identity.default_domain_id) - - def get_config(self, domain_id): - # Only one conf structure unless we are using separate domain backends - return CONF - - def config_overrides(self): - super(BaseLDAPIdentity, self).config_overrides() - self.config_fixture.config(group='identity', driver='ldap') - - def config_files(self): - config_files = super(BaseLDAPIdentity, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_ldap.conf')) - return config_files - - def new_user_ref(self, domain_id, project_id=None, **kwargs): - ref = unit.new_user_ref(domain_id=domain_id, project_id=project_id, - **kwargs) - if 'id' not in kwargs: - del ref['id'] - return ref - - def get_user_enabled_vals(self, user): - user_dn = ( - self.identity_api.driver.user._id_to_dn_string(user['id'])) - enabled_attr_name = CONF.ldap.user_enabled_attribute - - ldap_ = self.identity_api.driver.user.get_connection() - res = ldap_.search_s(user_dn, - ldap.SCOPE_BASE, - u'(sn=%s)' % user['name']) - if enabled_attr_name in res[0][1]: - return res[0][1][enabled_attr_name] - else: - return None - - def test_build_tree(self): - """Regression test for building the tree names.""" - user_api = identity.backends.ldap.UserApi(CONF) - self.assertTrue(user_api) - self.assertEqual("ou=Users,%s" % CONF.ldap.suffix, user_api.tree_dn) - - def test_configurable_allowed_user_actions(self): - user = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - self.identity_api.get_user(user['id']) - - user['password'] = u'fäképass2' - self.identity_api.update_user(user['id'], user) - - self.identity_api.delete_user(user['id']) - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user, - user['id']) - - def test_configurable_forbidden_user_actions(self): - driver = self.identity_api._select_identity_driver( - CONF.identity.default_domain_id) - driver.user.allow_create = False - driver.user.allow_update = False - driver.user.allow_delete = False - - user = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - self.assertRaises(exception.ForbiddenAction, - self.identity_api.create_user, - user) - - self.user_foo['password'] = u'fäképass2' - self.assertRaises(exception.ForbiddenAction, - self.identity_api.update_user, - self.user_foo['id'], - self.user_foo) - - self.assertRaises(exception.ForbiddenAction, - self.identity_api.delete_user, - self.user_foo['id']) - - def test_configurable_forbidden_create_existing_user(self): - driver = self.identity_api._select_identity_driver( - CONF.identity.default_domain_id) - driver.user.allow_create = False - - self.assertRaises(exception.ForbiddenAction, - self.identity_api.create_user, - self.user_foo) - - def test_user_filter(self): - user_ref = self.identity_api.get_user(self.user_foo['id']) - self.user_foo.pop('password') - self.assertDictEqual(self.user_foo, user_ref) - - driver = self.identity_api._select_identity_driver( - user_ref['domain_id']) - driver.user.ldap_filter = '(CN=DOES_NOT_MATCH)' - # invalidate the cache if the result is cached. - self.identity_api.get_user.invalidate(self.identity_api, - self.user_foo['id']) - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user, - self.user_foo['id']) - - def test_list_users_by_name_and_with_filter(self): - # confirm that the user is not exposed when it does not match the - # filter setting in conf even if it is requested by name in user list - hints = driver_hints.Hints() - hints.add_filter('name', self.user_foo['name']) - domain_id = self.user_foo['domain_id'] - driver = self.identity_api._select_identity_driver(domain_id) - driver.user.ldap_filter = ('(|(cn=%s)(cn=%s))' % - (self.user_sna['id'], self.user_two['id'])) - users = self.identity_api.list_users( - domain_scope=self._set_domain_scope(domain_id), - hints=hints) - self.assertEqual(0, len(users)) - - def test_remove_role_grant_from_user_and_project(self): - self.assignment_api.create_grant(user_id=self.user_foo['id'], - project_id=self.tenant_baz['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.tenant_baz['id']) - self.assertDictEqual(self.role_member, roles_ref[0]) - - self.assignment_api.delete_grant(user_id=self.user_foo['id'], - project_id=self.tenant_baz['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - user_id=self.user_foo['id'], - project_id=self.tenant_baz['id']) - self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - user_id=self.user_foo['id'], - project_id=self.tenant_baz['id'], - role_id='member') - - def test_get_and_remove_role_grant_by_group_and_project(self): - new_domain = self._get_domain_fixture() - new_group = unit.new_group_ref(domain_id=new_domain['id']) - new_group = self.identity_api.create_group(new_group) - new_user = self.new_user_ref(domain_id=new_domain['id']) - new_user = self.identity_api.create_user(new_user) - self.identity_api.add_user_to_group(new_user['id'], - new_group['id']) - - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - project_id=self.tenant_bar['id']) - self.assertEqual([], roles_ref) - self.assertEqual(0, len(roles_ref)) - - self.assignment_api.create_grant(group_id=new_group['id'], - project_id=self.tenant_bar['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - project_id=self.tenant_bar['id']) - self.assertNotEmpty(roles_ref) - self.assertDictEqual(self.role_member, roles_ref[0]) - - self.assignment_api.delete_grant(group_id=new_group['id'], - project_id=self.tenant_bar['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - project_id=self.tenant_bar['id']) - self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.RoleAssignmentNotFound, - self.assignment_api.delete_grant, - group_id=new_group['id'], - project_id=self.tenant_bar['id'], - role_id='member') - - def test_get_and_remove_role_grant_by_group_and_domain(self): - # TODO(henry-nash): We should really rewrite the tests in - # unit.resource.test_backends to be more flexible as to where the - # domains are sourced from, so that we would not need to override such - # tests here. This is raised as bug 1373865. - new_domain = self._get_domain_fixture() - new_group = unit.new_group_ref(domain_id=new_domain['id'],) - new_group = self.identity_api.create_group(new_group) - new_user = self.new_user_ref(domain_id=new_domain['id']) - new_user = self.identity_api.create_user(new_user) - self.identity_api.add_user_to_group(new_user['id'], - new_group['id']) - - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - - self.assignment_api.create_grant(group_id=new_group['id'], - domain_id=new_domain['id'], - role_id='member') - - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) - self.assertDictEqual(self.role_member, roles_ref[0]) - - self.assignment_api.delete_grant(group_id=new_group['id'], - domain_id=new_domain['id'], - role_id='member') - roles_ref = self.assignment_api.list_grants( - group_id=new_group['id'], - domain_id=new_domain['id']) - self.assertEqual(0, len(roles_ref)) - self.assertRaises(exception.NotFound, - self.assignment_api.delete_grant, - group_id=new_group['id'], - domain_id=new_domain['id'], - role_id='member') - - def test_get_role_assignment_by_domain_not_found(self): - self.skipTest('N/A: LDAP does not support multiple domains') - - def test_del_role_assignment_by_domain_not_found(self): - self.skipTest('N/A: LDAP does not support multiple domains') - - def test_get_and_remove_role_grant_by_user_and_domain(self): - self.skipTest('N/A: LDAP does not support multiple domains') - - def test_get_and_remove_correct_role_grant_from_a_mix(self): - self.skipTest('Blocked by bug 1101287') - - def test_get_and_remove_role_grant_by_group_and_cross_domain(self): - self.skipTest('N/A: LDAP does not support multiple domains') - - def test_get_and_remove_role_grant_by_user_and_cross_domain(self): - self.skipTest('N/A: LDAP does not support multiple domains') - - def test_role_grant_by_group_and_cross_domain_project(self): - self.skipTest('N/A: LDAP does not support multiple domains') - - def test_role_grant_by_user_and_cross_domain_project(self): - self.skipTest('N/A: LDAP does not support multiple domains') - - def test_multi_role_grant_by_user_group_on_project_domain(self): - self.skipTest('N/A: LDAP does not support multiple domains') - - def test_delete_role_with_user_and_group_grants(self): - self.skipTest('Blocked by bug 1101287') - - def test_delete_user_with_group_project_domain_links(self): - self.skipTest('N/A: LDAP does not support multiple domains') - - def test_delete_group_with_user_project_domain_links(self): - self.skipTest('N/A: LDAP does not support multiple domains') - - def test_list_role_assignment_containing_names(self): - self.skipTest('N/A: LDAP does not support multiple domains') - - def test_list_projects_for_user(self): - domain = self._get_domain_fixture() - user1 = self.new_user_ref(domain_id=domain['id']) - user1 = self.identity_api.create_user(user1) - user_projects = self.assignment_api.list_projects_for_user(user1['id']) - self.assertThat(user_projects, matchers.HasLength(0)) - - # new grant(user1, role_member, tenant_bar) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_member['id']) - # new grant(user1, role_member, tenant_baz) - self.assignment_api.create_grant(user_id=user1['id'], - project_id=self.tenant_baz['id'], - role_id=self.role_member['id']) - user_projects = self.assignment_api.list_projects_for_user(user1['id']) - self.assertThat(user_projects, matchers.HasLength(2)) - - # Now, check number of projects through groups - user2 = self.new_user_ref(domain_id=domain['id']) - user2 = self.identity_api.create_user(user2) - - group1 = unit.new_group_ref(domain_id=domain['id']) - group1 = self.identity_api.create_group(group1) - - self.identity_api.add_user_to_group(user2['id'], group1['id']) - - # new grant(group1(user2), role_member, tenant_bar) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_member['id']) - # new grant(group1(user2), role_member, tenant_baz) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=self.tenant_baz['id'], - role_id=self.role_member['id']) - user_projects = self.assignment_api.list_projects_for_user(user2['id']) - self.assertThat(user_projects, matchers.HasLength(2)) - - # new grant(group1(user2), role_other, tenant_bar) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_other['id']) - user_projects = self.assignment_api.list_projects_for_user(user2['id']) - self.assertThat(user_projects, matchers.HasLength(2)) - - def test_list_projects_for_user_and_groups(self): - domain = self._get_domain_fixture() - # Create user1 - user1 = self.new_user_ref(domain_id=domain['id']) - user1 = self.identity_api.create_user(user1) - - # Create new group for user1 - group1 = unit.new_group_ref(domain_id=domain['id']) - group1 = self.identity_api.create_group(group1) - - # Add user1 to group1 - self.identity_api.add_user_to_group(user1['id'], group1['id']) - - # Now, add grant to user1 and group1 in tenant_bar - self.assignment_api.create_grant(user_id=user1['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_member['id']) - self.assignment_api.create_grant(group_id=group1['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_member['id']) - - # The result is user1 has only one project granted - user_projects = self.assignment_api.list_projects_for_user(user1['id']) - self.assertThat(user_projects, matchers.HasLength(1)) - - # Now, delete user1 grant into tenant_bar and check - self.assignment_api.delete_grant(user_id=user1['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_member['id']) - - # The result is user1 has only one project granted. - # Granted through group1. - user_projects = self.assignment_api.list_projects_for_user(user1['id']) - self.assertThat(user_projects, matchers.HasLength(1)) - - def test_list_projects_for_user_with_grants(self): - domain = self._get_domain_fixture() - new_user = self.new_user_ref(domain_id=domain['id']) - new_user = self.identity_api.create_user(new_user) - - group1 = unit.new_group_ref(domain_id=domain['id']) - group1 = self.identity_api.create_group(group1) - group2 = unit.new_group_ref(domain_id=domain['id']) - group2 = self.identity_api.create_group(group2) - - project1 = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project1['id'], project1) - project2 = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project2['id'], project2) - - self.identity_api.add_user_to_group(new_user['id'], - group1['id']) - self.identity_api.add_user_to_group(new_user['id'], - group2['id']) - - self.assignment_api.create_grant(user_id=new_user['id'], - project_id=self.tenant_bar['id'], - role_id=self.role_member['id']) - self.assignment_api.create_grant(user_id=new_user['id'], - project_id=project1['id'], - role_id=self.role_admin['id']) - self.assignment_api.create_grant(group_id=group2['id'], - project_id=project2['id'], - role_id=self.role_admin['id']) - - user_projects = self.assignment_api.list_projects_for_user( - new_user['id']) - self.assertEqual(3, len(user_projects)) - - def test_create_duplicate_user_name_in_different_domains(self): - self.skipTest('Domains are read-only against LDAP') - - def test_create_duplicate_project_name_in_different_domains(self): - self.skipTest('Domains are read-only against LDAP') - - def test_create_duplicate_group_name_in_different_domains(self): - self.skipTest( - 'N/A: LDAP does not support multiple domains') - - def test_move_user_between_domains(self): - self.skipTest('Domains are read-only against LDAP') - - def test_move_user_between_domains_with_clashing_names_fails(self): - self.skipTest('Domains are read-only against LDAP') - - def test_move_group_between_domains(self): - self.skipTest( - 'N/A: LDAP does not support multiple domains') - - def test_move_group_between_domains_with_clashing_names_fails(self): - self.skipTest('Domains are read-only against LDAP') - - def test_move_project_between_domains(self): - self.skipTest('Domains are read-only against LDAP') - - def test_move_project_between_domains_with_clashing_names_fails(self): - self.skipTest('Domains are read-only against LDAP') - - def test_get_roles_for_user_and_domain(self): - self.skipTest('N/A: LDAP does not support multiple domains') - - def test_get_roles_for_groups_on_domain(self): - self.skipTest('Blocked by bug: 1390125') - - def test_get_roles_for_groups_on_project(self): - self.skipTest('Blocked by bug: 1390125') - - def test_list_domains_for_groups(self): - self.skipTest('N/A: LDAP does not support multiple domains') - - def test_list_projects_for_groups(self): - self.skipTest('Blocked by bug: 1390125') - - def test_domain_delete_hierarchy(self): - self.skipTest('Domains are read-only against LDAP') - - def test_list_role_assignments_unfiltered(self): - new_domain = self._get_domain_fixture() - new_user = self.new_user_ref(domain_id=new_domain['id']) - new_user = self.identity_api.create_user(new_user) - new_group = unit.new_group_ref(domain_id=new_domain['id']) - new_group = self.identity_api.create_group(new_group) - new_project = unit.new_project_ref(domain_id=new_domain['id']) - self.resource_api.create_project(new_project['id'], new_project) - - # First check how many role grant already exist - existing_assignments = len(self.assignment_api.list_role_assignments()) - - self.assignment_api.create_grant(user_id=new_user['id'], - project_id=new_project['id'], - role_id='other') - self.assignment_api.create_grant(group_id=new_group['id'], - project_id=new_project['id'], - role_id='admin') - - # Read back the list of assignments - check it is gone up by 2 - after_assignments = len(self.assignment_api.list_role_assignments()) - self.assertEqual(existing_assignments + 2, after_assignments) - - def test_list_role_assignments_dumb_member(self): - self.config_fixture.config(group='ldap', use_dumb_member=True) - self.ldapdb.clear() - self.load_backends() - self.load_fixtures(default_fixtures) - - new_domain = self._get_domain_fixture() - new_user = self.new_user_ref(domain_id=new_domain['id']) - new_user = self.identity_api.create_user(new_user) - new_project = unit.new_project_ref(domain_id=new_domain['id']) - self.resource_api.create_project(new_project['id'], new_project) - self.assignment_api.create_grant(user_id=new_user['id'], - project_id=new_project['id'], - role_id='other') - - # Read back the list of assignments and ensure - # that the LDAP dumb member isn't listed. - assignment_ids = [a['user_id'] for a in - self.assignment_api.list_role_assignments()] - dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member) - self.assertNotIn(dumb_id, assignment_ids) - - def test_list_user_ids_for_project_dumb_member(self): - self.config_fixture.config(group='ldap', use_dumb_member=True) - self.ldapdb.clear() - self.load_backends() - self.load_fixtures(default_fixtures) - - user = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - - user = self.identity_api.create_user(user) - self.assignment_api.add_user_to_project(self.tenant_baz['id'], - user['id']) - user_ids = self.assignment_api.list_user_ids_for_project( - self.tenant_baz['id']) - - self.assertIn(user['id'], user_ids) - - dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member) - self.assertNotIn(dumb_id, user_ids) - - def test_multi_group_grants_on_project_domain(self): - self.skipTest('Blocked by bug 1101287') - - def test_list_group_members_missing_entry(self): - """List group members with deleted user. - - If a group has a deleted entry for a member, the non-deleted members - are returned. - - """ - # Create a group - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group_id = self.identity_api.create_group(group)['id'] - - # Create a couple of users and add them to the group. - user = dict(name=uuid.uuid4().hex, - domain_id=CONF.identity.default_domain_id) - user_1_id = self.identity_api.create_user(user)['id'] - - self.identity_api.add_user_to_group(user_1_id, group_id) - - user = dict(name=uuid.uuid4().hex, - domain_id=CONF.identity.default_domain_id) - user_2_id = self.identity_api.create_user(user)['id'] - - self.identity_api.add_user_to_group(user_2_id, group_id) - - # Delete user 2 - # NOTE(blk-u): need to go directly to user interface to keep from - # updating the group. - unused, driver, entity_id = ( - self.identity_api._get_domain_driver_and_entity_id(user_2_id)) - driver.user.delete(entity_id) - - # List group users and verify only user 1. - res = self.identity_api.list_users_in_group(group_id) - - self.assertEqual(1, len(res), "Expected 1 entry (user_1)") - self.assertEqual(user_1_id, res[0]['id'], "Expected user 1 id") - - def test_list_group_members_when_no_members(self): - # List group members when there is no member in the group. - # No exception should be raised. - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group = self.identity_api.create_group(group) - - # If this doesn't raise, then the test is successful. - self.identity_api.list_users_in_group(group['id']) - - def test_list_group_members_dumb_member(self): - self.config_fixture.config(group='ldap', use_dumb_member=True) - self.ldapdb.clear() - self.load_backends() - self.load_fixtures(default_fixtures) - - # Create a group - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group_id = self.identity_api.create_group(group)['id'] - - # Create a user - user = dict(name=uuid.uuid4().hex, - domain_id=CONF.identity.default_domain_id) - user_id = self.identity_api.create_user(user)['id'] - - # Add user to the group - self.identity_api.add_user_to_group(user_id, group_id) - - user_ids = self.identity_api.list_users_in_group(group_id) - dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member) - - self.assertNotIn(dumb_id, user_ids) - - def test_list_domains(self): - # We have more domains here than the parent class, check for the - # correct number of domains for the multildap backend configs - domain1 = unit.new_domain_ref() - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - self.resource_api.create_domain(domain2['id'], domain2) - domains = self.resource_api.list_domains() - self.assertEqual(7, len(domains)) - domain_ids = [] - for domain in domains: - domain_ids.append(domain.get('id')) - self.assertIn(CONF.identity.default_domain_id, domain_ids) - self.assertIn(domain1['id'], domain_ids) - self.assertIn(domain2['id'], domain_ids) - - def test_authenticate_requires_simple_bind(self): - user = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - self.assignment_api.add_user_to_project(self.tenant_baz['id'], - user['id']) - driver = self.identity_api._select_identity_driver( - user['domain_id']) - driver.user.LDAP_USER = None - driver.user.LDAP_PASSWORD = None - - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, - user_id=user['id'], - password=None) - - # The group and domain CRUD tests below override the standard ones in - # unit.identity.test_backends.py so that we can exclude the update name - # test, since we do not (and will not) support the update of either group - # or domain names with LDAP. In the tests below, the update is tested by - # updating description. - @mock.patch.object(versionutils, 'report_deprecated_feature') - def test_group_crud(self, mock_deprecator): - # NOTE(stevemar): As of the Mitaka release, we now check for calls that - # the LDAP write functionality has been deprecated. - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group = self.identity_api.create_group(group) - args, _kwargs = mock_deprecator.call_args - self.assertIn("create_group for the LDAP identity backend", args[1]) - - group_ref = self.identity_api.get_group(group['id']) - self.assertDictEqual(group, group_ref) - group['description'] = uuid.uuid4().hex - self.identity_api.update_group(group['id'], group) - args, _kwargs = mock_deprecator.call_args - self.assertIn("update_group for the LDAP identity backend", args[1]) - - group_ref = self.identity_api.get_group(group['id']) - self.assertDictEqual(group, group_ref) - - self.identity_api.delete_group(group['id']) - args, _kwargs = mock_deprecator.call_args - self.assertIn("delete_group for the LDAP identity backend", args[1]) - self.assertRaises(exception.GroupNotFound, - self.identity_api.get_group, - group['id']) - - @mock.patch.object(versionutils, 'report_deprecated_feature') - def test_add_remove_user_group_deprecated(self, mock_deprecator): - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group = self.identity_api.create_group(group) - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - self.identity_api.add_user_to_group(user['id'], group['id']) - args, _kwargs = mock_deprecator.call_args - self.assertIn("add_user_to_group for the LDAP identity", args[1]) - - self.identity_api.remove_user_from_group(user['id'], group['id']) - args, _kwargs = mock_deprecator.call_args - self.assertIn("remove_user_from_group for the LDAP identity", args[1]) - - @unit.skip_if_cache_disabled('identity') - def test_cache_layer_group_crud(self): - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group = self.identity_api.create_group(group) - # cache the result - group_ref = self.identity_api.get_group(group['id']) - # delete the group bypassing identity api. - domain_id, driver, entity_id = ( - self.identity_api._get_domain_driver_and_entity_id(group['id'])) - driver.delete_group(entity_id) - - self.assertEqual(group_ref, - self.identity_api.get_group(group['id'])) - self.identity_api.get_group.invalidate(self.identity_api, group['id']) - self.assertRaises(exception.GroupNotFound, - self.identity_api.get_group, group['id']) - - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group = self.identity_api.create_group(group) - # cache the result - self.identity_api.get_group(group['id']) - group['description'] = uuid.uuid4().hex - group_ref = self.identity_api.update_group(group['id'], group) - self.assertDictContainsSubset(self.identity_api.get_group(group['id']), - group_ref) - - def test_create_user_none_mapping(self): - # When create a user where an attribute maps to None, the entry is - # created without that attribute and it doesn't fail with a TypeError. - driver = self.identity_api._select_identity_driver( - CONF.identity.default_domain_id) - driver.user.attribute_ignore = ['enabled', 'email', - 'tenants', 'tenantId'] - user = self.new_user_ref(domain_id=CONF.identity.default_domain_id, - project_id='maps_to_none') - - # If this doesn't raise, then the test is successful. - user = self.identity_api.create_user(user) - - def test_create_user_with_boolean_string_names(self): - # Ensure that any attribute that is equal to the string 'TRUE' - # or 'FALSE' will not be converted to a boolean value, it - # should be returned as is. - boolean_strings = ['TRUE', 'FALSE', 'true', 'false', 'True', 'False', - 'TrUe' 'FaLse'] - for name in boolean_strings: - user = self.new_user_ref(name=name, - domain_id=CONF.identity.default_domain_id) - user_ref = self.identity_api.create_user(user) - user_info = self.identity_api.get_user(user_ref['id']) - self.assertEqual(name, user_info['name']) - # Delete the user to ensure that the Keystone uniqueness - # requirements combined with the case-insensitive nature of a - # typical LDAP schema does not cause subsequent names in - # boolean_strings to clash. - self.identity_api.delete_user(user_ref['id']) - - def test_unignored_user_none_mapping(self): - # Ensure that an attribute that maps to None that is not explicitly - # ignored in configuration is implicitly ignored without triggering - # an error. - driver = self.identity_api._select_identity_driver( - CONF.identity.default_domain_id) - driver.user.attribute_ignore = ['enabled', 'email', - 'tenants', 'tenantId'] - - user = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - - user_ref = self.identity_api.create_user(user) - - # If this doesn't raise, then the test is successful. - self.identity_api.get_user(user_ref['id']) - - def test_update_user_name(self): - """A user's name cannot be changed through the LDAP driver.""" - self.assertRaises(exception.Conflict, - super(BaseLDAPIdentity, self).test_update_user_name) - - def test_arbitrary_attributes_are_returned_from_get_user(self): - self.skipTest("Using arbitrary attributes doesn't work under LDAP") - - def test_new_arbitrary_attributes_are_returned_from_update_user(self): - self.skipTest("Using arbitrary attributes doesn't work under LDAP") - - def test_updated_arbitrary_attributes_are_returned_from_update_user(self): - self.skipTest("Using arbitrary attributes doesn't work under LDAP") - - def test_cache_layer_domain_crud(self): - # TODO(morganfainberg): This also needs to be removed when full LDAP - # implementation is submitted. No need to duplicate the above test, - # just skip this time. - self.skipTest('Domains are read-only against LDAP') - - def test_user_id_comma(self): - """Even if the user has a , in their ID, groups can be listed.""" - # Create a user with a , in their ID - # NOTE(blk-u): the DN for this user is hard-coded in fakeldap! - - # Since we want to fake up this special ID, we'll squirt this - # direct into the driver and bypass the manager layer. - user_id = u'Doe, John' - user = self.new_user_ref(id=user_id, - domain_id=CONF.identity.default_domain_id) - user = self.identity_api.driver.create_user(user_id, user) - - # Now we'll use the manager to discover it, which will create a - # Public ID for it. - ref_list = self.identity_api.list_users() - public_user_id = None - for ref in ref_list: - if ref['name'] == user['name']: - public_user_id = ref['id'] - break - - # Create a group - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group_id = group['id'] - group = self.identity_api.driver.create_group(group_id, group) - # Now we'll use the manager to discover it, which will create a - # Public ID for it. - ref_list = self.identity_api.list_groups() - public_group_id = None - for ref in ref_list: - if ref['name'] == group['name']: - public_group_id = ref['id'] - break - - # Put the user in the group - self.identity_api.add_user_to_group(public_user_id, public_group_id) - - # List groups for user. - ref_list = self.identity_api.list_groups_for_user(public_user_id) - - group['id'] = public_group_id - self.assertThat(ref_list, matchers.Equals([group])) - - def test_user_id_comma_grants(self): - """List user and group grants, even with a comma in the user's ID.""" - # Create a user with a , in their ID - # NOTE(blk-u): the DN for this user is hard-coded in fakeldap! - - # Since we want to fake up this special ID, we'll squirt this - # direct into the driver and bypass the manager layer - user_id = u'Doe, John' - user = self.new_user_ref(id=user_id, - domain_id=CONF.identity.default_domain_id) - self.identity_api.driver.create_user(user_id, user) - - # Now we'll use the manager to discover it, which will create a - # Public ID for it. - ref_list = self.identity_api.list_users() - public_user_id = None - for ref in ref_list: - if ref['name'] == user['name']: - public_user_id = ref['id'] - break - - # Grant the user a role on a project. - - role_id = 'member' - project_id = self.tenant_baz['id'] - - self.assignment_api.create_grant(role_id, user_id=public_user_id, - project_id=project_id) - - role_ref = self.assignment_api.get_grant(role_id, - user_id=public_user_id, - project_id=project_id) - - self.assertEqual(role_id, role_ref['id']) - - def test_user_enabled_ignored_disable_error(self): - # When the server is configured so that the enabled attribute is - # ignored for users, users cannot be disabled. - - self.config_fixture.config(group='ldap', - user_attribute_ignore=['enabled']) - - # Need to re-load backends for the config change to take effect. - self.load_backends() - - # Attempt to disable the user. - self.assertRaises(exception.ForbiddenAction, - self.identity_api.update_user, self.user_foo['id'], - {'enabled': False}) - - user_info = self.identity_api.get_user(self.user_foo['id']) - - # If 'enabled' is ignored then 'enabled' isn't returned as part of the - # ref. - self.assertNotIn('enabled', user_info) - - def test_group_enabled_ignored_disable_error(self): - # When the server is configured so that the enabled attribute is - # ignored for groups, groups cannot be disabled. - - self.config_fixture.config(group='ldap', - group_attribute_ignore=['enabled']) - - # Need to re-load backends for the config change to take effect. - self.load_backends() - - # There's no group fixture so create a group. - new_domain = self._get_domain_fixture() - new_group = unit.new_group_ref(domain_id=new_domain['id']) - new_group = self.identity_api.create_group(new_group) - - # Attempt to disable the group. - self.assertRaises(exception.ForbiddenAction, - self.identity_api.update_group, new_group['id'], - {'enabled': False}) - - group_info = self.identity_api.get_group(new_group['id']) - - # If 'enabled' is ignored then 'enabled' isn't returned as part of the - # ref. - self.assertNotIn('enabled', group_info) - - def test_project_enabled_ignored_disable_error(self): - self.skipTest('Resource LDAP has been removed') - - def test_list_role_assignment_by_domain(self): - """Multiple domain assignments are not supported.""" - self.assertRaises( - (exception.Forbidden, exception.DomainNotFound, - exception.ValidationError), - super(BaseLDAPIdentity, self).test_list_role_assignment_by_domain) - - def test_list_role_assignment_by_user_with_domain_group_roles(self): - """Multiple domain assignments are not supported.""" - self.assertRaises( - (exception.Forbidden, exception.DomainNotFound, - exception.ValidationError), - super(BaseLDAPIdentity, self). - test_list_role_assignment_by_user_with_domain_group_roles) - - def test_domain_crud(self): - self.skipTest('Resource LDAP has been removed') - - def test_list_role_assignment_using_sourced_groups_with_domains(self): - """Multiple domain assignments are not supported.""" - self.assertRaises( - (exception.Forbidden, exception.ValidationError, - exception.DomainNotFound), - super(BaseLDAPIdentity, self). - test_list_role_assignment_using_sourced_groups_with_domains) - - def test_create_project_with_domain_id_and_without_parent_id(self): - """Multiple domains are not supported.""" - self.assertRaises( - exception.ValidationError, - super(BaseLDAPIdentity, self). - test_create_project_with_domain_id_and_without_parent_id) - - def test_create_project_with_domain_id_mismatch_to_parent_domain(self): - """Multiple domains are not supported.""" - self.assertRaises( - exception.ValidationError, - super(BaseLDAPIdentity, self). - test_create_project_with_domain_id_mismatch_to_parent_domain) - - def test_remove_foreign_assignments_when_deleting_a_domain(self): - """Multiple domains are not supported.""" - self.assertRaises( - (exception.ValidationError, exception.DomainNotFound), - super(BaseLDAPIdentity, - self).test_remove_foreign_assignments_when_deleting_a_domain) - - -class LDAPIdentity(BaseLDAPIdentity, unit.TestCase): - - def setUp(self): - # NOTE(dstanek): The database must be setup prior to calling the - # parent's setUp. The parent's setUp uses services (like - # credentials) that require a database. - self.useFixture(database.Database()) - super(LDAPIdentity, self).setUp() - _assert_backends(self, - assignment='sql', - identity='ldap', - resource='sql') - - def load_fixtures(self, fixtures): - # Override super impl since need to create group container. - create_group_container(self.identity_api) - super(LDAPIdentity, self).load_fixtures(fixtures) - - def test_list_domains(self): - domains = self.resource_api.list_domains() - self.assertEqual([resource.calc_default_domain()], domains) - - def test_configurable_allowed_project_actions(self): - domain = self._get_domain_fixture() - project = unit.new_project_ref(domain_id=domain['id']) - project = self.resource_api.create_project(project['id'], project) - project_ref = self.resource_api.get_project(project['id']) - self.assertEqual(project['id'], project_ref['id']) - - project['enabled'] = False - self.resource_api.update_project(project['id'], project) - - self.resource_api.delete_project(project['id']) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project['id']) - - def test_configurable_subtree_delete(self): - self.config_fixture.config(group='ldap', allow_subtree_delete=True) - self.load_backends() - - project1 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(project1['id'], project1) - - role1 = unit.new_role_ref() - self.role_api.create_role(role1['id'], role1) - - user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - user1 = self.identity_api.create_user(user1) - - self.assignment_api.add_role_to_user_and_project( - user_id=user1['id'], - tenant_id=project1['id'], - role_id=role1['id']) - - self.resource_api.delete_project(project1['id']) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project1['id']) - - self.resource_api.create_project(project1['id'], project1) - - list = self.assignment_api.get_roles_for_user_and_project( - user1['id'], - project1['id']) - self.assertEqual(0, len(list)) - - def test_configurable_forbidden_project_actions(self): - self.skipTest('Resource LDAP has been removed') - - def test_project_filter(self): - self.skipTest('Resource LDAP has been removed') - - def test_dumb_member(self): - self.config_fixture.config(group='ldap', use_dumb_member=True) - self.ldapdb.clear() - self.load_backends() - self.load_fixtures(default_fixtures) - dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member) - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user, - dumb_id) - - def test_project_attribute_mapping(self): - self.skipTest('Resource LDAP has been removed') - - def test_project_attribute_ignore(self): - self.skipTest('Resource LDAP has been removed') - - def test_user_enable_attribute_mask(self): - self.config_fixture.config(group='ldap', user_enabled_mask=2, - user_enabled_default='512') - self.ldapdb.clear() - self.load_backends() - self.load_fixtures(default_fixtures) - - user = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - - user_ref = self.identity_api.create_user(user) - - # Use assertIs rather than assertTrue because assertIs will assert the - # value is a Boolean as expected. - self.assertIs(user_ref['enabled'], True) - self.assertNotIn('enabled_nomask', user_ref) - - enabled_vals = self.get_user_enabled_vals(user_ref) - self.assertEqual([512], enabled_vals) - - user_ref = self.identity_api.get_user(user_ref['id']) - self.assertIs(user_ref['enabled'], True) - self.assertNotIn('enabled_nomask', user_ref) - - user['enabled'] = False - user_ref = self.identity_api.update_user(user_ref['id'], user) - self.assertIs(user_ref['enabled'], False) - self.assertNotIn('enabled_nomask', user_ref) - - enabled_vals = self.get_user_enabled_vals(user_ref) - self.assertEqual([514], enabled_vals) - - user_ref = self.identity_api.get_user(user_ref['id']) - self.assertIs(user_ref['enabled'], False) - self.assertNotIn('enabled_nomask', user_ref) - - user['enabled'] = True - user_ref = self.identity_api.update_user(user_ref['id'], user) - self.assertIs(user_ref['enabled'], True) - self.assertNotIn('enabled_nomask', user_ref) - - enabled_vals = self.get_user_enabled_vals(user_ref) - self.assertEqual([512], enabled_vals) - - user_ref = self.identity_api.get_user(user_ref['id']) - self.assertIs(user_ref['enabled'], True) - self.assertNotIn('enabled_nomask', user_ref) - - def test_user_enabled_invert(self): - self.config_fixture.config(group='ldap', user_enabled_invert=True, - user_enabled_default=False) - self.ldapdb.clear() - self.load_backends() - self.load_fixtures(default_fixtures) - - user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - - user2 = self.new_user_ref(enabled=False, - domain_id=CONF.identity.default_domain_id) - - user3 = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - - # Ensure that the LDAP attribute is False for a newly created - # enabled user. - user_ref = self.identity_api.create_user(user1) - self.assertIs(True, user_ref['enabled']) - enabled_vals = self.get_user_enabled_vals(user_ref) - self.assertEqual([False], enabled_vals) - user_ref = self.identity_api.get_user(user_ref['id']) - self.assertIs(True, user_ref['enabled']) - - # Ensure that the LDAP attribute is True for a disabled user. - user1['enabled'] = False - user_ref = self.identity_api.update_user(user_ref['id'], user1) - self.assertIs(False, user_ref['enabled']) - enabled_vals = self.get_user_enabled_vals(user_ref) - self.assertEqual([True], enabled_vals) - - # Enable the user and ensure that the LDAP attribute is True again. - user1['enabled'] = True - user_ref = self.identity_api.update_user(user_ref['id'], user1) - self.assertIs(True, user_ref['enabled']) - enabled_vals = self.get_user_enabled_vals(user_ref) - self.assertEqual([False], enabled_vals) - - # Ensure that the LDAP attribute is True for a newly created - # disabled user. - user_ref = self.identity_api.create_user(user2) - self.assertIs(False, user_ref['enabled']) - enabled_vals = self.get_user_enabled_vals(user_ref) - self.assertEqual([True], enabled_vals) - user_ref = self.identity_api.get_user(user_ref['id']) - self.assertIs(False, user_ref['enabled']) - - # Ensure that the LDAP attribute is inverted for a newly created - # user when the user_enabled_default setting is used. - user_ref = self.identity_api.create_user(user3) - self.assertIs(True, user_ref['enabled']) - enabled_vals = self.get_user_enabled_vals(user_ref) - self.assertEqual([False], enabled_vals) - user_ref = self.identity_api.get_user(user_ref['id']) - self.assertIs(True, user_ref['enabled']) - - @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get') - def test_user_enabled_invert_no_enabled_value(self, mock_ldap_get): - self.config_fixture.config(group='ldap', user_enabled_invert=True, - user_enabled_default=False) - # Mock the search results to return an entry with - # no enabled value. - mock_ldap_get.return_value = ( - 'cn=junk,dc=example,dc=com', - { - 'sn': [uuid.uuid4().hex], - 'email': [uuid.uuid4().hex], - 'cn': ['junk'] - } - ) - - user_api = identity.backends.ldap.UserApi(CONF) - user_ref = user_api.get('junk') - # Ensure that the model enabled attribute is inverted - # from the resource default. - self.assertIs(not CONF.ldap.user_enabled_default, user_ref['enabled']) - - @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get') - def test_user_enabled_invert_default_str_value(self, mock_ldap_get): - self.config_fixture.config(group='ldap', user_enabled_invert=True, - user_enabled_default='False') - # Mock the search results to return an entry with - # no enabled value. - mock_ldap_get.return_value = ( - 'cn=junk,dc=example,dc=com', - { - 'sn': [uuid.uuid4().hex], - 'email': [uuid.uuid4().hex], - 'cn': ['junk'] - } - ) - - user_api = identity.backends.ldap.UserApi(CONF) - user_ref = user_api.get('junk') - # Ensure that the model enabled attribute is inverted - # from the resource default. - self.assertIs(True, user_ref['enabled']) - - @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get') - def test_user_enabled_attribute_handles_expired(self, mock_ldap_get): - # If using 'passwordisexpired' as enabled attribute, and inverting it, - # Then an unauthorized user (expired password) should not be enabled. - self.config_fixture.config(group='ldap', user_enabled_invert=True, - user_enabled_attribute='passwordisexpired') - mock_ldap_get.return_value = ( - u'uid=123456789,c=us,ou=our_ldap,o=acme.com', - { - 'uid': [123456789], - 'mail': ['shaun@acme.com'], - 'passwordisexpired': ['TRUE'], - 'cn': ['uid=123456789,c=us,ou=our_ldap,o=acme.com'] - } - ) - - user_api = identity.backends.ldap.UserApi(CONF) - user_ref = user_api.get('123456789') - self.assertIs(False, user_ref['enabled']) - - @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get') - def test_user_enabled_attribute_handles_utf8(self, mock_ldap_get): - # If using 'passwordisexpired' as enabled attribute, and inverting it, - # and the result is utf8 encoded, then the an authorized user should - # be enabled. - self.config_fixture.config(group='ldap', user_enabled_invert=True, - user_enabled_attribute='passwordisexpired') - mock_ldap_get.return_value = ( - u'uid=123456789,c=us,ou=our_ldap,o=acme.com', - { - 'uid': [123456789], - 'mail': [u'shaun@acme.com'], - 'passwordisexpired': [u'false'], - 'cn': [u'uid=123456789,c=us,ou=our_ldap,o=acme.com'] - } - ) - - user_api = identity.backends.ldap.UserApi(CONF) - user_ref = user_api.get('123456789') - self.assertIs(True, user_ref['enabled']) - - @mock.patch.object(common_ldap_core.KeystoneLDAPHandler, 'simple_bind_s') - def test_user_api_get_connection_no_user_password(self, mocked_method): - """Don't bind in case the user and password are blank.""" - # Ensure the username/password are in-fact blank - self.config_fixture.config(group='ldap', user=None, password=None) - user_api = identity.backends.ldap.UserApi(CONF) - user_api.get_connection(user=None, password=None) - self.assertFalse(mocked_method.called, - msg='`simple_bind_s` method was unexpectedly called') - - @mock.patch.object(common_ldap_core.KeystoneLDAPHandler, 'connect') - def test_chase_referrals_off(self, mocked_fakeldap): - self.config_fixture.config( - group='ldap', - url='fake://memory', - chase_referrals=False) - user_api = identity.backends.ldap.UserApi(CONF) - user_api.get_connection(user=None, password=None) - - # The last call_arg should be a dictionary and should contain - # chase_referrals. Check to make sure the value of chase_referrals - # is as expected. - self.assertFalse(mocked_fakeldap.call_args[-1]['chase_referrals']) - - @mock.patch.object(common_ldap_core.KeystoneLDAPHandler, 'connect') - def test_chase_referrals_on(self, mocked_fakeldap): - self.config_fixture.config( - group='ldap', - url='fake://memory', - chase_referrals=True) - user_api = identity.backends.ldap.UserApi(CONF) - user_api.get_connection(user=None, password=None) - - # The last call_arg should be a dictionary and should contain - # chase_referrals. Check to make sure the value of chase_referrals - # is as expected. - self.assertTrue(mocked_fakeldap.call_args[-1]['chase_referrals']) - - @mock.patch.object(common_ldap_core.KeystoneLDAPHandler, 'connect') - def test_debug_level_set(self, mocked_fakeldap): - level = 12345 - self.config_fixture.config( - group='ldap', - url='fake://memory', - debug_level=level) - user_api = identity.backends.ldap.UserApi(CONF) - user_api.get_connection(user=None, password=None) - - # The last call_arg should be a dictionary and should contain - # debug_level. Check to make sure the value of debug_level - # is as expected. - self.assertEqual(level, mocked_fakeldap.call_args[-1]['debug_level']) - - def test_wrong_ldap_scope(self): - self.config_fixture.config(group='ldap', query_scope=uuid.uuid4().hex) - self.assertRaisesRegexp( - ValueError, - 'Invalid LDAP scope: %s. *' % CONF.ldap.query_scope, - identity.backends.ldap.Identity) - - def test_wrong_alias_dereferencing(self): - self.config_fixture.config(group='ldap', - alias_dereferencing=uuid.uuid4().hex) - self.assertRaisesRegexp( - ValueError, - 'Invalid LDAP deref option: %s\.' % CONF.ldap.alias_dereferencing, - identity.backends.ldap.Identity) - - def test_is_dumb_member(self): - self.config_fixture.config(group='ldap', - use_dumb_member=True) - self.load_backends() - - dn = 'cn=dumb,dc=nonexistent' - self.assertTrue(self.identity_api.driver.user._is_dumb_member(dn)) - - def test_is_dumb_member_upper_case_keys(self): - self.config_fixture.config(group='ldap', - use_dumb_member=True) - self.load_backends() - - dn = 'CN=dumb,DC=nonexistent' - self.assertTrue(self.identity_api.driver.user._is_dumb_member(dn)) - - def test_is_dumb_member_with_false_use_dumb_member(self): - self.config_fixture.config(group='ldap', - use_dumb_member=False) - self.load_backends() - dn = 'cn=dumb,dc=nonexistent' - self.assertFalse(self.identity_api.driver.user._is_dumb_member(dn)) - - def test_is_dumb_member_not_dumb(self): - self.config_fixture.config(group='ldap', - use_dumb_member=True) - self.load_backends() - dn = 'ou=some,dc=example.com' - self.assertFalse(self.identity_api.driver.user._is_dumb_member(dn)) - - def test_user_extra_attribute_mapping(self): - self.config_fixture.config( - group='ldap', - user_additional_attribute_mapping=['description:name']) - self.load_backends() - user = self.new_user_ref(name='EXTRA_ATTRIBUTES', - password='extra', - domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - dn, attrs = self.identity_api.driver.user._ldap_get(user['id']) - self.assertThat([user['name']], matchers.Equals(attrs['description'])) - - def test_user_description_attribute_mapping(self): - self.config_fixture.config( - group='ldap', - user_description_attribute='displayName') - self.load_backends() - - user = self.new_user_ref(domain_id=CONF.identity.default_domain_id, - displayName=uuid.uuid4().hex) - description = user['displayName'] - user = self.identity_api.create_user(user) - res = self.identity_api.driver.user.get_all() - - new_user = [u for u in res if u['id'] == user['id']][0] - self.assertThat(new_user['description'], matchers.Equals(description)) - - def test_user_extra_attribute_mapping_description_is_returned(self): - # Given a mapping like description:description, the description is - # returned. - - self.config_fixture.config( - group='ldap', - user_additional_attribute_mapping=['description:description']) - self.load_backends() - - user = self.new_user_ref(domain_id=CONF.identity.default_domain_id, - description=uuid.uuid4().hex) - description = user['description'] - user = self.identity_api.create_user(user) - res = self.identity_api.driver.user.get_all() - - new_user = [u for u in res if u['id'] == user['id']][0] - self.assertThat(new_user['description'], matchers.Equals(description)) - - def test_user_with_missing_id(self): - # create a user that doesn't have the id attribute - ldap_ = self.identity_api.driver.user.get_connection() - # `sn` is used for the attribute in the DN because it's allowed by - # the entry's objectclasses so that this test could conceivably run in - # the live tests. - ldap_id_field = 'sn' - ldap_id_value = uuid.uuid4().hex - dn = '%s=%s,ou=Users,cn=example,cn=com' % (ldap_id_field, - ldap_id_value) - modlist = [('objectClass', ['person', 'inetOrgPerson']), - (ldap_id_field, [ldap_id_value]), - ('mail', ['email@example.com']), - ('userPassword', [uuid.uuid4().hex])] - ldap_.add_s(dn, modlist) - - # make sure the user doesn't break other users - users = self.identity_api.driver.user.get_all() - self.assertThat(users, matchers.HasLength(len(default_fixtures.USERS))) - - @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get') - def test_user_mixed_case_attribute(self, mock_ldap_get): - # Mock the search results to return attribute names - # with unexpected case. - mock_ldap_get.return_value = ( - 'cn=junk,dc=example,dc=com', - { - 'sN': [uuid.uuid4().hex], - 'MaIl': [uuid.uuid4().hex], - 'cn': ['junk'] - } - ) - user = self.identity_api.get_user('junk') - self.assertEqual(mock_ldap_get.return_value[1]['sN'][0], - user['name']) - self.assertEqual(mock_ldap_get.return_value[1]['MaIl'][0], - user['email']) - - def test_parse_extra_attribute_mapping(self): - option_list = ['description:name', 'gecos:password', - 'fake:invalid', 'invalid1', 'invalid2:', - 'description:name:something'] - mapping = self.identity_api.driver.user._parse_extra_attrs(option_list) - expected_dict = {'description': 'name', 'gecos': 'password', - 'fake': 'invalid', 'invalid2': ''} - self.assertDictEqual(expected_dict, mapping) - - def test_create_domain(self): - domain = unit.new_domain_ref() - self.assertRaises(exception.ValidationError, - self.resource_api.create_domain, - domain['id'], - domain) - - @unit.skip_if_no_multiple_domains_support - def test_create_domain_case_sensitivity(self): - # domains are read-only, so case sensitivity isn't an issue - ref = unit.new_domain_ref() - self.assertRaises(exception.Forbidden, - self.resource_api.create_domain, - ref['id'], - ref) - - def test_cache_layer_domain_crud(self): - # TODO(morganfainberg): This also needs to be removed when full LDAP - # implementation is submitted. No need to duplicate the above test, - # just skip this time. - self.skipTest('Domains are read-only against LDAP') - - def test_domain_rename_invalidates_get_domain_by_name_cache(self): - parent = super(LDAPIdentity, self) - self.assertRaises( - exception.Forbidden, - parent.test_domain_rename_invalidates_get_domain_by_name_cache) - - def test_project_rename_invalidates_get_project_by_name_cache(self): - parent = super(LDAPIdentity, self) - self.assertRaises( - exception.Forbidden, - parent.test_project_rename_invalidates_get_project_by_name_cache) - - def test_project_crud(self): - # NOTE(topol): LDAP implementation does not currently support the - # updating of a project name so this method override - # provides a different update test - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - - project = self.resource_api.create_project(project['id'], project) - project_ref = self.resource_api.get_project(project['id']) - - self.assertDictEqual(project, project_ref) - - project['description'] = uuid.uuid4().hex - self.resource_api.update_project(project['id'], project) - project_ref = self.resource_api.get_project(project['id']) - self.assertDictEqual(project, project_ref) - - self.resource_api.delete_project(project['id']) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project['id']) - - @unit.skip_if_cache_disabled('assignment') - def test_cache_layer_project_crud(self): - # NOTE(morganfainberg): LDAP implementation does not currently support - # updating project names. This method override provides a different - # update test. - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - project_id = project['id'] - # Create a project - project = self.resource_api.create_project(project_id, project) - self.resource_api.get_project(project_id) - updated_project = copy.deepcopy(project) - updated_project['description'] = uuid.uuid4().hex - # Update project, bypassing resource manager - self.resource_api.driver.update_project(project_id, - updated_project) - # Verify get_project still returns the original project_ref - self.assertDictContainsSubset( - project, self.resource_api.get_project(project_id)) - # Invalidate cache - self.resource_api.get_project.invalidate(self.resource_api, - project_id) - # Verify get_project now returns the new project - self.assertDictContainsSubset( - updated_project, - self.resource_api.get_project(project_id)) - # Update project using the resource_api manager back to original - self.resource_api.update_project(project['id'], project) - # Verify get_project returns the original project_ref - self.assertDictContainsSubset( - project, self.resource_api.get_project(project_id)) - # Delete project bypassing resource_api - self.resource_api.driver.delete_project(project_id) - # Verify get_project still returns the project_ref - self.assertDictContainsSubset( - project, self.resource_api.get_project(project_id)) - # Invalidate cache - self.resource_api.get_project.invalidate(self.resource_api, - project_id) - # Verify ProjectNotFound now raised - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project_id) - # recreate project - self.resource_api.create_project(project_id, project) - self.resource_api.get_project(project_id) - # delete project - self.resource_api.delete_project(project_id) - # Verify ProjectNotFound is raised - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project_id) - - def test_update_is_domain_field(self): - domain = self._get_domain_fixture() - project = unit.new_project_ref(domain_id=domain['id']) - project = self.resource_api.create_project(project['id'], project) - - # Try to update the is_domain field to True - project['is_domain'] = True - self.assertRaises(exception.ValidationError, - self.resource_api.update_project, - project['id'], project) - - def test_delete_is_domain_project(self): - self.skipTest('Resource LDAP has been removed') - - def test_create_domain_under_regular_project_hierarchy_fails(self): - self.skipTest('Resource LDAP has been removed') - - def test_create_not_is_domain_project_under_is_domain_hierarchy(self): - self.skipTest('Resource LDAP has been removed') - - def test_create_project_passing_is_domain_flag_true(self): - self.skipTest('Resource LDAP has been removed') - - def test_create_project_with_parent_id_and_without_domain_id(self): - self.skipTest('Resource LDAP has been removed') - - def test_check_leaf_projects(self): - self.skipTest('Resource LDAP has been removed') - - def test_list_projects_in_subtree(self): - self.skipTest('Resource LDAP has been removed') - - def test_list_projects_in_subtree_with_circular_reference(self): - self.skipTest('Resource LDAP has been removed') - - def test_list_project_parents(self): - self.skipTest('Resource LDAP has been removed') - - def test_update_project_enabled_cascade(self): - self.skipTest('Resource LDAP has been removed') - - def test_cannot_enable_cascade_with_parent_disabled(self): - self.skipTest('Resource LDAP has been removed') - - def test_hierarchical_projects_crud(self): - self.skipTest('Resource LDAP has been removed') - - def test_create_project_under_disabled_one(self): - self.skipTest('Resource LDAP has been removed') - - def test_create_project_with_invalid_parent(self): - self.skipTest('Resource LDAP has been removed') - - def test_create_leaf_project_with_invalid_domain(self): - self.skipTest('Resource LDAP has been removed') - - def test_update_project_parent(self): - self.skipTest('Resource LDAP has been removed') - - def test_enable_project_with_disabled_parent(self): - self.skipTest('Resource LDAP has been removed') - - def test_disable_hierarchical_leaf_project(self): - self.skipTest('Resource LDAP has been removed') - - def test_disable_hierarchical_not_leaf_project(self): - self.skipTest('Resource LDAP has been removed') - - def test_delete_hierarchical_leaf_project(self): - self.skipTest('Resource LDAP has been removed') - - def test_delete_hierarchical_not_leaf_project(self): - self.skipTest('Resource LDAP has been removed') - - def test_check_hierarchy_depth(self): - self.skipTest('Resource LDAP has been removed') - - def test_multi_role_grant_by_user_group_on_project_domain(self): - # This is a partial implementation of the standard test that - # is defined in unit.assignment.test_backends.py. It omits - # both domain and group grants. since neither of these are - # yet supported by the ldap backend. - - role_list = [] - for _ in range(2): - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - role_list.append(role) - - user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - user1 = self.identity_api.create_user(user1) - project1 = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(project1['id'], project1) - - self.assignment_api.add_role_to_user_and_project( - user_id=user1['id'], - tenant_id=project1['id'], - role_id=role_list[0]['id']) - self.assignment_api.add_role_to_user_and_project( - user_id=user1['id'], - tenant_id=project1['id'], - role_id=role_list[1]['id']) - - # Although list_grants are not yet supported, we can test the - # alternate way of getting back lists of grants, where user - # and group roles are combined. Only directly assigned user - # roles are available, since group grants are not yet supported - - combined_list = self.assignment_api.get_roles_for_user_and_project( - user1['id'], - project1['id']) - self.assertEqual(2, len(combined_list)) - self.assertIn(role_list[0]['id'], combined_list) - self.assertIn(role_list[1]['id'], combined_list) - - # Finally, although domain roles are not implemented, check we can - # issue the combined get roles call with benign results, since thus is - # used in token generation - - combined_role_list = self.assignment_api.get_roles_for_user_and_domain( - user1['id'], CONF.identity.default_domain_id) - self.assertEqual(0, len(combined_role_list)) - - def test_list_projects_for_alternate_domain(self): - self.skipTest( - 'N/A: LDAP does not support multiple domains') - - def test_get_default_domain_by_name(self): - domain = self._get_domain_fixture() - - domain_ref = self.resource_api.get_domain_by_name(domain['name']) - self.assertEqual(domain_ref, domain) - - def test_base_ldap_connection_deref_option(self): - def get_conn(deref_name): - self.config_fixture.config(group='ldap', - alias_dereferencing=deref_name) - base_ldap = common_ldap.BaseLdap(CONF) - return base_ldap.get_connection() - - conn = get_conn('default') - self.assertEqual(ldap.get_option(ldap.OPT_DEREF), - conn.get_option(ldap.OPT_DEREF)) - - conn = get_conn('always') - self.assertEqual(ldap.DEREF_ALWAYS, - conn.get_option(ldap.OPT_DEREF)) - - conn = get_conn('finding') - self.assertEqual(ldap.DEREF_FINDING, - conn.get_option(ldap.OPT_DEREF)) - - conn = get_conn('never') - self.assertEqual(ldap.DEREF_NEVER, - conn.get_option(ldap.OPT_DEREF)) - - conn = get_conn('searching') - self.assertEqual(ldap.DEREF_SEARCHING, - conn.get_option(ldap.OPT_DEREF)) - - def test_list_users_no_dn(self): - users = self.identity_api.list_users() - self.assertEqual(len(default_fixtures.USERS), len(users)) - user_ids = set(user['id'] for user in users) - expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id'] - for user in default_fixtures.USERS) - for user_ref in users: - self.assertNotIn('dn', user_ref) - self.assertEqual(expected_user_ids, user_ids) - - def test_list_groups_no_dn(self): - # Create some test groups. - domain = self._get_domain_fixture() - expected_group_ids = [] - numgroups = 3 - for _ in range(numgroups): - group = unit.new_group_ref(domain_id=domain['id']) - group = self.identity_api.create_group(group) - expected_group_ids.append(group['id']) - # Fetch the test groups and ensure that they don't contain a dn. - groups = self.identity_api.list_groups() - self.assertEqual(numgroups, len(groups)) - group_ids = set(group['id'] for group in groups) - for group_ref in groups: - self.assertNotIn('dn', group_ref) - self.assertEqual(set(expected_group_ids), group_ids) - - def test_list_groups_for_user_no_dn(self): - # Create a test user. - user = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - # Create some test groups and add the test user as a member. - domain = self._get_domain_fixture() - expected_group_ids = [] - numgroups = 3 - for _ in range(numgroups): - group = unit.new_group_ref(domain_id=domain['id']) - group = self.identity_api.create_group(group) - expected_group_ids.append(group['id']) - self.identity_api.add_user_to_group(user['id'], group['id']) - # Fetch the groups for the test user - # and ensure they don't contain a dn. - groups = self.identity_api.list_groups_for_user(user['id']) - self.assertEqual(numgroups, len(groups)) - group_ids = set(group['id'] for group in groups) - for group_ref in groups: - self.assertNotIn('dn', group_ref) - self.assertEqual(set(expected_group_ids), group_ids) - - def test_user_id_attribute_in_create(self): - driver = self.identity_api._select_identity_driver( - CONF.identity.default_domain_id) - driver.user.id_attr = 'mail' - - user = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - user_ref = self.identity_api.get_user(user['id']) - # 'email' attribute should've created because it is also being used - # as user_id - self.assertEqual(user_ref['id'], user_ref['email']) - - def test_user_id_attribute_map(self): - driver = self.identity_api._select_identity_driver( - CONF.identity.default_domain_id) - driver.user.id_attr = 'mail' - - user_ref = self.identity_api.get_user(self.user_foo['email']) - # the user_id_attribute map should be honored, which means - # user_ref['id'] should contains the email attribute - self.assertEqual(self.user_foo['email'], user_ref['id']) - - @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get') - def test_get_id_from_dn_for_multivalued_attribute_id(self, mock_ldap_get): - driver = self.identity_api._select_identity_driver( - CONF.identity.default_domain_id) - driver.user.id_attr = 'mail' - - # make 'email' multivalued so we can test the error condition - email1 = uuid.uuid4().hex - email2 = uuid.uuid4().hex - mock_ldap_get.return_value = ( - 'cn=nobodycares,dc=example,dc=com', - { - 'sn': [uuid.uuid4().hex], - 'mail': [email1, email2], - 'cn': 'nobodycares' - } - ) - - user_ref = self.identity_api.get_user(email1) - # make sure we get the ID from DN (old behavior) if the ID attribute - # has multiple values - self.assertEqual('nobodycares', user_ref['id']) - - @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get') - def test_id_attribute_not_found(self, mock_ldap_get): - mock_ldap_get.return_value = ( - 'cn=nobodycares,dc=example,dc=com', - { - 'sn': [uuid.uuid4().hex], - } - ) - - user_api = identity.backends.ldap.UserApi(CONF) - self.assertRaises(exception.NotFound, - user_api.get, - 'nobodycares') - - @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get') - def test_user_id_not_in_dn(self, mock_ldap_get): - driver = self.identity_api._select_identity_driver( - CONF.identity.default_domain_id) - driver.user.id_attr = 'uid' - driver.user.attribute_mapping['name'] = 'cn' - - mock_ldap_get.return_value = ( - 'foo=bar,dc=example,dc=com', - { - 'sn': [uuid.uuid4().hex], - 'foo': ['bar'], - 'cn': ['junk'], - 'uid': ['crap'] - } - ) - user_ref = self.identity_api.get_user('crap') - self.assertEqual('crap', user_ref['id']) - self.assertEqual('junk', user_ref['name']) - - @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get') - def test_user_name_in_dn(self, mock_ldap_get): - driver = self.identity_api._select_identity_driver( - CONF.identity.default_domain_id) - driver.user.id_attr = 'SAMAccountName' - driver.user.attribute_mapping['name'] = 'cn' - - mock_ldap_get.return_value = ( - 'cn=Foo Bar,dc=example,dc=com', - { - 'sn': [uuid.uuid4().hex], - 'cn': ['Foo Bar'], - 'SAMAccountName': ['crap'] - } - ) - user_ref = self.identity_api.get_user('crap') - self.assertEqual('crap', user_ref['id']) - self.assertEqual('Foo Bar', user_ref['name']) - - -class LDAPLimitTests(unit.TestCase, identity_tests.LimitTests): - def setUp(self): - super(LDAPLimitTests, self).setUp() - - self.useFixture(ldapdb.LDAPDatabase()) - self.useFixture(database.Database(self.sql_driver_version_overrides)) - self.load_backends() - self.load_fixtures(default_fixtures) - identity_tests.LimitTests.setUp(self) - _assert_backends(self, - assignment='sql', - identity='ldap', - resource='sql') - - def config_overrides(self): - super(LDAPLimitTests, self).config_overrides() - self.config_fixture.config(group='identity', driver='ldap') - self.config_fixture.config(group='identity', - list_limit=len(default_fixtures.USERS) - 1) - - def config_files(self): - config_files = super(LDAPLimitTests, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_ldap.conf')) - return config_files - - def test_list_projects_filtered_and_limited(self): - self.skipTest("ldap for storing projects is deprecated") - - -class LDAPIdentityEnabledEmulation(LDAPIdentity): - def setUp(self): - super(LDAPIdentityEnabledEmulation, self).setUp() - self.ldapdb.clear() - self.load_backends() - self.load_fixtures(default_fixtures) - for obj in [self.tenant_bar, self.tenant_baz, self.user_foo, - self.user_two, self.user_badguy]: - obj.setdefault('enabled', True) - _assert_backends(self, identity='ldap') - - def load_fixtures(self, fixtures): - # Override super impl since need to create group container. - create_group_container(self.identity_api) - super(LDAPIdentity, self).load_fixtures(fixtures) - - def config_files(self): - config_files = super(LDAPIdentityEnabledEmulation, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_ldap.conf')) - return config_files - - def config_overrides(self): - super(LDAPIdentityEnabledEmulation, self).config_overrides() - self.config_fixture.config(group='ldap', - user_enabled_emulation=True) - - def test_project_crud(self): - # NOTE(topol): LDAPIdentityEnabledEmulation will create an - # enabled key in the project dictionary so this - # method override handles this side-effect - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - - project = self.resource_api.create_project(project['id'], project) - project_ref = self.resource_api.get_project(project['id']) - - # self.resource_api.create_project adds an enabled - # key with a value of True when LDAPIdentityEnabledEmulation - # is used so we now add this expected key to the project dictionary - project['enabled'] = True - self.assertDictEqual(project, project_ref) - - project['description'] = uuid.uuid4().hex - self.resource_api.update_project(project['id'], project) - project_ref = self.resource_api.get_project(project['id']) - self.assertDictEqual(project, project_ref) - - self.resource_api.delete_project(project['id']) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project['id']) - - @mock.patch.object(versionutils, 'report_deprecated_feature') - def test_user_crud(self, mock_deprecator): - # NOTE(stevemar): As of the Mitaka release, we now check for calls that - # the LDAP write functionality has been deprecated. - user_dict = self.new_user_ref( - domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user_dict) - args, _kwargs = mock_deprecator.call_args - self.assertIn("create_user for the LDAP identity backend", args[1]) - - del user_dict['password'] - user_ref = self.identity_api.get_user(user['id']) - user_ref_dict = {x: user_ref[x] for x in user_ref} - self.assertDictContainsSubset(user_dict, user_ref_dict) - - user_dict['password'] = uuid.uuid4().hex - self.identity_api.update_user(user['id'], user_dict) - args, _kwargs = mock_deprecator.call_args - self.assertIn("update_user for the LDAP identity backend", args[1]) - - del user_dict['password'] - user_ref = self.identity_api.get_user(user['id']) - user_ref_dict = {x: user_ref[x] for x in user_ref} - self.assertDictContainsSubset(user_dict, user_ref_dict) - - self.identity_api.delete_user(user['id']) - args, _kwargs = mock_deprecator.call_args - self.assertIn("delete_user for the LDAP identity backend", args[1]) - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user, - user['id']) - - def test_user_auth_emulated(self): - driver = self.identity_api._select_identity_driver( - CONF.identity.default_domain_id) - driver.user.enabled_emulation_dn = 'cn=test,dc=test' - self.identity_api.authenticate( - context={}, - user_id=self.user_foo['id'], - password=self.user_foo['password']) - - def test_user_enable_attribute_mask(self): - self.skipTest( - "Enabled emulation conflicts with enabled mask") - - def test_user_enabled_use_group_config(self): - self.config_fixture.config( - group='ldap', - user_enabled_emulation_use_group_config=True, - group_member_attribute='uniqueMember', - group_objectclass='groupOfUniqueNames') - self.ldapdb.clear() - self.load_backends() - self.load_fixtures(default_fixtures) - - # Create a user and ensure they are enabled. - user1 = unit.new_user_ref(enabled=True, - domain_id=CONF.identity.default_domain_id) - user_ref = self.identity_api.create_user(user1) - self.assertIs(True, user_ref['enabled']) - - # Get a user and ensure they are enabled. - user_ref = self.identity_api.get_user(user_ref['id']) - self.assertIs(True, user_ref['enabled']) - - def test_user_enabled_invert(self): - self.config_fixture.config(group='ldap', user_enabled_invert=True, - user_enabled_default=False) - self.ldapdb.clear() - self.load_backends() - self.load_fixtures(default_fixtures) - - user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - - user2 = self.new_user_ref(enabled=False, - domain_id=CONF.identity.default_domain_id) - - user3 = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - - # Ensure that the enabled LDAP attribute is not set for a - # newly created enabled user. - user_ref = self.identity_api.create_user(user1) - self.assertIs(True, user_ref['enabled']) - self.assertIsNone(self.get_user_enabled_vals(user_ref)) - user_ref = self.identity_api.get_user(user_ref['id']) - self.assertIs(True, user_ref['enabled']) - - # Ensure that an enabled LDAP attribute is not set for a disabled user. - user1['enabled'] = False - user_ref = self.identity_api.update_user(user_ref['id'], user1) - self.assertIs(False, user_ref['enabled']) - self.assertIsNone(self.get_user_enabled_vals(user_ref)) - - # Enable the user and ensure that the LDAP enabled - # attribute is not set. - user1['enabled'] = True - user_ref = self.identity_api.update_user(user_ref['id'], user1) - self.assertIs(True, user_ref['enabled']) - self.assertIsNone(self.get_user_enabled_vals(user_ref)) - - # Ensure that the LDAP enabled attribute is not set for a - # newly created disabled user. - user_ref = self.identity_api.create_user(user2) - self.assertIs(False, user_ref['enabled']) - self.assertIsNone(self.get_user_enabled_vals(user_ref)) - user_ref = self.identity_api.get_user(user_ref['id']) - self.assertIs(False, user_ref['enabled']) - - # Ensure that the LDAP enabled attribute is not set for a newly created - # user when the user_enabled_default setting is used. - user_ref = self.identity_api.create_user(user3) - self.assertIs(True, user_ref['enabled']) - self.assertIsNone(self.get_user_enabled_vals(user_ref)) - user_ref = self.identity_api.get_user(user_ref['id']) - self.assertIs(True, user_ref['enabled']) - - def test_user_enabled_invert_no_enabled_value(self): - self.skipTest( - "N/A: Covered by test_user_enabled_invert") - - def test_user_enabled_invert_default_str_value(self): - self.skipTest( - "N/A: Covered by test_user_enabled_invert") - - @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get') - def test_user_enabled_attribute_handles_utf8(self, mock_ldap_get): - # Since user_enabled_emulation is enabled in this test, this test will - # fail since it's using user_enabled_invert. - self.config_fixture.config(group='ldap', user_enabled_invert=True, - user_enabled_attribute='passwordisexpired') - mock_ldap_get.return_value = ( - u'uid=123456789,c=us,ou=our_ldap,o=acme.com', - { - 'uid': [123456789], - 'mail': [u'shaun@acme.com'], - 'passwordisexpired': [u'false'], - 'cn': [u'uid=123456789,c=us,ou=our_ldap,o=acme.com'] - } - ) - - user_api = identity.backends.ldap.UserApi(CONF) - user_ref = user_api.get('123456789') - self.assertIs(False, user_ref['enabled']) - - def test_escape_member_dn(self): - # The enabled member DN is properly escaped when querying for enabled - # user. - - object_id = uuid.uuid4().hex - driver = self.identity_api._select_identity_driver( - CONF.identity.default_domain_id) - - # driver.user is the EnabledEmuMixIn implementation used for this test. - mixin_impl = driver.user - - # ) is a special char in a filter and must be escaped. - sample_dn = 'cn=foo)bar' - # LDAP requires ) is escaped by being replaced with "\29" - sample_dn_filter_esc = r'cn=foo\29bar' - - # Override the tree_dn, it's used to build the enabled member filter - mixin_impl.tree_dn = sample_dn - - # The filter that _get_enabled is going to build contains the - # tree_dn, which better be escaped in this case. - exp_filter = '(%s=%s=%s,%s)' % ( - mixin_impl.member_attribute, mixin_impl.id_attr, object_id, - sample_dn_filter_esc) - - with mixin_impl.get_connection() as conn: - m = self.useFixture(mockpatch.PatchObject(conn, 'search_s')).mock - mixin_impl._get_enabled(object_id, conn) - # The 3rd argument is the DN. - self.assertEqual(exp_filter, m.call_args[0][2]) - - -class LDAPPosixGroupsTest(unit.TestCase): - - def setUp(self): - - super(LDAPPosixGroupsTest, self).setUp() - - self.useFixture(ldapdb.LDAPDatabase()) - self.useFixture(database.Database()) - - self.load_backends() - self.load_fixtures(default_fixtures) - - _assert_backends(self, identity='ldap') - - def load_fixtures(self, fixtures): - # Override super impl since need to create group container. - create_group_container(self.identity_api) - super(LDAPPosixGroupsTest, self).load_fixtures(fixtures) - - def config_overrides(self): - super(LDAPPosixGroupsTest, self).config_overrides() - self.config_fixture.config(group='identity', driver='ldap') - self.config_fixture.config(group='ldap', group_members_are_ids=True, - group_member_attribute='memberUID') - - def config_files(self): - config_files = super(LDAPPosixGroupsTest, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_ldap.conf')) - return config_files - - def _get_domain_fixture(self): - """Domains in LDAP are read-only, so just return the static one.""" - return self.resource_api.get_domain(CONF.identity.default_domain_id) - - def test_posix_member_id(self): - domain = self._get_domain_fixture() - new_group = unit.new_group_ref(domain_id=domain['id']) - new_group = self.identity_api.create_group(new_group) - # Make sure we get an empty list back on a new group, not an error. - user_refs = self.identity_api.list_users_in_group(new_group['id']) - self.assertEqual([], user_refs) - # Make sure we get the correct users back once they have been added - # to the group. - new_user = unit.new_user_ref(domain_id=domain['id']) - new_user = self.identity_api.create_user(new_user) - - # NOTE(amakarov): Create the group directly using LDAP operations - # rather than going through the manager. - group_api = self.identity_api.driver.group - group_ref = group_api.get(new_group['id']) - mod = (ldap.MOD_ADD, group_api.member_attribute, new_user['id']) - conn = group_api.get_connection() - conn.modify_s(group_ref['dn'], [mod]) - - # Testing the case "the group contains a user" - user_refs = self.identity_api.list_users_in_group(new_group['id']) - self.assertIn(new_user['id'], (x['id'] for x in user_refs)) - - # Testing the case "the user is a member of a group" - group_refs = self.identity_api.list_groups_for_user(new_user['id']) - self.assertIn(new_group['id'], (x['id'] for x in group_refs)) - - -class LdapIdentityWithMapping( - BaseLDAPIdentity, unit.SQLDriverOverrides, unit.TestCase): - """Class to test mapping of default LDAP backend. - - The default configuration is not to enable mapping when using a single - backend LDAP driver. However, a cloud provider might want to enable - the mapping, hence hiding the LDAP IDs from any clients of keystone. - Setting backward_compatible_ids to False will enable this mapping. - - """ - - def config_files(self): - config_files = super(LdapIdentityWithMapping, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_ldap_sql.conf')) - return config_files - - def setUp(self): - sqldb = self.useFixture(database.Database()) - super(LdapIdentityWithMapping, self).setUp() - self.ldapdb.clear() - self.load_backends() - cache.configure_cache() - - sqldb.recreate() - self.load_fixtures(default_fixtures) - # defaulted by the data load - self.user_foo['enabled'] = True - _assert_backends(self, identity='ldap') - - def config_overrides(self): - super(LdapIdentityWithMapping, self).config_overrides() - self.config_fixture.config(group='identity', driver='ldap') - self.config_fixture.config(group='identity_mapping', - backward_compatible_ids=False) - - def test_dynamic_mapping_build(self): - """Test to ensure entities not create via controller are mapped. - - Many LDAP backends will, essentially, by Read Only. In these cases - the mapping is not built by creating objects, rather from enumerating - the entries. We test this here my manually deleting the mapping and - then trying to re-read the entries. - - """ - initial_mappings = len(mapping_sql.list_id_mappings()) - user1 = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - user1 = self.identity_api.create_user(user1) - user2 = self.new_user_ref(domain_id=CONF.identity.default_domain_id) - user2 = self.identity_api.create_user(user2) - mappings = mapping_sql.list_id_mappings() - self.assertEqual(initial_mappings + 2, len(mappings)) - - # Now delete the mappings for the two users above - self.id_mapping_api.purge_mappings({'public_id': user1['id']}) - self.id_mapping_api.purge_mappings({'public_id': user2['id']}) - - # We should no longer be able to get these users via their old IDs - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user, - user1['id']) - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user, - user2['id']) - - # Now enumerate all users...this should re-build the mapping, and - # we should be able to find the users via their original public IDs. - self.identity_api.list_users() - self.identity_api.get_user(user1['id']) - self.identity_api.get_user(user2['id']) - - def test_get_roles_for_user_and_project_user_group_same_id(self): - self.skipTest('N/A: We never generate the same ID for a user and ' - 'group in our mapping table') - - def test_list_domains(self): - domains = self.resource_api.list_domains() - self.assertEqual([resource.calc_default_domain()], domains) - - -class BaseMultiLDAPandSQLIdentity(object): - """Mixin class with support methods for domain-specific config testing.""" - - def create_users_across_domains(self): - """Create a set of users, each with a role on their own domain.""" - # We also will check that the right number of id mappings get created - initial_mappings = len(mapping_sql.list_id_mappings()) - - self.users['user0'] = unit.create_user( - self.identity_api, - self.domains['domain_default']['id']) - self.assignment_api.create_grant( - user_id=self.users['user0']['id'], - domain_id=self.domains['domain_default']['id'], - role_id=self.role_member['id']) - for x in range(1, self.domain_count): - self.users['user%s' % x] = unit.create_user( - self.identity_api, - self.domains['domain%s' % x]['id']) - self.assignment_api.create_grant( - user_id=self.users['user%s' % x]['id'], - domain_id=self.domains['domain%s' % x]['id'], - role_id=self.role_member['id']) - - # So how many new id mappings should have been created? One for each - # user created in a domain that is using the non default driver.. - self.assertEqual(initial_mappings + self.domain_specific_count, - len(mapping_sql.list_id_mappings())) - - def check_user(self, user, domain_id, expected_status): - """Check user is in correct backend. - - As part of the tests, we want to force ourselves to manually - select the driver for a given domain, to make sure the entity - ended up in the correct backend. - - """ - driver = self.identity_api._select_identity_driver(domain_id) - unused, unused, entity_id = ( - self.identity_api._get_domain_driver_and_entity_id( - user['id'])) - - if expected_status == http_client.OK: - ref = driver.get_user(entity_id) - ref = self.identity_api._set_domain_id_and_mapping( - ref, domain_id, driver, map.EntityType.USER) - user = user.copy() - del user['password'] - self.assertDictEqual(user, ref) - else: - # TODO(henry-nash): Use AssertRaises here, although - # there appears to be an issue with using driver.get_user - # inside that construct - try: - driver.get_user(entity_id) - except expected_status: - pass - - def setup_initial_domains(self): - - def create_domain(domain): - try: - ref = self.resource_api.create_domain( - domain['id'], domain) - except exception.Conflict: - ref = ( - self.resource_api.get_domain_by_name(domain['name'])) - return ref - - self.domains = {} - for x in range(1, self.domain_count): - domain = 'domain%s' % x - self.domains[domain] = create_domain( - {'id': uuid.uuid4().hex, 'name': domain}) - self.domains['domain_default'] = create_domain( - resource.calc_default_domain()) - - def test_authenticate_to_each_domain(self): - """Test that a user in each domain can authenticate.""" - for user_num in range(self.domain_count): - user = 'user%s' % user_num - self.identity_api.authenticate( - context={}, - user_id=self.users[user]['id'], - password=self.users[user]['password']) - - -class MultiLDAPandSQLIdentity(BaseLDAPIdentity, unit.SQLDriverOverrides, - unit.TestCase, BaseMultiLDAPandSQLIdentity): - """Class to test common SQL plus individual LDAP backends. - - We define a set of domains and domain-specific backends: - - - A separate LDAP backend for the default domain - - A separate LDAP backend for domain1 - - domain2 shares the same LDAP as domain1, but uses a different - tree attach point - - An SQL backend for all other domains (which will include domain3 - and domain4) - - Normally one would expect that the default domain would be handled as - part of the "other domains" - however the above provides better - test coverage since most of the existing backend tests use the default - domain. - - """ - - def setUp(self): - sqldb = self.useFixture(database.Database()) - super(MultiLDAPandSQLIdentity, self).setUp() - - self.load_backends() - sqldb.recreate() - - self.domain_count = 5 - self.domain_specific_count = 3 - self.setup_initial_domains() - self._setup_initial_users() - - # All initial test data setup complete, time to switch on support - # for separate backends per domain. - self.enable_multi_domain() - - self.ldapdb.clear() - self.load_fixtures(default_fixtures) - self.create_users_across_domains() - self.assert_backends() - - def assert_backends(self): - _assert_backends(self, - assignment='sql', - identity={ - None: 'sql', - self.domains['domain_default']['id']: 'ldap', - self.domains['domain1']['id']: 'ldap', - self.domains['domain2']['id']: 'ldap', - }, - resource='sql') - - def config_overrides(self): - super(MultiLDAPandSQLIdentity, self).config_overrides() - # Make sure identity and assignment are actually SQL drivers, - # BaseLDAPIdentity sets these options to use LDAP. - self.config_fixture.config(group='identity', driver='sql') - self.config_fixture.config(group='resource', driver='sql') - self.config_fixture.config(group='assignment', driver='sql') - - def _setup_initial_users(self): - # Create some identity entities BEFORE we switch to multi-backend, so - # we can test that these are still accessible - self.users = {} - self.users['userA'] = unit.create_user( - self.identity_api, - self.domains['domain_default']['id']) - self.users['userB'] = unit.create_user( - self.identity_api, - self.domains['domain1']['id']) - self.users['userC'] = unit.create_user( - self.identity_api, - self.domains['domain3']['id']) - - def enable_multi_domain(self): - """Enable the chosen form of multi domain configuration support. - - This method enables the file-based configuration support. Child classes - that wish to use the database domain configuration support should - override this method and set the appropriate config_fixture option. - - """ - self.config_fixture.config( - group='identity', domain_specific_drivers_enabled=True, - domain_config_dir=unit.TESTCONF + '/domain_configs_multi_ldap', - list_limit=1000) - self.config_fixture.config(group='identity_mapping', - backward_compatible_ids=False) - - def get_config(self, domain_id): - # Get the config for this domain, will return CONF - # if no specific config defined for this domain - return self.identity_api.domain_configs.get_domain_conf(domain_id) - - def test_list_users(self): - # Override the standard list users, since we have added an extra user - # to the default domain, so the number of expected users is one more - # than in the standard test. - users = self.identity_api.list_users( - domain_scope=self._set_domain_scope( - CONF.identity.default_domain_id)) - self.assertEqual(len(default_fixtures.USERS) + 1, len(users)) - user_ids = set(user['id'] for user in users) - expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id'] - for user in default_fixtures.USERS) - expected_user_ids.add(self.users['user0']['id']) - for user_ref in users: - self.assertNotIn('password', user_ref) - self.assertEqual(expected_user_ids, user_ids) - - @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get_all') - def test_list_limit_domain_specific_inheritance(self, ldap_get_all): - # passiging hints is important, because if it's not passed, limiting - # is considered be disabled - hints = driver_hints.Hints() - self.identity_api.list_users( - domain_scope=self.domains['domain2']['id'], - hints=hints) - # since list_limit is not specified in keystone.domain2.conf, it should - # take the default, which is 1000 - self.assertTrue(ldap_get_all.called) - args, kwargs = ldap_get_all.call_args - hints = args[0] - self.assertEqual(1000, hints.limit['limit']) - - @mock.patch.object(common_ldap_core.BaseLdap, '_ldap_get_all') - def test_list_limit_domain_specific_override(self, ldap_get_all): - # passiging hints is important, because if it's not passed, limiting - # is considered to be disabled - hints = driver_hints.Hints() - self.identity_api.list_users( - domain_scope=self.domains['domain1']['id'], - hints=hints) - # this should have the list_limit set in Keystone.domain1.conf, which - # is 101 - self.assertTrue(ldap_get_all.called) - args, kwargs = ldap_get_all.call_args - hints = args[0] - self.assertEqual(101, hints.limit['limit']) - - def test_domain_segregation(self): - """Test that separate configs have segregated the domain. - - Test Plan: - - - Users were created in each domain as part of setup, now make sure - you can only find a given user in its relevant domain/backend - - Make sure that for a backend that supports multiple domains - you can get the users via any of its domains - - """ - # Check that I can read a user with the appropriate domain-selected - # driver, but won't find it via any other domain driver - - check_user = self.check_user - check_user(self.users['user0'], - self.domains['domain_default']['id'], http_client.OK) - for domain in [self.domains['domain1']['id'], - self.domains['domain2']['id'], - self.domains['domain3']['id'], - self.domains['domain4']['id']]: - check_user(self.users['user0'], domain, exception.UserNotFound) - - check_user(self.users['user1'], self.domains['domain1']['id'], - http_client.OK) - for domain in [self.domains['domain_default']['id'], - self.domains['domain2']['id'], - self.domains['domain3']['id'], - self.domains['domain4']['id']]: - check_user(self.users['user1'], domain, exception.UserNotFound) - - check_user(self.users['user2'], self.domains['domain2']['id'], - http_client.OK) - for domain in [self.domains['domain_default']['id'], - self.domains['domain1']['id'], - self.domains['domain3']['id'], - self.domains['domain4']['id']]: - check_user(self.users['user2'], domain, exception.UserNotFound) - - # domain3 and domain4 share the same backend, so you should be - # able to see user3 and user4 from either. - - check_user(self.users['user3'], self.domains['domain3']['id'], - http_client.OK) - check_user(self.users['user3'], self.domains['domain4']['id'], - http_client.OK) - check_user(self.users['user4'], self.domains['domain3']['id'], - http_client.OK) - check_user(self.users['user4'], self.domains['domain4']['id'], - http_client.OK) - - for domain in [self.domains['domain_default']['id'], - self.domains['domain1']['id'], - self.domains['domain2']['id']]: - check_user(self.users['user3'], domain, exception.UserNotFound) - check_user(self.users['user4'], domain, exception.UserNotFound) - - # Finally, going through the regular manager layer, make sure we - # only see the right number of users in each of the non-default - # domains. One might have expected two users in domain1 (since we - # created one before we switched to multi-backend), however since - # that domain changed backends in the switch we don't find it anymore. - # This is as designed - we don't support moving domains between - # backends. - # - # The listing of the default domain is already handled in the - # test_lists_users() method. - for domain in [self.domains['domain1']['id'], - self.domains['domain2']['id'], - self.domains['domain4']['id']]: - self.assertThat( - self.identity_api.list_users(domain_scope=domain), - matchers.HasLength(1)) - - # domain3 had a user created before we switched on - # multiple backends, plus one created afterwards - and its - # backend has not changed - so we should find two. - self.assertThat( - self.identity_api.list_users( - domain_scope=self.domains['domain3']['id']), - matchers.HasLength(2)) - - def test_existing_uuids_work(self): - """Test that 'uni-domain' created IDs still work. - - Throwing the switch to domain-specific backends should not cause - existing identities to be inaccessible via ID. - - """ - self.identity_api.get_user(self.users['userA']['id']) - self.identity_api.get_user(self.users['userB']['id']) - self.identity_api.get_user(self.users['userC']['id']) - - def test_scanning_of_config_dir(self): - """Test the Manager class scans the config directory. - - The setup for the main tests above load the domain configs directly - so that the test overrides can be included. This test just makes sure - that the standard config directory scanning does pick up the relevant - domain config files. - - """ - # Confirm that config has drivers_enabled as True, which we will - # check has been set to False later in this test - self.assertTrue(CONF.identity.domain_specific_drivers_enabled) - self.load_backends() - # Execute any command to trigger the lazy loading of domain configs - self.identity_api.list_users( - domain_scope=self.domains['domain1']['id']) - # ...and now check the domain configs have been set up - self.assertIn('default', self.identity_api.domain_configs) - self.assertIn(self.domains['domain1']['id'], - self.identity_api.domain_configs) - self.assertIn(self.domains['domain2']['id'], - self.identity_api.domain_configs) - self.assertNotIn(self.domains['domain3']['id'], - self.identity_api.domain_configs) - self.assertNotIn(self.domains['domain4']['id'], - self.identity_api.domain_configs) - - # Finally check that a domain specific config contains items from both - # the primary config and the domain specific config - conf = self.identity_api.domain_configs.get_domain_conf( - self.domains['domain1']['id']) - # This should now be false, as is the default, since this is not - # set in the standard primary config file - self.assertFalse(conf.identity.domain_specific_drivers_enabled) - # ..and make sure a domain-specific options is also set - self.assertEqual('fake://memory1', conf.ldap.url) - - def test_delete_domain_with_user_added(self): - domain = unit.new_domain_ref() - project = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_domain(domain['id'], domain) - project = self.resource_api.create_project(project['id'], project) - project_ref = self.resource_api.get_project(project['id']) - self.assertDictEqual(project, project_ref) - - self.assignment_api.create_grant(user_id=self.user_foo['id'], - project_id=project['id'], - role_id=self.role_member['id']) - self.assignment_api.delete_grant(user_id=self.user_foo['id'], - project_id=project['id'], - role_id=self.role_member['id']) - domain['enabled'] = False - self.resource_api.update_domain(domain['id'], domain) - self.resource_api.delete_domain(domain['id']) - self.assertRaises(exception.DomainNotFound, - self.resource_api.get_domain, - domain['id']) - - def test_user_enabled_ignored_disable_error(self): - # Override. - self.skipTest("Doesn't apply since LDAP config has no affect on the " - "SQL identity backend.") - - def test_group_enabled_ignored_disable_error(self): - # Override. - self.skipTest("Doesn't apply since LDAP config has no affect on the " - "SQL identity backend.") - - def test_project_enabled_ignored_disable_error(self): - # Override - self.skipTest("Doesn't apply since LDAP configuration is ignored for " - "SQL assignment backend.") - - def test_list_role_assignments_filtered_by_role(self): - # Domain roles are supported by the SQL Assignment backend - base = super(BaseLDAPIdentity, self) - base.test_list_role_assignments_filtered_by_role() - - def test_list_role_assignment_by_domain(self): - # With multi LDAP this method should work, so override the override - # from BaseLDAPIdentity - super(BaseLDAPIdentity, self).test_list_role_assignment_by_domain() - - def test_list_role_assignment_by_user_with_domain_group_roles(self): - # With multi LDAP this method should work, so override the override - # from BaseLDAPIdentity - super(BaseLDAPIdentity, self).\ - test_list_role_assignment_by_user_with_domain_group_roles() - - def test_list_role_assignment_using_sourced_groups_with_domains(self): - # With SQL Assignment this method should work, so override the override - # from BaseLDAPIdentity - base = super(BaseLDAPIdentity, self) - base.test_list_role_assignment_using_sourced_groups_with_domains() - - def test_create_project_with_domain_id_and_without_parent_id(self): - # With multi LDAP this method should work, so override the override - # from BaseLDAPIdentity - super(BaseLDAPIdentity, self).\ - test_create_project_with_domain_id_and_without_parent_id() - - def test_create_project_with_domain_id_mismatch_to_parent_domain(self): - # With multi LDAP this method should work, so override the override - # from BaseLDAPIdentity - super(BaseLDAPIdentity, self).\ - test_create_project_with_domain_id_mismatch_to_parent_domain() - - def test_remove_foreign_assignments_when_deleting_a_domain(self): - # With multi LDAP this method should work, so override the override - # from BaseLDAPIdentity - base = super(BaseLDAPIdentity, self) - base.test_remove_foreign_assignments_when_deleting_a_domain() - - -class MultiLDAPandSQLIdentityDomainConfigsInSQL(MultiLDAPandSQLIdentity): - """Class to test the use of domain configs stored in the database. - - Repeat the same tests as MultiLDAPandSQLIdentity, but instead of using the - domain specific config files, store the domain specific values in the - database. - - """ - - def assert_backends(self): - _assert_backends(self, - assignment='sql', - identity={ - None: 'sql', - self.domains['domain_default']['id']: 'ldap', - self.domains['domain1']['id']: 'ldap', - self.domains['domain2']['id']: 'ldap', - }, - resource='sql') - - def enable_multi_domain(self): - # The values below are the same as in the domain_configs_multi_ldap - # directory of test config_files. - default_config = { - 'ldap': {'url': 'fake://memory', - 'user': 'cn=Admin', - 'password': 'password', - 'suffix': 'cn=example,cn=com'}, - 'identity': {'driver': 'ldap'} - } - domain1_config = { - 'ldap': {'url': 'fake://memory1', - 'user': 'cn=Admin', - 'password': 'password', - 'suffix': 'cn=example,cn=com'}, - 'identity': {'driver': 'ldap', - 'list_limit': '101'} - } - domain2_config = { - 'ldap': {'url': 'fake://memory', - 'user': 'cn=Admin', - 'password': 'password', - 'suffix': 'cn=myroot,cn=com', - 'group_tree_dn': 'ou=UserGroups,dc=myroot,dc=org', - 'user_tree_dn': 'ou=Users,dc=myroot,dc=org'}, - 'identity': {'driver': 'ldap'} - } - - self.domain_config_api.create_config(CONF.identity.default_domain_id, - default_config) - self.domain_config_api.create_config(self.domains['domain1']['id'], - domain1_config) - self.domain_config_api.create_config(self.domains['domain2']['id'], - domain2_config) - - self.config_fixture.config( - group='identity', domain_specific_drivers_enabled=True, - domain_configurations_from_database=True, - list_limit=1000) - self.config_fixture.config(group='identity_mapping', - backward_compatible_ids=False) - - def test_domain_config_has_no_impact_if_database_support_disabled(self): - """Ensure database domain configs have no effect if disabled. - - Set reading from database configs to false, restart the backends - and then try and set and use database configs. - - """ - self.config_fixture.config( - group='identity', domain_configurations_from_database=False) - self.load_backends() - new_config = {'ldap': {'url': uuid.uuid4().hex}} - self.domain_config_api.create_config( - CONF.identity.default_domain_id, new_config) - # Trigger the identity backend to initialise any domain specific - # configurations - self.identity_api.list_users() - # Check that the new config has not been passed to the driver for - # the default domain. - default_config = ( - self.identity_api.domain_configs.get_domain_conf( - CONF.identity.default_domain_id)) - self.assertEqual(CONF.ldap.url, default_config.ldap.url) - - def test_reloading_domain_config(self): - """Ensure domain drivers are reloaded on a config modification.""" - domain_cfgs = self.identity_api.domain_configs - - # Create a new config for the default domain, hence overwriting the - # current settings. - new_config = { - 'ldap': {'url': uuid.uuid4().hex}, - 'identity': {'driver': 'ldap'}} - self.domain_config_api.create_config( - CONF.identity.default_domain_id, new_config) - default_config = ( - domain_cfgs.get_domain_conf(CONF.identity.default_domain_id)) - self.assertEqual(new_config['ldap']['url'], default_config.ldap.url) - - # Ensure updating is also honored - updated_config = {'url': uuid.uuid4().hex} - self.domain_config_api.update_config( - CONF.identity.default_domain_id, updated_config, - group='ldap', option='url') - default_config = ( - domain_cfgs.get_domain_conf(CONF.identity.default_domain_id)) - self.assertEqual(updated_config['url'], default_config.ldap.url) - - # ...and finally ensure delete causes the driver to get the standard - # config again. - self.domain_config_api.delete_config(CONF.identity.default_domain_id) - default_config = ( - domain_cfgs.get_domain_conf(CONF.identity.default_domain_id)) - self.assertEqual(CONF.ldap.url, default_config.ldap.url) - - def test_setting_multiple_sql_driver_raises_exception(self): - """Ensure setting multiple domain specific sql drivers is prevented.""" - new_config = {'identity': {'driver': 'sql'}} - self.domain_config_api.create_config( - CONF.identity.default_domain_id, new_config) - self.identity_api.domain_configs.get_domain_conf( - CONF.identity.default_domain_id) - self.domain_config_api.create_config(self.domains['domain1']['id'], - new_config) - self.assertRaises(exception.MultipleSQLDriversInConfig, - self.identity_api.domain_configs.get_domain_conf, - self.domains['domain1']['id']) - - def test_same_domain_gets_sql_driver(self): - """Ensure we can set an SQL driver if we have had it before.""" - new_config = {'identity': {'driver': 'sql'}} - self.domain_config_api.create_config( - CONF.identity.default_domain_id, new_config) - self.identity_api.domain_configs.get_domain_conf( - CONF.identity.default_domain_id) - - # By using a slightly different config, we cause the driver to be - # reloaded...and hence check if we can reuse the sql driver - new_config = {'identity': {'driver': 'sql'}, - 'ldap': {'url': 'fake://memory1'}} - self.domain_config_api.create_config( - CONF.identity.default_domain_id, new_config) - self.identity_api.domain_configs.get_domain_conf( - CONF.identity.default_domain_id) - - def test_delete_domain_clears_sql_registration(self): - """Ensure registration is deleted when a domain is deleted.""" - domain = unit.new_domain_ref() - domain = self.resource_api.create_domain(domain['id'], domain) - new_config = {'identity': {'driver': 'sql'}} - self.domain_config_api.create_config(domain['id'], new_config) - self.identity_api.domain_configs.get_domain_conf(domain['id']) - - # First show that trying to set SQL for another driver fails - self.domain_config_api.create_config(self.domains['domain1']['id'], - new_config) - self.assertRaises(exception.MultipleSQLDriversInConfig, - self.identity_api.domain_configs.get_domain_conf, - self.domains['domain1']['id']) - self.domain_config_api.delete_config(self.domains['domain1']['id']) - - # Now we delete the domain - domain['enabled'] = False - self.resource_api.update_domain(domain['id'], domain) - self.resource_api.delete_domain(domain['id']) - - # The registration should now be available - self.domain_config_api.create_config(self.domains['domain1']['id'], - new_config) - self.identity_api.domain_configs.get_domain_conf( - self.domains['domain1']['id']) - - def test_orphaned_registration_does_not_prevent_getting_sql_driver(self): - """Ensure we self heal an orphaned sql registration.""" - domain = unit.new_domain_ref() - domain = self.resource_api.create_domain(domain['id'], domain) - new_config = {'identity': {'driver': 'sql'}} - self.domain_config_api.create_config(domain['id'], new_config) - self.identity_api.domain_configs.get_domain_conf(domain['id']) - - # First show that trying to set SQL for another driver fails - self.domain_config_api.create_config(self.domains['domain1']['id'], - new_config) - self.assertRaises(exception.MultipleSQLDriversInConfig, - self.identity_api.domain_configs.get_domain_conf, - self.domains['domain1']['id']) - - # Now we delete the domain by using the backend driver directly, - # which causes the domain to be deleted without any of the cleanup - # that is in the manager (this is simulating a server process crashing - # in the middle of a delete domain operation, and somehow leaving the - # domain config settings in place, but the domain is deleted). We - # should still be able to set another domain to SQL, since we should - # self heal this issue. - - self.resource_api.driver.delete_project(domain['id']) - # Invalidate cache (so we will see the domain has gone) - self.resource_api.get_domain.invalidate( - self.resource_api, domain['id']) - - # The registration should now be available - self.domain_config_api.create_config(self.domains['domain1']['id'], - new_config) - self.identity_api.domain_configs.get_domain_conf( - self.domains['domain1']['id']) - - -class DomainSpecificLDAPandSQLIdentity( - BaseLDAPIdentity, unit.SQLDriverOverrides, unit.TestCase, - BaseMultiLDAPandSQLIdentity): - """Class to test when all domains use specific configs, including SQL. - - We define a set of domains and domain-specific backends: - - - A separate LDAP backend for the default domain - - A separate SQL backend for domain1 - - Although the default driver still exists, we don't use it. - - """ - - def setUp(self): - sqldb = self.useFixture(database.Database()) - super(DomainSpecificLDAPandSQLIdentity, self).setUp() - self.initial_setup(sqldb) - - def initial_setup(self, sqldb): - # We aren't setting up any initial data ahead of switching to - # domain-specific operation, so make the switch straight away. - self.config_fixture.config( - group='identity', domain_specific_drivers_enabled=True, - domain_config_dir=( - unit.TESTCONF + '/domain_configs_one_sql_one_ldap')) - self.config_fixture.config(group='identity_mapping', - backward_compatible_ids=False) - - self.load_backends() - sqldb.recreate() - - self.domain_count = 2 - self.domain_specific_count = 2 - self.setup_initial_domains() - self.users = {} - - self.ldapdb.clear() - self.load_fixtures(default_fixtures) - self.create_users_across_domains() - - _assert_backends( - self, - assignment='sql', - identity={ - None: 'ldap', - 'default': 'ldap', - self.domains['domain1']['id']: 'sql', - }, - resource='sql') - - def config_overrides(self): - super(DomainSpecificLDAPandSQLIdentity, self).config_overrides() - # Make sure resource & assignment are actually SQL drivers, - # BaseLDAPIdentity causes this option to use LDAP. - self.config_fixture.config(group='resource', driver='sql') - self.config_fixture.config(group='assignment', driver='sql') - - def get_config(self, domain_id): - # Get the config for this domain, will return CONF - # if no specific config defined for this domain - return self.identity_api.domain_configs.get_domain_conf(domain_id) - - def test_list_domains(self): - self.skipTest( - 'N/A: Not relevant for multi ldap testing') - - def test_list_domains_non_default_domain_id(self): - self.skipTest( - 'N/A: Not relevant for multi ldap testing') - - def test_domain_crud(self): - self.skipTest( - 'N/A: Not relevant for multi ldap testing') - - def test_not_delete_domain_with_enabled_subdomains(self): - self.skipTest( - 'N/A: Not relevant for multi ldap testing') - - def test_delete_domain(self): - # With this restricted multi LDAP class, tests that use multiple - # domains and identity, are still not supported - self.assertRaises( - exception.DomainNotFound, - super(BaseLDAPIdentity, self).test_delete_domain_with_project_api) - - def test_list_users(self): - # Override the standard list users, since we have added an extra user - # to the default domain, so the number of expected users is one more - # than in the standard test. - users = self.identity_api.list_users( - domain_scope=self._set_domain_scope( - CONF.identity.default_domain_id)) - self.assertEqual(len(default_fixtures.USERS) + 1, len(users)) - user_ids = set(user['id'] for user in users) - expected_user_ids = set(getattr(self, 'user_%s' % user['id'])['id'] - for user in default_fixtures.USERS) - expected_user_ids.add(self.users['user0']['id']) - for user_ref in users: - self.assertNotIn('password', user_ref) - self.assertEqual(expected_user_ids, user_ids) - - def test_domain_segregation(self): - """Test that separate configs have segregated the domain. - - Test Plan: - - - Users were created in each domain as part of setup, now make sure - you can only find a given user in its relevant domain/backend - - Make sure that for a backend that supports multiple domains - you can get the users via any of its domains - - """ - # Check that I can read a user with the appropriate domain-selected - # driver, but won't find it via any other domain driver - - self.check_user(self.users['user0'], - self.domains['domain_default']['id'], http_client.OK) - self.check_user(self.users['user0'], - self.domains['domain1']['id'], exception.UserNotFound) - - self.check_user(self.users['user1'], - self.domains['domain1']['id'], http_client.OK) - self.check_user(self.users['user1'], - self.domains['domain_default']['id'], - exception.UserNotFound) - - # Finally, going through the regular manager layer, make sure we - # only see the right number of users in the non-default domain. - - self.assertThat( - self.identity_api.list_users( - domain_scope=self.domains['domain1']['id']), - matchers.HasLength(1)) - - def test_add_role_grant_to_user_and_project_returns_not_found(self): - self.skipTest('Blocked by bug 1101287') - - def test_get_role_grants_for_user_and_project_returns_not_found(self): - self.skipTest('Blocked by bug 1101287') - - def test_list_projects_for_user_with_grants(self): - self.skipTest('Blocked by bug 1221805') - - def test_get_roles_for_user_and_project_user_group_same_id(self): - self.skipTest('N/A: We never generate the same ID for a user and ' - 'group in our mapping table') - - def test_user_id_comma(self): - self.skipTest('Only valid if it is guaranteed to be talking to ' - 'the fakeldap backend') - - def test_user_id_comma_grants(self): - self.skipTest('Only valid if it is guaranteed to be talking to ' - 'the fakeldap backend') - - def test_user_enabled_ignored_disable_error(self): - # Override. - self.skipTest("Doesn't apply since LDAP config has no affect on the " - "SQL identity backend.") - - def test_group_enabled_ignored_disable_error(self): - # Override. - self.skipTest("Doesn't apply since LDAP config has no affect on the " - "SQL identity backend.") - - def test_project_enabled_ignored_disable_error(self): - # Override - self.skipTest("Doesn't apply since LDAP configuration is ignored for " - "SQL assignment backend.") - - def test_list_role_assignments_filtered_by_role(self): - # Domain roles are supported by the SQL Assignment backend - base = super(BaseLDAPIdentity, self) - base.test_list_role_assignments_filtered_by_role() - - def test_delete_domain_with_project_api(self): - # With this restricted multi LDAP class, tests that use multiple - # domains and identity, are still not supported - self.assertRaises( - exception.DomainNotFound, - super(BaseLDAPIdentity, self).test_delete_domain_with_project_api) - - def test_create_project_with_domain_id_and_without_parent_id(self): - # With restricted multi LDAP, tests that don't use identity, but do - # required aditional domains will work - base = super(BaseLDAPIdentity, self) - base.test_create_project_with_domain_id_and_without_parent_id() - - def test_create_project_with_domain_id_mismatch_to_parent_domain(self): - # With restricted multi LDAP, tests that don't use identity, but do - # required aditional domains will work - base = super(BaseLDAPIdentity, self) - base.test_create_project_with_domain_id_mismatch_to_parent_domain() - - -class DomainSpecificSQLIdentity(DomainSpecificLDAPandSQLIdentity): - """Class to test simplest use of domain-specific SQL driver. - - The simplest use of an SQL domain-specific backend is when it is used to - augment the standard case when LDAP is the default driver defined in the - main config file. This would allow, for example, service users to be - stored in SQL while LDAP handles the rest. Hence we define: - - - The default driver uses the LDAP backend for the default domain - - A separate SQL backend for domain1 - - """ - - def initial_setup(self, sqldb): - # We aren't setting up any initial data ahead of switching to - # domain-specific operation, so make the switch straight away. - self.config_fixture.config( - group='identity', domain_specific_drivers_enabled=True, - domain_config_dir=( - unit.TESTCONF + '/domain_configs_default_ldap_one_sql')) - # Part of the testing counts how many new mappings get created as - # we create users, so ensure we are NOT using mapping for the default - # LDAP domain so this doesn't confuse the calculation. - self.config_fixture.config(group='identity_mapping', - backward_compatible_ids=True) - - self.load_backends() - sqldb.recreate() - - self.domain_count = 2 - self.domain_specific_count = 1 - self.setup_initial_domains() - self.users = {} - - self.load_fixtures(default_fixtures) - self.create_users_across_domains() - - _assert_backends(self, - assignment='sql', - identity='ldap', - resource='sql') - - def config_overrides(self): - super(DomainSpecificSQLIdentity, self).config_overrides() - self.config_fixture.config(group='identity', driver='ldap') - self.config_fixture.config(group='resource', driver='sql') - self.config_fixture.config(group='assignment', driver='sql') - - def get_config(self, domain_id): - if domain_id == CONF.identity.default_domain_id: - return CONF - else: - return self.identity_api.domain_configs.get_domain_conf(domain_id) - - def test_default_sql_plus_sql_specific_driver_fails(self): - # First confirm that if ldap is default driver, domain1 can be - # loaded as sql - self.config_fixture.config(group='identity', driver='ldap') - self.config_fixture.config(group='assignment', driver='sql') - self.load_backends() - # Make any identity call to initiate the lazy loading of configs - self.identity_api.list_users( - domain_scope=CONF.identity.default_domain_id) - self.assertIsNotNone(self.get_config(self.domains['domain1']['id'])) - - # Now re-initialize, but with sql as the identity driver - self.config_fixture.config(group='identity', driver='sql') - self.config_fixture.config(group='assignment', driver='sql') - self.load_backends() - # Make any identity call to initiate the lazy loading of configs, which - # should fail since we would now have two sql drivers. - self.assertRaises(exception.MultipleSQLDriversInConfig, - self.identity_api.list_users, - domain_scope=CONF.identity.default_domain_id) - - def test_multiple_sql_specific_drivers_fails(self): - self.config_fixture.config(group='identity', driver='ldap') - self.config_fixture.config(group='assignment', driver='sql') - self.load_backends() - # Ensure default, domain1 and domain2 exist - self.domain_count = 3 - self.setup_initial_domains() - # Make any identity call to initiate the lazy loading of configs - self.identity_api.list_users( - domain_scope=CONF.identity.default_domain_id) - # This will only load domain1, since the domain2 config file is - # not stored in the same location - self.assertIsNotNone(self.get_config(self.domains['domain1']['id'])) - - # Now try and manually load a 2nd sql specific driver, for domain2, - # which should fail. - self.assertRaises( - exception.MultipleSQLDriversInConfig, - self.identity_api.domain_configs._load_config_from_file, - self.resource_api, - [unit.TESTCONF + '/domain_configs_one_extra_sql/' + - 'keystone.domain2.conf'], - 'domain2') - - -class LdapFilterTests(identity_tests.FilterTests, unit.TestCase): - - def setUp(self): - super(LdapFilterTests, self).setUp() - sqldb = self.useFixture(database.Database()) - self.useFixture(ldapdb.LDAPDatabase()) - - self.load_backends() - self.load_fixtures(default_fixtures) - sqldb.recreate() - _assert_backends(self, identity='ldap') - - def config_overrides(self): - super(LdapFilterTests, self).config_overrides() - self.config_fixture.config(group='identity', driver='ldap') - - def config_files(self): - config_files = super(LdapFilterTests, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_ldap.conf')) - return config_files - - @wip('Not supported by LDAP identity driver') - def test_list_users_in_group_inexact_filtered(self): - # The LDAP identity driver currently does not support filtering on the - # listing users for a given group, so will fail this test. - super(LdapFilterTests, - self).test_list_users_in_group_inexact_filtered() - - @wip('Not supported by LDAP identity driver') - def test_list_users_in_group_exact_filtered(self): - # The LDAP identity driver currently does not support filtering on the - # listing users for a given group, so will fail this test. - super(LdapFilterTests, self).test_list_users_in_group_exact_filtered() diff --git a/keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py b/keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py deleted file mode 100644 index ec789d04..00000000 --- a/keystone-moon/keystone/tests/unit/test_backend_ldap_pool.py +++ /dev/null @@ -1,243 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2012 OpenStack Foundation -# Copyright 2013 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ldappool -import mock -from oslo_config import cfg -from oslotest import mockpatch - -from keystone.common.ldap import core as ldap_core -from keystone.identity.backends import ldap -from keystone.tests import unit -from keystone.tests.unit import fakeldap -from keystone.tests.unit import test_backend_ldap - -CONF = cfg.CONF - - -class LdapPoolCommonTestMixin(object): - """LDAP pool specific common tests used here and in live tests.""" - - def cleanup_pools(self): - ldap_core.PooledLDAPHandler.connection_pools.clear() - - def test_handler_with_use_pool_enabled(self): - # by default use_pool and use_auth_pool is enabled in test pool config - user_ref = self.identity_api.get_user(self.user_foo['id']) - self.user_foo.pop('password') - self.assertDictEqual(self.user_foo, user_ref) - - handler = ldap_core._get_connection(CONF.ldap.url, use_pool=True) - self.assertIsInstance(handler, ldap_core.PooledLDAPHandler) - - @mock.patch.object(ldap_core.KeystoneLDAPHandler, 'connect') - @mock.patch.object(ldap_core.KeystoneLDAPHandler, 'simple_bind_s') - def test_handler_with_use_pool_not_enabled(self, bind_method, - connect_method): - self.config_fixture.config(group='ldap', use_pool=False) - self.config_fixture.config(group='ldap', use_auth_pool=True) - self.cleanup_pools() - - user_api = ldap.UserApi(CONF) - handler = user_api.get_connection(user=None, password=None, - end_user_auth=True) - # use_auth_pool flag does not matter when use_pool is False - # still handler is non pool version - self.assertIsInstance(handler.conn, ldap_core.PythonLDAPHandler) - - @mock.patch.object(ldap_core.KeystoneLDAPHandler, 'connect') - @mock.patch.object(ldap_core.KeystoneLDAPHandler, 'simple_bind_s') - def test_handler_with_end_user_auth_use_pool_not_enabled(self, bind_method, - connect_method): - # by default use_pool is enabled in test pool config - # now disabling use_auth_pool flag to test handler instance - self.config_fixture.config(group='ldap', use_auth_pool=False) - self.cleanup_pools() - - user_api = ldap.UserApi(CONF) - handler = user_api.get_connection(user=None, password=None, - end_user_auth=True) - self.assertIsInstance(handler.conn, ldap_core.PythonLDAPHandler) - - # For end_user_auth case, flag should not be false otherwise - # it will use, admin connections ldap pool - handler = user_api.get_connection(user=None, password=None, - end_user_auth=False) - self.assertIsInstance(handler.conn, ldap_core.PooledLDAPHandler) - - def test_pool_size_set(self): - # get related connection manager instance - ldappool_cm = self.conn_pools[CONF.ldap.url] - self.assertEqual(CONF.ldap.pool_size, ldappool_cm.size) - - def test_pool_retry_max_set(self): - # get related connection manager instance - ldappool_cm = self.conn_pools[CONF.ldap.url] - self.assertEqual(CONF.ldap.pool_retry_max, ldappool_cm.retry_max) - - def test_pool_retry_delay_set(self): - # just make one identity call to initiate ldap connection if not there - self.identity_api.get_user(self.user_foo['id']) - - # get related connection manager instance - ldappool_cm = self.conn_pools[CONF.ldap.url] - self.assertEqual(CONF.ldap.pool_retry_delay, ldappool_cm.retry_delay) - - def test_pool_use_tls_set(self): - # get related connection manager instance - ldappool_cm = self.conn_pools[CONF.ldap.url] - self.assertEqual(CONF.ldap.use_tls, ldappool_cm.use_tls) - - def test_pool_timeout_set(self): - # get related connection manager instance - ldappool_cm = self.conn_pools[CONF.ldap.url] - self.assertEqual(CONF.ldap.pool_connection_timeout, - ldappool_cm.timeout) - - def test_pool_use_pool_set(self): - # get related connection manager instance - ldappool_cm = self.conn_pools[CONF.ldap.url] - self.assertEqual(CONF.ldap.use_pool, ldappool_cm.use_pool) - - def test_pool_connection_lifetime_set(self): - # get related connection manager instance - ldappool_cm = self.conn_pools[CONF.ldap.url] - self.assertEqual(CONF.ldap.pool_connection_lifetime, - ldappool_cm.max_lifetime) - - def test_max_connection_error_raised(self): - - who = CONF.ldap.user - cred = CONF.ldap.password - # get related connection manager instance - ldappool_cm = self.conn_pools[CONF.ldap.url] - ldappool_cm.size = 2 - - # 3rd connection attempt should raise Max connection error - with ldappool_cm.connection(who, cred) as _: # conn1 - with ldappool_cm.connection(who, cred) as _: # conn2 - try: - with ldappool_cm.connection(who, cred) as _: # conn3 - _.unbind_s() - self.fail() - except Exception as ex: - self.assertIsInstance(ex, - ldappool.MaxConnectionReachedError) - ldappool_cm.size = CONF.ldap.pool_size - - def test_pool_size_expands_correctly(self): - - who = CONF.ldap.user - cred = CONF.ldap.password - # get related connection manager instance - ldappool_cm = self.conn_pools[CONF.ldap.url] - ldappool_cm.size = 3 - - def _get_conn(): - return ldappool_cm.connection(who, cred) - - # Open 3 connections first - with _get_conn() as _: # conn1 - self.assertEqual(1, len(ldappool_cm)) - with _get_conn() as _: # conn2 - self.assertEqual(2, len(ldappool_cm)) - with _get_conn() as _: # conn2 - _.unbind_ext_s() - self.assertEqual(3, len(ldappool_cm)) - - # Then open 3 connections again and make sure size does not grow - # over 3 - with _get_conn() as _: # conn1 - self.assertEqual(1, len(ldappool_cm)) - with _get_conn() as _: # conn2 - self.assertEqual(2, len(ldappool_cm)) - with _get_conn() as _: # conn3 - _.unbind_ext_s() - self.assertEqual(3, len(ldappool_cm)) - - def test_password_change_with_pool(self): - old_password = self.user_sna['password'] - self.cleanup_pools() - - # authenticate so that connection is added to pool before password - # change - user_ref = self.identity_api.authenticate( - context={}, - user_id=self.user_sna['id'], - password=self.user_sna['password']) - - self.user_sna.pop('password') - self.user_sna['enabled'] = True - self.assertDictEqual(self.user_sna, user_ref) - - new_password = 'new_password' - user_ref['password'] = new_password - self.identity_api.update_user(user_ref['id'], user_ref) - - # now authenticate again to make sure new password works with - # connection pool - user_ref2 = self.identity_api.authenticate( - context={}, - user_id=self.user_sna['id'], - password=new_password) - - user_ref.pop('password') - self.assertDictEqual(user_ref, user_ref2) - - # Authentication with old password would not work here as there - # is only one connection in pool which get bind again with updated - # password..so no old bind is maintained in this case. - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, - user_id=self.user_sna['id'], - password=old_password) - - -class LDAPIdentity(LdapPoolCommonTestMixin, - test_backend_ldap.LDAPIdentity, - unit.TestCase): - """Executes tests in existing base class with pooled LDAP handler.""" - - def setUp(self): - self.useFixture(mockpatch.PatchObject( - ldap_core.PooledLDAPHandler, 'Connector', fakeldap.FakeLdapPool)) - super(LDAPIdentity, self).setUp() - - self.addCleanup(self.cleanup_pools) - # storing to local variable to avoid long references - self.conn_pools = ldap_core.PooledLDAPHandler.connection_pools - # super class loads db fixtures which establishes ldap connection - # so adding dummy call to highlight connection pool initialization - # as its not that obvious though its not needed here - self.identity_api.get_user(self.user_foo['id']) - - def config_files(self): - config_files = super(LDAPIdentity, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_ldap_pool.conf')) - return config_files - - @mock.patch.object(ldap_core, 'utf8_encode') - def test_utf8_encoded_is_used_in_pool(self, mocked_method): - def side_effect(arg): - return arg - mocked_method.side_effect = side_effect - # invalidate the cache to get utf8_encode function called. - self.identity_api.get_user.invalidate(self.identity_api, - self.user_foo['id']) - self.identity_api.get_user(self.user_foo['id']) - mocked_method.assert_any_call(CONF.ldap.user) - mocked_method.assert_any_call(CONF.ldap.password) diff --git a/keystone-moon/keystone/tests/unit/test_backend_rules.py b/keystone-moon/keystone/tests/unit/test_backend_rules.py deleted file mode 100644 index c32c3307..00000000 --- a/keystone-moon/keystone/tests/unit/test_backend_rules.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit.policy import test_backends as policy_tests - - -class RulesPolicy(unit.TestCase, policy_tests.PolicyTests): - def setUp(self): - super(RulesPolicy, self).setUp() - self.load_backends() - - def config_overrides(self): - super(RulesPolicy, self).config_overrides() - self.config_fixture.config(group='policy', driver='rules') - - def test_create(self): - self.assertRaises(exception.NotImplemented, - super(RulesPolicy, self).test_create) - - def test_get(self): - self.assertRaises(exception.NotImplemented, - super(RulesPolicy, self).test_get) - - def test_list(self): - self.assertRaises(exception.NotImplemented, - super(RulesPolicy, self).test_list) - - def test_update(self): - self.assertRaises(exception.NotImplemented, - super(RulesPolicy, self).test_update) - - def test_delete(self): - self.assertRaises(exception.NotImplemented, - super(RulesPolicy, self).test_delete) - - def test_get_policy_returns_not_found(self): - self.assertRaises(exception.NotImplemented, - super(RulesPolicy, - self).test_get_policy_returns_not_found) - - def test_update_policy_returns_not_found(self): - self.assertRaises(exception.NotImplemented, - super(RulesPolicy, - self).test_update_policy_returns_not_found) - - def test_delete_policy_returns_not_found(self): - self.assertRaises(exception.NotImplemented, - super(RulesPolicy, - self).test_delete_policy_returns_not_found) diff --git a/keystone-moon/keystone/tests/unit/test_backend_sql.py b/keystone-moon/keystone/tests/unit/test_backend_sql.py deleted file mode 100644 index 2e703fff..00000000 --- a/keystone-moon/keystone/tests/unit/test_backend_sql.py +++ /dev/null @@ -1,1025 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools -import uuid - -import mock -from oslo_config import cfg -from oslo_db import exception as db_exception -from oslo_db import options -from six.moves import range -import sqlalchemy -from sqlalchemy import exc -from testtools import matchers - -from keystone.common import driver_hints -from keystone.common import sql -from keystone import exception -from keystone.identity.backends import sql as identity_sql -from keystone import resource -from keystone.tests import unit -from keystone.tests.unit.assignment import test_backends as assignment_tests -from keystone.tests.unit.catalog import test_backends as catalog_tests -from keystone.tests.unit import default_fixtures -from keystone.tests.unit.identity import test_backends as identity_tests -from keystone.tests.unit.ksfixtures import database -from keystone.tests.unit.policy import test_backends as policy_tests -from keystone.tests.unit.resource import test_backends as resource_tests -from keystone.tests.unit.token import test_backends as token_tests -from keystone.tests.unit.trust import test_backends as trust_tests -from keystone.token.persistence.backends import sql as token_sql - - -CONF = cfg.CONF - - -class SqlTests(unit.SQLDriverOverrides, unit.TestCase): - - def setUp(self): - super(SqlTests, self).setUp() - self.useFixture(database.Database(self.sql_driver_version_overrides)) - self.load_backends() - - # populate the engine with tables & fixtures - self.load_fixtures(default_fixtures) - # defaulted by the data load - self.user_foo['enabled'] = True - - def config_files(self): - config_files = super(SqlTests, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_sql.conf')) - return config_files - - -class SqlModels(SqlTests): - - def select_table(self, name): - table = sqlalchemy.Table(name, - sql.ModelBase.metadata, - autoload=True) - s = sqlalchemy.select([table]) - return s - - def assertExpectedSchema(self, table, expected_schema): - """Assert that a table's schema is what we expect. - - :param string table: the name of the table to inspect - :param tuple expected_schema: a tuple of tuples containing the - expected schema - :raises AssertionError: when the database schema doesn't match the - expected schema - - The expected_schema format is simply:: - - ( - ('column name', sql type, qualifying detail), - ... - ) - - The qualifying detail varies based on the type of the column:: - - - sql.Boolean columns must indicate the column's default value or - None if there is no default - - Columns with a length, like sql.String, must indicate the - column's length - - All other column types should use None - - Example:: - - cols = (('id', sql.String, 64), - ('enabled', sql.Boolean, True), - ('extra', sql.JsonBlob, None)) - self.assertExpectedSchema('table_name', cols) - - """ - table = self.select_table(table) - - actual_schema = [] - for column in table.c: - if isinstance(column.type, sql.Boolean): - default = None - if column._proxies[0].default: - default = column._proxies[0].default.arg - actual_schema.append((column.name, type(column.type), default)) - elif (hasattr(column.type, 'length') and - not isinstance(column.type, sql.Enum)): - # NOTE(dstanek): Even though sql.Enum columns have a length - # set we don't want to catch them here. Maybe in the future - # we'll check to see that they contain a list of the correct - # possible values. - actual_schema.append((column.name, - type(column.type), - column.type.length)) - else: - actual_schema.append((column.name, type(column.type), None)) - - self.assertItemsEqual(expected_schema, actual_schema) - - def test_user_model(self): - cols = (('id', sql.String, 64), - ('default_project_id', sql.String, 64), - ('enabled', sql.Boolean, None), - ('extra', sql.JsonBlob, None)) - self.assertExpectedSchema('user', cols) - - def test_local_user_model(self): - cols = (('id', sql.Integer, None), - ('user_id', sql.String, 64), - ('name', sql.String, 255), - ('domain_id', sql.String, 64)) - self.assertExpectedSchema('local_user', cols) - - def test_password_model(self): - cols = (('id', sql.Integer, None), - ('local_user_id', sql.Integer, None), - ('password', sql.String, 128)) - self.assertExpectedSchema('password', cols) - - def test_federated_user_model(self): - cols = (('id', sql.Integer, None), - ('user_id', sql.String, 64), - ('idp_id', sql.String, 64), - ('protocol_id', sql.String, 64), - ('unique_id', sql.String, 255), - ('display_name', sql.String, 255)) - self.assertExpectedSchema('federated_user', cols) - - def test_group_model(self): - cols = (('id', sql.String, 64), - ('name', sql.String, 64), - ('description', sql.Text, None), - ('domain_id', sql.String, 64), - ('extra', sql.JsonBlob, None)) - self.assertExpectedSchema('group', cols) - - def test_domain_model(self): - cols = (('id', sql.String, 64), - ('name', sql.String, 64), - ('enabled', sql.Boolean, True), - ('extra', sql.JsonBlob, None)) - self.assertExpectedSchema('domain', cols) - - def test_project_model(self): - cols = (('id', sql.String, 64), - ('name', sql.String, 64), - ('description', sql.Text, None), - ('domain_id', sql.String, 64), - ('enabled', sql.Boolean, None), - ('extra', sql.JsonBlob, None), - ('parent_id', sql.String, 64), - ('is_domain', sql.Boolean, False)) - self.assertExpectedSchema('project', cols) - - def test_role_assignment_model(self): - cols = (('type', sql.Enum, None), - ('actor_id', sql.String, 64), - ('target_id', sql.String, 64), - ('role_id', sql.String, 64), - ('inherited', sql.Boolean, False)) - self.assertExpectedSchema('assignment', cols) - - def test_user_group_membership(self): - cols = (('group_id', sql.String, 64), - ('user_id', sql.String, 64)) - self.assertExpectedSchema('user_group_membership', cols) - - def test_revocation_event_model(self): - cols = (('id', sql.Integer, None), - ('domain_id', sql.String, 64), - ('project_id', sql.String, 64), - ('user_id', sql.String, 64), - ('role_id', sql.String, 64), - ('trust_id', sql.String, 64), - ('consumer_id', sql.String, 64), - ('access_token_id', sql.String, 64), - ('issued_before', sql.DateTime, None), - ('expires_at', sql.DateTime, None), - ('revoked_at', sql.DateTime, None), - ('audit_id', sql.String, 32), - ('audit_chain_id', sql.String, 32)) - self.assertExpectedSchema('revocation_event', cols) - - -class SqlIdentity(SqlTests, identity_tests.IdentityTests, - assignment_tests.AssignmentTests, - resource_tests.ResourceTests): - def test_password_hashed(self): - with sql.session_for_read() as session: - user_ref = self.identity_api._get_user(session, - self.user_foo['id']) - self.assertNotEqual(self.user_foo['password'], - user_ref['password']) - - def test_create_user_with_null_password(self): - user_dict = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id) - user_dict["password"] = None - new_user_dict = self.identity_api.create_user(user_dict) - with sql.session_for_read() as session: - new_user_ref = self.identity_api._get_user(session, - new_user_dict['id']) - self.assertFalse(new_user_ref.local_user.passwords) - - def test_update_user_with_null_password(self): - user_dict = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id) - self.assertTrue(user_dict['password']) - new_user_dict = self.identity_api.create_user(user_dict) - new_user_dict["password"] = None - new_user_dict = self.identity_api.update_user(new_user_dict['id'], - new_user_dict) - with sql.session_for_read() as session: - new_user_ref = self.identity_api._get_user(session, - new_user_dict['id']) - self.assertFalse(new_user_ref.local_user.passwords) - - def test_delete_user_with_project_association(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - self.assignment_api.add_user_to_project(self.tenant_bar['id'], - user['id']) - self.identity_api.delete_user(user['id']) - self.assertRaises(exception.UserNotFound, - self.assignment_api.list_projects_for_user, - user['id']) - - def test_create_null_user_name(self): - user = unit.new_user_ref(name=None, - domain_id=CONF.identity.default_domain_id) - self.assertRaises(exception.ValidationError, - self.identity_api.create_user, - user) - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user_by_name, - user['name'], - CONF.identity.default_domain_id) - - def test_create_user_case_sensitivity(self): - # user name case sensitivity is down to the fact that it is marked as - # an SQL UNIQUE column, which may not be valid for other backends, like - # LDAP. - - # create a ref with a lowercase name - ref = unit.new_user_ref(name=uuid.uuid4().hex.lower(), - domain_id=CONF.identity.default_domain_id) - ref = self.identity_api.create_user(ref) - - # assign a new ID with the same name, but this time in uppercase - ref['name'] = ref['name'].upper() - self.identity_api.create_user(ref) - - def test_create_federated_user_unique_constraint(self): - federated_dict = unit.new_federated_user_ref() - user_dict = self.shadow_users_api.create_federated_user(federated_dict) - user_dict = self.identity_api.get_user(user_dict["id"]) - self.assertIsNotNone(user_dict["id"]) - self.assertRaises(exception.Conflict, - self.shadow_users_api.create_federated_user, - federated_dict) - - def test_get_federated_user(self): - federated_dict = unit.new_federated_user_ref() - user_dict_create = self.shadow_users_api.create_federated_user( - federated_dict) - user_dict_get = self.shadow_users_api.get_federated_user( - federated_dict["idp_id"], - federated_dict["protocol_id"], - federated_dict["unique_id"]) - self.assertItemsEqual(user_dict_create, user_dict_get) - self.assertEqual(user_dict_create["id"], user_dict_get["id"]) - - def test_update_federated_user_display_name(self): - federated_dict = unit.new_federated_user_ref() - user_dict_create = self.shadow_users_api.create_federated_user( - federated_dict) - new_display_name = uuid.uuid4().hex - self.shadow_users_api.update_federated_user_display_name( - federated_dict["idp_id"], - federated_dict["protocol_id"], - federated_dict["unique_id"], - new_display_name) - user_ref = self.shadow_users_api._get_federated_user( - federated_dict["idp_id"], - federated_dict["protocol_id"], - federated_dict["unique_id"]) - self.assertEqual(user_ref.federated_users[0].display_name, - new_display_name) - self.assertEqual(user_dict_create["id"], user_ref.id) - - def test_create_project_case_sensitivity(self): - # project name case sensitivity is down to the fact that it is marked - # as an SQL UNIQUE column, which may not be valid for other backends, - # like LDAP. - - # create a ref with a lowercase name - ref = unit.new_project_ref(domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(ref['id'], ref) - - # assign a new ID with the same name, but this time in uppercase - ref['id'] = uuid.uuid4().hex - ref['name'] = ref['name'].upper() - self.resource_api.create_project(ref['id'], ref) - - def test_create_null_project_name(self): - project = unit.new_project_ref( - name=None, domain_id=CONF.identity.default_domain_id) - self.assertRaises(exception.ValidationError, - self.resource_api.create_project, - project['id'], - project) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project['id']) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project_by_name, - project['name'], - CONF.identity.default_domain_id) - - def test_delete_project_with_user_association(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - self.assignment_api.add_user_to_project(self.tenant_bar['id'], - user['id']) - self.resource_api.delete_project(self.tenant_bar['id']) - tenants = self.assignment_api.list_projects_for_user(user['id']) - self.assertEqual([], tenants) - - def test_update_project_returns_extra(self): - """This tests for backwards-compatibility with an essex/folsom bug. - - Non-indexed attributes were returned in an 'extra' attribute, instead - of on the entity itself; for consistency and backwards compatibility, - those attributes should be included twice. - - This behavior is specific to the SQL driver. - - """ - arbitrary_key = uuid.uuid4().hex - arbitrary_value = uuid.uuid4().hex - project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - project[arbitrary_key] = arbitrary_value - ref = self.resource_api.create_project(project['id'], project) - self.assertEqual(arbitrary_value, ref[arbitrary_key]) - self.assertIsNone(ref.get('extra')) - - ref['name'] = uuid.uuid4().hex - ref = self.resource_api.update_project(ref['id'], ref) - self.assertEqual(arbitrary_value, ref[arbitrary_key]) - self.assertEqual(arbitrary_value, ref['extra'][arbitrary_key]) - - def test_update_user_returns_extra(self): - """This tests for backwards-compatibility with an essex/folsom bug. - - Non-indexed attributes were returned in an 'extra' attribute, instead - of on the entity itself; for consistency and backwards compatibility, - those attributes should be included twice. - - This behavior is specific to the SQL driver. - - """ - arbitrary_key = uuid.uuid4().hex - arbitrary_value = uuid.uuid4().hex - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user[arbitrary_key] = arbitrary_value - del user["id"] - ref = self.identity_api.create_user(user) - self.assertEqual(arbitrary_value, ref[arbitrary_key]) - self.assertIsNone(ref.get('password')) - self.assertIsNone(ref.get('extra')) - - user['name'] = uuid.uuid4().hex - user['password'] = uuid.uuid4().hex - ref = self.identity_api.update_user(ref['id'], user) - self.assertIsNone(ref.get('password')) - self.assertIsNone(ref['extra'].get('password')) - self.assertEqual(arbitrary_value, ref[arbitrary_key]) - self.assertEqual(arbitrary_value, ref['extra'][arbitrary_key]) - - def test_sql_user_to_dict_null_default_project_id(self): - user = unit.new_user_ref(domain_id=CONF.identity.default_domain_id) - user = self.identity_api.create_user(user) - with sql.session_for_read() as session: - query = session.query(identity_sql.User) - query = query.filter_by(id=user['id']) - raw_user_ref = query.one() - self.assertIsNone(raw_user_ref.default_project_id) - user_ref = raw_user_ref.to_dict() - self.assertNotIn('default_project_id', user_ref) - session.close() - - def test_list_domains_for_user(self): - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - user = unit.new_user_ref(domain_id=domain['id']) - - test_domain1 = unit.new_domain_ref() - self.resource_api.create_domain(test_domain1['id'], test_domain1) - test_domain2 = unit.new_domain_ref() - self.resource_api.create_domain(test_domain2['id'], test_domain2) - - user = self.identity_api.create_user(user) - user_domains = self.assignment_api.list_domains_for_user(user['id']) - self.assertEqual(0, len(user_domains)) - self.assignment_api.create_grant(user_id=user['id'], - domain_id=test_domain1['id'], - role_id=self.role_member['id']) - self.assignment_api.create_grant(user_id=user['id'], - domain_id=test_domain2['id'], - role_id=self.role_member['id']) - user_domains = self.assignment_api.list_domains_for_user(user['id']) - self.assertThat(user_domains, matchers.HasLength(2)) - - def test_list_domains_for_user_with_grants(self): - # Create two groups each with a role on a different domain, and - # make user1 a member of both groups. Both these new domains - # should now be included, along with any direct user grants. - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - user = unit.new_user_ref(domain_id=domain['id']) - user = self.identity_api.create_user(user) - group1 = unit.new_group_ref(domain_id=domain['id']) - group1 = self.identity_api.create_group(group1) - group2 = unit.new_group_ref(domain_id=domain['id']) - group2 = self.identity_api.create_group(group2) - - test_domain1 = unit.new_domain_ref() - self.resource_api.create_domain(test_domain1['id'], test_domain1) - test_domain2 = unit.new_domain_ref() - self.resource_api.create_domain(test_domain2['id'], test_domain2) - test_domain3 = unit.new_domain_ref() - self.resource_api.create_domain(test_domain3['id'], test_domain3) - - self.identity_api.add_user_to_group(user['id'], group1['id']) - self.identity_api.add_user_to_group(user['id'], group2['id']) - - # Create 3 grants, one user grant, the other two as group grants - self.assignment_api.create_grant(user_id=user['id'], - domain_id=test_domain1['id'], - role_id=self.role_member['id']) - self.assignment_api.create_grant(group_id=group1['id'], - domain_id=test_domain2['id'], - role_id=self.role_admin['id']) - self.assignment_api.create_grant(group_id=group2['id'], - domain_id=test_domain3['id'], - role_id=self.role_admin['id']) - user_domains = self.assignment_api.list_domains_for_user(user['id']) - self.assertThat(user_domains, matchers.HasLength(3)) - - def test_list_domains_for_user_with_inherited_grants(self): - """Test that inherited roles on the domain are excluded. - - Test Plan: - - - Create two domains, one user, group and role - - Domain1 is given an inherited user role, Domain2 an inherited - group role (for a group of which the user is a member) - - When listing domains for user, neither domain should be returned - - """ - domain1 = unit.new_domain_ref() - domain1 = self.resource_api.create_domain(domain1['id'], domain1) - domain2 = unit.new_domain_ref() - domain2 = self.resource_api.create_domain(domain2['id'], domain2) - user = unit.new_user_ref(domain_id=domain1['id']) - user = self.identity_api.create_user(user) - group = unit.new_group_ref(domain_id=domain1['id']) - group = self.identity_api.create_group(group) - self.identity_api.add_user_to_group(user['id'], group['id']) - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - - # Create a grant on each domain, one user grant, one group grant, - # both inherited. - self.assignment_api.create_grant(user_id=user['id'], - domain_id=domain1['id'], - role_id=role['id'], - inherited_to_projects=True) - self.assignment_api.create_grant(group_id=group['id'], - domain_id=domain2['id'], - role_id=role['id'], - inherited_to_projects=True) - - user_domains = self.assignment_api.list_domains_for_user(user['id']) - # No domains should be returned since both domains have only inherited - # roles assignments. - self.assertThat(user_domains, matchers.HasLength(0)) - - def test_storing_null_domain_id_in_project_ref(self): - """Test the special storage of domain_id=None in sql resource driver. - - The resource driver uses a special value in place of None for domain_id - in the project record. This shouldn't escape the driver. Hence we test - the interface to ensure that you can store a domain_id of None, and - that any special value used inside the driver does not escape through - the interface. - - """ - spoiler_project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(spoiler_project['id'], - spoiler_project) - - # First let's create a project with a None domain_id and make sure we - # can read it back. - project = unit.new_project_ref(domain_id=None, is_domain=True) - project = self.resource_api.create_project(project['id'], project) - ref = self.resource_api.get_project(project['id']) - self.assertDictEqual(project, ref) - - # Can we get it by name? - ref = self.resource_api.get_project_by_name(project['name'], None) - self.assertDictEqual(project, ref) - - # Can we filter for them - create a second domain to ensure we are - # testing the receipt of more than one. - project2 = unit.new_project_ref(domain_id=None, is_domain=True) - project2 = self.resource_api.create_project(project2['id'], project2) - hints = driver_hints.Hints() - hints.add_filter('domain_id', None) - refs = self.resource_api.list_projects(hints) - self.assertThat(refs, matchers.HasLength(2 + self.domain_count)) - self.assertIn(project, refs) - self.assertIn(project2, refs) - - # Can we update it? - project['name'] = uuid.uuid4().hex - self.resource_api.update_project(project['id'], project) - ref = self.resource_api.get_project(project['id']) - self.assertDictEqual(project, ref) - - # Finally, make sure we can delete it - project['enabled'] = False - self.resource_api.update_project(project['id'], project) - self.resource_api.delete_project(project['id']) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project['id']) - - def test_hidden_project_domain_root_is_really_hidden(self): - """Ensure we cannot access the hidden root of all project domains. - - Calling any of the driver methods should result in the same as - would be returned if we passed a project that does not exist. We don't - test create_project, since we do not allow a caller of our API to - specify their own ID for a new entity. - - """ - def _exercise_project_api(ref_id): - driver = self.resource_api.driver - self.assertRaises(exception.ProjectNotFound, - driver.get_project, - ref_id) - - self.assertRaises(exception.ProjectNotFound, - driver.get_project_by_name, - resource.NULL_DOMAIN_ID, - ref_id) - - project_ids = [x['id'] for x in - driver.list_projects(driver_hints.Hints())] - self.assertNotIn(ref_id, project_ids) - - projects = driver.list_projects_from_ids([ref_id]) - self.assertThat(projects, matchers.HasLength(0)) - - project_ids = [x for x in - driver.list_project_ids_from_domain_ids([ref_id])] - self.assertNotIn(ref_id, project_ids) - - self.assertRaises(exception.DomainNotFound, - driver.list_projects_in_domain, - ref_id) - - project_ids = [ - x['id'] for x in - driver.list_projects_acting_as_domain(driver_hints.Hints())] - self.assertNotIn(ref_id, project_ids) - - projects = driver.list_projects_in_subtree(ref_id) - self.assertThat(projects, matchers.HasLength(0)) - - self.assertRaises(exception.ProjectNotFound, - driver.list_project_parents, - ref_id) - - # A non-existing project just returns True from the driver - self.assertTrue(driver.is_leaf_project(ref_id)) - - self.assertRaises(exception.ProjectNotFound, - driver.update_project, - ref_id, - {}) - - self.assertRaises(exception.ProjectNotFound, - driver.delete_project, - ref_id) - - # Deleting list of projects that includes a non-existing project - # should be silent - driver.delete_projects_from_ids([ref_id]) - - _exercise_project_api(uuid.uuid4().hex) - _exercise_project_api(resource.NULL_DOMAIN_ID) - - -class SqlTrust(SqlTests, trust_tests.TrustTests): - pass - - -class SqlToken(SqlTests, token_tests.TokenTests): - def test_token_revocation_list_uses_right_columns(self): - # This query used to be heavy with too many columns. We want - # to make sure it is only running with the minimum columns - # necessary. - - expected_query_args = (token_sql.TokenModel.id, - token_sql.TokenModel.expires, - token_sql.TokenModel.extra,) - - with mock.patch.object(token_sql, 'sql') as mock_sql: - tok = token_sql.Token() - tok.list_revoked_tokens() - - mock_query = mock_sql.session_for_read().__enter__().query - mock_query.assert_called_with(*expected_query_args) - - def test_flush_expired_tokens_batch(self): - # TODO(dstanek): This test should be rewritten to be less - # brittle. The code will likely need to be changed first. I - # just copied the spirit of the existing test when I rewrote - # mox -> mock. These tests are brittle because they have the - # call structure for SQLAlchemy encoded in them. - - # test sqlite dialect - with mock.patch.object(token_sql, 'sql') as mock_sql: - mock_sql.get_session().bind.dialect.name = 'sqlite' - tok = token_sql.Token() - tok.flush_expired_tokens() - - filter_mock = mock_sql.get_session().query().filter() - self.assertFalse(filter_mock.limit.called) - self.assertTrue(filter_mock.delete.called_once) - - def test_flush_expired_tokens_batch_mysql(self): - # test mysql dialect, we don't need to test IBM DB SA separately, since - # other tests below test the differences between how they use the batch - # strategy - with mock.patch.object(token_sql, 'sql') as mock_sql: - mock_sql.session_for_write().__enter__( - ).query().filter().delete.return_value = 0 - - mock_sql.session_for_write().__enter__( - ).bind.dialect.name = 'mysql' - - tok = token_sql.Token() - expiry_mock = mock.Mock() - ITERS = [1, 2, 3] - expiry_mock.return_value = iter(ITERS) - token_sql._expiry_range_batched = expiry_mock - tok.flush_expired_tokens() - - # The expiry strategy is only invoked once, the other calls are via - # the yield return. - self.assertEqual(1, expiry_mock.call_count) - - mock_delete = mock_sql.session_for_write().__enter__( - ).query().filter().delete - - self.assertThat(mock_delete.call_args_list, - matchers.HasLength(len(ITERS))) - - def test_expiry_range_batched(self): - upper_bound_mock = mock.Mock(side_effect=[1, "final value"]) - sess_mock = mock.Mock() - query_mock = sess_mock.query().filter().order_by().offset().limit() - query_mock.one.side_effect = [['test'], sql.NotFound()] - for i, x in enumerate(token_sql._expiry_range_batched(sess_mock, - upper_bound_mock, - batch_size=50)): - if i == 0: - # The first time the batch iterator returns, it should return - # the first result that comes back from the database. - self.assertEqual('test', x) - elif i == 1: - # The second time, the database range function should return - # nothing, so the batch iterator returns the result of the - # upper_bound function - self.assertEqual("final value", x) - else: - self.fail("range batch function returned more than twice") - - def test_expiry_range_strategy_sqlite(self): - tok = token_sql.Token() - sqlite_strategy = tok._expiry_range_strategy('sqlite') - self.assertEqual(token_sql._expiry_range_all, sqlite_strategy) - - def test_expiry_range_strategy_ibm_db_sa(self): - tok = token_sql.Token() - db2_strategy = tok._expiry_range_strategy('ibm_db_sa') - self.assertIsInstance(db2_strategy, functools.partial) - self.assertEqual(token_sql._expiry_range_batched, db2_strategy.func) - self.assertEqual({'batch_size': 100}, db2_strategy.keywords) - - def test_expiry_range_strategy_mysql(self): - tok = token_sql.Token() - mysql_strategy = tok._expiry_range_strategy('mysql') - self.assertIsInstance(mysql_strategy, functools.partial) - self.assertEqual(token_sql._expiry_range_batched, mysql_strategy.func) - self.assertEqual({'batch_size': 1000}, mysql_strategy.keywords) - - -class SqlCatalog(SqlTests, catalog_tests.CatalogTests): - - _legacy_endpoint_id_in_endpoint = True - _enabled_default_to_true_when_creating_endpoint = True - - def test_catalog_ignored_malformed_urls(self): - service = unit.new_service_ref() - self.catalog_api.create_service(service['id'], service) - - malformed_url = "http://192.168.1.104:8774/v2/$(tenant)s" - endpoint = unit.new_endpoint_ref(service_id=service['id'], - url=malformed_url, - region_id=None) - self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy()) - - # NOTE(dstanek): there are no valid URLs, so nothing is in the catalog - catalog = self.catalog_api.get_catalog('fake-user', 'fake-tenant') - self.assertEqual({}, catalog) - - def test_get_catalog_with_empty_public_url(self): - service = unit.new_service_ref() - self.catalog_api.create_service(service['id'], service) - - endpoint = unit.new_endpoint_ref(url='', service_id=service['id'], - region_id=None) - self.catalog_api.create_endpoint(endpoint['id'], endpoint.copy()) - - catalog = self.catalog_api.get_catalog('user', 'tenant') - catalog_endpoint = catalog[endpoint['region_id']][service['type']] - self.assertEqual(service['name'], catalog_endpoint['name']) - self.assertEqual(endpoint['id'], catalog_endpoint['id']) - self.assertEqual('', catalog_endpoint['publicURL']) - self.assertIsNone(catalog_endpoint.get('adminURL')) - self.assertIsNone(catalog_endpoint.get('internalURL')) - - def test_create_endpoint_region_returns_not_found(self): - service = unit.new_service_ref() - self.catalog_api.create_service(service['id'], service) - - endpoint = unit.new_endpoint_ref(region_id=uuid.uuid4().hex, - service_id=service['id']) - - self.assertRaises(exception.ValidationError, - self.catalog_api.create_endpoint, - endpoint['id'], - endpoint.copy()) - - def test_create_region_invalid_id(self): - region = unit.new_region_ref(id='0' * 256) - - self.assertRaises(exception.StringLengthExceeded, - self.catalog_api.create_region, - region) - - def test_create_region_invalid_parent_id(self): - region = unit.new_region_ref(parent_region_id='0' * 256) - - self.assertRaises(exception.RegionNotFound, - self.catalog_api.create_region, - region) - - def test_delete_region_with_endpoint(self): - # create a region - region = unit.new_region_ref() - self.catalog_api.create_region(region) - - # create a child region - child_region = unit.new_region_ref(parent_region_id=region['id']) - self.catalog_api.create_region(child_region) - # create a service - service = unit.new_service_ref() - self.catalog_api.create_service(service['id'], service) - - # create an endpoint attached to the service and child region - child_endpoint = unit.new_endpoint_ref(region_id=child_region['id'], - service_id=service['id']) - - self.catalog_api.create_endpoint(child_endpoint['id'], child_endpoint) - self.assertRaises(exception.RegionDeletionError, - self.catalog_api.delete_region, - child_region['id']) - - # create an endpoint attached to the service and parent region - endpoint = unit.new_endpoint_ref(region_id=region['id'], - service_id=service['id']) - - self.catalog_api.create_endpoint(endpoint['id'], endpoint) - self.assertRaises(exception.RegionDeletionError, - self.catalog_api.delete_region, - region['id']) - - -class SqlPolicy(SqlTests, policy_tests.PolicyTests): - pass - - -class SqlInheritance(SqlTests, assignment_tests.InheritanceTests): - pass - - -class SqlImpliedRoles(SqlTests, assignment_tests.ImpliedRoleTests): - pass - - -class SqlTokenCacheInvalidation(SqlTests, token_tests.TokenCacheInvalidation): - def setUp(self): - super(SqlTokenCacheInvalidation, self).setUp() - self._create_test_data() - - -class SqlFilterTests(SqlTests, identity_tests.FilterTests): - - def clean_up_entities(self): - """Clean up entity test data from Filter Test Cases.""" - for entity in ['user', 'group', 'project']: - self._delete_test_data(entity, self.entity_list[entity]) - self._delete_test_data(entity, self.domain1_entity_list[entity]) - del self.entity_list - del self.domain1_entity_list - self.domain1['enabled'] = False - self.resource_api.update_domain(self.domain1['id'], self.domain1) - self.resource_api.delete_domain(self.domain1['id']) - del self.domain1 - - def test_list_entities_filtered_by_domain(self): - # NOTE(henry-nash): This method is here rather than in - # unit.identity.test_backends since any domain filtering with LDAP is - # handled by the manager layer (and is already tested elsewhere) not at - # the driver level. - self.addCleanup(self.clean_up_entities) - self.domain1 = unit.new_domain_ref() - self.resource_api.create_domain(self.domain1['id'], self.domain1) - - self.entity_list = {} - self.domain1_entity_list = {} - for entity in ['user', 'group', 'project']: - # Create 5 entities, 3 of which are in domain1 - DOMAIN1_ENTITIES = 3 - self.entity_list[entity] = self._create_test_data(entity, 2) - self.domain1_entity_list[entity] = self._create_test_data( - entity, DOMAIN1_ENTITIES, self.domain1['id']) - - # Should get back the DOMAIN1_ENTITIES in domain1 - hints = driver_hints.Hints() - hints.add_filter('domain_id', self.domain1['id']) - entities = self._list_entities(entity)(hints=hints) - self.assertEqual(DOMAIN1_ENTITIES, len(entities)) - self._match_with_list(entities, self.domain1_entity_list[entity]) - # Check the driver has removed the filter from the list hints - self.assertFalse(hints.get_exact_filter_by_name('domain_id')) - - def test_filter_sql_injection_attack(self): - """Test against sql injection attack on filters - - Test Plan: - - Attempt to get all entities back by passing a two-term attribute - - Attempt to piggyback filter to damage DB (e.g. drop table) - - """ - # Check we have some users - users = self.identity_api.list_users() - self.assertTrue(len(users) > 0) - - hints = driver_hints.Hints() - hints.add_filter('name', "anything' or 'x'='x") - users = self.identity_api.list_users(hints=hints) - self.assertEqual(0, len(users)) - - # See if we can add a SQL command...use the group table instead of the - # user table since 'user' is reserved word for SQLAlchemy. - group = unit.new_group_ref(domain_id=CONF.identity.default_domain_id) - group = self.identity_api.create_group(group) - - hints = driver_hints.Hints() - hints.add_filter('name', "x'; drop table group") - groups = self.identity_api.list_groups(hints=hints) - self.assertEqual(0, len(groups)) - - groups = self.identity_api.list_groups() - self.assertTrue(len(groups) > 0) - - -class SqlLimitTests(SqlTests, identity_tests.LimitTests): - def setUp(self): - super(SqlLimitTests, self).setUp() - identity_tests.LimitTests.setUp(self) - - -class FakeTable(sql.ModelBase): - __tablename__ = 'test_table' - col = sql.Column(sql.String(32), primary_key=True) - - @sql.handle_conflicts('keystone') - def insert(self): - raise db_exception.DBDuplicateEntry - - @sql.handle_conflicts('keystone') - def update(self): - raise db_exception.DBError( - inner_exception=exc.IntegrityError('a', 'a', 'a')) - - @sql.handle_conflicts('keystone') - def lookup(self): - raise KeyError - - -class SqlDecorators(unit.TestCase): - - def test_initialization_fail(self): - self.assertRaises(exception.StringLengthExceeded, - FakeTable, col='a' * 64) - - def test_initialization(self): - tt = FakeTable(col='a') - self.assertEqual('a', tt.col) - - def test_conflict_happend(self): - self.assertRaises(exception.Conflict, FakeTable().insert) - self.assertRaises(exception.UnexpectedError, FakeTable().update) - - def test_not_conflict_error(self): - self.assertRaises(KeyError, FakeTable().lookup) - - -class SqlModuleInitialization(unit.TestCase): - - @mock.patch.object(sql.core, 'CONF') - @mock.patch.object(options, 'set_defaults') - def test_initialize_module(self, set_defaults, CONF): - sql.initialize() - set_defaults.assert_called_with(CONF, - connection='sqlite:///keystone.db') - - -class SqlCredential(SqlTests): - - def _create_credential_with_user_id(self, user_id=uuid.uuid4().hex): - credential = unit.new_credential_ref(user_id=user_id, - extra=uuid.uuid4().hex, - type=uuid.uuid4().hex) - self.credential_api.create_credential(credential['id'], credential) - return credential - - def _validateCredentialList(self, retrieved_credentials, - expected_credentials): - self.assertEqual(len(expected_credentials), len(retrieved_credentials)) - retrived_ids = [c['id'] for c in retrieved_credentials] - for cred in expected_credentials: - self.assertIn(cred['id'], retrived_ids) - - def setUp(self): - super(SqlCredential, self).setUp() - self.credentials = [] - for _ in range(3): - self.credentials.append( - self._create_credential_with_user_id()) - self.user_credentials = [] - for _ in range(3): - cred = self._create_credential_with_user_id(self.user_foo['id']) - self.user_credentials.append(cred) - self.credentials.append(cred) - - def test_list_credentials(self): - credentials = self.credential_api.list_credentials() - self._validateCredentialList(credentials, self.credentials) - # test filtering using hints - hints = driver_hints.Hints() - hints.add_filter('user_id', self.user_foo['id']) - credentials = self.credential_api.list_credentials(hints) - self._validateCredentialList(credentials, self.user_credentials) - - def test_list_credentials_for_user(self): - credentials = self.credential_api.list_credentials_for_user( - self.user_foo['id']) - self._validateCredentialList(credentials, self.user_credentials) - - def test_list_credentials_for_user_and_type(self): - cred = self.user_credentials[0] - credentials = self.credential_api.list_credentials_for_user( - self.user_foo['id'], type=cred['type']) - self._validateCredentialList(credentials, [cred]) diff --git a/keystone-moon/keystone/tests/unit/test_backend_templated.py b/keystone-moon/keystone/tests/unit/test_backend_templated.py deleted file mode 100644 index ca957e78..00000000 --- a/keystone-moon/keystone/tests/unit/test_backend_templated.py +++ /dev/null @@ -1,261 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -import mock -from six.moves import zip - -from keystone import catalog -from keystone.tests import unit -from keystone.tests.unit.catalog import test_backends as catalog_tests -from keystone.tests.unit import default_fixtures -from keystone.tests.unit.ksfixtures import database - - -BROKEN_WRITE_FUNCTIONALITY_MSG = ("Templated backend doesn't correctly " - "implement write operations") - - -class TestTemplatedCatalog(unit.TestCase, catalog_tests.CatalogTests): - - DEFAULT_FIXTURE = { - 'RegionOne': { - 'compute': { - 'adminURL': 'http://localhost:8774/v1.1/bar', - 'publicURL': 'http://localhost:8774/v1.1/bar', - 'internalURL': 'http://localhost:8774/v1.1/bar', - 'name': "'Compute Service'", - 'id': '2' - }, - 'identity': { - 'adminURL': 'http://localhost:35357/v2.0', - 'publicURL': 'http://localhost:5000/v2.0', - 'internalURL': 'http://localhost:35357/v2.0', - 'name': "'Identity Service'", - 'id': '1' - } - } - } - - def setUp(self): - super(TestTemplatedCatalog, self).setUp() - self.useFixture(database.Database()) - self.load_backends() - self.load_fixtures(default_fixtures) - - def config_overrides(self): - super(TestTemplatedCatalog, self).config_overrides() - self.config_fixture.config( - group='catalog', - driver='templated', - template_file=unit.dirs.tests('default_catalog.templates')) - - def test_get_catalog(self): - catalog_ref = self.catalog_api.get_catalog('foo', 'bar') - self.assertDictEqual(self.DEFAULT_FIXTURE, catalog_ref) - - # NOTE(lbragstad): This test is skipped because the catalog is being - # modified within the test and not through the API. - @unit.skip_if_cache_is_enabled('catalog') - def test_catalog_ignored_malformed_urls(self): - # both endpoints are in the catalog - catalog_ref = self.catalog_api.get_catalog('foo', 'bar') - self.assertEqual(2, len(catalog_ref['RegionOne'])) - - region = self.catalog_api.driver.templates['RegionOne'] - region['compute']['adminURL'] = 'http://localhost:8774/v1.1/$(tenant)s' - - # the malformed one has been removed - catalog_ref = self.catalog_api.get_catalog('foo', 'bar') - self.assertEqual(1, len(catalog_ref['RegionOne'])) - - def test_get_catalog_endpoint_disabled(self): - self.skipTest("Templated backend doesn't have disabled endpoints") - - def test_get_v3_catalog_endpoint_disabled(self): - self.skipTest("Templated backend doesn't have disabled endpoints") - - def assert_catalogs_equal(self, expected, observed): - sort_key = lambda d: d['id'] - for e, o in zip(sorted(expected, key=sort_key), - sorted(observed, key=sort_key)): - expected_endpoints = e.pop('endpoints') - observed_endpoints = o.pop('endpoints') - self.assertDictEqual(e, o) - self.assertItemsEqual(expected_endpoints, observed_endpoints) - - def test_get_v3_catalog(self): - user_id = uuid.uuid4().hex - project_id = uuid.uuid4().hex - catalog_ref = self.catalog_api.get_v3_catalog(user_id, project_id) - exp_catalog = [ - {'endpoints': [ - {'interface': 'admin', - 'region': 'RegionOne', - 'url': 'http://localhost:8774/v1.1/%s' % project_id}, - {'interface': 'public', - 'region': 'RegionOne', - 'url': 'http://localhost:8774/v1.1/%s' % project_id}, - {'interface': 'internal', - 'region': 'RegionOne', - 'url': 'http://localhost:8774/v1.1/%s' % project_id}], - 'type': 'compute', - 'name': "'Compute Service'", - 'id': '2'}, - {'endpoints': [ - {'interface': 'admin', - 'region': 'RegionOne', - 'url': 'http://localhost:35357/v2.0'}, - {'interface': 'public', - 'region': 'RegionOne', - 'url': 'http://localhost:5000/v2.0'}, - {'interface': 'internal', - 'region': 'RegionOne', - 'url': 'http://localhost:35357/v2.0'}], - 'type': 'identity', - 'name': "'Identity Service'", - 'id': '1'}] - self.assert_catalogs_equal(exp_catalog, catalog_ref) - - def test_get_catalog_ignores_endpoints_with_invalid_urls(self): - user_id = uuid.uuid4().hex - tenant_id = None - # If the URL has no 'tenant_id' to substitute, we will skip the - # endpoint which contains this kind of URL. - catalog_ref = self.catalog_api.get_v3_catalog(user_id, tenant_id) - exp_catalog = [ - {'endpoints': [], - 'type': 'compute', - 'name': "'Compute Service'", - 'id': '2'}, - {'endpoints': [ - {'interface': 'admin', - 'region': 'RegionOne', - 'url': 'http://localhost:35357/v2.0'}, - {'interface': 'public', - 'region': 'RegionOne', - 'url': 'http://localhost:5000/v2.0'}, - {'interface': 'internal', - 'region': 'RegionOne', - 'url': 'http://localhost:35357/v2.0'}], - 'type': 'identity', - 'name': "'Identity Service'", - 'id': '1'}] - self.assert_catalogs_equal(exp_catalog, catalog_ref) - - def test_list_regions_filtered_by_parent_region_id(self): - self.skipTest('Templated backend does not support hints') - - def test_service_filtering(self): - self.skipTest("Templated backend doesn't support filtering") - - def test_list_services_with_hints(self): - hints = {} - services = self.catalog_api.list_services(hints=hints) - exp_services = [ - {'type': 'compute', - 'description': '', - 'enabled': True, - 'name': "'Compute Service'", - 'id': 'compute'}, - {'type': 'identity', - 'description': '', - 'enabled': True, - 'name': "'Identity Service'", - 'id': 'identity'}] - self.assertItemsEqual(exp_services, services) - - # NOTE(dstanek): the following methods have been overridden - # from unit.catalog.test_backends.CatalogTests. - - def test_region_crud(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - @unit.skip_if_cache_disabled('catalog') - def test_cache_layer_region_crud(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - @unit.skip_if_cache_disabled('catalog') - def test_invalidate_cache_when_updating_region(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - def test_create_region_with_duplicate_id(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - def test_delete_region_returns_not_found(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - def test_create_region_invalid_parent_region_returns_not_found(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - def test_avoid_creating_circular_references_in_regions_update(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - @mock.patch.object(catalog.Driver, - "_ensure_no_circle_in_hierarchical_regions") - def test_circular_regions_can_be_deleted(self, mock_ensure_on_circle): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - def test_service_crud(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - @unit.skip_if_cache_disabled('catalog') - def test_cache_layer_service_crud(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - @unit.skip_if_cache_disabled('catalog') - def test_invalidate_cache_when_updating_service(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - def test_delete_service_with_endpoint(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - def test_cache_layer_delete_service_with_endpoint(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - def test_delete_service_returns_not_found(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - def test_update_endpoint_nonexistent_service(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - def test_create_endpoint_nonexistent_region(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - def test_update_endpoint_nonexistent_region(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - def test_get_endpoint_returns_not_found(self): - self.skipTest("Templated backend doesn't use IDs for endpoints.") - - def test_delete_endpoint_returns_not_found(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - def test_create_endpoint(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - def test_update_endpoint(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) - - def test_list_endpoints(self): - expected_urls = set(['http://localhost:$(public_port)s/v2.0', - 'http://localhost:$(admin_port)s/v2.0', - 'http://localhost:8774/v1.1/$(tenant_id)s']) - endpoints = self.catalog_api.list_endpoints() - self.assertEqual(expected_urls, set(e['url'] for e in endpoints)) - - @unit.skip_if_cache_disabled('catalog') - def test_invalidate_cache_when_updating_endpoint(self): - self.skipTest(BROKEN_WRITE_FUNCTIONALITY_MSG) diff --git a/keystone-moon/keystone/tests/unit/test_cache.py b/keystone-moon/keystone/tests/unit/test_cache.py deleted file mode 100644 index 3c2afe66..00000000 --- a/keystone-moon/keystone/tests/unit/test_cache.py +++ /dev/null @@ -1,324 +0,0 @@ -# Copyright 2013 Metacloud -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import time -import uuid - -from dogpile.cache import api -from dogpile.cache import proxy -import mock -from oslo_config import cfg - -from keystone.common import cache -from keystone import exception -from keystone.tests import unit - - -CONF = cfg.CONF -NO_VALUE = api.NO_VALUE - - -def _copy_value(value): - if value is not NO_VALUE: - value = copy.deepcopy(value) - return value - - -# NOTE(morganfainberg): WARNING - It is not recommended to use the Memory -# backend for dogpile.cache in a real deployment under any circumstances. The -# backend does no cleanup of expired values and therefore will leak memory. The -# backend is not implemented in a way to share data across processes (e.g. -# Keystone in HTTPD. This proxy is a hack to get around the lack of isolation -# of values in memory. Currently it blindly stores and retrieves the values -# from the cache, and modifications to dicts/lists/etc returned can result in -# changes to the cached values. In short, do not use the dogpile.cache.memory -# backend unless you are running tests or expecting odd/strange results. -class CacheIsolatingProxy(proxy.ProxyBackend): - """Proxy that forces a memory copy of stored values. - - The default in-memory cache-region does not perform a copy on values it is - meant to cache. Therefore if the value is modified after set or after get, - the cached value also is modified. This proxy does a copy as the last - thing before storing data. - - """ - def get(self, key): - return _copy_value(self.proxied.get(key)) - - def set(self, key, value): - self.proxied.set(key, _copy_value(value)) - - -class TestProxy(proxy.ProxyBackend): - def get(self, key): - value = _copy_value(self.proxied.get(key)) - if value is not NO_VALUE: - if isinstance(value[0], TestProxyValue): - value[0].cached = True - return value - - -class TestProxyValue(object): - def __init__(self, value): - self.value = value - self.cached = False - - -class CacheRegionTest(unit.TestCase): - - def setUp(self): - super(CacheRegionTest, self).setUp() - self.region = cache.make_region() - cache.configure_cache_region(self.region) - self.region.wrap(TestProxy) - self.test_value = TestProxyValue('Decorator Test') - - def _add_test_caching_option(self): - self.config_fixture.register_opt( - cfg.BoolOpt('caching', default=True), group='cache') - - def _get_cacheable_function(self): - with mock.patch.object(cache.REGION, 'cache_on_arguments', - self.region.cache_on_arguments): - memoize = cache.get_memoization_decorator(section='cache') - - @memoize - def cacheable_function(value): - return value - - return cacheable_function - - def test_region_built_with_proxy_direct_cache_test(self): - # Verify cache regions are properly built with proxies. - test_value = TestProxyValue('Direct Cache Test') - self.region.set('cache_test', test_value) - cached_value = self.region.get('cache_test') - self.assertTrue(cached_value.cached) - - def test_cache_region_no_error_multiple_config(self): - # Verify configuring the CacheRegion again doesn't error. - cache.configure_cache_region(self.region) - cache.configure_cache_region(self.region) - - def _get_cache_fallthrough_fn(self, cache_time): - with mock.patch.object(cache.REGION, 'cache_on_arguments', - self.region.cache_on_arguments): - memoize = cache.get_memoization_decorator( - section='cache', - expiration_section='assignment') - - class _test_obj(object): - def __init__(self, value): - self.test_value = value - - @memoize - def get_test_value(self): - return self.test_value - - def _do_test(value): - - test_obj = _test_obj(value) - - # Ensure the value has been cached - test_obj.get_test_value() - # Get the now cached value - cached_value = test_obj.get_test_value() - self.assertTrue(cached_value.cached) - self.assertEqual(value.value, cached_value.value) - self.assertEqual(cached_value.value, test_obj.test_value.value) - # Change the underlying value on the test object. - test_obj.test_value = TestProxyValue(uuid.uuid4().hex) - self.assertEqual(cached_value.value, - test_obj.get_test_value().value) - # override the system time to ensure the non-cached new value - # is returned - new_time = time.time() + (cache_time * 2) - with mock.patch.object(time, 'time', - return_value=new_time): - overriden_cache_value = test_obj.get_test_value() - self.assertNotEqual(cached_value.value, - overriden_cache_value.value) - self.assertEqual(test_obj.test_value.value, - overriden_cache_value.value) - - return _do_test - - def test_cache_no_fallthrough_expiration_time_fn(self): - # Since we do not re-configure the cache region, for ease of testing - # this value is set the same as the expiration_time default in the - # [cache] section - cache_time = 600 - expiration_time = cache.get_expiration_time_fn('role') - do_test = self._get_cache_fallthrough_fn(cache_time) - # Run the test with the assignment cache_time value - self.config_fixture.config(cache_time=cache_time, - group='role') - test_value = TestProxyValue(uuid.uuid4().hex) - self.assertEqual(cache_time, expiration_time()) - do_test(value=test_value) - - def test_cache_fallthrough_expiration_time_fn(self): - # Since we do not re-configure the cache region, for ease of testing - # this value is set the same as the expiration_time default in the - # [cache] section - cache_time = 599 - expiration_time = cache.get_expiration_time_fn('role') - do_test = self._get_cache_fallthrough_fn(cache_time) - # Run the test with the assignment cache_time value set to None and - # the global value set. - self.config_fixture.config(cache_time=None, group='role') - test_value = TestProxyValue(uuid.uuid4().hex) - self.assertIsNone(expiration_time()) - do_test(value=test_value) - - def test_should_cache_fn_global_cache_enabled(self): - # Verify should_cache_fn generates a sane function for subsystem and - # functions as expected with caching globally enabled. - cacheable_function = self._get_cacheable_function() - - self.config_fixture.config(group='cache', enabled=True) - cacheable_function(self.test_value) - cached_value = cacheable_function(self.test_value) - self.assertTrue(cached_value.cached) - - def test_should_cache_fn_global_cache_disabled(self): - # Verify should_cache_fn generates a sane function for subsystem and - # functions as expected with caching globally disabled. - cacheable_function = self._get_cacheable_function() - - self.config_fixture.config(group='cache', enabled=False) - cacheable_function(self.test_value) - cached_value = cacheable_function(self.test_value) - self.assertFalse(cached_value.cached) - - def test_should_cache_fn_global_cache_disabled_section_cache_enabled(self): - # Verify should_cache_fn generates a sane function for subsystem and - # functions as expected with caching globally disabled and the specific - # section caching enabled. - cacheable_function = self._get_cacheable_function() - - self._add_test_caching_option() - self.config_fixture.config(group='cache', enabled=False) - self.config_fixture.config(group='cache', caching=True) - - cacheable_function(self.test_value) - cached_value = cacheable_function(self.test_value) - self.assertFalse(cached_value.cached) - - def test_should_cache_fn_global_cache_enabled_section_cache_disabled(self): - # Verify should_cache_fn generates a sane function for subsystem and - # functions as expected with caching globally enabled and the specific - # section caching disabled. - cacheable_function = self._get_cacheable_function() - - self._add_test_caching_option() - self.config_fixture.config(group='cache', enabled=True) - self.config_fixture.config(group='cache', caching=False) - - cacheable_function(self.test_value) - cached_value = cacheable_function(self.test_value) - self.assertFalse(cached_value.cached) - - def test_should_cache_fn_global_cache_enabled_section_cache_enabled(self): - # Verify should_cache_fn generates a sane function for subsystem and - # functions as expected with caching globally enabled and the specific - # section caching enabled. - cacheable_function = self._get_cacheable_function() - - self._add_test_caching_option() - self.config_fixture.config(group='cache', enabled=True) - self.config_fixture.config(group='cache', caching=True) - - cacheable_function(self.test_value) - cached_value = cacheable_function(self.test_value) - self.assertTrue(cached_value.cached) - - def test_cache_dictionary_config_builder(self): - """Validate we build a sane dogpile.cache dictionary config.""" - self.config_fixture.config(group='cache', - config_prefix='test_prefix', - backend='some_test_backend', - expiration_time=86400, - backend_argument=['arg1:test', - 'arg2:test:test', - 'arg3.invalid']) - - config_dict = cache.build_cache_config() - self.assertEqual( - CONF.cache.backend, config_dict['test_prefix.backend']) - self.assertEqual( - CONF.cache.expiration_time, - config_dict['test_prefix.expiration_time']) - self.assertEqual('test', config_dict['test_prefix.arguments.arg1']) - self.assertEqual('test:test', - config_dict['test_prefix.arguments.arg2']) - self.assertNotIn('test_prefix.arguments.arg3', config_dict) - - def test_cache_debug_proxy(self): - single_value = 'Test Value' - single_key = 'testkey' - multi_values = {'key1': 1, 'key2': 2, 'key3': 3} - - self.region.set(single_key, single_value) - self.assertEqual(single_value, self.region.get(single_key)) - - self.region.delete(single_key) - self.assertEqual(NO_VALUE, self.region.get(single_key)) - - self.region.set_multi(multi_values) - cached_values = self.region.get_multi(multi_values.keys()) - for value in multi_values.values(): - self.assertIn(value, cached_values) - self.assertEqual(len(multi_values.values()), len(cached_values)) - - self.region.delete_multi(multi_values.keys()) - for value in self.region.get_multi(multi_values.keys()): - self.assertEqual(NO_VALUE, value) - - def test_configure_non_region_object_raises_error(self): - self.assertRaises(exception.ValidationError, - cache.configure_cache_region, - "bogus") - - -class CacheNoopBackendTest(unit.TestCase): - - def setUp(self): - super(CacheNoopBackendTest, self).setUp() - self.region = cache.make_region() - cache.configure_cache_region(self.region) - - def config_overrides(self): - super(CacheNoopBackendTest, self).config_overrides() - self.config_fixture.config(group='cache', - backend='keystone.common.cache.noop') - - def test_noop_backend(self): - single_value = 'Test Value' - single_key = 'testkey' - multi_values = {'key1': 1, 'key2': 2, 'key3': 3} - - self.region.set(single_key, single_value) - self.assertEqual(NO_VALUE, self.region.get(single_key)) - - self.region.set_multi(multi_values) - cached_values = self.region.get_multi(multi_values.keys()) - self.assertEqual(len(cached_values), len(multi_values.values())) - for value in cached_values: - self.assertEqual(NO_VALUE, value) - - # Delete should not raise exceptions - self.region.delete(single_key) - self.region.delete_multi(multi_values.keys()) diff --git a/keystone-moon/keystone/tests/unit/test_cache_backend_mongo.py b/keystone-moon/keystone/tests/unit/test_cache_backend_mongo.py deleted file mode 100644 index 66f80c21..00000000 --- a/keystone-moon/keystone/tests/unit/test_cache_backend_mongo.py +++ /dev/null @@ -1,728 +0,0 @@ -# Copyright 2014 Hewlett-Packard Development Company, L.P. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import collections -import copy -import functools -import uuid - -from dogpile.cache import api -from dogpile.cache import region as dp_region -import six -from six.moves import range - -from keystone.common.cache.backends import mongo -from keystone import exception -from keystone.tests import unit - - -# Mock database structure sample where 'ks_cache' is database and -# 'cache' is collection. Dogpile CachedValue data is divided in two -# fields `value` (CachedValue.payload) and `meta` (CachedValue.metadata) -ks_cache = { - "cache": [ - { - "value": { - "serviceType": "identity", - "allVersionsUrl": "https://dummyUrl", - "dateLastModified": "ISODDate(2014-02-08T18:39:13.237Z)", - "serviceName": "Identity", - "enabled": "True" - }, - "meta": { - "v": 1, - "ct": 1392371422.015121 - }, - "doc_date": "ISODate('2014-02-14T09:50:22.015Z')", - "_id": "8251dc95f63842719c077072f1047ddf" - }, - { - "value": "dummyValueX", - "meta": { - "v": 1, - "ct": 1392371422.014058 - }, - "doc_date": "ISODate('2014-02-14T09:50:22.014Z')", - "_id": "66730b9534d146f0804d23729ad35436" - } - ] -} - - -COLLECTIONS = {} -SON_MANIPULATOR = None - - -class MockCursor(object): - - def __init__(self, collection, dataset_factory): - super(MockCursor, self).__init__() - self.collection = collection - self._factory = dataset_factory - self._dataset = self._factory() - self._limit = None - self._skip = None - - def __iter__(self): - return self - - def __next__(self): - if self._skip: - for _ in range(self._skip): - next(self._dataset) - self._skip = None - if self._limit is not None and self._limit <= 0: - raise StopIteration() - if self._limit is not None: - self._limit -= 1 - return next(self._dataset) - - next = __next__ - - def __getitem__(self, index): - arr = [x for x in self._dataset] - self._dataset = iter(arr) - return arr[index] - - -class MockCollection(object): - - def __init__(self, db, name): - super(MockCollection, self).__init__() - self.name = name - self._collection_database = db - self._documents = {} - self.write_concern = {} - - def __getattr__(self, name): - if name == 'database': - return self._collection_database - - def ensure_index(self, key_or_list, *args, **kwargs): - pass - - def index_information(self): - return {} - - def find_one(self, spec_or_id=None, *args, **kwargs): - if spec_or_id is None: - spec_or_id = {} - if not isinstance(spec_or_id, collections.Mapping): - spec_or_id = {'_id': spec_or_id} - - try: - return next(self.find(spec_or_id, *args, **kwargs)) - except StopIteration: - return None - - def find(self, spec=None, *args, **kwargs): - return MockCursor(self, functools.partial(self._get_dataset, spec)) - - def _get_dataset(self, spec): - dataset = (self._copy_doc(document, dict) for document in - self._iter_documents(spec)) - return dataset - - def _iter_documents(self, spec=None): - return (SON_MANIPULATOR.transform_outgoing(document, self) for - document in six.itervalues(self._documents) - if self._apply_filter(document, spec)) - - def _apply_filter(self, document, query): - for key, search in query.items(): - doc_val = document.get(key) - if isinstance(search, dict): - op_dict = {'$in': lambda dv, sv: dv in sv} - is_match = all( - op_str in op_dict and op_dict[op_str](doc_val, search_val) - for op_str, search_val in search.items() - ) - else: - is_match = doc_val == search - - return is_match - - def _copy_doc(self, obj, container): - if isinstance(obj, list): - new = [] - for item in obj: - new.append(self._copy_doc(item, container)) - return new - if isinstance(obj, dict): - new = container() - for key, value in list(obj.items()): - new[key] = self._copy_doc(value, container) - return new - else: - return copy.copy(obj) - - def insert(self, data, manipulate=True, **kwargs): - if isinstance(data, list): - return [self._insert(element) for element in data] - return self._insert(data) - - def save(self, data, manipulate=True, **kwargs): - return self._insert(data) - - def _insert(self, data): - if '_id' not in data: - data['_id'] = uuid.uuid4().hex - object_id = data['_id'] - self._documents[object_id] = self._internalize_dict(data) - return object_id - - def find_and_modify(self, spec, document, upsert=False, **kwargs): - self.update(spec, document, upsert, **kwargs) - - def update(self, spec, document, upsert=False, **kwargs): - - existing_docs = [doc for doc in six.itervalues(self._documents) - if self._apply_filter(doc, spec)] - if existing_docs: - existing_doc = existing_docs[0] # should find only 1 match - _id = existing_doc['_id'] - existing_doc.clear() - existing_doc['_id'] = _id - existing_doc.update(self._internalize_dict(document)) - elif upsert: - existing_doc = self._documents[self._insert(document)] - - def _internalize_dict(self, d): - return {k: copy.deepcopy(v) for k, v in d.items()} - - def remove(self, spec_or_id=None, search_filter=None): - """Remove objects matching spec_or_id from the collection.""" - if spec_or_id is None: - spec_or_id = search_filter if search_filter else {} - if not isinstance(spec_or_id, dict): - spec_or_id = {'_id': spec_or_id} - to_delete = list(self.find(spec=spec_or_id)) - for doc in to_delete: - doc_id = doc['_id'] - del self._documents[doc_id] - - return { - "connectionId": uuid.uuid4().hex, - "n": len(to_delete), - "ok": 1.0, - "err": None, - } - - -class MockMongoDB(object): - def __init__(self, dbname): - self._dbname = dbname - self.mainpulator = None - - def authenticate(self, username, password): - pass - - def add_son_manipulator(self, manipulator): - global SON_MANIPULATOR - SON_MANIPULATOR = manipulator - - def __getattr__(self, name): - if name == 'authenticate': - return self.authenticate - elif name == 'name': - return self._dbname - elif name == 'add_son_manipulator': - return self.add_son_manipulator - else: - return get_collection(self._dbname, name) - - def __getitem__(self, name): - return get_collection(self._dbname, name) - - -class MockMongoClient(object): - def __init__(self, *args, **kwargs): - pass - - def __getattr__(self, dbname): - return MockMongoDB(dbname) - - -def get_collection(db_name, collection_name): - mongo_collection = MockCollection(MockMongoDB(db_name), collection_name) - return mongo_collection - - -def pymongo_override(): - global pymongo - import pymongo - if pymongo.MongoClient is not MockMongoClient: - pymongo.MongoClient = MockMongoClient - if pymongo.MongoReplicaSetClient is not MockMongoClient: - pymongo.MongoClient = MockMongoClient - - -class MyTransformer(mongo.BaseTransform): - """Added here just to check manipulator logic is used correctly.""" - - def transform_incoming(self, son, collection): - return super(MyTransformer, self).transform_incoming(son, collection) - - def transform_outgoing(self, son, collection): - return super(MyTransformer, self).transform_outgoing(son, collection) - - -class MongoCache(unit.BaseTestCase): - def setUp(self): - super(MongoCache, self).setUp() - global COLLECTIONS - COLLECTIONS = {} - mongo.MongoApi._DB = {} - mongo.MongoApi._MONGO_COLLS = {} - pymongo_override() - # using typical configuration - self.arguments = { - 'db_hosts': 'localhost:27017', - 'db_name': 'ks_cache', - 'cache_collection': 'cache', - 'username': 'test_user', - 'password': 'test_password' - } - - def test_missing_db_hosts(self): - self.arguments.pop('db_hosts') - region = dp_region.make_region() - self.assertRaises(exception.ValidationError, region.configure, - 'keystone.cache.mongo', - arguments=self.arguments) - - def test_missing_db_name(self): - self.arguments.pop('db_name') - region = dp_region.make_region() - self.assertRaises(exception.ValidationError, region.configure, - 'keystone.cache.mongo', - arguments=self.arguments) - - def test_missing_cache_collection_name(self): - self.arguments.pop('cache_collection') - region = dp_region.make_region() - self.assertRaises(exception.ValidationError, region.configure, - 'keystone.cache.mongo', - arguments=self.arguments) - - def test_incorrect_write_concern(self): - self.arguments['w'] = 'one value' - region = dp_region.make_region() - self.assertRaises(exception.ValidationError, region.configure, - 'keystone.cache.mongo', - arguments=self.arguments) - - def test_correct_write_concern(self): - self.arguments['w'] = 1 - region = dp_region.make_region().configure('keystone.cache.mongo', - arguments=self.arguments) - - random_key = uuid.uuid4().hex - region.set(random_key, "dummyValue10") - # There is no proxy so can access MongoCacheBackend directly - self.assertEqual(1, region.backend.api.w) - - def test_incorrect_read_preference(self): - self.arguments['read_preference'] = 'inValidValue' - region = dp_region.make_region().configure('keystone.cache.mongo', - arguments=self.arguments) - # As per delayed loading of pymongo, read_preference value should - # still be string and NOT enum - self.assertEqual('inValidValue', region.backend.api.read_preference) - - random_key = uuid.uuid4().hex - self.assertRaises(ValueError, region.set, - random_key, "dummyValue10") - - def test_correct_read_preference(self): - self.arguments['read_preference'] = 'secondaryPreferred' - region = dp_region.make_region().configure('keystone.cache.mongo', - arguments=self.arguments) - # As per delayed loading of pymongo, read_preference value should - # still be string and NOT enum - self.assertEqual('secondaryPreferred', - region.backend.api.read_preference) - - random_key = uuid.uuid4().hex - region.set(random_key, "dummyValue10") - - # Now as pymongo is loaded so expected read_preference value is enum. - # There is no proxy so can access MongoCacheBackend directly - self.assertEqual(3, region.backend.api.read_preference) - - def test_missing_replica_set_name(self): - self.arguments['use_replica'] = True - region = dp_region.make_region() - self.assertRaises(exception.ValidationError, region.configure, - 'keystone.cache.mongo', - arguments=self.arguments) - - def test_provided_replica_set_name(self): - self.arguments['use_replica'] = True - self.arguments['replicaset_name'] = 'my_replica' - dp_region.make_region().configure('keystone.cache.mongo', - arguments=self.arguments) - self.assertTrue(True) # reached here means no initialization error - - def test_incorrect_mongo_ttl_seconds(self): - self.arguments['mongo_ttl_seconds'] = 'sixty' - region = dp_region.make_region() - self.assertRaises(exception.ValidationError, region.configure, - 'keystone.cache.mongo', - arguments=self.arguments) - - def test_cache_configuration_values_assertion(self): - self.arguments['use_replica'] = True - self.arguments['replicaset_name'] = 'my_replica' - self.arguments['mongo_ttl_seconds'] = 60 - self.arguments['ssl'] = False - region = dp_region.make_region().configure('keystone.cache.mongo', - arguments=self.arguments) - # There is no proxy so can access MongoCacheBackend directly - self.assertEqual('localhost:27017', region.backend.api.hosts) - self.assertEqual('ks_cache', region.backend.api.db_name) - self.assertEqual('cache', region.backend.api.cache_collection) - self.assertEqual('test_user', region.backend.api.username) - self.assertEqual('test_password', region.backend.api.password) - self.assertEqual(True, region.backend.api.use_replica) - self.assertEqual('my_replica', region.backend.api.replicaset_name) - self.assertEqual(False, region.backend.api.conn_kwargs['ssl']) - self.assertEqual(60, region.backend.api.ttl_seconds) - - def test_multiple_region_cache_configuration(self): - arguments1 = copy.copy(self.arguments) - arguments1['cache_collection'] = 'cache_region1' - - region1 = dp_region.make_region().configure('keystone.cache.mongo', - arguments=arguments1) - # There is no proxy so can access MongoCacheBackend directly - self.assertEqual('localhost:27017', region1.backend.api.hosts) - self.assertEqual('ks_cache', region1.backend.api.db_name) - self.assertEqual('cache_region1', region1.backend.api.cache_collection) - self.assertEqual('test_user', region1.backend.api.username) - self.assertEqual('test_password', region1.backend.api.password) - # Should be None because of delayed initialization - self.assertIsNone(region1.backend.api._data_manipulator) - - random_key1 = uuid.uuid4().hex - region1.set(random_key1, "dummyValue10") - self.assertEqual("dummyValue10", region1.get(random_key1)) - # Now should have initialized - self.assertIsInstance(region1.backend.api._data_manipulator, - mongo.BaseTransform) - - class_name = '%s.%s' % (MyTransformer.__module__, "MyTransformer") - - arguments2 = copy.copy(self.arguments) - arguments2['cache_collection'] = 'cache_region2' - arguments2['son_manipulator'] = class_name - - region2 = dp_region.make_region().configure('keystone.cache.mongo', - arguments=arguments2) - # There is no proxy so can access MongoCacheBackend directly - self.assertEqual('localhost:27017', region2.backend.api.hosts) - self.assertEqual('ks_cache', region2.backend.api.db_name) - self.assertEqual('cache_region2', region2.backend.api.cache_collection) - - # Should be None because of delayed initialization - self.assertIsNone(region2.backend.api._data_manipulator) - - random_key = uuid.uuid4().hex - region2.set(random_key, "dummyValue20") - self.assertEqual("dummyValue20", region2.get(random_key)) - # Now should have initialized - self.assertIsInstance(region2.backend.api._data_manipulator, - MyTransformer) - - region1.set(random_key1, "dummyValue22") - self.assertEqual("dummyValue22", region1.get(random_key1)) - - def test_typical_configuration(self): - - dp_region.make_region().configure( - 'keystone.cache.mongo', - arguments=self.arguments - ) - self.assertTrue(True) # reached here means no initialization error - - def test_backend_get_missing_data(self): - - region = dp_region.make_region().configure( - 'keystone.cache.mongo', - arguments=self.arguments - ) - - random_key = uuid.uuid4().hex - # should return NO_VALUE as key does not exist in cache - self.assertEqual(api.NO_VALUE, region.get(random_key)) - - def test_backend_set_data(self): - - region = dp_region.make_region().configure( - 'keystone.cache.mongo', - arguments=self.arguments - ) - - random_key = uuid.uuid4().hex - region.set(random_key, "dummyValue") - self.assertEqual("dummyValue", region.get(random_key)) - - def test_backend_set_data_with_string_as_valid_ttl(self): - - self.arguments['mongo_ttl_seconds'] = '3600' - region = dp_region.make_region().configure('keystone.cache.mongo', - arguments=self.arguments) - self.assertEqual(3600, region.backend.api.ttl_seconds) - random_key = uuid.uuid4().hex - region.set(random_key, "dummyValue") - self.assertEqual("dummyValue", region.get(random_key)) - - def test_backend_set_data_with_int_as_valid_ttl(self): - - self.arguments['mongo_ttl_seconds'] = 1800 - region = dp_region.make_region().configure('keystone.cache.mongo', - arguments=self.arguments) - self.assertEqual(1800, region.backend.api.ttl_seconds) - random_key = uuid.uuid4().hex - region.set(random_key, "dummyValue") - self.assertEqual("dummyValue", region.get(random_key)) - - def test_backend_set_none_as_data(self): - - region = dp_region.make_region().configure( - 'keystone.cache.mongo', - arguments=self.arguments - ) - - random_key = uuid.uuid4().hex - region.set(random_key, None) - self.assertIsNone(region.get(random_key)) - - def test_backend_set_blank_as_data(self): - - region = dp_region.make_region().configure( - 'keystone.cache.mongo', - arguments=self.arguments - ) - - random_key = uuid.uuid4().hex - region.set(random_key, "") - self.assertEqual("", region.get(random_key)) - - def test_backend_set_same_key_multiple_times(self): - - region = dp_region.make_region().configure( - 'keystone.cache.mongo', - arguments=self.arguments - ) - - random_key = uuid.uuid4().hex - region.set(random_key, "dummyValue") - self.assertEqual("dummyValue", region.get(random_key)) - - dict_value = {'key1': 'value1'} - region.set(random_key, dict_value) - self.assertEqual(dict_value, region.get(random_key)) - - region.set(random_key, "dummyValue2") - self.assertEqual("dummyValue2", region.get(random_key)) - - def test_backend_multi_set_data(self): - - region = dp_region.make_region().configure( - 'keystone.cache.mongo', - arguments=self.arguments - ) - random_key = uuid.uuid4().hex - random_key1 = uuid.uuid4().hex - random_key2 = uuid.uuid4().hex - random_key3 = uuid.uuid4().hex - mapping = {random_key1: 'dummyValue1', - random_key2: 'dummyValue2', - random_key3: 'dummyValue3'} - region.set_multi(mapping) - # should return NO_VALUE as key does not exist in cache - self.assertEqual(api.NO_VALUE, region.get(random_key)) - self.assertFalse(region.get(random_key)) - self.assertEqual("dummyValue1", region.get(random_key1)) - self.assertEqual("dummyValue2", region.get(random_key2)) - self.assertEqual("dummyValue3", region.get(random_key3)) - - def test_backend_multi_get_data(self): - - region = dp_region.make_region().configure( - 'keystone.cache.mongo', - arguments=self.arguments - ) - random_key = uuid.uuid4().hex - random_key1 = uuid.uuid4().hex - random_key2 = uuid.uuid4().hex - random_key3 = uuid.uuid4().hex - mapping = {random_key1: 'dummyValue1', - random_key2: '', - random_key3: 'dummyValue3'} - region.set_multi(mapping) - - keys = [random_key, random_key1, random_key2, random_key3] - results = region.get_multi(keys) - # should return NO_VALUE as key does not exist in cache - self.assertEqual(api.NO_VALUE, results[0]) - self.assertEqual("dummyValue1", results[1]) - self.assertEqual("", results[2]) - self.assertEqual("dummyValue3", results[3]) - - def test_backend_multi_set_should_update_existing(self): - - region = dp_region.make_region().configure( - 'keystone.cache.mongo', - arguments=self.arguments - ) - random_key = uuid.uuid4().hex - random_key1 = uuid.uuid4().hex - random_key2 = uuid.uuid4().hex - random_key3 = uuid.uuid4().hex - mapping = {random_key1: 'dummyValue1', - random_key2: 'dummyValue2', - random_key3: 'dummyValue3'} - region.set_multi(mapping) - # should return NO_VALUE as key does not exist in cache - self.assertEqual(api.NO_VALUE, region.get(random_key)) - self.assertEqual("dummyValue1", region.get(random_key1)) - self.assertEqual("dummyValue2", region.get(random_key2)) - self.assertEqual("dummyValue3", region.get(random_key3)) - - mapping = {random_key1: 'dummyValue4', - random_key2: 'dummyValue5'} - region.set_multi(mapping) - self.assertEqual(api.NO_VALUE, region.get(random_key)) - self.assertEqual("dummyValue4", region.get(random_key1)) - self.assertEqual("dummyValue5", region.get(random_key2)) - self.assertEqual("dummyValue3", region.get(random_key3)) - - def test_backend_multi_set_get_with_blanks_none(self): - - region = dp_region.make_region().configure( - 'keystone.cache.mongo', - arguments=self.arguments - ) - random_key = uuid.uuid4().hex - random_key1 = uuid.uuid4().hex - random_key2 = uuid.uuid4().hex - random_key3 = uuid.uuid4().hex - random_key4 = uuid.uuid4().hex - mapping = {random_key1: 'dummyValue1', - random_key2: None, - random_key3: '', - random_key4: 'dummyValue4'} - region.set_multi(mapping) - # should return NO_VALUE as key does not exist in cache - self.assertEqual(api.NO_VALUE, region.get(random_key)) - self.assertEqual("dummyValue1", region.get(random_key1)) - self.assertIsNone(region.get(random_key2)) - self.assertEqual("", region.get(random_key3)) - self.assertEqual("dummyValue4", region.get(random_key4)) - - keys = [random_key, random_key1, random_key2, random_key3, random_key4] - results = region.get_multi(keys) - - # should return NO_VALUE as key does not exist in cache - self.assertEqual(api.NO_VALUE, results[0]) - self.assertEqual("dummyValue1", results[1]) - self.assertIsNone(results[2]) - self.assertEqual("", results[3]) - self.assertEqual("dummyValue4", results[4]) - - mapping = {random_key1: 'dummyValue5', - random_key2: 'dummyValue6'} - region.set_multi(mapping) - self.assertEqual(api.NO_VALUE, region.get(random_key)) - self.assertEqual("dummyValue5", region.get(random_key1)) - self.assertEqual("dummyValue6", region.get(random_key2)) - self.assertEqual("", region.get(random_key3)) - - def test_backend_delete_data(self): - - region = dp_region.make_region().configure( - 'keystone.cache.mongo', - arguments=self.arguments - ) - - random_key = uuid.uuid4().hex - region.set(random_key, "dummyValue") - self.assertEqual("dummyValue", region.get(random_key)) - - region.delete(random_key) - # should return NO_VALUE as key no longer exists in cache - self.assertEqual(api.NO_VALUE, region.get(random_key)) - - def test_backend_multi_delete_data(self): - - region = dp_region.make_region().configure( - 'keystone.cache.mongo', - arguments=self.arguments - ) - random_key = uuid.uuid4().hex - random_key1 = uuid.uuid4().hex - random_key2 = uuid.uuid4().hex - random_key3 = uuid.uuid4().hex - mapping = {random_key1: 'dummyValue1', - random_key2: 'dummyValue2', - random_key3: 'dummyValue3'} - region.set_multi(mapping) - # should return NO_VALUE as key does not exist in cache - self.assertEqual(api.NO_VALUE, region.get(random_key)) - self.assertEqual("dummyValue1", region.get(random_key1)) - self.assertEqual("dummyValue2", region.get(random_key2)) - self.assertEqual("dummyValue3", region.get(random_key3)) - self.assertEqual(api.NO_VALUE, region.get("InvalidKey")) - - keys = mapping.keys() - - region.delete_multi(keys) - - self.assertEqual(api.NO_VALUE, region.get("InvalidKey")) - # should return NO_VALUE as keys no longer exist in cache - self.assertEqual(api.NO_VALUE, region.get(random_key1)) - self.assertEqual(api.NO_VALUE, region.get(random_key2)) - self.assertEqual(api.NO_VALUE, region.get(random_key3)) - - def test_additional_crud_method_arguments_support(self): - """Additional arguments should works across find/insert/update.""" - - self.arguments['wtimeout'] = 30000 - self.arguments['j'] = True - self.arguments['continue_on_error'] = True - self.arguments['secondary_acceptable_latency_ms'] = 60 - region = dp_region.make_region().configure( - 'keystone.cache.mongo', - arguments=self.arguments - ) - - # There is no proxy so can access MongoCacheBackend directly - api_methargs = region.backend.api.meth_kwargs - self.assertEqual(30000, api_methargs['wtimeout']) - self.assertEqual(True, api_methargs['j']) - self.assertEqual(True, api_methargs['continue_on_error']) - self.assertEqual(60, api_methargs['secondary_acceptable_latency_ms']) - - random_key = uuid.uuid4().hex - region.set(random_key, "dummyValue1") - self.assertEqual("dummyValue1", region.get(random_key)) - - region.set(random_key, "dummyValue2") - self.assertEqual("dummyValue2", region.get(random_key)) - - random_key = uuid.uuid4().hex - region.set(random_key, "dummyValue3") - self.assertEqual("dummyValue3", region.get(random_key)) diff --git a/keystone-moon/keystone/tests/unit/test_catalog.py b/keystone-moon/keystone/tests/unit/test_catalog.py deleted file mode 100644 index 76e3055a..00000000 --- a/keystone-moon/keystone/tests/unit/test_catalog.py +++ /dev/null @@ -1,355 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from six.moves import http_client - -from keystone import catalog -from keystone.tests import unit -from keystone.tests.unit.ksfixtures import database -from keystone.tests.unit import rest - - -BASE_URL = 'http://127.0.0.1:35357/v2' -SERVICE_FIXTURE = object() - - -class V2CatalogTestCase(rest.RestfulTestCase): - def setUp(self): - super(V2CatalogTestCase, self).setUp() - self.useFixture(database.Database()) - - self.service = unit.new_service_ref() - self.service_id = self.service['id'] - self.catalog_api.create_service(self.service_id, self.service) - - # TODO(termie): add an admin user to the fixtures and use that user - # override the fixtures, for now - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], - self.tenant_bar['id'], - self.role_admin['id']) - - def config_overrides(self): - super(V2CatalogTestCase, self).config_overrides() - self.config_fixture.config(group='catalog', driver='sql') - - def _get_token_id(self, r): - """Applicable only to JSON.""" - return r.result['access']['token']['id'] - - def _endpoint_create(self, expected_status=http_client.OK, - service_id=SERVICE_FIXTURE, - publicurl='http://localhost:8080', - internalurl='http://localhost:8080', - adminurl='http://localhost:8080'): - if service_id is SERVICE_FIXTURE: - service_id = self.service_id - - path = '/v2.0/endpoints' - body = { - 'endpoint': { - 'adminurl': adminurl, - 'service_id': service_id, - 'region': 'RegionOne', - 'internalurl': internalurl, - 'publicurl': publicurl - } - } - - r = self.admin_request(method='POST', token=self.get_scoped_token(), - path=path, expected_status=expected_status, - body=body) - return body, r - - def _region_create(self): - region = unit.new_region_ref() - region_id = region['id'] - self.catalog_api.create_region(region) - return region_id - - def test_endpoint_create(self): - req_body, response = self._endpoint_create() - self.assertIn('endpoint', response.result) - self.assertIn('id', response.result['endpoint']) - for field, value in req_body['endpoint'].items(): - self.assertEqual(value, response.result['endpoint'][field]) - - def test_pure_v3_endpoint_with_publicurl_visible_from_v2(self): - """Test pure v3 endpoint can be fetched via v2.0 API. - - For those who are using v2.0 APIs, endpoints created by v3 API should - also be visible as there are no differences about the endpoints - except the format or the internal implementation. Since publicURL is - required for v2.0 API, so only v3 endpoints of the service which have - the public interface endpoint will be converted into v2.0 endpoints. - """ - region_id = self._region_create() - - # create v3 endpoints with three interfaces - body = { - 'endpoint': unit.new_endpoint_ref(self.service_id, - region_id=region_id) - } - for interface in catalog.controllers.INTERFACES: - body['endpoint']['interface'] = interface - self.admin_request(method='POST', - token=self.get_scoped_token(), - path='/v3/endpoints', - expected_status=http_client.CREATED, - body=body) - - r = self.admin_request(token=self.get_scoped_token(), - path='/v2.0/endpoints') - # Endpoints of the service which have a public interface endpoint - # will be returned via v2.0 API - self.assertEqual(1, len(r.result['endpoints'])) - v2_endpoint = r.result['endpoints'][0] - self.assertEqual(self.service_id, v2_endpoint['service_id']) - # This is not the focus of this test, so no different urls are used. - self.assertEqual(body['endpoint']['url'], v2_endpoint['publicurl']) - self.assertEqual(body['endpoint']['url'], v2_endpoint['adminurl']) - self.assertEqual(body['endpoint']['url'], v2_endpoint['internalurl']) - self.assertNotIn('name', v2_endpoint) - - v3_endpoint = self.catalog_api.get_endpoint(v2_endpoint['id']) - # Checks the v3 public endpoint's id is the generated v2.0 endpoint - self.assertEqual('public', v3_endpoint['interface']) - self.assertEqual(self.service_id, v3_endpoint['service_id']) - - def test_pure_v3_endpoint_without_publicurl_invisible_from_v2(self): - """Test that the v2.0 API can't fetch v3 endpoints without publicURLs. - - v2.0 API will return endpoints created by v3 API, but publicURL is - required for the service in the v2.0 API, therefore v3 endpoints of - a service which don't have publicURL will be ignored. - """ - region_id = self._region_create() - - # create a v3 endpoint without public interface - body = { - 'endpoint': unit.new_endpoint_ref(self.service_id, - region_id=region_id) - } - for interface in catalog.controllers.INTERFACES: - if interface == 'public': - continue - body['endpoint']['interface'] = interface - self.admin_request(method='POST', - token=self.get_scoped_token(), - path='/v3/endpoints', - expected_status=http_client.CREATED, - body=body) - - r = self.admin_request(token=self.get_scoped_token(), - path='/v2.0/endpoints') - # v3 endpoints of a service which don't have publicURL can't be - # fetched via v2.0 API - self.assertEqual(0, len(r.result['endpoints'])) - - def test_endpoint_create_with_null_adminurl(self): - req_body, response = self._endpoint_create(adminurl=None) - self.assertIsNone(req_body['endpoint']['adminurl']) - self.assertNotIn('adminurl', response.result['endpoint']) - - def test_endpoint_create_with_empty_adminurl(self): - req_body, response = self._endpoint_create(adminurl='') - self.assertEqual('', req_body['endpoint']['adminurl']) - self.assertNotIn("adminurl", response.result['endpoint']) - - def test_endpoint_create_with_null_internalurl(self): - req_body, response = self._endpoint_create(internalurl=None) - self.assertIsNone(req_body['endpoint']['internalurl']) - self.assertNotIn('internalurl', response.result['endpoint']) - - def test_endpoint_create_with_empty_internalurl(self): - req_body, response = self._endpoint_create(internalurl='') - self.assertEqual('', req_body['endpoint']['internalurl']) - self.assertNotIn("internalurl", response.result['endpoint']) - - def test_endpoint_create_with_null_publicurl(self): - self._endpoint_create(expected_status=http_client.BAD_REQUEST, - publicurl=None) - - def test_endpoint_create_with_empty_publicurl(self): - self._endpoint_create(expected_status=http_client.BAD_REQUEST, - publicurl='') - - def test_endpoint_create_with_null_service_id(self): - self._endpoint_create(expected_status=http_client.BAD_REQUEST, - service_id=None) - - def test_endpoint_create_with_empty_service_id(self): - self._endpoint_create(expected_status=http_client.BAD_REQUEST, - service_id='') - - def test_endpoint_create_with_valid_url(self): - """Create endpoint with valid URL should be tested, too.""" - # list one valid url is enough, no need to list too much - valid_url = 'http://127.0.0.1:8774/v1.1/$(tenant_id)s' - - # baseline tests that all valid URLs works - self._endpoint_create(expected_status=http_client.OK, - publicurl=valid_url, - internalurl=valid_url, - adminurl=valid_url) - - def test_endpoint_create_with_invalid_url(self): - """Test the invalid cases: substitutions is not exactly right.""" - invalid_urls = [ - # using a substitution that is not whitelisted - KeyError - 'http://127.0.0.1:8774/v1.1/$(nonexistent)s', - - # invalid formatting - ValueError - 'http://127.0.0.1:8774/v1.1/$(tenant_id)', - 'http://127.0.0.1:8774/v1.1/$(tenant_id)t', - 'http://127.0.0.1:8774/v1.1/$(tenant_id', - - # invalid type specifier - TypeError - # admin_url is a string not an int - 'http://127.0.0.1:8774/v1.1/$(admin_url)d', - ] - - # list one valid url is enough, no need to list too much - valid_url = 'http://127.0.0.1:8774/v1.1/$(tenant_id)s' - - # Case one: publicurl, internalurl and adminurl are - # all invalid - for invalid_url in invalid_urls: - self._endpoint_create(expected_status=http_client.BAD_REQUEST, - publicurl=invalid_url, - internalurl=invalid_url, - adminurl=invalid_url) - - # Case two: publicurl, internalurl are invalid - # and adminurl is valid - for invalid_url in invalid_urls: - self._endpoint_create(expected_status=http_client.BAD_REQUEST, - publicurl=invalid_url, - internalurl=invalid_url, - adminurl=valid_url) - - # Case three: publicurl, adminurl are invalid - # and internalurl is valid - for invalid_url in invalid_urls: - self._endpoint_create(expected_status=http_client.BAD_REQUEST, - publicurl=invalid_url, - internalurl=valid_url, - adminurl=invalid_url) - - # Case four: internalurl, adminurl are invalid - # and publicurl is valid - for invalid_url in invalid_urls: - self._endpoint_create(expected_status=http_client.BAD_REQUEST, - publicurl=valid_url, - internalurl=invalid_url, - adminurl=invalid_url) - - # Case five: publicurl is invalid, internalurl - # and adminurl are valid - for invalid_url in invalid_urls: - self._endpoint_create(expected_status=http_client.BAD_REQUEST, - publicurl=invalid_url, - internalurl=valid_url, - adminurl=valid_url) - - # Case six: internalurl is invalid, publicurl - # and adminurl are valid - for invalid_url in invalid_urls: - self._endpoint_create(expected_status=http_client.BAD_REQUEST, - publicurl=valid_url, - internalurl=invalid_url, - adminurl=valid_url) - - # Case seven: adminurl is invalid, publicurl - # and internalurl are valid - for invalid_url in invalid_urls: - self._endpoint_create(expected_status=http_client.BAD_REQUEST, - publicurl=valid_url, - internalurl=valid_url, - adminurl=invalid_url) - - -class TestV2CatalogAPISQL(unit.TestCase): - - def setUp(self): - super(TestV2CatalogAPISQL, self).setUp() - self.useFixture(database.Database()) - self.catalog_api = catalog.Manager() - - service = unit.new_service_ref() - self.service_id = service['id'] - self.catalog_api.create_service(self.service_id, service) - - self.create_endpoint(service_id=self.service_id) - - def create_endpoint(self, service_id, **kwargs): - endpoint = unit.new_endpoint_ref(service_id=service_id, - region_id=None, - **kwargs) - self.catalog_api.create_endpoint(endpoint['id'], endpoint) - return endpoint - - def config_overrides(self): - super(TestV2CatalogAPISQL, self).config_overrides() - self.config_fixture.config(group='catalog', driver='sql') - - def test_get_catalog_ignores_endpoints_with_invalid_urls(self): - user_id = uuid.uuid4().hex - tenant_id = uuid.uuid4().hex - - # the only endpoint in the catalog is the one created in setUp - catalog = self.catalog_api.get_catalog(user_id, tenant_id) - self.assertEqual(1, len(catalog)) - # it's also the only endpoint in the backend - self.assertEqual(1, len(self.catalog_api.list_endpoints())) - - # create a new, invalid endpoint - malformed type declaration - self.create_endpoint(self.service_id, - url='http://keystone/%(tenant_id)') - - # create a new, invalid endpoint - nonexistent key - self.create_endpoint(self.service_id, - url='http://keystone/%(you_wont_find_me)s') - - # verify that the invalid endpoints don't appear in the catalog - catalog = self.catalog_api.get_catalog(user_id, tenant_id) - self.assertEqual(1, len(catalog)) - # all three endpoints appear in the backend - self.assertEqual(3, len(self.catalog_api.list_endpoints())) - - def test_get_catalog_always_returns_service_name(self): - user_id = uuid.uuid4().hex - tenant_id = uuid.uuid4().hex - - # new_service_ref() returns a ref with a `name`. - named_svc = unit.new_service_ref() - self.catalog_api.create_service(named_svc['id'], named_svc) - self.create_endpoint(service_id=named_svc['id']) - - # This time manually delete the generated `name`. - unnamed_svc = unit.new_service_ref() - del unnamed_svc['name'] - self.catalog_api.create_service(unnamed_svc['id'], unnamed_svc) - self.create_endpoint(service_id=unnamed_svc['id']) - - region = None - catalog = self.catalog_api.get_catalog(user_id, tenant_id) - - self.assertEqual(named_svc['name'], - catalog[region][named_svc['type']]['name']) - - # verify a name is not generated when the service is passed to the API - self.assertEqual('', catalog[region][unnamed_svc['type']]['name']) diff --git a/keystone-moon/keystone/tests/unit/test_cert_setup.py b/keystone-moon/keystone/tests/unit/test_cert_setup.py deleted file mode 100644 index debf87f5..00000000 --- a/keystone-moon/keystone/tests/unit/test_cert_setup.py +++ /dev/null @@ -1,243 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import shutil - -import mock -from six.moves import http_client -from testtools import matchers - -from keystone.common import environment -from keystone.common import openssl -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit import rest -from keystone import token - - -SSLDIR = unit.dirs.tmp('ssl') -CONF = unit.CONF - - -CERTDIR = os.path.join(SSLDIR, 'certs') -KEYDIR = os.path.join(SSLDIR, 'private') - - -class CertSetupTestCase(rest.RestfulTestCase): - - def setUp(self): - super(CertSetupTestCase, self).setUp() - - def cleanup_ssldir(): - try: - shutil.rmtree(SSLDIR) - except OSError: - pass - - self.addCleanup(cleanup_ssldir) - - def config_overrides(self): - super(CertSetupTestCase, self).config_overrides() - ca_certs = os.path.join(CERTDIR, 'ca.pem') - ca_key = os.path.join(CERTDIR, 'cakey.pem') - - self.config_fixture.config( - group='signing', - certfile=os.path.join(CERTDIR, 'signing_cert.pem'), - ca_certs=ca_certs, - ca_key=ca_key, - keyfile=os.path.join(KEYDIR, 'signing_key.pem')) - self.config_fixture.config( - group='ssl', - ca_key=ca_key) - self.config_fixture.config( - group='eventlet_server_ssl', - ca_certs=ca_certs, - certfile=os.path.join(CERTDIR, 'keystone.pem'), - keyfile=os.path.join(KEYDIR, 'keystonekey.pem')) - self.config_fixture.config(group='token', provider='pkiz') - - def test_can_handle_missing_certs(self): - controller = token.controllers.Auth() - - self.config_fixture.config(group='signing', certfile='invalid') - user = unit.create_user(self.identity_api, - domain_id=CONF.identity.default_domain_id) - body_dict = { - 'passwordCredentials': { - 'userId': user['id'], - 'password': user['password'], - }, - } - self.assertRaises(exception.UnexpectedError, - controller.authenticate, - {}, body_dict) - - def test_create_pki_certs(self, rebuild=False): - pki = openssl.ConfigurePKI(None, None, rebuild=rebuild) - pki.run() - self.assertTrue(os.path.exists(CONF.signing.certfile)) - self.assertTrue(os.path.exists(CONF.signing.ca_certs)) - self.assertTrue(os.path.exists(CONF.signing.keyfile)) - - def test_create_ssl_certs(self, rebuild=False): - ssl = openssl.ConfigureSSL(None, None, rebuild=rebuild) - ssl.run() - self.assertTrue(os.path.exists(CONF.eventlet_server_ssl.ca_certs)) - self.assertTrue(os.path.exists(CONF.eventlet_server_ssl.certfile)) - self.assertTrue(os.path.exists(CONF.eventlet_server_ssl.keyfile)) - - def test_fetch_signing_cert(self, rebuild=False): - pki = openssl.ConfigurePKI(None, None, rebuild=rebuild) - pki.run() - - # NOTE(jamielennox): Use request directly because certificate - # requests don't have some of the normal information - signing_resp = self.request(self.public_app, - '/v2.0/certificates/signing', - method='GET', - expected_status=http_client.OK) - - cacert_resp = self.request(self.public_app, - '/v2.0/certificates/ca', - method='GET', - expected_status=http_client.OK) - - with open(CONF.signing.certfile) as f: - self.assertEqual(f.read(), signing_resp.text) - - with open(CONF.signing.ca_certs) as f: - self.assertEqual(f.read(), cacert_resp.text) - - # NOTE(jamielennox): This is weird behaviour that we need to enforce. - # It doesn't matter what you ask for it's always going to give text - # with a text/html content_type. - - for path in ['/v2.0/certificates/signing', '/v2.0/certificates/ca']: - for accept in [None, 'text/html', 'application/json', 'text/xml']: - headers = {'Accept': accept} if accept else {} - resp = self.request(self.public_app, path, method='GET', - expected_status=http_client.OK, - headers=headers) - - self.assertEqual('text/html', resp.content_type) - - def test_fetch_signing_cert_when_rebuild(self): - pki = openssl.ConfigurePKI(None, None) - pki.run() - self.test_fetch_signing_cert(rebuild=True) - - def test_failure(self): - for path in ['/v2.0/certificates/signing', '/v2.0/certificates/ca']: - self.request(self.public_app, path, method='GET', - expected_status=http_client.INTERNAL_SERVER_ERROR) - - def test_pki_certs_rebuild(self): - self.test_create_pki_certs() - with open(CONF.signing.certfile) as f: - cert_file1 = f.read() - - self.test_create_pki_certs(rebuild=True) - with open(CONF.signing.certfile) as f: - cert_file2 = f.read() - - self.assertNotEqual(cert_file1, cert_file2) - - def test_ssl_certs_rebuild(self): - self.test_create_ssl_certs() - with open(CONF.eventlet_server_ssl.certfile) as f: - cert_file1 = f.read() - - self.test_create_ssl_certs(rebuild=True) - with open(CONF.eventlet_server_ssl.certfile) as f: - cert_file2 = f.read() - - self.assertNotEqual(cert_file1, cert_file2) - - @mock.patch.object(os, 'remove') - def test_rebuild_pki_certs_remove_error(self, mock_remove): - self.test_create_pki_certs() - with open(CONF.signing.certfile) as f: - cert_file1 = f.read() - - mock_remove.side_effect = OSError() - self.test_create_pki_certs(rebuild=True) - with open(CONF.signing.certfile) as f: - cert_file2 = f.read() - - self.assertEqual(cert_file1, cert_file2) - - @mock.patch.object(os, 'remove') - def test_rebuild_ssl_certs_remove_error(self, mock_remove): - self.test_create_ssl_certs() - with open(CONF.eventlet_server_ssl.certfile) as f: - cert_file1 = f.read() - - mock_remove.side_effect = OSError() - self.test_create_ssl_certs(rebuild=True) - with open(CONF.eventlet_server_ssl.certfile) as f: - cert_file2 = f.read() - - self.assertEqual(cert_file1, cert_file2) - - def test_create_pki_certs_twice_without_rebuild(self): - self.test_create_pki_certs() - with open(CONF.signing.certfile) as f: - cert_file1 = f.read() - - self.test_create_pki_certs() - with open(CONF.signing.certfile) as f: - cert_file2 = f.read() - - self.assertEqual(cert_file1, cert_file2) - - def test_create_ssl_certs_twice_without_rebuild(self): - self.test_create_ssl_certs() - with open(CONF.eventlet_server_ssl.certfile) as f: - cert_file1 = f.read() - - self.test_create_ssl_certs() - with open(CONF.eventlet_server_ssl.certfile) as f: - cert_file2 = f.read() - - self.assertEqual(cert_file1, cert_file2) - - -class TestExecCommand(unit.TestCase): - - @mock.patch.object(environment.subprocess.Popen, 'poll') - def test_running_a_successful_command(self, mock_poll): - mock_poll.return_value = 0 - - ssl = openssl.ConfigureSSL('keystone_user', 'keystone_group') - ssl.exec_command(['ls']) - - @mock.patch.object(environment.subprocess, 'check_output') - def test_running_an_invalid_command(self, mock_check_output): - cmd = ['ls'] - - output = 'this is the output string' - - error = environment.subprocess.CalledProcessError(returncode=1, - cmd=cmd, - output=output) - mock_check_output.side_effect = error - - ssl = openssl.ConfigureSSL('keystone_user', 'keystone_group') - e = self.assertRaises(environment.subprocess.CalledProcessError, - ssl.exec_command, - cmd) - self.assertThat(e.output, matchers.Equals(output)) diff --git a/keystone-moon/keystone/tests/unit/test_cli.py b/keystone-moon/keystone/tests/unit/test_cli.py deleted file mode 100644 index 06f2e172..00000000 --- a/keystone-moon/keystone/tests/unit/test_cli.py +++ /dev/null @@ -1,478 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import uuid - -import fixtures -import mock -from oslo_config import cfg -from six.moves import range -from testtools import matchers - -from keystone.cmd import cli -from keystone.common import dependency -from keystone.i18n import _ -from keystone import resource -from keystone.tests import unit -from keystone.tests.unit.ksfixtures import database - - -CONF = cfg.CONF - - -class CliTestCase(unit.SQLDriverOverrides, unit.TestCase): - def config_files(self): - config_files = super(CliTestCase, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_sql.conf')) - return config_files - - def test_token_flush(self): - self.useFixture(database.Database()) - self.load_backends() - cli.TokenFlush.main() - - -class CliBootStrapTestCase(unit.SQLDriverOverrides, unit.TestCase): - - def setUp(self): - self.useFixture(database.Database()) - super(CliBootStrapTestCase, self).setUp() - - def config_files(self): - self.config_fixture.register_cli_opt(cli.command_opt) - config_files = super(CliBootStrapTestCase, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_sql.conf')) - return config_files - - def config(self, config_files): - CONF(args=['bootstrap', '--bootstrap-password', uuid.uuid4().hex], - project='keystone', - default_config_files=config_files) - - def test_bootstrap(self): - bootstrap = cli.BootStrap() - self._do_test_bootstrap(bootstrap) - - def _do_test_bootstrap(self, bootstrap): - bootstrap.do_bootstrap() - project = bootstrap.resource_manager.get_project_by_name( - bootstrap.project_name, - 'default') - user = bootstrap.identity_manager.get_user_by_name( - bootstrap.username, - 'default') - role = bootstrap.role_manager.get_role(bootstrap.role_id) - role_list = ( - bootstrap.assignment_manager.get_roles_for_user_and_project( - user['id'], - project['id'])) - self.assertIs(len(role_list), 1) - self.assertEqual(role_list[0], role['id']) - # NOTE(morganfainberg): Pass an empty context, it isn't used by - # `authenticate` method. - bootstrap.identity_manager.authenticate( - {}, - user['id'], - bootstrap.password) - - if bootstrap.region_id: - region = bootstrap.catalog_manager.get_region(bootstrap.region_id) - self.assertEqual(self.region_id, region['id']) - - if bootstrap.service_id: - svc = bootstrap.catalog_manager.get_service(bootstrap.service_id) - self.assertEqual(self.service_name, svc['name']) - - self.assertEqual(set(['admin', 'public', 'internal']), - set(bootstrap.endpoints)) - - urls = {'public': self.public_url, - 'internal': self.internal_url, - 'admin': self.admin_url} - - for interface, url in urls.items(): - endpoint_id = bootstrap.endpoints[interface] - endpoint = bootstrap.catalog_manager.get_endpoint(endpoint_id) - - self.assertEqual(self.region_id, endpoint['region_id']) - self.assertEqual(url, endpoint['url']) - self.assertEqual(svc['id'], endpoint['service_id']) - self.assertEqual(interface, endpoint['interface']) - - def test_bootstrap_is_idempotent(self): - # NOTE(morganfainberg): Ensure we can run bootstrap multiple times - # without erroring. - bootstrap = cli.BootStrap() - self._do_test_bootstrap(bootstrap) - self._do_test_bootstrap(bootstrap) - - -class CliBootStrapTestCaseWithEnvironment(CliBootStrapTestCase): - - def config(self, config_files): - CONF(args=['bootstrap'], project='keystone', - default_config_files=config_files) - - def setUp(self): - super(CliBootStrapTestCaseWithEnvironment, self).setUp() - self.password = uuid.uuid4().hex - self.username = uuid.uuid4().hex - self.project_name = uuid.uuid4().hex - self.role_name = uuid.uuid4().hex - self.service_name = uuid.uuid4().hex - self.public_url = uuid.uuid4().hex - self.internal_url = uuid.uuid4().hex - self.admin_url = uuid.uuid4().hex - self.region_id = uuid.uuid4().hex - self.default_domain = { - 'id': CONF.identity.default_domain_id, - 'name': 'Default', - } - self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_PASSWORD', - newvalue=self.password)) - self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_USERNAME', - newvalue=self.username)) - self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_PROJECT_NAME', - newvalue=self.project_name)) - self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_ROLE_NAME', - newvalue=self.role_name)) - self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_SERVICE_NAME', - newvalue=self.service_name)) - self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_PUBLIC_URL', - newvalue=self.public_url)) - self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_INTERNAL_URL', - newvalue=self.internal_url)) - self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_ADMIN_URL', - newvalue=self.admin_url)) - self.useFixture( - fixtures.EnvironmentVariable('OS_BOOTSTRAP_REGION_ID', - newvalue=self.region_id)) - - def test_assignment_created_with_user_exists(self): - # test assignment can be created if user already exists. - bootstrap = cli.BootStrap() - bootstrap.resource_manager.create_domain(self.default_domain['id'], - self.default_domain) - user_ref = unit.new_user_ref(self.default_domain['id'], - name=self.username, - password=self.password) - bootstrap.identity_manager.create_user(user_ref) - self._do_test_bootstrap(bootstrap) - - def test_assignment_created_with_project_exists(self): - # test assignment can be created if project already exists. - bootstrap = cli.BootStrap() - bootstrap.resource_manager.create_domain(self.default_domain['id'], - self.default_domain) - project_ref = unit.new_project_ref(self.default_domain['id'], - name=self.project_name) - bootstrap.resource_manager.create_project(project_ref['id'], - project_ref) - self._do_test_bootstrap(bootstrap) - - def test_assignment_created_with_role_exists(self): - # test assignment can be created if role already exists. - bootstrap = cli.BootStrap() - bootstrap.resource_manager.create_domain(self.default_domain['id'], - self.default_domain) - role = unit.new_role_ref(name=self.role_name) - bootstrap.role_manager.create_role(role['id'], role) - self._do_test_bootstrap(bootstrap) - - def test_assignment_created_with_region_exists(self): - # test assignment can be created if role already exists. - bootstrap = cli.BootStrap() - bootstrap.resource_manager.create_domain(self.default_domain['id'], - self.default_domain) - region = unit.new_region_ref(id=self.region_id) - bootstrap.catalog_manager.create_region(region) - self._do_test_bootstrap(bootstrap) - - def test_endpoints_created_with_service_exists(self): - # test assignment can be created if role already exists. - bootstrap = cli.BootStrap() - bootstrap.resource_manager.create_domain(self.default_domain['id'], - self.default_domain) - service = unit.new_service_ref(name=self.service_name) - bootstrap.catalog_manager.create_service(service['id'], service) - self._do_test_bootstrap(bootstrap) - - def test_endpoints_created_with_endpoint_exists(self): - # test assignment can be created if role already exists. - bootstrap = cli.BootStrap() - bootstrap.resource_manager.create_domain(self.default_domain['id'], - self.default_domain) - service = unit.new_service_ref(name=self.service_name) - bootstrap.catalog_manager.create_service(service['id'], service) - - region = unit.new_region_ref(id=self.region_id) - bootstrap.catalog_manager.create_region(region) - - endpoint = unit.new_endpoint_ref(interface='public', - service_id=service['id'], - url=self.public_url, - region_id=self.region_id) - bootstrap.catalog_manager.create_endpoint(endpoint['id'], endpoint) - - self._do_test_bootstrap(bootstrap) - - -class CliDomainConfigAllTestCase(unit.SQLDriverOverrides, unit.TestCase): - - def setUp(self): - self.useFixture(database.Database()) - super(CliDomainConfigAllTestCase, self).setUp() - self.load_backends() - self.config_fixture.config( - group='identity', - domain_config_dir=unit.TESTCONF + '/domain_configs_multi_ldap') - self.domain_count = 3 - self.setup_initial_domains() - - def config_files(self): - self.config_fixture.register_cli_opt(cli.command_opt) - self.addCleanup(self.cleanup) - config_files = super(CliDomainConfigAllTestCase, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_sql.conf')) - return config_files - - def cleanup(self): - CONF.reset() - CONF.unregister_opt(cli.command_opt) - - def cleanup_domains(self): - for domain in self.domains: - if domain == 'domain_default': - # Not allowed to delete the default domain, but should at least - # delete any domain-specific config for it. - self.domain_config_api.delete_config( - CONF.identity.default_domain_id) - continue - this_domain = self.domains[domain] - this_domain['enabled'] = False - self.resource_api.update_domain(this_domain['id'], this_domain) - self.resource_api.delete_domain(this_domain['id']) - self.domains = {} - - def config(self, config_files): - CONF(args=['domain_config_upload', '--all'], project='keystone', - default_config_files=config_files) - - def setup_initial_domains(self): - - def create_domain(domain): - return self.resource_api.create_domain(domain['id'], domain) - - self.domains = {} - self.addCleanup(self.cleanup_domains) - for x in range(1, self.domain_count): - domain = 'domain%s' % x - self.domains[domain] = create_domain( - {'id': uuid.uuid4().hex, 'name': domain}) - self.domains['domain_default'] = create_domain( - resource.calc_default_domain()) - - def test_config_upload(self): - # The values below are the same as in the domain_configs_multi_ldap - # directory of test config_files. - default_config = { - 'ldap': {'url': 'fake://memory', - 'user': 'cn=Admin', - 'password': 'password', - 'suffix': 'cn=example,cn=com'}, - 'identity': {'driver': 'ldap'} - } - domain1_config = { - 'ldap': {'url': 'fake://memory1', - 'user': 'cn=Admin', - 'password': 'password', - 'suffix': 'cn=example,cn=com'}, - 'identity': {'driver': 'ldap', - 'list_limit': '101'} - } - domain2_config = { - 'ldap': {'url': 'fake://memory', - 'user': 'cn=Admin', - 'password': 'password', - 'suffix': 'cn=myroot,cn=com', - 'group_tree_dn': 'ou=UserGroups,dc=myroot,dc=org', - 'user_tree_dn': 'ou=Users,dc=myroot,dc=org'}, - 'identity': {'driver': 'ldap'} - } - - # Clear backend dependencies, since cli loads these manually - dependency.reset() - cli.DomainConfigUpload.main() - - res = self.domain_config_api.get_config_with_sensitive_info( - CONF.identity.default_domain_id) - self.assertEqual(default_config, res) - res = self.domain_config_api.get_config_with_sensitive_info( - self.domains['domain1']['id']) - self.assertEqual(domain1_config, res) - res = self.domain_config_api.get_config_with_sensitive_info( - self.domains['domain2']['id']) - self.assertEqual(domain2_config, res) - - -class CliDomainConfigSingleDomainTestCase(CliDomainConfigAllTestCase): - - def config(self, config_files): - CONF(args=['domain_config_upload', '--domain-name', 'Default'], - project='keystone', default_config_files=config_files) - - def test_config_upload(self): - # The values below are the same as in the domain_configs_multi_ldap - # directory of test config_files. - default_config = { - 'ldap': {'url': 'fake://memory', - 'user': 'cn=Admin', - 'password': 'password', - 'suffix': 'cn=example,cn=com'}, - 'identity': {'driver': 'ldap'} - } - - # Clear backend dependencies, since cli loads these manually - dependency.reset() - cli.DomainConfigUpload.main() - - res = self.domain_config_api.get_config_with_sensitive_info( - CONF.identity.default_domain_id) - self.assertEqual(default_config, res) - res = self.domain_config_api.get_config_with_sensitive_info( - self.domains['domain1']['id']) - self.assertEqual({}, res) - res = self.domain_config_api.get_config_with_sensitive_info( - self.domains['domain2']['id']) - self.assertEqual({}, res) - - def test_no_overwrite_config(self): - # Create a config for the default domain - default_config = { - 'ldap': {'url': uuid.uuid4().hex}, - 'identity': {'driver': 'ldap'} - } - self.domain_config_api.create_config( - CONF.identity.default_domain_id, default_config) - - # Now try and upload the settings in the configuration file for the - # default domain - dependency.reset() - with mock.patch('six.moves.builtins.print') as mock_print: - self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main) - file_name = ('keystone.%s.conf' % - resource.calc_default_domain()['name']) - error_msg = _( - 'Domain: %(domain)s already has a configuration defined - ' - 'ignoring file: %(file)s.') % { - 'domain': resource.calc_default_domain()['name'], - 'file': os.path.join(CONF.identity.domain_config_dir, - file_name)} - mock_print.assert_has_calls([mock.call(error_msg)]) - - res = self.domain_config_api.get_config( - CONF.identity.default_domain_id) - # The initial config should not have been overwritten - self.assertEqual(default_config, res) - - -class CliDomainConfigNoOptionsTestCase(CliDomainConfigAllTestCase): - - def config(self, config_files): - CONF(args=['domain_config_upload'], - project='keystone', default_config_files=config_files) - - def test_config_upload(self): - dependency.reset() - with mock.patch('six.moves.builtins.print') as mock_print: - self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main) - mock_print.assert_has_calls( - [mock.call( - _('At least one option must be provided, use either ' - '--all or --domain-name'))]) - - -class CliDomainConfigTooManyOptionsTestCase(CliDomainConfigAllTestCase): - - def config(self, config_files): - CONF(args=['domain_config_upload', '--all', '--domain-name', - 'Default'], - project='keystone', default_config_files=config_files) - - def test_config_upload(self): - dependency.reset() - with mock.patch('six.moves.builtins.print') as mock_print: - self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main) - mock_print.assert_has_calls( - [mock.call(_('The --all option cannot be used with ' - 'the --domain-name option'))]) - - -class CliDomainConfigInvalidDomainTestCase(CliDomainConfigAllTestCase): - - def config(self, config_files): - self.invalid_domain_name = uuid.uuid4().hex - CONF(args=['domain_config_upload', '--domain-name', - self.invalid_domain_name], - project='keystone', default_config_files=config_files) - - def test_config_upload(self): - dependency.reset() - with mock.patch('six.moves.builtins.print') as mock_print: - self.assertRaises(unit.UnexpectedExit, cli.DomainConfigUpload.main) - file_name = 'keystone.%s.conf' % self.invalid_domain_name - error_msg = (_( - 'Invalid domain name: %(domain)s found in config file name: ' - '%(file)s - ignoring this file.') % { - 'domain': self.invalid_domain_name, - 'file': os.path.join(CONF.identity.domain_config_dir, - file_name)}) - mock_print.assert_has_calls([mock.call(error_msg)]) - - -class TestDomainConfigFinder(unit.BaseTestCase): - - def setUp(self): - super(TestDomainConfigFinder, self).setUp() - self.logging = self.useFixture(fixtures.LoggerFixture()) - - @mock.patch('os.walk') - def test_finder_ignores_files(self, mock_walk): - mock_walk.return_value = [ - ['.', [], ['file.txt', 'keystone.conf', 'keystone.domain0.conf']], - ] - - domain_configs = list(cli._domain_config_finder('.')) - - expected_domain_configs = [('./keystone.domain0.conf', 'domain0')] - self.assertThat(domain_configs, - matchers.Equals(expected_domain_configs)) - - expected_msg_template = ('Ignoring file (%s) while scanning ' - 'domain config directory') - self.assertThat( - self.logging.output, - matchers.Contains(expected_msg_template % 'file.txt')) - self.assertThat( - self.logging.output, - matchers.Contains(expected_msg_template % 'keystone.conf')) diff --git a/keystone-moon/keystone/tests/unit/test_config.py b/keystone-moon/keystone/tests/unit/test_config.py deleted file mode 100644 index d7e7809f..00000000 --- a/keystone-moon/keystone/tests/unit/test_config.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from oslo_config import cfg - -from keystone.common import config -from keystone import exception -from keystone.tests import unit - - -CONF = cfg.CONF - - -class ConfigTestCase(unit.TestCase): - - def config_files(self): - config_files = super(ConfigTestCase, self).config_files() - # Insert the keystone sample as the first config file to be loaded - # since it is used in one of the code paths to determine the paste-ini - # location. - config_files.insert(0, unit.dirs.etc('keystone.conf.sample')) - return config_files - - def test_paste_config(self): - self.assertEqual(unit.dirs.etc('keystone-paste.ini'), - config.find_paste_config()) - self.config_fixture.config(group='paste_deploy', - config_file=uuid.uuid4().hex) - self.assertRaises(exception.ConfigFileNotFound, - config.find_paste_config) - self.config_fixture.config(group='paste_deploy', config_file='') - self.assertEqual(unit.dirs.etc('keystone.conf.sample'), - config.find_paste_config()) - - def test_config_default(self): - self.assertIs(None, CONF.auth.password) - self.assertIs(None, CONF.auth.token) - - -class DeprecatedTestCase(unit.TestCase): - """Test using the original (deprecated) name for renamed options.""" - - def config_files(self): - config_files = super(DeprecatedTestCase, self).config_files() - config_files.append(unit.dirs.tests_conf('deprecated.conf')) - return config_files - - def test_sql(self): - # Options in [sql] were moved to [database] in Icehouse for the change - # to use oslo-incubator's db.sqlalchemy.sessions. - - self.assertEqual('sqlite://deprecated', CONF.database.connection) - self.assertEqual(54321, CONF.database.idle_timeout) - - -class DeprecatedOverrideTestCase(unit.TestCase): - """Test using the deprecated AND new name for renamed options.""" - - def config_files(self): - config_files = super(DeprecatedOverrideTestCase, self).config_files() - config_files.append(unit.dirs.tests_conf('deprecated_override.conf')) - return config_files - - def test_sql(self): - # Options in [sql] were moved to [database] in Icehouse for the change - # to use oslo-incubator's db.sqlalchemy.sessions. - - self.assertEqual('sqlite://new', CONF.database.connection) - self.assertEqual(65432, CONF.database.idle_timeout) diff --git a/keystone-moon/keystone/tests/unit/test_contrib_ec2.py b/keystone-moon/keystone/tests/unit/test_contrib_ec2.py deleted file mode 100644 index 2810a47a..00000000 --- a/keystone-moon/keystone/tests/unit/test_contrib_ec2.py +++ /dev/null @@ -1,208 +0,0 @@ -# Copyright 2015 Intel Corporation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from keystoneclient.contrib.ec2 import utils as ec2_utils - -from keystone.contrib.ec2 import controllers -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit import default_fixtures -from keystone.tests.unit.ksfixtures import database - - -class TestCredentialEc2(unit.TestCase): - # TODO(davechen): more testcases for ec2 credential are expected here and - # the file name would be renamed to "test_credential" to correspond with - # "test_v3_credential.py". - def setUp(self): - super(TestCredentialEc2, self).setUp() - self.useFixture(database.Database()) - self.load_backends() - self.load_fixtures(default_fixtures) - self.user_id = self.user_foo['id'] - self.project_id = self.tenant_bar['id'] - self.blob = {'access': uuid.uuid4().hex, - 'secret': uuid.uuid4().hex} - self.controller = controllers.Ec2Controller() - self.creds_ref = {'user_id': self.user_id, - 'tenant_id': self.project_id, - 'access': self.blob['access'], - 'secret': self.blob['secret'], - 'trust_id': None} - - def test_signature_validate_no_host_port(self): - """Test signature validation with the access/secret provided.""" - access = self.blob['access'] - secret = self.blob['secret'] - signer = ec2_utils.Ec2Signer(secret) - params = {'SignatureMethod': 'HmacSHA256', - 'SignatureVersion': '2', - 'AWSAccessKeyId': access} - request = {'host': 'foo', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - signature = signer.generate(request) - - sig_ref = {'access': access, - 'signature': signature, - 'host': 'foo', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - - # Now validate the signature based on the dummy request - self.assertTrue(self.controller.check_signature(self.creds_ref, - sig_ref)) - - def test_signature_validate_with_host_port(self): - """Test signature validation when host is bound with port. - - Host is bound with a port, generally, the port here is not the - standard port for the protocol, like '80' for HTTP and port 443 - for HTTPS, the port is not omitted by the client library. - """ - access = self.blob['access'] - secret = self.blob['secret'] - signer = ec2_utils.Ec2Signer(secret) - params = {'SignatureMethod': 'HmacSHA256', - 'SignatureVersion': '2', - 'AWSAccessKeyId': access} - request = {'host': 'foo:8181', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - signature = signer.generate(request) - - sig_ref = {'access': access, - 'signature': signature, - 'host': 'foo:8181', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - - # Now validate the signature based on the dummy request - self.assertTrue(self.controller.check_signature(self.creds_ref, - sig_ref)) - - def test_signature_validate_with_missed_host_port(self): - """Test signature validation when host is bound with well-known port. - - Host is bound with a port, but the port is well-know port like '80' - for HTTP and port 443 for HTTPS, sometimes, client library omit - the port but then make the request with the port. - see (How to create the string to sign): 'http://docs.aws.amazon.com/ - general/latest/gr/signature-version-2.html'. - - Since "credentials['host']" is not set by client library but is - taken from "req.host", so caused the differences. - """ - access = self.blob['access'] - secret = self.blob['secret'] - signer = ec2_utils.Ec2Signer(secret) - params = {'SignatureMethod': 'HmacSHA256', - 'SignatureVersion': '2', - 'AWSAccessKeyId': access} - # Omit the port to generate the signature. - cnt_req = {'host': 'foo', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - signature = signer.generate(cnt_req) - - sig_ref = {'access': access, - 'signature': signature, - 'host': 'foo:8080', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - - # Now validate the signature based on the dummy request - # Check the signature again after omitting the port. - self.assertTrue(self.controller.check_signature(self.creds_ref, - sig_ref)) - - def test_signature_validate_no_signature(self): - """Signature is not presented in signature reference data.""" - access = self.blob['access'] - params = {'SignatureMethod': 'HmacSHA256', - 'SignatureVersion': '2', - 'AWSAccessKeyId': access} - - sig_ref = {'access': access, - 'signature': None, - 'host': 'foo:8080', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - - creds_ref = {'user_id': self.user_id, - 'tenant_id': self.project_id, - 'access': self.blob['access'], - 'secret': self.blob['secret'], - 'trust_id': None - } - - # Now validate the signature based on the dummy request - self.assertRaises(exception.Unauthorized, - self.controller.check_signature, - creds_ref, sig_ref) - - def test_signature_validate_invalid_signature(self): - """Signature is not signed on the correct data.""" - access = self.blob['access'] - secret = self.blob['secret'] - signer = ec2_utils.Ec2Signer(secret) - params = {'SignatureMethod': 'HmacSHA256', - 'SignatureVersion': '2', - 'AWSAccessKeyId': access} - request = {'host': 'bar', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - signature = signer.generate(request) - - sig_ref = {'access': access, - 'signature': signature, - 'host': 'foo:8080', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - - creds_ref = {'user_id': self.user_id, - 'tenant_id': self.project_id, - 'access': self.blob['access'], - 'secret': self.blob['secret'], - 'trust_id': None - } - - # Now validate the signature based on the dummy request - self.assertRaises(exception.Unauthorized, - self.controller.check_signature, - creds_ref, sig_ref) - - def test_check_non_admin_user(self): - """Checking if user is admin causes uncaught error. - - When checking if a user is an admin, keystone.exception.Unauthorized - is raised but not caught if the user is not an admin. - """ - # make a non-admin user - context = {'is_admin': False, 'token_id': uuid.uuid4().hex} - - # check if user is admin - # no exceptions should be raised - self.controller._is_admin(context) diff --git a/keystone-moon/keystone/tests/unit/test_contrib_s3_core.py b/keystone-moon/keystone/tests/unit/test_contrib_s3_core.py deleted file mode 100644 index c9706da7..00000000 --- a/keystone-moon/keystone/tests/unit/test_contrib_s3_core.py +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from keystone.contrib import s3 -from keystone import exception -from keystone.tests import unit - - -class S3ContribCore(unit.TestCase): - def setUp(self): - super(S3ContribCore, self).setUp() - - self.load_backends() - - self.controller = s3.S3Controller() - - def test_good_signature_v1(self): - creds_ref = {'secret': - u'b121dd41cdcc42fe9f70e572e84295aa'} - credentials = {'token': - 'UFVUCjFCMk0yWThBc2dUcGdBbVk3UGhDZmc9PQphcHB' - 'saWNhdGlvbi9vY3RldC1zdHJlYW0KVHVlLCAxMSBEZWMgMjAxM' - 'iAyMTo0MTo0MSBHTVQKL2NvbnRfczMvdXBsb2FkZWRfZnJ' - 'vbV9zMy50eHQ=', - 'signature': 'IL4QLcLVaYgylF9iHj6Wb8BGZsw='} - - self.assertIsNone(self.controller.check_signature(creds_ref, - credentials)) - - def test_bad_signature_v1(self): - creds_ref = {'secret': - u'b121dd41cdcc42fe9f70e572e84295aa'} - credentials = {'token': - 'UFVUCjFCMk0yWThBc2dUcGdBbVk3UGhDZmc9PQphcHB' - 'saWNhdGlvbi9vY3RldC1zdHJlYW0KVHVlLCAxMSBEZWMgMjAxM' - 'iAyMTo0MTo0MSBHTVQKL2NvbnRfczMvdXBsb2FkZWRfZnJ' - 'vbV9zMy50eHQ=', - 'signature': uuid.uuid4().hex} - - self.assertRaises(exception.Unauthorized, - self.controller.check_signature, - creds_ref, credentials) - - def test_good_signature_v4(self): - creds_ref = {'secret': - u'e7a7a2240136494986991a6598d9fb9f'} - credentials = {'token': - 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw' - 'MTUwODI0L1JlZ2lvbk9uZS9zMy9hd3M0X3JlcXVlc3QKZjIy' - 'MTU1ODBlZWI5YTE2NzM1MWJkOTNlODZjM2I2ZjA0YTkyOGY1' - 'YzU1MjBhMzkzNWE0NTM1NDBhMDk1NjRiNQ==', - 'signature': - '730ba8f58df6ffeadd78f402e990b2910d60' - 'bc5c2aec63619734f096a4dd77be'} - - self.assertIsNone(self.controller.check_signature(creds_ref, - credentials)) - - def test_bad_signature_v4(self): - creds_ref = {'secret': - u'e7a7a2240136494986991a6598d9fb9f'} - credentials = {'token': - 'QVdTNC1ITUFDLVNIQTI1NgoyMDE1MDgyNFQxMTIwNDFaCjIw' - 'MTUwODI0L1JlZ2lvbk9uZS9zMy9hd3M0X3JlcXVlc3QKZjIy' - 'MTU1ODBlZWI5YTE2NzM1MWJkOTNlODZjM2I2ZjA0YTkyOGY1' - 'YzU1MjBhMzkzNWE0NTM1NDBhMDk1NjRiNQ==', - 'signature': uuid.uuid4().hex} - - self.assertRaises(exception.Unauthorized, - self.controller.check_signature, - creds_ref, credentials) - - def test_bad_token_v4(self): - creds_ref = {'secret': - u'e7a7a2240136494986991a6598d9fb9f'} - # token has invalid format of first part - credentials = {'token': - 'QVdTNC1BQUEKWApYClg=', - 'signature': ''} - self.assertRaises(exception.Unauthorized, - self.controller.check_signature, - creds_ref, credentials) - - # token has invalid format of scope - credentials = {'token': - 'QVdTNC1ITUFDLVNIQTI1NgpYCi8vczMvYXdzTl9yZXF1ZXN0Clg=', - 'signature': ''} - self.assertRaises(exception.Unauthorized, - self.controller.check_signature, - creds_ref, credentials) diff --git a/keystone-moon/keystone/tests/unit/test_contrib_simple_cert.py b/keystone-moon/keystone/tests/unit/test_contrib_simple_cert.py deleted file mode 100644 index 111aa5c6..00000000 --- a/keystone-moon/keystone/tests/unit/test_contrib_simple_cert.py +++ /dev/null @@ -1,57 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from six.moves import http_client - -from keystone.tests.unit import test_v3 - - -class BaseTestCase(test_v3.RestfulTestCase): - - CA_PATH = '/v3/OS-SIMPLE-CERT/ca' - CERT_PATH = '/v3/OS-SIMPLE-CERT/certificates' - - -class TestSimpleCert(BaseTestCase): - - def request_cert(self, path): - content_type = 'application/x-pem-file' - response = self.request(app=self.public_app, - method='GET', - path=path, - headers={'Accept': content_type}, - expected_status=http_client.OK) - - self.assertEqual(content_type, response.content_type.lower()) - self.assertIn(b'---BEGIN', response.body) - - return response - - def test_ca_cert(self): - self.request_cert(self.CA_PATH) - - def test_signing_cert(self): - self.request_cert(self.CERT_PATH) - - def test_missing_file(self): - # these files do not exist - self.config_fixture.config(group='signing', - ca_certs=uuid.uuid4().hex, - certfile=uuid.uuid4().hex) - - for path in [self.CA_PATH, self.CERT_PATH]: - self.request(app=self.public_app, - method='GET', - path=path, - expected_status=http_client.INTERNAL_SERVER_ERROR) diff --git a/keystone-moon/keystone/tests/unit/test_credential.py b/keystone-moon/keystone/tests/unit/test_credential.py deleted file mode 100644 index e917ef71..00000000 --- a/keystone-moon/keystone/tests/unit/test_credential.py +++ /dev/null @@ -1,265 +0,0 @@ -# Copyright 2015 UnitedStack, Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from keystoneclient.contrib.ec2 import utils as ec2_utils -from six.moves import http_client - -from keystone.common import utils -from keystone.contrib.ec2 import controllers -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit import default_fixtures -from keystone.tests.unit.ksfixtures import database -from keystone.tests.unit import rest - -CRED_TYPE_EC2 = controllers.CRED_TYPE_EC2 - - -class V2CredentialEc2TestCase(rest.RestfulTestCase): - def setUp(self): - super(V2CredentialEc2TestCase, self).setUp() - self.user_id = self.user_foo['id'] - self.project_id = self.tenant_bar['id'] - - def _get_token_id(self, r): - return r.result['access']['token']['id'] - - def _get_ec2_cred(self): - uri = self._get_ec2_cred_uri() - r = self.public_request(method='POST', token=self.get_scoped_token(), - path=uri, body={'tenant_id': self.project_id}) - return r.result['credential'] - - def _get_ec2_cred_uri(self): - return '/v2.0/users/%s/credentials/OS-EC2' % self.user_id - - def test_ec2_cannot_get_non_ec2_credential(self): - access_key = uuid.uuid4().hex - cred_id = utils.hash_access_key(access_key) - non_ec2_cred = unit.new_credential_ref( - user_id=self.user_id, - project_id=self.project_id) - non_ec2_cred['id'] = cred_id - self.credential_api.create_credential(cred_id, non_ec2_cred) - - # if access_key is not found, ec2 controller raises Unauthorized - # exception - path = '/'.join([self._get_ec2_cred_uri(), access_key]) - self.public_request(method='GET', token=self.get_scoped_token(), - path=path, - expected_status=http_client.UNAUTHORIZED) - - def assertValidErrorResponse(self, r): - # FIXME(wwwjfy): it's copied from test_v3.py. The logic of this method - # in test_v2.py and test_v3.py (both are inherited from rest.py) has no - # difference, so they should be refactored into one place. Also, the - # function signatures in both files don't match the one in the parent - # class in rest.py. - resp = r.result - self.assertIsNotNone(resp.get('error')) - self.assertIsNotNone(resp['error'].get('code')) - self.assertIsNotNone(resp['error'].get('title')) - self.assertIsNotNone(resp['error'].get('message')) - self.assertEqual(int(resp['error']['code']), r.status_code) - - def test_ec2_list_credentials(self): - self._get_ec2_cred() - uri = self._get_ec2_cred_uri() - r = self.public_request(method='GET', token=self.get_scoped_token(), - path=uri) - cred_list = r.result['credentials'] - self.assertEqual(1, len(cred_list)) - - # non-EC2 credentials won't be fetched - non_ec2_cred = unit.new_credential_ref( - user_id=self.user_id, - project_id=self.project_id) - non_ec2_cred['type'] = uuid.uuid4().hex - self.credential_api.create_credential(non_ec2_cred['id'], - non_ec2_cred) - r = self.public_request(method='GET', token=self.get_scoped_token(), - path=uri) - cred_list_2 = r.result['credentials'] - # still one element because non-EC2 credentials are not returned. - self.assertEqual(1, len(cred_list_2)) - self.assertEqual(cred_list[0], cred_list_2[0]) - - -class V2CredentialEc2Controller(unit.TestCase): - def setUp(self): - super(V2CredentialEc2Controller, self).setUp() - self.useFixture(database.Database()) - self.load_backends() - self.load_fixtures(default_fixtures) - self.user_id = self.user_foo['id'] - self.project_id = self.tenant_bar['id'] - self.controller = controllers.Ec2Controller() - self.blob, tmp_ref = unit.new_ec2_credential( - user_id=self.user_id, - project_id=self.project_id) - - self.creds_ref = (controllers.Ec2Controller - ._convert_v3_to_ec2_credential(tmp_ref)) - - def test_signature_validate_no_host_port(self): - """Test signature validation with the access/secret provided.""" - access = self.blob['access'] - secret = self.blob['secret'] - signer = ec2_utils.Ec2Signer(secret) - params = {'SignatureMethod': 'HmacSHA256', - 'SignatureVersion': '2', - 'AWSAccessKeyId': access} - request = {'host': 'foo', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - signature = signer.generate(request) - - sig_ref = {'access': access, - 'signature': signature, - 'host': 'foo', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - - # Now validate the signature based on the dummy request - self.assertTrue(self.controller.check_signature(self.creds_ref, - sig_ref)) - - def test_signature_validate_with_host_port(self): - """Test signature validation when host is bound with port. - - Host is bound with a port, generally, the port here is not the - standard port for the protocol, like '80' for HTTP and port 443 - for HTTPS, the port is not omitted by the client library. - """ - access = self.blob['access'] - secret = self.blob['secret'] - signer = ec2_utils.Ec2Signer(secret) - params = {'SignatureMethod': 'HmacSHA256', - 'SignatureVersion': '2', - 'AWSAccessKeyId': access} - request = {'host': 'foo:8181', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - signature = signer.generate(request) - - sig_ref = {'access': access, - 'signature': signature, - 'host': 'foo:8181', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - - # Now validate the signature based on the dummy request - self.assertTrue(self.controller.check_signature(self.creds_ref, - sig_ref)) - - def test_signature_validate_with_missed_host_port(self): - """Test signature validation when host is bound with well-known port. - - Host is bound with a port, but the port is well-know port like '80' - for HTTP and port 443 for HTTPS, sometimes, client library omit - the port but then make the request with the port. - see (How to create the string to sign): 'http://docs.aws.amazon.com/ - general/latest/gr/signature-version-2.html'. - - Since "credentials['host']" is not set by client library but is - taken from "req.host", so caused the differences. - """ - access = self.blob['access'] - secret = self.blob['secret'] - signer = ec2_utils.Ec2Signer(secret) - params = {'SignatureMethod': 'HmacSHA256', - 'SignatureVersion': '2', - 'AWSAccessKeyId': access} - # Omit the port to generate the signature. - cnt_req = {'host': 'foo', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - signature = signer.generate(cnt_req) - - sig_ref = {'access': access, - 'signature': signature, - 'host': 'foo:8080', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - - # Now validate the signature based on the dummy request - # Check the signature again after omitting the port. - self.assertTrue(self.controller.check_signature(self.creds_ref, - sig_ref)) - - def test_signature_validate_no_signature(self): - """Signature is not presented in signature reference data.""" - access = self.blob['access'] - params = {'SignatureMethod': 'HmacSHA256', - 'SignatureVersion': '2', - 'AWSAccessKeyId': access} - - sig_ref = {'access': access, - 'signature': None, - 'host': 'foo:8080', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - - # Now validate the signature based on the dummy request - self.assertRaises(exception.Unauthorized, - self.controller.check_signature, - self.creds_ref, sig_ref) - - def test_signature_validate_invalid_signature(self): - """Signature is not signed on the correct data.""" - access = self.blob['access'] - secret = self.blob['secret'] - signer = ec2_utils.Ec2Signer(secret) - params = {'SignatureMethod': 'HmacSHA256', - 'SignatureVersion': '2', - 'AWSAccessKeyId': access} - request = {'host': 'bar', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - signature = signer.generate(request) - - sig_ref = {'access': access, - 'signature': signature, - 'host': 'foo:8080', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - - # Now validate the signature based on the dummy request - self.assertRaises(exception.Unauthorized, - self.controller.check_signature, - self.creds_ref, sig_ref) - - def test_check_non_admin_user(self): - """Checking if user is admin causes uncaught error. - - When checking if a user is an admin, keystone.exception.Unauthorized - is raised but not caught if the user is not an admin. - """ - # make a non-admin user - context = {'is_admin': False, 'token_id': uuid.uuid4().hex} - - # check if user is admin - # no exceptions should be raised - self.controller._is_admin(context) diff --git a/keystone-moon/keystone/tests/unit/test_driver_hints.py b/keystone-moon/keystone/tests/unit/test_driver_hints.py deleted file mode 100644 index 75d76194..00000000 --- a/keystone-moon/keystone/tests/unit/test_driver_hints.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import driver_hints -from keystone.tests.unit import core as test - - -class ListHintsTests(test.TestCase): - - def test_create_iterate_satisfy(self): - hints = driver_hints.Hints() - hints.add_filter('t1', 'data1') - hints.add_filter('t2', 'data2') - self.assertEqual(2, len(hints.filters)) - filter = hints.get_exact_filter_by_name('t1') - self.assertEqual('t1', filter['name']) - self.assertEqual('data1', filter['value']) - self.assertEqual('equals', filter['comparator']) - self.assertFalse(filter['case_sensitive']) - - hints.filters.remove(filter) - filter_count = 0 - for filter in hints.filters: - filter_count += 1 - self.assertEqual('t2', filter['name']) - self.assertEqual(1, filter_count) - - def test_multiple_creates(self): - hints = driver_hints.Hints() - hints.add_filter('t1', 'data1') - hints.add_filter('t2', 'data2') - self.assertEqual(2, len(hints.filters)) - hints2 = driver_hints.Hints() - hints2.add_filter('t4', 'data1') - hints2.add_filter('t5', 'data2') - self.assertEqual(2, len(hints.filters)) - - def test_limits(self): - hints = driver_hints.Hints() - self.assertIsNone(hints.limit) - hints.set_limit(10) - self.assertEqual(10, hints.limit['limit']) - self.assertFalse(hints.limit['truncated']) - hints.set_limit(11) - self.assertEqual(11, hints.limit['limit']) - self.assertFalse(hints.limit['truncated']) - hints.set_limit(10, truncated=True) - self.assertEqual(10, hints.limit['limit']) - self.assertTrue(hints.limit['truncated']) diff --git a/keystone-moon/keystone/tests/unit/test_ec2_token_middleware.py b/keystone-moon/keystone/tests/unit/test_ec2_token_middleware.py deleted file mode 100644 index 03c95e27..00000000 --- a/keystone-moon/keystone/tests/unit/test_ec2_token_middleware.py +++ /dev/null @@ -1,34 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystonemiddleware import ec2_token as ksm_ec2_token - -from keystone.middleware import ec2_token -from keystone.tests import unit as tests - - -class EC2TokenMiddlewareTestBase(tests.BaseTestCase): - def test_symbols(self): - """Verify ec2 middleware symbols. - - Verify that the keystone version of ec2_token middleware forwards the - public symbols from the keystonemiddleware version of the ec2_token - middleware for backwards compatibility. - - """ - - self.assertIs(ksm_ec2_token.app_factory, ec2_token.app_factory) - self.assertIs(ksm_ec2_token.filter_factory, ec2_token.filter_factory) - self.assertTrue( - issubclass(ec2_token.EC2Token, ksm_ec2_token.EC2Token), - 'ec2_token.EC2Token is not subclass of ' - 'keystonemiddleware.ec2_token.EC2Token') diff --git a/keystone-moon/keystone/tests/unit/test_entry_points.py b/keystone-moon/keystone/tests/unit/test_entry_points.py deleted file mode 100644 index e973e942..00000000 --- a/keystone-moon/keystone/tests/unit/test_entry_points.py +++ /dev/null @@ -1,48 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import stevedore -from testtools import matchers - -from keystone.tests.unit import core as test - - -class TestPasteDeploymentEntryPoints(test.TestCase): - def test_entry_point_middleware(self): - """Assert that our list of expected middleware is present.""" - expected_names = [ - 'admin_token_auth', - 'build_auth_context', - 'crud_extension', - 'cors', - 'debug', - 'endpoint_filter_extension', - 'ec2_extension', - 'ec2_extension_v3', - 'federation_extension', - 'json_body', - 'oauth1_extension', - 'request_id', - 'revoke_extension', - 's3_extension', - 'simple_cert_extension', - 'sizelimit', - 'token_auth', - 'url_normalize', - 'user_crud_extension', - ] - - em = stevedore.ExtensionManager('paste.filter_factory') - - actual_names = [extension.name for extension in em] - - self.assertThat(actual_names, matchers.ContainsAll(expected_names)) diff --git a/keystone-moon/keystone/tests/unit/test_exception.py b/keystone-moon/keystone/tests/unit/test_exception.py deleted file mode 100644 index 25ca2c09..00000000 --- a/keystone-moon/keystone/tests/unit/test_exception.py +++ /dev/null @@ -1,273 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from oslo_config import cfg -from oslo_config import fixture as config_fixture -from oslo_serialization import jsonutils -import six - -from keystone.common import wsgi -from keystone import exception -from keystone.tests import unit - - -class ExceptionTestCase(unit.BaseTestCase): - def assertValidJsonRendering(self, e): - resp = wsgi.render_exception(e) - self.assertEqual(e.code, resp.status_int) - self.assertEqual('%s %s' % (e.code, e.title), resp.status) - - j = jsonutils.loads(resp.body) - self.assertIsNotNone(j.get('error')) - self.assertIsNotNone(j['error'].get('code')) - self.assertIsNotNone(j['error'].get('title')) - self.assertIsNotNone(j['error'].get('message')) - self.assertNotIn('\n', j['error']['message']) - self.assertNotIn(' ', j['error']['message']) - self.assertTrue(type(j['error']['code']) is int) - - def test_all_json_renderings(self): - """Everything callable in the exception module should be renderable. - - ... except for the base error class (exception.Error), which is not - user-facing. - - This test provides a custom message to bypass docstring parsing, which - should be tested separately. - - """ - for cls in [x for x in exception.__dict__.values() if callable(x)]: - if cls is not exception.Error and isinstance(cls, exception.Error): - self.assertValidJsonRendering(cls(message='Overridden.')) - - def test_validation_error(self): - target = uuid.uuid4().hex - attribute = uuid.uuid4().hex - e = exception.ValidationError(target=target, attribute=attribute) - self.assertValidJsonRendering(e) - self.assertIn(target, six.text_type(e)) - self.assertIn(attribute, six.text_type(e)) - - def test_not_found(self): - target = uuid.uuid4().hex - e = exception.NotFound(target=target) - self.assertValidJsonRendering(e) - self.assertIn(target, six.text_type(e)) - - def test_forbidden_title(self): - e = exception.Forbidden() - resp = wsgi.render_exception(e) - j = jsonutils.loads(resp.body) - self.assertEqual('Forbidden', e.title) - self.assertEqual('Forbidden', j['error'].get('title')) - - def test_unicode_message(self): - message = u'Comment \xe7a va' - e = exception.Error(message) - - try: - self.assertEqual(message, six.text_type(e)) - except UnicodeEncodeError: - self.fail("unicode error message not supported") - - def test_unicode_string(self): - e = exception.ValidationError(attribute='xx', - target='Long \xe2\x80\x93 Dash') - - if six.PY2: - self.assertIn(u'\u2013', six.text_type(e)) - else: - self.assertIn('Long \xe2\x80\x93 Dash', six.text_type(e)) - - def test_invalid_unicode_string(self): - # NOTE(jamielennox): This is a complete failure case so what is - # returned in the exception message is not that important so long - # as there is an error with a message - e = exception.ValidationError(attribute='xx', - target='\xe7a va') - - if six.PY2: - self.assertIn('%(attribute)', six.text_type(e)) - else: - # There's no UnicodeDecodeError on python 3. - self.assertIn('\xe7a va', six.text_type(e)) - - -class UnexpectedExceptionTestCase(ExceptionTestCase): - """Tests if internal info is exposed to the API user on UnexpectedError.""" - - class SubClassExc(exception.UnexpectedError): - debug_message_format = 'Debug Message: %(debug_info)s' - - def setUp(self): - super(UnexpectedExceptionTestCase, self).setUp() - self.exc_str = uuid.uuid4().hex - self.config_fixture = self.useFixture(config_fixture.Config(cfg.CONF)) - - def test_unexpected_error_no_debug(self): - self.config_fixture.config(debug=False) - e = exception.UnexpectedError(exception=self.exc_str) - self.assertNotIn(self.exc_str, six.text_type(e)) - - def test_unexpected_error_debug(self): - self.config_fixture.config(debug=True, insecure_debug=True) - e = exception.UnexpectedError(exception=self.exc_str) - self.assertIn(self.exc_str, six.text_type(e)) - - def test_unexpected_error_subclass_no_debug(self): - self.config_fixture.config(debug=False) - e = UnexpectedExceptionTestCase.SubClassExc( - debug_info=self.exc_str) - self.assertEqual(exception.UnexpectedError.message_format, - six.text_type(e)) - - def test_unexpected_error_subclass_debug(self): - self.config_fixture.config(debug=True, insecure_debug=True) - subclass = self.SubClassExc - - e = subclass(debug_info=self.exc_str) - expected = subclass.debug_message_format % {'debug_info': self.exc_str} - self.assertEqual( - '%s %s' % (expected, exception.SecurityError.amendment), - six.text_type(e)) - - def test_unexpected_error_custom_message_no_debug(self): - self.config_fixture.config(debug=False) - e = exception.UnexpectedError(self.exc_str) - self.assertEqual(exception.UnexpectedError.message_format, - six.text_type(e)) - - def test_unexpected_error_custom_message_debug(self): - self.config_fixture.config(debug=True, insecure_debug=True) - e = exception.UnexpectedError(self.exc_str) - self.assertEqual( - '%s %s' % (self.exc_str, exception.SecurityError.amendment), - six.text_type(e)) - - def test_unexpected_error_custom_message_exception_debug(self): - self.config_fixture.config(debug=True, insecure_debug=True) - orig_e = exception.NotFound(target=uuid.uuid4().hex) - e = exception.UnexpectedError(orig_e) - self.assertEqual( - '%s %s' % (six.text_type(orig_e), - exception.SecurityError.amendment), - six.text_type(e)) - - def test_unexpected_error_custom_message_binary_debug(self): - self.config_fixture.config(debug=True, insecure_debug=True) - binary_msg = b'something' - e = exception.UnexpectedError(binary_msg) - self.assertEqual( - '%s %s' % (six.text_type(binary_msg), - exception.SecurityError.amendment), - six.text_type(e)) - - -class SecurityErrorTestCase(ExceptionTestCase): - """Tests whether security-related info is exposed to the API user.""" - - def setUp(self): - super(SecurityErrorTestCase, self).setUp() - self.config_fixture = self.useFixture(config_fixture.Config(cfg.CONF)) - - def test_unauthorized_exposure(self): - self.config_fixture.config(debug=False) - - risky_info = uuid.uuid4().hex - e = exception.Unauthorized(message=risky_info) - self.assertValidJsonRendering(e) - self.assertNotIn(risky_info, six.text_type(e)) - - def test_unauthorized_exposure_in_debug(self): - self.config_fixture.config(debug=True, insecure_debug=True) - - risky_info = uuid.uuid4().hex - e = exception.Unauthorized(message=risky_info) - self.assertValidJsonRendering(e) - self.assertIn(risky_info, six.text_type(e)) - - def test_forbidden_exposure(self): - self.config_fixture.config(debug=False) - - risky_info = uuid.uuid4().hex - e = exception.Forbidden(message=risky_info) - self.assertValidJsonRendering(e) - self.assertNotIn(risky_info, six.text_type(e)) - - def test_forbidden_exposure_in_debug(self): - self.config_fixture.config(debug=True, insecure_debug=True) - - risky_info = uuid.uuid4().hex - e = exception.Forbidden(message=risky_info) - self.assertValidJsonRendering(e) - self.assertIn(risky_info, six.text_type(e)) - - def test_forbidden_action_exposure(self): - self.config_fixture.config(debug=False) - - risky_info = uuid.uuid4().hex - action = uuid.uuid4().hex - e = exception.ForbiddenAction(message=risky_info, action=action) - self.assertValidJsonRendering(e) - self.assertNotIn(risky_info, six.text_type(e)) - self.assertIn(action, six.text_type(e)) - self.assertNotIn(exception.SecurityError.amendment, six.text_type(e)) - - e = exception.ForbiddenAction(action=action) - self.assertValidJsonRendering(e) - self.assertIn(action, six.text_type(e)) - self.assertNotIn(exception.SecurityError.amendment, six.text_type(e)) - - def test_forbidden_action_exposure_in_debug(self): - self.config_fixture.config(debug=True, insecure_debug=True) - - risky_info = uuid.uuid4().hex - action = uuid.uuid4().hex - - e = exception.ForbiddenAction(message=risky_info, action=action) - self.assertValidJsonRendering(e) - self.assertIn(risky_info, six.text_type(e)) - self.assertIn(exception.SecurityError.amendment, six.text_type(e)) - - e = exception.ForbiddenAction(action=action) - self.assertValidJsonRendering(e) - self.assertIn(action, six.text_type(e)) - self.assertNotIn(exception.SecurityError.amendment, six.text_type(e)) - - def test_forbidden_action_no_message(self): - # When no custom message is given when the ForbiddenAction (or other - # SecurityError subclass) is created the exposed message is the same - # whether debug is enabled or not. - - action = uuid.uuid4().hex - - self.config_fixture.config(debug=False) - e = exception.ForbiddenAction(action=action) - exposed_message = six.text_type(e) - self.assertIn(action, exposed_message) - self.assertNotIn(exception.SecurityError.amendment, six.text_type(e)) - - self.config_fixture.config(debug=True) - e = exception.ForbiddenAction(action=action) - self.assertEqual(exposed_message, six.text_type(e)) - - def test_unicode_argument_message(self): - self.config_fixture.config(debug=False) - - risky_info = u'\u7ee7\u7eed\u884c\u7f29\u8fdb\u6216' - e = exception.Forbidden(message=risky_info) - self.assertValidJsonRendering(e) - self.assertNotIn(risky_info, six.text_type(e)) diff --git a/keystone-moon/keystone/tests/unit/test_hacking_checks.py b/keystone-moon/keystone/tests/unit/test_hacking_checks.py deleted file mode 100644 index e279cc7f..00000000 --- a/keystone-moon/keystone/tests/unit/test_hacking_checks.py +++ /dev/null @@ -1,143 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import textwrap - -import mock -import pep8 - -from keystone.tests.hacking import checks -from keystone.tests import unit -from keystone.tests.unit.ksfixtures import hacking as hacking_fixtures - - -class BaseStyleCheck(unit.BaseTestCase): - - def setUp(self): - super(BaseStyleCheck, self).setUp() - self.code_ex = self.useFixture(self.get_fixture()) - self.addCleanup(delattr, self, 'code_ex') - - def get_checker(self): - """Returns the checker to be used for tests in this class.""" - raise NotImplemented('subclasses must provide a real implementation') - - def get_fixture(self): - return hacking_fixtures.HackingCode() - - # We are patching pep8 so that only the check under test is actually - # installed. - @mock.patch('pep8._checks', - {'physical_line': {}, 'logical_line': {}, 'tree': {}}) - def run_check(self, code): - pep8.register_check(self.get_checker()) - - lines = textwrap.dedent(code).strip().splitlines(True) - - checker = pep8.Checker(lines=lines) - checker.check_all() - checker.report._deferred_print.sort() - return checker.report._deferred_print - - def assert_has_errors(self, code, expected_errors=None): - actual_errors = [e[:3] for e in self.run_check(code)] - self.assertEqual(expected_errors or [], actual_errors) - - -class TestCheckForMutableDefaultArgs(BaseStyleCheck): - - def get_checker(self): - return checks.CheckForMutableDefaultArgs - - def test(self): - code = self.code_ex.mutable_default_args['code'] - errors = self.code_ex.mutable_default_args['expected_errors'] - self.assert_has_errors(code, expected_errors=errors) - - -class TestBlockCommentsBeginWithASpace(BaseStyleCheck): - - def get_checker(self): - return checks.block_comments_begin_with_a_space - - def test(self): - code = self.code_ex.comments_begin_with_space['code'] - errors = self.code_ex.comments_begin_with_space['expected_errors'] - self.assert_has_errors(code, expected_errors=errors) - - -class TestAssertingNoneEquality(BaseStyleCheck): - - def get_checker(self): - return checks.CheckForAssertingNoneEquality - - def test(self): - code = self.code_ex.asserting_none_equality['code'] - errors = self.code_ex.asserting_none_equality['expected_errors'] - self.assert_has_errors(code, expected_errors=errors) - - -class BaseLoggingCheck(BaseStyleCheck): - - def get_checker(self): - return checks.CheckForLoggingIssues - - def get_fixture(self): - return hacking_fixtures.HackingLogging() - - def assert_has_errors(self, code, expected_errors=None): - - # pull out the parts of the error that we'll match against - actual_errors = (e[:3] for e in self.run_check(code)) - # adjust line numbers to make the fixture data more readable. - import_lines = len(self.code_ex.shared_imports.split('\n')) - 1 - actual_errors = [(e[0] - import_lines, e[1], e[2]) - for e in actual_errors] - self.assertEqual(expected_errors or [], actual_errors) - - -class TestCheckForDebugLoggingIssues(BaseLoggingCheck): - - def test_for_translations(self): - fixture = self.code_ex.assert_no_translations_for_debug_logging - code = self.code_ex.shared_imports + fixture['code'] - errors = fixture['expected_errors'] - self.assert_has_errors(code, expected_errors=errors) - - -class TestLoggingWithWarn(BaseLoggingCheck): - - def test(self): - data = self.code_ex.assert_not_using_deprecated_warn - code = self.code_ex.shared_imports + data['code'] - errors = data['expected_errors'] - self.assert_has_errors(code, expected_errors=errors) - - -class TestCheckForNonDebugLoggingIssues(BaseLoggingCheck): - - def test_for_translations(self): - for example in self.code_ex.examples: - code = self.code_ex.shared_imports + example['code'] - errors = example['expected_errors'] - self.assert_has_errors(code, expected_errors=errors) - - -class TestDictConstructorWithSequenceCopy(BaseStyleCheck): - - def get_checker(self): - return checks.dict_constructor_with_sequence_copy - - def test(self): - code = self.code_ex.dict_constructor['code'] - errors = self.code_ex.dict_constructor['expected_errors'] - self.assert_has_errors(code, expected_errors=errors) diff --git a/keystone-moon/keystone/tests/unit/test_ipv6.py b/keystone-moon/keystone/tests/unit/test_ipv6.py deleted file mode 100644 index df59429e..00000000 --- a/keystone-moon/keystone/tests/unit/test_ipv6.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from oslo_config import cfg - -from keystone.common import environment -from keystone.tests import unit -from keystone.tests.unit.ksfixtures import appserver - - -CONF = cfg.CONF - - -class IPv6TestCase(unit.TestCase): - - def setUp(self): - self.skip_if_no_ipv6() - super(IPv6TestCase, self).setUp() - self.load_backends() - - def test_ipv6_ok(self): - """Make sure both public and admin API work with ipv6.""" - paste_conf = self._paste_config('keystone') - - # Verify Admin - with appserver.AppServer(paste_conf, appserver.ADMIN, host="::1"): - conn = environment.httplib.HTTPConnection( - '::1', CONF.eventlet_server.admin_port) - conn.request('GET', '/') - resp = conn.getresponse() - self.assertEqual(300, resp.status) - - # Verify Public - with appserver.AppServer(paste_conf, appserver.MAIN, host="::1"): - conn = environment.httplib.HTTPConnection( - '::1', CONF.eventlet_server.public_port) - conn.request('GET', '/') - resp = conn.getresponse() - self.assertEqual(300, resp.status) diff --git a/keystone-moon/keystone/tests/unit/test_kvs.py b/keystone-moon/keystone/tests/unit/test_kvs.py deleted file mode 100644 index a88ee1ac..00000000 --- a/keystone-moon/keystone/tests/unit/test_kvs.py +++ /dev/null @@ -1,586 +0,0 @@ -# Copyright 2013 Metacloud, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import time -import uuid - -from dogpile.cache import api -from dogpile.cache import proxy -import mock -import six -from testtools import matchers - -from keystone.common.kvs.backends import inmemdb -from keystone.common.kvs.backends import memcached -from keystone.common.kvs import core -from keystone import exception -from keystone.tests import unit - - -NO_VALUE = api.NO_VALUE - - -class MutexFixture(object): - def __init__(self, storage_dict, key, timeout): - self.database = storage_dict - self.key = '_lock' + key - - def acquire(self, wait=True): - while True: - try: - self.database[self.key] = 1 - return True - except KeyError: - return False - - def release(self): - self.database.pop(self.key, None) - - -class KVSBackendFixture(inmemdb.MemoryBackend): - def __init__(self, arguments): - class InmemTestDB(dict): - def __setitem__(self, key, value): - if key in self: - raise KeyError('Key %s already exists' % key) - super(InmemTestDB, self).__setitem__(key, value) - - self._db = InmemTestDB() - self.lock_timeout = arguments.pop('lock_timeout', 5) - self.test_arg = arguments.pop('test_arg', None) - - def get_mutex(self, key): - return MutexFixture(self._db, key, self.lock_timeout) - - @classmethod - def key_mangler(cls, key): - return 'KVSBackend_' + key - - -class KVSBackendForcedKeyMangleFixture(KVSBackendFixture): - use_backend_key_mangler = True - - @classmethod - def key_mangler(cls, key): - return 'KVSBackendForcedKeyMangle_' + key - - -class RegionProxyFixture(proxy.ProxyBackend): - """A test dogpile.cache proxy that does nothing.""" - - -class RegionProxy2Fixture(proxy.ProxyBackend): - """A test dogpile.cache proxy that does nothing.""" - - -class TestMemcacheDriver(api.CacheBackend): - """A test dogpile.cache backend. - - This test backend conforms to the mixin-mechanism for - overriding set and set_multi methods on dogpile memcached drivers. - """ - - class test_client(object): - # FIXME(morganfainberg): Convert this test client over to using mock - # and/or mock.MagicMock as appropriate - - def __init__(self): - self.__name__ = 'TestingMemcacheDriverClientObject' - self.set_arguments_passed = None - self.keys_values = {} - self.lock_set_time = None - self.lock_expiry = None - - def set(self, key, value, **set_arguments): - self.keys_values.clear() - self.keys_values[key] = value - self.set_arguments_passed = set_arguments - - def set_multi(self, mapping, **set_arguments): - self.keys_values.clear() - self.keys_values = mapping - self.set_arguments_passed = set_arguments - - def add(self, key, value, expiry_time): - # NOTE(morganfainberg): `add` is used in this case for the - # memcache lock testing. If further testing is required around the - # actual memcache `add` interface, this method should be - # expanded to work more like the actual memcache `add` function - if self.lock_expiry is not None and self.lock_set_time is not None: - if time.time() - self.lock_set_time < self.lock_expiry: - return False - self.lock_expiry = expiry_time - self.lock_set_time = time.time() - return True - - def delete(self, key): - # NOTE(morganfainberg): `delete` is used in this case for the - # memcache lock testing. If further testing is required around the - # actual memcache `delete` interface, this method should be - # expanded to work more like the actual memcache `delete` function. - self.lock_expiry = None - self.lock_set_time = None - return True - - def __init__(self, arguments): - self.client = self.test_client() - self.set_arguments = {} - # NOTE(morganfainberg): This is the same logic as the dogpile backend - # since we need to mirror that functionality for the `set_argument` - # values to appear on the actual backend. - if 'memcached_expire_time' in arguments: - self.set_arguments['time'] = arguments['memcached_expire_time'] - - def set(self, key, value): - self.client.set(key, value, **self.set_arguments) - - def set_multi(self, mapping): - self.client.set_multi(mapping, **self.set_arguments) - - -class KVSTest(unit.TestCase): - def setUp(self): - super(KVSTest, self).setUp() - self.key_foo = 'foo_' + uuid.uuid4().hex - self.value_foo = uuid.uuid4().hex - self.key_bar = 'bar_' + uuid.uuid4().hex - self.value_bar = {'complex_data_structure': uuid.uuid4().hex} - self.addCleanup(memcached.VALID_DOGPILE_BACKENDS.pop, - 'TestDriver', - None) - memcached.VALID_DOGPILE_BACKENDS['TestDriver'] = TestMemcacheDriver - - def _get_kvs_region(self, name=None): - if name is None: - name = uuid.uuid4().hex - return core.get_key_value_store(name) - - def test_kvs_basic_configuration(self): - # Test that the most basic configuration options pass through to the - # backend. - region_one = uuid.uuid4().hex - region_two = uuid.uuid4().hex - test_arg = 100 - kvs = self._get_kvs_region(region_one) - kvs.configure('openstack.kvs.Memory') - - self.assertIsInstance(kvs._region.backend, inmemdb.MemoryBackend) - self.assertEqual(region_one, kvs._region.name) - - kvs = self._get_kvs_region(region_two) - kvs.configure('openstack.kvs.KVSBackendFixture', - test_arg=test_arg) - - self.assertEqual(region_two, kvs._region.name) - self.assertEqual(test_arg, kvs._region.backend.test_arg) - - def test_kvs_proxy_configuration(self): - # Test that proxies are applied correctly and in the correct (reverse) - # order to the kvs region. - kvs = self._get_kvs_region() - kvs.configure( - 'openstack.kvs.Memory', - proxy_list=['keystone.tests.unit.test_kvs.RegionProxyFixture', - 'keystone.tests.unit.test_kvs.RegionProxy2Fixture']) - - self.assertIsInstance(kvs._region.backend, RegionProxyFixture) - self.assertIsInstance(kvs._region.backend.proxied, RegionProxy2Fixture) - self.assertIsInstance(kvs._region.backend.proxied.proxied, - inmemdb.MemoryBackend) - - def test_kvs_key_mangler_fallthrough_default(self): - # Test to make sure we default to the standard dogpile sha1 hashing - # key_mangler - kvs = self._get_kvs_region() - kvs.configure('openstack.kvs.Memory') - - self.assertIs(kvs._region.key_mangler, core.sha1_mangle_key) - # The backend should also have the keymangler set the same as the - # region now. - self.assertIs(kvs._region.backend.key_mangler, core.sha1_mangle_key) - - def test_kvs_key_mangler_configuration_backend(self): - kvs = self._get_kvs_region() - kvs.configure('openstack.kvs.KVSBackendFixture') - expected = KVSBackendFixture.key_mangler(self.key_foo) - self.assertEqual(expected, kvs._region.key_mangler(self.key_foo)) - - def test_kvs_key_mangler_configuration_forced_backend(self): - kvs = self._get_kvs_region() - kvs.configure('openstack.kvs.KVSBackendForcedKeyMangleFixture', - key_mangler=core.sha1_mangle_key) - expected = KVSBackendForcedKeyMangleFixture.key_mangler(self.key_foo) - self.assertEqual(expected, kvs._region.key_mangler(self.key_foo)) - - def test_kvs_key_mangler_configuration_disabled(self): - # Test that no key_mangler is set if enable_key_mangler is false - self.config_fixture.config(group='kvs', enable_key_mangler=False) - kvs = self._get_kvs_region() - kvs.configure('openstack.kvs.Memory') - - self.assertIsNone(kvs._region.key_mangler) - self.assertIsNone(kvs._region.backend.key_mangler) - - def test_kvs_key_mangler_set_on_backend(self): - def test_key_mangler(key): - return key - - kvs = self._get_kvs_region() - kvs.configure('openstack.kvs.Memory') - self.assertIs(kvs._region.backend.key_mangler, core.sha1_mangle_key) - kvs._set_key_mangler(test_key_mangler) - self.assertIs(kvs._region.backend.key_mangler, test_key_mangler) - - def test_kvs_basic_get_set_delete(self): - # Test the basic get/set/delete actions on the KVS region - kvs = self._get_kvs_region() - kvs.configure('openstack.kvs.Memory') - - # Not found should be raised if the key doesn't exist - self.assertRaises(exception.NotFound, kvs.get, key=self.key_bar) - kvs.set(self.key_bar, self.value_bar) - returned_value = kvs.get(self.key_bar) - # The returned value should be the same value as the value in .set - self.assertEqual(self.value_bar, returned_value) - # The value should not be the exact object used in .set - self.assertIsNot(returned_value, self.value_bar) - kvs.delete(self.key_bar) - # Second delete should raise NotFound - self.assertRaises(exception.NotFound, kvs.delete, key=self.key_bar) - - def _kvs_multi_get_set_delete(self, kvs): - keys = [self.key_foo, self.key_bar] - expected = [self.value_foo, self.value_bar] - - kvs.set_multi({self.key_foo: self.value_foo, - self.key_bar: self.value_bar}) - # Returned value from get_multi should be a list of the values of the - # keys - self.assertEqual(expected, kvs.get_multi(keys)) - # Delete both keys - kvs.delete_multi(keys) - # make sure that NotFound is properly raised when trying to get the now - # deleted keys - self.assertRaises(exception.NotFound, kvs.get_multi, keys=keys) - self.assertRaises(exception.NotFound, kvs.get, key=self.key_foo) - self.assertRaises(exception.NotFound, kvs.get, key=self.key_bar) - # Make sure get_multi raises NotFound if one of the keys isn't found - kvs.set(self.key_foo, self.value_foo) - self.assertRaises(exception.NotFound, kvs.get_multi, keys=keys) - - def test_kvs_multi_get_set_delete(self): - kvs = self._get_kvs_region() - kvs.configure('openstack.kvs.Memory') - - self._kvs_multi_get_set_delete(kvs) - - def test_kvs_locking_context_handler(self): - # Make sure we're creating the correct key/value pairs for the backend - # distributed locking mutex. - self.config_fixture.config(group='kvs', enable_key_mangler=False) - kvs = self._get_kvs_region() - kvs.configure('openstack.kvs.KVSBackendFixture') - - lock_key = '_lock' + self.key_foo - self.assertNotIn(lock_key, kvs._region.backend._db) - with core.KeyValueStoreLock(kvs._mutex(self.key_foo), self.key_foo): - self.assertIn(lock_key, kvs._region.backend._db) - self.assertIs(kvs._region.backend._db[lock_key], 1) - - self.assertNotIn(lock_key, kvs._region.backend._db) - - def test_kvs_locking_context_handler_locking_disabled(self): - # Make sure no creation of key/value pairs for the backend - # distributed locking mutex occurs if locking is disabled. - self.config_fixture.config(group='kvs', enable_key_mangler=False) - kvs = self._get_kvs_region() - kvs.configure('openstack.kvs.KVSBackendFixture', locking=False) - lock_key = '_lock' + self.key_foo - self.assertNotIn(lock_key, kvs._region.backend._db) - with core.KeyValueStoreLock(kvs._mutex(self.key_foo), self.key_foo, - False): - self.assertNotIn(lock_key, kvs._region.backend._db) - - self.assertNotIn(lock_key, kvs._region.backend._db) - - def test_kvs_with_lock_action_context_manager_timeout(self): - kvs = self._get_kvs_region() - lock_timeout = 5 - kvs.configure('openstack.kvs.Memory', lock_timeout=lock_timeout) - - def do_with_lock_action_timeout(kvs_region, key, offset): - with kvs_region.get_lock(key) as lock_in_use: - self.assertTrue(lock_in_use.active) - # Subtract the offset from the acquire_time. If this puts the - # acquire_time difference from time.time() at >= lock_timeout - # this should raise a LockTimeout exception. This is because - # there is a built-in 1-second overlap where the context - # manager thinks the lock is expired but the lock is still - # active. This is to help mitigate race conditions on the - # time-check itself. - lock_in_use.acquire_time -= offset - with kvs_region._action_with_lock(key, lock_in_use): - pass - - # This should succeed, we are not timed-out here. - do_with_lock_action_timeout(kvs, key=uuid.uuid4().hex, offset=2) - # Try it now with an offset equal to the lock_timeout - self.assertRaises(core.LockTimeout, - do_with_lock_action_timeout, - kvs_region=kvs, - key=uuid.uuid4().hex, - offset=lock_timeout) - # Final test with offset significantly greater than the lock_timeout - self.assertRaises(core.LockTimeout, - do_with_lock_action_timeout, - kvs_region=kvs, - key=uuid.uuid4().hex, - offset=100) - - def test_kvs_with_lock_action_mismatched_keys(self): - kvs = self._get_kvs_region() - kvs.configure('openstack.kvs.Memory') - - def do_with_lock_action(kvs_region, lock_key, target_key): - with kvs_region.get_lock(lock_key) as lock_in_use: - self.assertTrue(lock_in_use.active) - with kvs_region._action_with_lock(target_key, lock_in_use): - pass - - # Ensure we raise a ValueError if the lock key mismatches from the - # target key. - self.assertRaises(ValueError, - do_with_lock_action, - kvs_region=kvs, - lock_key=self.key_foo, - target_key=self.key_bar) - - def test_kvs_with_lock_action_context_manager(self): - # Make sure we're creating the correct key/value pairs for the backend - # distributed locking mutex. - self.config_fixture.config(group='kvs', enable_key_mangler=False) - kvs = self._get_kvs_region() - kvs.configure('openstack.kvs.KVSBackendFixture') - - lock_key = '_lock' + self.key_foo - self.assertNotIn(lock_key, kvs._region.backend._db) - with kvs.get_lock(self.key_foo) as lock: - with kvs._action_with_lock(self.key_foo, lock): - self.assertTrue(lock.active) - self.assertIn(lock_key, kvs._region.backend._db) - self.assertIs(kvs._region.backend._db[lock_key], 1) - - self.assertNotIn(lock_key, kvs._region.backend._db) - - def test_kvs_with_lock_action_context_manager_no_lock(self): - # Make sure we're not locking unless an actual lock is passed into the - # context manager - self.config_fixture.config(group='kvs', enable_key_mangler=False) - kvs = self._get_kvs_region() - kvs.configure('openstack.kvs.KVSBackendFixture') - - lock_key = '_lock' + self.key_foo - lock = None - self.assertNotIn(lock_key, kvs._region.backend._db) - with kvs._action_with_lock(self.key_foo, lock): - self.assertNotIn(lock_key, kvs._region.backend._db) - - self.assertNotIn(lock_key, kvs._region.backend._db) - - def test_kvs_backend_registration_does_not_reregister_backends(self): - # SetUp registers the test backends. Running this again would raise an - # exception if re-registration of the backends occurred. - kvs = self._get_kvs_region() - kvs.configure('openstack.kvs.Memory') - core._register_backends() - - def test_kvs_memcached_manager_valid_dogpile_memcached_backend(self): - kvs = self._get_kvs_region() - kvs.configure('openstack.kvs.Memcached', - memcached_backend='TestDriver') - self.assertIsInstance(kvs._region.backend.driver, - TestMemcacheDriver) - - def test_kvs_memcached_manager_invalid_dogpile_memcached_backend(self): - # Invalid dogpile memcache backend should raise ValueError - kvs = self._get_kvs_region() - self.assertRaises(ValueError, - kvs.configure, - backing_store='openstack.kvs.Memcached', - memcached_backend=uuid.uuid4().hex) - - def test_kvs_memcache_manager_no_expiry_keys(self): - # Make sure the memcache backend recalculates the no-expiry keys - # correctly when a key-mangler is set on it. - - def new_mangler(key): - return '_mangled_key_' + key - - kvs = self._get_kvs_region() - no_expiry_keys = set(['test_key']) - kvs.configure('openstack.kvs.Memcached', - memcached_backend='TestDriver', - no_expiry_keys=no_expiry_keys) - calculated_keys = set([kvs._region.key_mangler(key) - for key in no_expiry_keys]) - self.assertIs(kvs._region.backend.key_mangler, core.sha1_mangle_key) - self.assertSetEqual(calculated_keys, - kvs._region.backend.no_expiry_hashed_keys) - self.assertSetEqual(no_expiry_keys, - kvs._region.backend.raw_no_expiry_keys) - calculated_keys = set([new_mangler(key) for key in no_expiry_keys]) - kvs._region.backend.key_mangler = new_mangler - self.assertSetEqual(calculated_keys, - kvs._region.backend.no_expiry_hashed_keys) - self.assertSetEqual(no_expiry_keys, - kvs._region.backend.raw_no_expiry_keys) - - def test_kvs_memcache_key_mangler_set_to_none(self): - kvs = self._get_kvs_region() - no_expiry_keys = set(['test_key']) - kvs.configure('openstack.kvs.Memcached', - memcached_backend='TestDriver', - no_expiry_keys=no_expiry_keys) - self.assertIs(kvs._region.backend.key_mangler, core.sha1_mangle_key) - kvs._region.backend.key_mangler = None - self.assertSetEqual(kvs._region.backend.raw_no_expiry_keys, - kvs._region.backend.no_expiry_hashed_keys) - self.assertIsNone(kvs._region.backend.key_mangler) - - def test_noncallable_key_mangler_set_on_driver_raises_type_error(self): - kvs = self._get_kvs_region() - kvs.configure('openstack.kvs.Memcached', - memcached_backend='TestDriver') - self.assertRaises(TypeError, - setattr, - kvs._region.backend, - 'key_mangler', - 'Non-Callable') - - def test_kvs_memcache_set_arguments_and_memcache_expires_ttl(self): - # Test the "set_arguments" (arguments passed on all set calls) logic - # and the no-expiry-key modifications of set_arguments for the explicit - # memcache TTL. - self.config_fixture.config(group='kvs', enable_key_mangler=False) - kvs = self._get_kvs_region() - memcache_expire_time = 86400 - - expected_set_args = {'time': memcache_expire_time} - expected_no_expiry_args = {} - - expected_foo_keys = [self.key_foo] - expected_bar_keys = [self.key_bar] - - mapping_foo = {self.key_foo: self.value_foo} - mapping_bar = {self.key_bar: self.value_bar} - - kvs.configure(backing_store='openstack.kvs.Memcached', - memcached_backend='TestDriver', - memcached_expire_time=memcache_expire_time, - some_other_arg=uuid.uuid4().hex, - no_expiry_keys=[self.key_bar]) - kvs_driver = kvs._region.backend.driver - - # Ensure the set_arguments are correct - self.assertDictEqual( - expected_set_args, - kvs._region.backend._get_set_arguments_driver_attr()) - - # Set a key that would have an expiry and verify the correct result - # occurred and that the correct set_arguments were passed. - kvs.set(self.key_foo, self.value_foo) - self.assertDictEqual( - expected_set_args, - kvs._region.backend.driver.client.set_arguments_passed) - observed_foo_keys = list(kvs_driver.client.keys_values.keys()) - self.assertEqual(expected_foo_keys, observed_foo_keys) - self.assertEqual( - self.value_foo, - kvs._region.backend.driver.client.keys_values[self.key_foo][0]) - - # Set a key that would not have an expiry and verify the correct result - # occurred and that the correct set_arguments were passed. - kvs.set(self.key_bar, self.value_bar) - self.assertDictEqual( - expected_no_expiry_args, - kvs._region.backend.driver.client.set_arguments_passed) - observed_bar_keys = list(kvs_driver.client.keys_values.keys()) - self.assertEqual(expected_bar_keys, observed_bar_keys) - self.assertEqual( - self.value_bar, - kvs._region.backend.driver.client.keys_values[self.key_bar][0]) - - # set_multi a dict that would have an expiry and verify the correct - # result occurred and that the correct set_arguments were passed. - kvs.set_multi(mapping_foo) - self.assertDictEqual( - expected_set_args, - kvs._region.backend.driver.client.set_arguments_passed) - observed_foo_keys = list(kvs_driver.client.keys_values.keys()) - self.assertEqual(expected_foo_keys, observed_foo_keys) - self.assertEqual( - self.value_foo, - kvs._region.backend.driver.client.keys_values[self.key_foo][0]) - - # set_multi a dict that would not have an expiry and verify the correct - # result occurred and that the correct set_arguments were passed. - kvs.set_multi(mapping_bar) - self.assertDictEqual( - expected_no_expiry_args, - kvs._region.backend.driver.client.set_arguments_passed) - observed_bar_keys = list(kvs_driver.client.keys_values.keys()) - self.assertEqual(expected_bar_keys, observed_bar_keys) - self.assertEqual( - self.value_bar, - kvs._region.backend.driver.client.keys_values[self.key_bar][0]) - - def test_memcached_lock_max_lock_attempts(self): - kvs = self._get_kvs_region() - max_lock_attempts = 1 - test_key = uuid.uuid4().hex - - kvs.configure(backing_store='openstack.kvs.Memcached', - memcached_backend='TestDriver', - max_lock_attempts=max_lock_attempts) - - self.assertEqual(max_lock_attempts, - kvs._region.backend.max_lock_attempts) - # Simple Lock success test - with kvs.get_lock(test_key) as lock: - kvs.set(test_key, 'testing', lock) - - def lock_within_a_lock(key): - with kvs.get_lock(key) as first_lock: - kvs.set(test_key, 'lock', first_lock) - with kvs.get_lock(key) as second_lock: - kvs.set(key, 'lock-within-a-lock', second_lock) - - self.assertRaises(exception.UnexpectedError, - lock_within_a_lock, - key=test_key) - - -class TestMemcachedBackend(unit.TestCase): - - @mock.patch('keystone.common.kvs.backends.memcached._', six.text_type) - def test_invalid_backend_fails_initialization(self): - raises_valueerror = matchers.Raises(matchers.MatchesException( - ValueError, r'.*FakeBackend.*')) - - options = { - 'url': 'needed to get to the focus of this test (the backend)', - 'memcached_backend': 'FakeBackend', - } - self.assertThat(lambda: memcached.MemcachedBackend(options), - raises_valueerror) diff --git a/keystone-moon/keystone/tests/unit/test_ldap_livetest.py b/keystone-moon/keystone/tests/unit/test_ldap_livetest.py deleted file mode 100644 index 4bce6a73..00000000 --- a/keystone-moon/keystone/tests/unit/test_ldap_livetest.py +++ /dev/null @@ -1,217 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import subprocess -import uuid - -import ldap.modlist -from oslo_config import cfg -from six.moves import range - -from keystone import exception -from keystone.identity.backends import ldap as identity_ldap -from keystone.tests import unit -from keystone.tests.unit import test_backend_ldap - - -CONF = cfg.CONF - - -def create_object(dn, attrs): - conn = ldap.initialize(CONF.ldap.url) - conn.simple_bind_s(CONF.ldap.user, CONF.ldap.password) - ldif = ldap.modlist.addModlist(attrs) - conn.add_s(dn, ldif) - conn.unbind_s() - - -class LiveLDAPIdentity(test_backend_ldap.LDAPIdentity): - - def setUp(self): - self._ldap_skip_live() - super(LiveLDAPIdentity, self).setUp() - - def _ldap_skip_live(self): - self.skip_if_env_not_set('ENABLE_LDAP_LIVE_TEST') - - def clear_database(self): - devnull = open('/dev/null', 'w') - subprocess.call(['ldapdelete', - '-x', - '-D', CONF.ldap.user, - '-H', CONF.ldap.url, - '-w', CONF.ldap.password, - '-r', CONF.ldap.suffix], - stderr=devnull) - - if CONF.ldap.suffix.startswith('ou='): - tree_dn_attrs = {'objectclass': 'organizationalUnit', - 'ou': 'openstack'} - else: - tree_dn_attrs = {'objectclass': ['dcObject', 'organizationalUnit'], - 'dc': 'openstack', - 'ou': 'openstack'} - create_object(CONF.ldap.suffix, tree_dn_attrs) - create_object(CONF.ldap.user_tree_dn, - {'objectclass': 'organizationalUnit', - 'ou': 'Users'}) - create_object(CONF.ldap.role_tree_dn, - {'objectclass': 'organizationalUnit', - 'ou': 'Roles'}) - create_object(CONF.ldap.group_tree_dn, - {'objectclass': 'organizationalUnit', - 'ou': 'UserGroups'}) - - def config_files(self): - config_files = super(LiveLDAPIdentity, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_liveldap.conf')) - return config_files - - def test_build_tree(self): - """Regression test for building the tree names.""" - # logic is different from the fake backend. - user_api = identity_ldap.UserApi(CONF) - self.assertTrue(user_api) - self.assertEqual(user_api.tree_dn, CONF.ldap.user_tree_dn) - - def test_ldap_dereferencing(self): - alt_users_ldif = {'objectclass': ['top', 'organizationalUnit'], - 'ou': 'alt_users'} - alt_fake_user_ldif = {'objectclass': ['person', 'inetOrgPerson'], - 'cn': 'alt_fake1', - 'sn': 'alt_fake1'} - aliased_users_ldif = {'objectclass': ['alias', 'extensibleObject'], - 'aliasedobjectname': "ou=alt_users,%s" % - CONF.ldap.suffix} - create_object("ou=alt_users,%s" % CONF.ldap.suffix, alt_users_ldif) - create_object("%s=alt_fake1,ou=alt_users,%s" % - (CONF.ldap.user_id_attribute, CONF.ldap.suffix), - alt_fake_user_ldif) - create_object("ou=alt_users,%s" % CONF.ldap.user_tree_dn, - aliased_users_ldif) - - self.config_fixture.config(group='ldap', - query_scope='sub', - alias_dereferencing='never') - self.identity_api = identity_ldap.Identity() - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user, - 'alt_fake1') - - self.config_fixture.config(group='ldap', - alias_dereferencing='searching') - self.identity_api = identity_ldap.Identity() - user_ref = self.identity_api.get_user('alt_fake1') - self.assertEqual('alt_fake1', user_ref['id']) - - self.config_fixture.config(group='ldap', alias_dereferencing='always') - self.identity_api = identity_ldap.Identity() - user_ref = self.identity_api.get_user('alt_fake1') - self.assertEqual('alt_fake1', user_ref['id']) - - # FakeLDAP does not correctly process filters, so this test can only be - # run against a live LDAP server - def test_list_groups_for_user_filtered(self): - domain = self._get_domain_fixture() - test_groups = [] - test_users = [] - GROUP_COUNT = 3 - USER_COUNT = 2 - - for x in range(0, USER_COUNT): - # TODO(shaleh): use unit.new_user_ref() - new_user = {'name': uuid.uuid4().hex, 'password': uuid.uuid4().hex, - 'enabled': True, 'domain_id': domain['id']} - new_user = self.identity_api.create_user(new_user) - test_users.append(new_user) - positive_user = test_users[0] - negative_user = test_users[1] - - for x in range(0, USER_COUNT): - group_refs = self.identity_api.list_groups_for_user( - test_users[x]['id']) - self.assertEqual(0, len(group_refs)) - - for x in range(0, GROUP_COUNT): - new_group = unit.new_group_ref(domain_id=domain['id']) - new_group = self.identity_api.create_group(new_group) - test_groups.append(new_group) - - group_refs = self.identity_api.list_groups_for_user( - positive_user['id']) - self.assertEqual(x, len(group_refs)) - - self.identity_api.add_user_to_group( - positive_user['id'], - new_group['id']) - group_refs = self.identity_api.list_groups_for_user( - positive_user['id']) - self.assertEqual(x + 1, len(group_refs)) - - group_refs = self.identity_api.list_groups_for_user( - negative_user['id']) - self.assertEqual(0, len(group_refs)) - - driver = self.identity_api._select_identity_driver( - CONF.identity.default_domain_id) - driver.group.ldap_filter = '(dn=xx)' - - group_refs = self.identity_api.list_groups_for_user( - positive_user['id']) - self.assertEqual(0, len(group_refs)) - group_refs = self.identity_api.list_groups_for_user( - negative_user['id']) - self.assertEqual(0, len(group_refs)) - - driver.group.ldap_filter = '(objectclass=*)' - - group_refs = self.identity_api.list_groups_for_user( - positive_user['id']) - self.assertEqual(GROUP_COUNT, len(group_refs)) - group_refs = self.identity_api.list_groups_for_user( - negative_user['id']) - self.assertEqual(0, len(group_refs)) - - def test_user_enable_attribute_mask(self): - self.config_fixture.config( - group='ldap', - user_enabled_emulation=False, - user_enabled_attribute='employeeType') - super(LiveLDAPIdentity, self).test_user_enable_attribute_mask() - - def test_create_project_case_sensitivity(self): - # The attribute used for the live LDAP tests is case insensitive. - - def call_super(): - (super(LiveLDAPIdentity, self). - test_create_project_case_sensitivity()) - - self.assertRaises(exception.Conflict, call_super) - - def test_create_user_case_sensitivity(self): - # The attribute used for the live LDAP tests is case insensitive. - - def call_super(): - super(LiveLDAPIdentity, self).test_create_user_case_sensitivity() - - self.assertRaises(exception.Conflict, call_super) - - def test_project_update_missing_attrs_with_a_falsey_value(self): - # The description attribute doesn't allow an empty value. - - def call_super(): - (super(LiveLDAPIdentity, self). - test_project_update_missing_attrs_with_a_falsey_value()) - - self.assertRaises(ldap.INVALID_SYNTAX, call_super) diff --git a/keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py b/keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py deleted file mode 100644 index a284114a..00000000 --- a/keystone-moon/keystone/tests/unit/test_ldap_pool_livetest.py +++ /dev/null @@ -1,202 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -import ldappool -from oslo_config import cfg - -from keystone.common.ldap import core as ldap_core -from keystone.identity.backends import ldap -from keystone.tests import unit -from keystone.tests.unit import fakeldap -from keystone.tests.unit import test_backend_ldap_pool -from keystone.tests.unit import test_ldap_livetest - - -CONF = cfg.CONF - - -class LiveLDAPPoolIdentity(test_backend_ldap_pool.LdapPoolCommonTestMixin, - test_ldap_livetest.LiveLDAPIdentity): - """Executes existing LDAP live test with pooled LDAP handler. - - Also executes common pool specific tests via Mixin class. - - """ - - def setUp(self): - super(LiveLDAPPoolIdentity, self).setUp() - self.addCleanup(self.cleanup_pools) - # storing to local variable to avoid long references - self.conn_pools = ldap_core.PooledLDAPHandler.connection_pools - - def config_files(self): - config_files = super(LiveLDAPPoolIdentity, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_pool_liveldap.conf')) - return config_files - - def test_assert_connector_used_not_fake_ldap_pool(self): - handler = ldap_core._get_connection(CONF.ldap.url, use_pool=True) - self.assertNotEqual(type(handler.Connector), - type(fakeldap.FakeLdapPool)) - self.assertEqual(type(ldappool.StateConnector), - type(handler.Connector)) - - def test_async_search_and_result3(self): - self.config_fixture.config(group='ldap', page_size=1) - self.test_user_enable_attribute_mask() - - def test_pool_size_expands_correctly(self): - - who = CONF.ldap.user - cred = CONF.ldap.password - # get related connection manager instance - ldappool_cm = self.conn_pools[CONF.ldap.url] - - def _get_conn(): - return ldappool_cm.connection(who, cred) - - with _get_conn() as c1: # 1 - self.assertEqual(1, len(ldappool_cm)) - self.assertTrue(c1.connected, True) - self.assertTrue(c1.active, True) - with _get_conn() as c2: # conn2 - self.assertEqual(2, len(ldappool_cm)) - self.assertTrue(c2.connected) - self.assertTrue(c2.active) - - self.assertEqual(2, len(ldappool_cm)) - # c2 went out of context, its connected but not active - self.assertTrue(c2.connected) - self.assertFalse(c2.active) - with _get_conn() as c3: # conn3 - self.assertEqual(2, len(ldappool_cm)) - self.assertTrue(c3.connected) - self.assertTrue(c3.active) - self.assertTrue(c3 is c2) # same connection is reused - self.assertTrue(c2.active) - with _get_conn() as c4: # conn4 - self.assertEqual(3, len(ldappool_cm)) - self.assertTrue(c4.connected) - self.assertTrue(c4.active) - - def test_password_change_with_auth_pool_disabled(self): - self.config_fixture.config(group='ldap', use_auth_pool=False) - old_password = self.user_sna['password'] - - self.test_password_change_with_pool() - - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, - user_id=self.user_sna['id'], - password=old_password) - - def _create_user_and_authenticate(self, password): - # TODO(shaleh): port to new_user_ref() - user_dict = { - 'domain_id': CONF.identity.default_domain_id, - 'name': uuid.uuid4().hex, - 'password': password} - user = self.identity_api.create_user(user_dict) - - self.identity_api.authenticate( - context={}, - user_id=user['id'], - password=password) - - return self.identity_api.get_user(user['id']) - - def _get_auth_conn_pool_cm(self): - pool_url = ldap_core.PooledLDAPHandler.auth_pool_prefix + CONF.ldap.url - return self.conn_pools[pool_url] - - def _do_password_change_for_one_user(self, password, new_password): - self.config_fixture.config(group='ldap', use_auth_pool=True) - self.cleanup_pools() - self.load_backends() - - user1 = self._create_user_and_authenticate(password) - auth_cm = self._get_auth_conn_pool_cm() - self.assertEqual(1, len(auth_cm)) - user2 = self._create_user_and_authenticate(password) - self.assertEqual(1, len(auth_cm)) - user3 = self._create_user_and_authenticate(password) - self.assertEqual(1, len(auth_cm)) - user4 = self._create_user_and_authenticate(password) - self.assertEqual(1, len(auth_cm)) - user5 = self._create_user_and_authenticate(password) - self.assertEqual(1, len(auth_cm)) - - # connection pool size remains 1 even for different user ldap bind - # as there is only one active connection at a time - - user_api = ldap.UserApi(CONF) - u1_dn = user_api._id_to_dn_string(user1['id']) - u2_dn = user_api._id_to_dn_string(user2['id']) - u3_dn = user_api._id_to_dn_string(user3['id']) - u4_dn = user_api._id_to_dn_string(user4['id']) - u5_dn = user_api._id_to_dn_string(user5['id']) - - # now create multiple active connections for end user auth case which - # will force to keep them in pool. After that, modify one of user - # password. Need to make sure that user connection is in middle - # of pool list. - auth_cm = self._get_auth_conn_pool_cm() - with auth_cm.connection(u1_dn, password) as _: - with auth_cm.connection(u2_dn, password) as _: - with auth_cm.connection(u3_dn, password) as _: - with auth_cm.connection(u4_dn, password) as _: - with auth_cm.connection(u5_dn, password) as _: - self.assertEqual(5, len(auth_cm)) - _.unbind_s() - - user3['password'] = new_password - self.identity_api.update_user(user3['id'], user3) - - return user3 - - def test_password_change_with_auth_pool_enabled_long_lifetime(self): - self.config_fixture.config(group='ldap', - auth_pool_connection_lifetime=600) - old_password = 'my_password' - new_password = 'new_password' - user = self._do_password_change_for_one_user(old_password, - new_password) - user.pop('password') - - # with long connection lifetime auth_pool can bind to old password - # successfully which is not desired if password change is frequent - # use case in a deployment. - # This can happen in multiple concurrent connections case only. - user_ref = self.identity_api.authenticate( - context={}, user_id=user['id'], password=old_password) - - self.assertDictEqual(user, user_ref) - - def test_password_change_with_auth_pool_enabled_no_lifetime(self): - self.config_fixture.config(group='ldap', - auth_pool_connection_lifetime=0) - - old_password = 'my_password' - new_password = 'new_password' - user = self._do_password_change_for_one_user(old_password, - new_password) - # now as connection lifetime is zero, so authentication - # with old password will always fail. - self.assertRaises(AssertionError, - self.identity_api.authenticate, - context={}, user_id=user['id'], - password=old_password) diff --git a/keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py b/keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py deleted file mode 100644 index 98e2882d..00000000 --- a/keystone-moon/keystone/tests/unit/test_ldap_tls_livetest.py +++ /dev/null @@ -1,119 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# Copyright 2013 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ldap.modlist -from oslo_config import cfg - -from keystone import exception -from keystone import identity -from keystone.tests import unit -from keystone.tests.unit import test_ldap_livetest - - -CONF = cfg.CONF - - -def create_object(dn, attrs): - conn = ldap.initialize(CONF.ldap.url) - conn.simple_bind_s(CONF.ldap.user, CONF.ldap.password) - ldif = ldap.modlist.addModlist(attrs) - conn.add_s(dn, ldif) - conn.unbind_s() - - -class LiveTLSLDAPIdentity(test_ldap_livetest.LiveLDAPIdentity): - - def _ldap_skip_live(self): - self.skip_if_env_not_set('ENABLE_TLS_LDAP_LIVE_TEST') - - def config_files(self): - config_files = super(LiveTLSLDAPIdentity, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_tls_liveldap.conf')) - return config_files - - def test_tls_certfile_demand_option(self): - self.config_fixture.config(group='ldap', - use_tls=True, - tls_cacertdir=None, - tls_req_cert='demand') - self.identity_api = identity.backends.ldap.Identity() - - # TODO(shaleh): use new_user_ref() - user = {'name': 'fake1', - 'password': 'fakepass1', - 'tenants': ['bar']} - user = self.identity_api.create_user('user') - user_ref = self.identity_api.get_user(user['id']) - self.assertEqual(user['id'], user_ref['id']) - - user['password'] = 'fakepass2' - self.identity_api.update_user(user['id'], user) - - self.identity_api.delete_user(user['id']) - self.assertRaises(exception.UserNotFound, self.identity_api.get_user, - user['id']) - - def test_tls_certdir_demand_option(self): - self.config_fixture.config(group='ldap', - use_tls=True, - tls_cacertdir=None, - tls_req_cert='demand') - self.identity_api = identity.backends.ldap.Identity() - - # TODO(shaleh): use new_user_ref() - user = {'id': 'fake1', - 'name': 'fake1', - 'password': 'fakepass1', - 'tenants': ['bar']} - self.identity_api.create_user('fake1', user) - user_ref = self.identity_api.get_user('fake1') - self.assertEqual('fake1', user_ref['id']) - - user['password'] = 'fakepass2' - self.identity_api.update_user('fake1', user) - - self.identity_api.delete_user('fake1') - self.assertRaises(exception.UserNotFound, self.identity_api.get_user, - 'fake1') - - def test_tls_bad_certfile(self): - self.config_fixture.config( - group='ldap', - use_tls=True, - tls_req_cert='demand', - tls_cacertfile='/etc/keystone/ssl/certs/mythicalcert.pem', - tls_cacertdir=None) - self.identity_api = identity.backends.ldap.Identity() - - # TODO(shaleh): use new_user_ref() - user = {'name': 'fake1', - 'password': 'fakepass1', - 'tenants': ['bar']} - self.assertRaises(IOError, self.identity_api.create_user, user) - - def test_tls_bad_certdir(self): - self.config_fixture.config( - group='ldap', - use_tls=True, - tls_cacertfile=None, - tls_req_cert='demand', - tls_cacertdir='/etc/keystone/ssl/mythicalcertdir') - self.identity_api = identity.backends.ldap.Identity() - - # TODO(shaleh): use new_user_ref() - user = {'name': 'fake1', - 'password': 'fakepass1', - 'tenants': ['bar']} - self.assertRaises(IOError, self.identity_api.create_user, user) diff --git a/keystone-moon/keystone/tests/unit/test_middleware.py b/keystone-moon/keystone/tests/unit/test_middleware.py deleted file mode 100644 index d33e8c00..00000000 --- a/keystone-moon/keystone/tests/unit/test_middleware.py +++ /dev/null @@ -1,764 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import hashlib -import uuid - -from oslo_config import cfg -from six.moves import http_client -import webtest - -from keystone.common import authorization -from keystone.common import tokenless_auth -from keystone import exception -from keystone.federation import constants as federation_constants -from keystone import middleware -from keystone.tests import unit -from keystone.tests.unit import mapping_fixtures -from keystone.tests.unit import test_backend_sql - - -CONF = cfg.CONF - - -class MiddlewareRequestTestBase(unit.TestCase): - - MIDDLEWARE_CLASS = None # override this in subclasses - - def _application(self): - """A base wsgi application that returns a simple response.""" - def app(environ, start_response): - # WSGI requires the body of the response to be six.binary_type - body = uuid.uuid4().hex.encode('utf-8') - resp_headers = [('Content-Type', 'text/html; charset=utf8'), - ('Content-Length', str(len(body)))] - start_response('200 OK', resp_headers) - return [body] - - return app - - def _generate_app_response(self, app, headers=None, method='get', - path='/', **kwargs): - """Given a wsgi application wrap it in webtest and call it.""" - return getattr(webtest.TestApp(app), method)(path, - headers=headers or {}, - **kwargs) - - def _middleware_failure(self, exc, *args, **kwargs): - """Assert that an exception is being thrown from process_request.""" - # NOTE(jamielennox): This is a little ugly. We need to call the webtest - # framework so that the correct RequestClass object is created for when - # we call process_request. However because we go via webtest we only - # see the response object and not the actual exception that is thrown - # by process_request. To get around this we subclass process_request - # with something that checks for the right type of exception being - # thrown so we can test the middle of the request process. - # TODO(jamielennox): Change these tests to test the value of the - # response rather than the error that is raised. - - class _Failing(self.MIDDLEWARE_CLASS): - - _called = False - - def process_request(i_self, *i_args, **i_kwargs): - # i_ to distinguish it from and not clobber the outer vars - e = self.assertRaises(exc, - super(_Failing, i_self).process_request, - *i_args, **i_kwargs) - i_self._called = True - raise e - - # by default the returned status when an uncaught exception is raised - # for validation or caught errors this will likely be 400 - kwargs.setdefault('status', http_client.INTERNAL_SERVER_ERROR) # 500 - - app = _Failing(self._application()) - resp = self._generate_app_response(app, *args, **kwargs) - self.assertTrue(app._called) - return resp - - def _do_middleware_response(self, *args, **kwargs): - """Wrap a middleware around a sample application and call it.""" - app = self.MIDDLEWARE_CLASS(self._application()) - return self._generate_app_response(app, *args, **kwargs) - - def _do_middleware_request(self, *args, **kwargs): - """The request object from a successful middleware call.""" - return self._do_middleware_response(*args, **kwargs).request - - -class TokenAuthMiddlewareTest(MiddlewareRequestTestBase): - - MIDDLEWARE_CLASS = middleware.TokenAuthMiddleware - - def test_request(self): - headers = {middleware.AUTH_TOKEN_HEADER: 'MAGIC'} - req = self._do_middleware_request(headers=headers) - context = req.environ[middleware.CONTEXT_ENV] - self.assertEqual('MAGIC', context['token_id']) - - -class AdminTokenAuthMiddlewareTest(MiddlewareRequestTestBase): - - MIDDLEWARE_CLASS = middleware.AdminTokenAuthMiddleware - - def config_overrides(self): - super(AdminTokenAuthMiddlewareTest, self).config_overrides() - self.config_fixture.config( - admin_token='ADMIN') - - def test_request_admin(self): - headers = {middleware.AUTH_TOKEN_HEADER: 'ADMIN'} - req = self._do_middleware_request(headers=headers) - self.assertTrue(req.environ[middleware.CONTEXT_ENV]['is_admin']) - - def test_request_non_admin(self): - headers = {middleware.AUTH_TOKEN_HEADER: 'NOT-ADMIN'} - req = self._do_middleware_request(headers=headers) - self.assertFalse(req.environ[middleware.CONTEXT_ENV]['is_admin']) - - -class JsonBodyMiddlewareTest(MiddlewareRequestTestBase): - - MIDDLEWARE_CLASS = middleware.JsonBodyMiddleware - - def test_request_with_params(self): - headers = {'Content-Type': 'application/json'} - params = '{"arg1": "one", "arg2": ["a"]}' - req = self._do_middleware_request(params=params, - headers=headers, - method='post') - self.assertEqual({"arg1": "one", "arg2": ["a"]}, - req.environ[middleware.PARAMS_ENV]) - - def test_malformed_json(self): - headers = {'Content-Type': 'application/json'} - self._do_middleware_response(params='{"arg1": "on', - headers=headers, - method='post', - status=http_client.BAD_REQUEST) - - def test_not_dict_body(self): - headers = {'Content-Type': 'application/json'} - resp = self._do_middleware_response(params='42', - headers=headers, - method='post', - status=http_client.BAD_REQUEST) - - self.assertIn('valid JSON object', resp.json['error']['message']) - - def test_no_content_type(self): - headers = {'Content-Type': ''} - params = '{"arg1": "one", "arg2": ["a"]}' - req = self._do_middleware_request(params=params, - headers=headers, - method='post') - self.assertEqual({"arg1": "one", "arg2": ["a"]}, - req.environ[middleware.PARAMS_ENV]) - - def test_unrecognized_content_type(self): - headers = {'Content-Type': 'text/plain'} - self._do_middleware_response(params='{"arg1": "one", "arg2": ["a"]}', - headers=headers, - method='post', - status=http_client.BAD_REQUEST) - - def test_unrecognized_content_type_without_body(self): - headers = {'Content-Type': 'text/plain'} - req = self._do_middleware_request(headers=headers) - self.assertEqual({}, req.environ.get(middleware.PARAMS_ENV, {})) - - -class AuthContextMiddlewareTest(test_backend_sql.SqlTests, - MiddlewareRequestTestBase): - - MIDDLEWARE_CLASS = middleware.AuthContextMiddleware - - def setUp(self): - super(AuthContextMiddlewareTest, self).setUp() - self.client_issuer = uuid.uuid4().hex - self.untrusted_client_issuer = uuid.uuid4().hex - self.trusted_issuer = self.client_issuer - self.config_fixture.config(group='tokenless_auth', - trusted_issuer=[self.trusted_issuer]) - - # client_issuer is encoded because you can't hash - # unicode objects with hashlib. - # This idp_id is calculated based on sha256(self.client_issuer) - hashed_idp = hashlib.sha256(self.client_issuer.encode('utf-8')) - self.idp_id = hashed_idp.hexdigest() - self._load_sample_data() - - def _load_sample_data(self): - self.protocol_id = 'x509' - - # 1) Create a domain for the user. - self.domain = unit.new_domain_ref() - self.domain_id = self.domain['id'] - self.domain_name = self.domain['name'] - self.resource_api.create_domain(self.domain_id, self.domain) - - # 2) Create a project for the user. - self.project = unit.new_project_ref(domain_id=self.domain_id) - self.project_id = self.project['id'] - self.project_name = self.project['name'] - - self.resource_api.create_project(self.project_id, self.project) - - # 3) Create a user in new domain. - self.user = unit.new_user_ref(domain_id=self.domain_id, - project_id=self.project_id) - - self.user = self.identity_api.create_user(self.user) - - # Add IDP - self.idp = self._idp_ref(id=self.idp_id) - self.federation_api.create_idp(self.idp['id'], - self.idp) - - # Add a role - self.role = unit.new_role_ref() - self.role_id = self.role['id'] - self.role_name = self.role['name'] - self.role_api.create_role(self.role_id, self.role) - - # Add a group - self.group = unit.new_group_ref(domain_id=self.domain_id) - self.group = self.identity_api.create_group(self.group) - - # Assign a role to the user on a project - self.assignment_api.add_role_to_user_and_project( - user_id=self.user['id'], - tenant_id=self.project_id, - role_id=self.role_id) - - # Assign a role to the group on a project - self.assignment_api.create_grant( - role_id=self.role_id, - group_id=self.group['id'], - project_id=self.project_id) - - def _load_mapping_rules(self, rules): - # Add a mapping - self.mapping = self._mapping_ref(rules=rules) - self.federation_api.create_mapping(self.mapping['id'], - self.mapping) - # Add protocols - self.proto_x509 = self._proto_ref(mapping_id=self.mapping['id']) - self.proto_x509['id'] = self.protocol_id - self.federation_api.create_protocol(self.idp['id'], - self.proto_x509['id'], - self.proto_x509) - - def _idp_ref(self, id=None): - idp = { - 'id': id or uuid.uuid4().hex, - 'enabled': True, - 'description': uuid.uuid4().hex - } - return idp - - def _proto_ref(self, mapping_id=None): - proto = { - 'id': uuid.uuid4().hex, - 'mapping_id': mapping_id or uuid.uuid4().hex - } - return proto - - def _mapping_ref(self, rules=None): - if rules is None: - mapped_rules = {} - else: - mapped_rules = rules.get('rules', {}) - return { - 'id': uuid.uuid4().hex, - 'rules': mapped_rules - } - - def _assert_tokenless_auth_context(self, context, ephemeral_user=False): - self.assertIsNotNone(context) - self.assertEqual(self.project_id, context['project_id']) - self.assertIn(self.role_name, context['roles']) - if ephemeral_user: - self.assertEqual(self.group['id'], context['group_ids'][0]) - self.assertEqual('ephemeral', - context[federation_constants.PROTOCOL]) - self.assertEqual(self.idp_id, - context[federation_constants.IDENTITY_PROVIDER]) - else: - self.assertEqual(self.user['id'], context['user_id']) - - def _create_context(self, request, mapping_ref=None, - exception_expected=False): - """Builds the auth context from the given arguments. - - auth context will be returned from the AuthContextMiddleware based on - what is being passed in the given request and what mapping is being - setup in the backend DB. - - :param request: HTTP request - :param mapping_ref: A mapping in JSON structure will be setup in the - backend DB for mapping a user or a group. - :param exception_expected: Sets to True when an exception is expected - to raised based on the given arguments. - :returns: context an auth context contains user and role information - :rtype: dict - """ - if mapping_ref: - self._load_mapping_rules(mapping_ref) - - if not exception_expected: - (middleware.AuthContextMiddleware('Tokenless_auth_test'). - process_request(request)) - context = request.environ.get(authorization.AUTH_CONTEXT_ENV) - else: - context = middleware.AuthContextMiddleware('Tokenless_auth_test') - return context - - def test_context_already_exists(self): - stub_value = uuid.uuid4().hex - env = {authorization.AUTH_CONTEXT_ENV: stub_value} - req = self._do_middleware_request(extra_environ=env) - self.assertEqual(stub_value, - req.environ.get(authorization.AUTH_CONTEXT_ENV)) - - def test_not_applicable_to_token_request(self): - req = self._do_middleware_request(path='/auth/tokens', method='post') - context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self.assertIsNone(context) - - def test_no_tokenless_attributes_request(self): - req = self._do_middleware_request() - context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self.assertIsNone(context) - - def test_no_issuer_attribute_request(self): - env = {} - env['HTTP_X_PROJECT_ID'] = uuid.uuid4().hex - req = self._do_middleware_request(extra_environ=env) - context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self.assertIsNone(context) - - def test_has_only_issuer_and_project_name_request(self): - env = {} - # SSL_CLIENT_I_DN is the attribute name that wsgi env - # references to issuer of the client certificate. - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_NAME'] = uuid.uuid4().hex - self._middleware_failure(exception.ValidationError, - extra_environ=env, - status=400) - - def test_has_only_issuer_and_project_domain_name_request(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_DOMAIN_NAME'] = uuid.uuid4().hex - self._middleware_failure(exception.ValidationError, - extra_environ=env, - status=400) - - def test_has_only_issuer_and_project_domain_id_request(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_DOMAIN_ID'] = uuid.uuid4().hex - self._middleware_failure(exception.ValidationError, - extra_environ=env, - status=400) - - def test_missing_both_domain_and_project_request(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - self._middleware_failure(exception.ValidationError, - extra_environ=env, - status=400) - - def test_empty_trusted_issuer_list(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_ID'] = uuid.uuid4().hex - - self.config_fixture.config(group='tokenless_auth', - trusted_issuer=[]) - - req = self._do_middleware_request(extra_environ=env) - context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self.assertIsNone(context) - - def test_client_issuer_not_trusted(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.untrusted_client_issuer - env['HTTP_X_PROJECT_ID'] = uuid.uuid4().hex - req = self._do_middleware_request(extra_environ=env) - context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self.assertIsNone(context) - - def test_proj_scope_with_proj_id_and_proj_dom_id_success(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_ID'] = self.project_id - env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id - # SSL_CLIENT_USER_NAME and SSL_CLIENT_DOMAIN_NAME are the types - # defined in the mapping that will map to the user name and - # domain name - env['SSL_CLIENT_USER_NAME'] = self.user['name'] - env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name - - self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME) - - req = self._do_middleware_request(extra_environ=env) - context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self._assert_tokenless_auth_context(context) - - def test_proj_scope_with_proj_id_only_success(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_ID'] = self.project_id - env['SSL_CLIENT_USER_NAME'] = self.user['name'] - env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name - - self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME) - - req = self._do_middleware_request(extra_environ=env) - context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self._assert_tokenless_auth_context(context) - - def test_proj_scope_with_proj_name_and_proj_dom_id_success(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_NAME'] = self.project_name - env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id - env['SSL_CLIENT_USER_NAME'] = self.user['name'] - env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name - - self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME) - - req = self._do_middleware_request(extra_environ=env) - context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self._assert_tokenless_auth_context(context) - - def test_proj_scope_with_proj_name_and_proj_dom_name_success(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_NAME'] = self.project_name - env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name - env['SSL_CLIENT_USER_NAME'] = self.user['name'] - env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name - - self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME) - - req = self._do_middleware_request(extra_environ=env) - context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self._assert_tokenless_auth_context(context) - - def test_proj_scope_with_proj_name_only_fail(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_NAME'] = self.project_id - env['SSL_CLIENT_USER_NAME'] = self.user['name'] - env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name - - self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME) - - self._middleware_failure(exception.ValidationError, - extra_environ=env, - status=400) - - def test_mapping_with_userid_and_domainid_success(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_NAME'] = self.project_name - env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name - env['SSL_CLIENT_USER_ID'] = self.user['id'] - env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id - - self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERID_AND_DOMAINID) - - req = self._do_middleware_request(extra_environ=env) - context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self._assert_tokenless_auth_context(context) - - def test_mapping_with_userid_and_domainname_success(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_NAME'] = self.project_name - env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name - env['SSL_CLIENT_USER_ID'] = self.user['id'] - env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name - - self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERID_AND_DOMAINNAME) - - req = self._do_middleware_request(extra_environ=env) - context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self._assert_tokenless_auth_context(context) - - def test_mapping_with_username_and_domainid_success(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_NAME'] = self.project_name - env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name - env['SSL_CLIENT_USER_NAME'] = self.user['name'] - env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id - - self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID) - - req = self._do_middleware_request(extra_environ=env) - context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self._assert_tokenless_auth_context(context) - - def test_only_domain_name_fail(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_ID'] = self.project_id - env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id - env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name - - self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_DOMAINNAME_ONLY) - - self._middleware_failure(exception.ValidationError, - extra_environ=env, - status=400) - - def test_only_domain_id_fail(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_ID'] = self.project_id - env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id - env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id - - self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_DOMAINID_ONLY) - - self._middleware_failure(exception.ValidationError, - extra_environ=env, - status=400) - - def test_missing_domain_data_fail(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_ID'] = self.project_id - env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id - env['SSL_CLIENT_USER_NAME'] = self.user['name'] - - self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_ONLY) - - self._middleware_failure(exception.ValidationError, - extra_environ=env, - status=400) - - def test_userid_success(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_ID'] = self.project_id - env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id - env['SSL_CLIENT_USER_ID'] = self.user['id'] - - self._load_mapping_rules(mapping_fixtures.MAPPING_WITH_USERID_ONLY) - req = self._do_middleware_request(extra_environ=env) - context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self._assert_tokenless_auth_context(context) - - def test_domain_disable_fail(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_NAME'] = self.project_name - env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name - env['SSL_CLIENT_USER_NAME'] = self.user['name'] - env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id - - self.domain['enabled'] = False - self.domain = self.resource_api.update_domain( - self.domain['id'], self.domain) - - self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID) - self._middleware_failure(exception.Unauthorized, - extra_environ=env, - status=401) - - def test_user_disable_fail(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_NAME'] = self.project_name - env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name - env['SSL_CLIENT_USER_NAME'] = self.user['name'] - env['SSL_CLIENT_DOMAIN_ID'] = self.domain_id - - self.user['enabled'] = False - self.user = self.identity_api.update_user(self.user['id'], self.user) - - self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINID) - - self._middleware_failure(AssertionError, - extra_environ=env) - - def test_invalid_user_fail(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_ID'] = self.project_id - env['HTTP_X_PROJECT_DOMAIN_ID'] = self.domain_id - env['SSL_CLIENT_USER_NAME'] = uuid.uuid4().hex - env['SSL_CLIENT_DOMAIN_NAME'] = self.domain_name - - self._load_mapping_rules( - mapping_fixtures.MAPPING_WITH_USERNAME_AND_DOMAINNAME) - - self._middleware_failure(exception.UserNotFound, - extra_environ=env, - status=404) - - def test_ephemeral_success(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_NAME'] = self.project_name - env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name - env['SSL_CLIENT_USER_NAME'] = self.user['name'] - self.config_fixture.config(group='tokenless_auth', - protocol='ephemeral') - self.protocol_id = 'ephemeral' - mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER) - mapping['rules'][0]['local'][0]['group']['id'] = self.group['id'] - self._load_mapping_rules(mapping) - - req = self._do_middleware_request(extra_environ=env) - context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self._assert_tokenless_auth_context(context, ephemeral_user=True) - - def test_ephemeral_with_default_user_type_success(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_NAME'] = self.project_name - env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name - env['SSL_CLIENT_USER_NAME'] = self.user['name'] - self.config_fixture.config(group='tokenless_auth', - protocol='ephemeral') - self.protocol_id = 'ephemeral' - # this mapping does not have the user type defined - # and it should defaults to 'ephemeral' which is - # the expected type for the test case. - mapping = copy.deepcopy( - mapping_fixtures.MAPPING_FOR_DEFAULT_EPHEMERAL_USER) - mapping['rules'][0]['local'][0]['group']['id'] = self.group['id'] - self._load_mapping_rules(mapping) - - req = self._do_middleware_request(extra_environ=env) - context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self._assert_tokenless_auth_context(context, ephemeral_user=True) - - def test_ephemeral_any_user_success(self): - """Verify ephemeral user does not need a specified user. - - Keystone is not looking to match the user, but a corresponding group. - """ - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_NAME'] = self.project_name - env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name - env['SSL_CLIENT_USER_NAME'] = uuid.uuid4().hex - self.config_fixture.config(group='tokenless_auth', - protocol='ephemeral') - self.protocol_id = 'ephemeral' - mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER) - mapping['rules'][0]['local'][0]['group']['id'] = self.group['id'] - self._load_mapping_rules(mapping) - - req = self._do_middleware_request(extra_environ=env) - context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self._assert_tokenless_auth_context(context, ephemeral_user=True) - - def test_ephemeral_invalid_scope_fail(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_NAME'] = uuid.uuid4().hex - env['HTTP_X_PROJECT_DOMAIN_NAME'] = uuid.uuid4().hex - env['SSL_CLIENT_USER_NAME'] = self.user['name'] - self.config_fixture.config(group='tokenless_auth', - protocol='ephemeral') - self.protocol_id = 'ephemeral' - mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER) - mapping['rules'][0]['local'][0]['group']['id'] = self.group['id'] - self._load_mapping_rules(mapping) - - self._middleware_failure(exception.Unauthorized, - extra_environ=env, - status=401) - - def test_ephemeral_no_group_found_fail(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_NAME'] = self.project_name - env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name - env['SSL_CLIENT_USER_NAME'] = self.user['name'] - self.config_fixture.config(group='tokenless_auth', - protocol='ephemeral') - self.protocol_id = 'ephemeral' - mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER) - mapping['rules'][0]['local'][0]['group']['id'] = uuid.uuid4().hex - self._load_mapping_rules(mapping) - - self._middleware_failure(exception.MappedGroupNotFound, - extra_environ=env) - - def test_ephemeral_incorrect_mapping_fail(self): - """Test ephemeral user picking up the non-ephemeral user mapping. - - Looking up the mapping with protocol Id 'x509' will load up - the non-ephemeral user mapping, results unauthenticated. - """ - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - env['HTTP_X_PROJECT_NAME'] = self.project_name - env['HTTP_X_PROJECT_DOMAIN_NAME'] = self.domain_name - env['SSL_CLIENT_USER_NAME'] = self.user['name'] - # This will pick up the incorrect mapping - self.config_fixture.config(group='tokenless_auth', - protocol='x509') - self.protocol_id = 'x509' - mapping = copy.deepcopy(mapping_fixtures.MAPPING_FOR_EPHEMERAL_USER) - mapping['rules'][0]['local'][0]['group']['id'] = uuid.uuid4().hex - self._load_mapping_rules(mapping) - - self._middleware_failure(exception.MappedGroupNotFound, - extra_environ=env) - - def test_create_idp_id_success(self): - env = {} - env['SSL_CLIENT_I_DN'] = self.client_issuer - auth = tokenless_auth.TokenlessAuthHelper(env) - idp_id = auth._build_idp_id() - self.assertEqual(self.idp_id, idp_id) - - def test_create_idp_id_attri_not_found_fail(self): - env = {} - env[uuid.uuid4().hex] = self.client_issuer - auth = tokenless_auth.TokenlessAuthHelper(env) - expected_msg = ('Could not determine Identity Provider ID. The ' - 'configuration option %s was not found in the ' - 'request environment.' % - CONF.tokenless_auth.issuer_attribute) - # Check the content of the exception message as well - self.assertRaisesRegexp(exception.TokenlessAuthConfigError, - expected_msg, - auth._build_idp_id) diff --git a/keystone-moon/keystone/tests/unit/test_no_admin_token_auth.py b/keystone-moon/keystone/tests/unit/test_no_admin_token_auth.py deleted file mode 100644 index bf60cff0..00000000 --- a/keystone-moon/keystone/tests/unit/test_no_admin_token_auth.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os - -from six.moves import http_client -import webtest - -from keystone.tests import unit - - -class TestNoAdminTokenAuth(unit.TestCase): - def setUp(self): - super(TestNoAdminTokenAuth, self).setUp() - self.load_backends() - - self._generate_paste_config() - - self.admin_app = webtest.TestApp( - self.loadapp(unit.dirs.tmp('no_admin_token_auth'), name='admin'), - extra_environ=dict(REMOTE_ADDR='127.0.0.1')) - self.addCleanup(setattr, self, 'admin_app', None) - - def _generate_paste_config(self): - # Generate a file, based on keystone-paste.ini, that doesn't include - # admin_token_auth in the pipeline - - with open(unit.dirs.etc('keystone-paste.ini'), 'r') as f: - contents = f.read() - - new_contents = contents.replace(' admin_token_auth ', ' ') - - filename = unit.dirs.tmp('no_admin_token_auth-paste.ini') - with open(filename, 'w') as f: - f.write(new_contents) - self.addCleanup(os.remove, filename) - - def test_request_no_admin_token_auth(self): - # This test verifies that if the admin_token_auth middleware isn't - # in the paste pipeline that users can still make requests. - - # Note(blk-u): Picked /v2.0/tenants because it's an operation that - # requires is_admin in the context, any operation that requires - # is_admin would work for this test. - REQ_PATH = '/v2.0/tenants' - - # If the following does not raise, then the test is successful. - self.admin_app.get(REQ_PATH, headers={'X-Auth-Token': 'NotAdminToken'}, - status=http_client.UNAUTHORIZED) diff --git a/keystone-moon/keystone/tests/unit/test_policy.py b/keystone-moon/keystone/tests/unit/test_policy.py deleted file mode 100644 index d6e911e9..00000000 --- a/keystone-moon/keystone/tests/unit/test_policy.py +++ /dev/null @@ -1,222 +0,0 @@ -# Copyright 2011 Piston Cloud Computing, Inc. -# All Rights Reserved. - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import json -import os - -from oslo_policy import policy as common_policy -import six -from testtools import matchers - -from keystone import exception -from keystone.policy.backends import rules -from keystone.tests import unit -from keystone.tests.unit import ksfixtures -from keystone.tests.unit.ksfixtures import temporaryfile - - -class PolicyFileTestCase(unit.TestCase): - def setUp(self): - # self.tmpfilename should exist before setUp super is called - # this is to ensure it is available for the config_fixture in - # the config_overrides call. - self.tempfile = self.useFixture(temporaryfile.SecureTempFile()) - self.tmpfilename = self.tempfile.file_name - super(PolicyFileTestCase, self).setUp() - self.target = {} - - def _policy_fixture(self): - return ksfixtures.Policy(self.tmpfilename, self.config_fixture) - - def test_modified_policy_reloads(self): - action = "example:test" - empty_credentials = {} - with open(self.tmpfilename, "w") as policyfile: - policyfile.write("""{"example:test": []}""") - rules.enforce(empty_credentials, action, self.target) - with open(self.tmpfilename, "w") as policyfile: - policyfile.write("""{"example:test": ["false:false"]}""") - rules._ENFORCER.clear() - self.assertRaises(exception.ForbiddenAction, rules.enforce, - empty_credentials, action, self.target) - - -class PolicyTestCase(unit.TestCase): - def setUp(self): - super(PolicyTestCase, self).setUp() - self.rules = { - "true": [], - "example:allowed": [], - "example:denied": [["false:false"]], - "example:get_http": [["http:http://www.example.com"]], - "example:my_file": [["role:compute_admin"], - ["project_id:%(project_id)s"]], - "example:early_and_fail": [["false:false", "rule:true"]], - "example:early_or_success": [["rule:true"], ["false:false"]], - "example:lowercase_admin": [["role:admin"], ["role:sysadmin"]], - "example:uppercase_admin": [["role:ADMIN"], ["role:sysadmin"]], - } - - # NOTE(vish): then overload underlying policy engine - self._set_rules() - self.credentials = {} - self.target = {} - - def _set_rules(self): - these_rules = common_policy.Rules.from_dict(self.rules) - rules._ENFORCER.set_rules(these_rules) - - def test_enforce_nonexistent_action_throws(self): - action = "example:noexist" - self.assertRaises(exception.ForbiddenAction, rules.enforce, - self.credentials, action, self.target) - - def test_enforce_bad_action_throws(self): - action = "example:denied" - self.assertRaises(exception.ForbiddenAction, rules.enforce, - self.credentials, action, self.target) - - def test_enforce_good_action(self): - action = "example:allowed" - rules.enforce(self.credentials, action, self.target) - - def test_templatized_enforcement(self): - target_mine = {'project_id': 'fake'} - target_not_mine = {'project_id': 'another'} - credentials = {'project_id': 'fake', 'roles': []} - action = "example:my_file" - rules.enforce(credentials, action, target_mine) - self.assertRaises(exception.ForbiddenAction, rules.enforce, - credentials, action, target_not_mine) - - def test_early_AND_enforcement(self): - action = "example:early_and_fail" - self.assertRaises(exception.ForbiddenAction, rules.enforce, - self.credentials, action, self.target) - - def test_early_OR_enforcement(self): - action = "example:early_or_success" - rules.enforce(self.credentials, action, self.target) - - def test_ignore_case_role_check(self): - lowercase_action = "example:lowercase_admin" - uppercase_action = "example:uppercase_admin" - # NOTE(dprince): We mix case in the Admin role here to ensure - # case is ignored - admin_credentials = {'roles': ['AdMiN']} - rules.enforce(admin_credentials, lowercase_action, self.target) - rules.enforce(admin_credentials, uppercase_action, self.target) - - -class DefaultPolicyTestCase(unit.TestCase): - def setUp(self): - super(DefaultPolicyTestCase, self).setUp() - - self.rules = { - "default": [], - "example:exist": [["false:false"]] - } - self._set_rules('default') - self.credentials = {} - - # FIXME(gyee): latest Oslo policy Enforcer class reloads the rules in - # its enforce() method even though rules has been initialized via - # set_rules(). To make it easier to do our tests, we're going to - # monkeypatch load_roles() so it does nothing. This seem like a bug in - # Oslo policy as we shouldn't have to reload the rules if they have - # already been set using set_rules(). - self._old_load_rules = rules._ENFORCER.load_rules - self.addCleanup(setattr, rules._ENFORCER, 'load_rules', - self._old_load_rules) - rules._ENFORCER.load_rules = lambda *args, **kwargs: None - - def _set_rules(self, default_rule): - these_rules = common_policy.Rules.from_dict(self.rules, default_rule) - rules._ENFORCER.set_rules(these_rules) - - def test_policy_called(self): - self.assertRaises(exception.ForbiddenAction, rules.enforce, - self.credentials, "example:exist", {}) - - def test_not_found_policy_calls_default(self): - rules.enforce(self.credentials, "example:noexist", {}) - - def test_default_not_found(self): - new_default_rule = "default_noexist" - # FIXME(gyee): need to overwrite the Enforcer's default_rule first - # as it is recreating the rules with its own default_rule instead - # of the default_rule passed in from set_rules(). I think this is a - # bug in Oslo policy. - rules._ENFORCER.default_rule = new_default_rule - self._set_rules(new_default_rule) - self.assertRaises(exception.ForbiddenAction, rules.enforce, - self.credentials, "example:noexist", {}) - - -class PolicyJsonTestCase(unit.TestCase): - - def _load_entries(self, filename): - return set(json.load(open(filename))) - - def test_json_examples_have_matching_entries(self): - policy_keys = self._load_entries(unit.dirs.etc('policy.json')) - cloud_policy_keys = self._load_entries( - unit.dirs.etc('policy.v3cloudsample.json')) - - policy_extra_keys = ['admin_or_token_subject', - 'service_admin_or_token_subject', - 'token_subject', ] - expected_policy_keys = list(cloud_policy_keys) + policy_extra_keys - diffs = set(policy_keys).difference(set(expected_policy_keys)) - - self.assertThat(diffs, matchers.Equals(set())) - - def test_all_targets_documented(self): - # All the targets in the sample policy file must be documented in - # doc/source/policy_mapping.rst. - - policy_keys = self._load_entries(unit.dirs.etc('policy.json')) - - # These keys are in the policy.json but aren't targets. - policy_rule_keys = [ - 'admin_or_owner', 'admin_or_token_subject', 'admin_required', - 'default', 'owner', 'service_admin_or_token_subject', - 'service_or_admin', 'service_role', 'token_subject', ] - - def read_doc_targets(): - # Parse the doc/source/policy_mapping.rst file and return the - # targets. - - doc_path = os.path.join( - unit.ROOTDIR, 'doc', 'source', 'policy_mapping.rst') - with open(doc_path) as doc_file: - for line in doc_file: - if line.startswith('Target'): - break - for line in doc_file: - # Skip === line - if line.startswith('==='): - break - for line in doc_file: - line = line.rstrip() - if not line or line.startswith(' '): - continue - if line.startswith('=='): - break - target, dummy, dummy = line.partition(' ') - yield six.text_type(target) - - doc_targets = list(read_doc_targets()) - self.assertItemsEqual(policy_keys, doc_targets + policy_rule_keys) diff --git a/keystone-moon/keystone/tests/unit/test_revoke.py b/keystone-moon/keystone/tests/unit/test_revoke.py deleted file mode 100644 index 82c0125a..00000000 --- a/keystone-moon/keystone/tests/unit/test_revoke.py +++ /dev/null @@ -1,622 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import datetime -import uuid - -import mock -from oslo_utils import timeutils -from six.moves import range -from testtools import matchers - -from keystone.common import utils -from keystone import exception -from keystone.models import revoke_model -from keystone.tests import unit -from keystone.tests.unit import test_backend_sql -from keystone.token import provider - - -def _new_id(): - return uuid.uuid4().hex - - -def _future_time(): - expire_delta = datetime.timedelta(seconds=1000) - future_time = timeutils.utcnow() + expire_delta - return future_time - - -def _past_time(): - expire_delta = datetime.timedelta(days=-1000) - past_time = timeutils.utcnow() + expire_delta - return past_time - - -def _sample_blank_token(): - issued_delta = datetime.timedelta(minutes=-2) - issued_at = timeutils.utcnow() + issued_delta - token_data = revoke_model.blank_token_data(issued_at) - return token_data - - -def _matches(event, token_values): - """See if the token matches the revocation event. - - Used as a secondary check on the logic to Check - By Tree Below: This is abrute force approach to checking. - Compare each attribute from the event with the corresponding - value from the token. If the event does not have a value for - the attribute, a match is still possible. If the event has a - value for the attribute, and it does not match the token, no match - is possible, so skip the remaining checks. - - :param event: one revocation event to match - :param token_values: dictionary with set of values taken from the - token - :returns: True if the token matches the revocation event, indicating the - token has been revoked - """ - # The token has three attributes that can match the user_id - if event.user_id is not None: - for attribute_name in ['user_id', 'trustor_id', 'trustee_id']: - if event.user_id == token_values[attribute_name]: - break - else: - return False - - # The token has two attributes that can match the domain_id - if event.domain_id is not None: - for attribute_name in ['identity_domain_id', 'assignment_domain_id']: - if event.domain_id == token_values[attribute_name]: - break - else: - return False - - if event.domain_scope_id is not None: - if event.domain_scope_id != token_values['assignment_domain_id']: - return False - - # If any one check does not match, the while token does - # not match the event. The numerous return False indicate - # that the token is still valid and short-circuits the - # rest of the logic. - attribute_names = ['project_id', - 'expires_at', 'trust_id', 'consumer_id', - 'access_token_id', 'audit_id', 'audit_chain_id'] - for attribute_name in attribute_names: - if getattr(event, attribute_name) is not None: - if (getattr(event, attribute_name) != - token_values[attribute_name]): - return False - - if event.role_id is not None: - roles = token_values['roles'] - for role in roles: - if event.role_id == role: - break - else: - return False - if token_values['issued_at'] > event.issued_before: - return False - return True - - -class RevokeTests(object): - - def test_list(self): - self.revoke_api.revoke_by_user(user_id=1) - self.assertEqual(1, len(self.revoke_api.list_events())) - - self.revoke_api.revoke_by_user(user_id=2) - self.assertEqual(2, len(self.revoke_api.list_events())) - - def test_list_since(self): - self.revoke_api.revoke_by_user(user_id=1) - self.revoke_api.revoke_by_user(user_id=2) - past = timeutils.utcnow() - datetime.timedelta(seconds=1000) - self.assertEqual(2, len(self.revoke_api.list_events(last_fetch=past))) - future = timeutils.utcnow() + datetime.timedelta(seconds=1000) - self.assertEqual(0, - len(self.revoke_api.list_events(last_fetch=future))) - - def test_past_expiry_are_removed(self): - user_id = 1 - self.revoke_api.revoke_by_expiration(user_id, _future_time()) - self.assertEqual(1, len(self.revoke_api.list_events())) - event = revoke_model.RevokeEvent() - event.revoked_at = _past_time() - self.revoke_api.revoke(event) - self.assertEqual(1, len(self.revoke_api.list_events())) - - @mock.patch.object(timeutils, 'utcnow') - def test_expired_events_removed_validate_token_success(self, mock_utcnow): - def _sample_token_values(): - token = _sample_blank_token() - token['expires_at'] = utils.isotime(_future_time(), - subsecond=True) - return token - - now = datetime.datetime.utcnow() - now_plus_2h = now + datetime.timedelta(hours=2) - mock_utcnow.return_value = now - - # Build a token and validate it. This will seed the cache for the - # future 'synchronize' call. - token_values = _sample_token_values() - - user_id = _new_id() - self.revoke_api.revoke_by_user(user_id) - token_values['user_id'] = user_id - self.assertRaises(exception.TokenNotFound, - self.revoke_api.check_token, - token_values) - - # Move our clock forward by 2h, build a new token and validate it. - # 'synchronize' should now be exercised and remove old expired events - mock_utcnow.return_value = now_plus_2h - self.revoke_api.revoke_by_expiration(_new_id(), now_plus_2h) - # should no longer throw an exception - self.revoke_api.check_token(token_values) - - def test_revoke_by_expiration_project_and_domain_fails(self): - user_id = _new_id() - expires_at = utils.isotime(_future_time(), subsecond=True) - domain_id = _new_id() - project_id = _new_id() - self.assertThat( - lambda: self.revoke_api.revoke_by_expiration( - user_id, expires_at, domain_id=domain_id, - project_id=project_id), - matchers.raises(exception.UnexpectedError)) - - -class SqlRevokeTests(test_backend_sql.SqlTests, RevokeTests): - def config_overrides(self): - super(SqlRevokeTests, self).config_overrides() - self.config_fixture.config( - group='token', - provider='pki', - revoke_by_id=False) - - -class RevokeTreeTests(unit.TestCase): - def setUp(self): - super(RevokeTreeTests, self).setUp() - self.events = [] - self.tree = revoke_model.RevokeTree() - self._sample_data() - - def _sample_data(self): - user_ids = [] - project_ids = [] - role_ids = [] - for i in range(0, 3): - user_ids.append(_new_id()) - project_ids.append(_new_id()) - role_ids.append(_new_id()) - - project_tokens = [] - i = len(project_tokens) - project_tokens.append(_sample_blank_token()) - project_tokens[i]['user_id'] = user_ids[0] - project_tokens[i]['project_id'] = project_ids[0] - project_tokens[i]['roles'] = [role_ids[1]] - - i = len(project_tokens) - project_tokens.append(_sample_blank_token()) - project_tokens[i]['user_id'] = user_ids[1] - project_tokens[i]['project_id'] = project_ids[0] - project_tokens[i]['roles'] = [role_ids[0]] - - i = len(project_tokens) - project_tokens.append(_sample_blank_token()) - project_tokens[i]['user_id'] = user_ids[0] - project_tokens[i]['project_id'] = project_ids[1] - project_tokens[i]['roles'] = [role_ids[0]] - - token_to_revoke = _sample_blank_token() - token_to_revoke['user_id'] = user_ids[0] - token_to_revoke['project_id'] = project_ids[0] - token_to_revoke['roles'] = [role_ids[0]] - - self.project_tokens = project_tokens - self.user_ids = user_ids - self.project_ids = project_ids - self.role_ids = role_ids - self.token_to_revoke = token_to_revoke - - def _assertTokenRevoked(self, token_data): - self.assertTrue(any([_matches(e, token_data) for e in self.events])) - return self.assertTrue(self.tree.is_revoked(token_data), - 'Token should be revoked') - - def _assertTokenNotRevoked(self, token_data): - self.assertFalse(any([_matches(e, token_data) for e in self.events])) - return self.assertFalse(self.tree.is_revoked(token_data), - 'Token should not be revoked') - - def _revoke_by_user(self, user_id): - return self.tree.add_event( - revoke_model.RevokeEvent(user_id=user_id)) - - def _revoke_by_audit_id(self, audit_id): - event = self.tree.add_event( - revoke_model.RevokeEvent(audit_id=audit_id)) - self.events.append(event) - return event - - def _revoke_by_audit_chain_id(self, audit_chain_id, project_id=None, - domain_id=None): - event = self.tree.add_event( - revoke_model.RevokeEvent(audit_chain_id=audit_chain_id, - project_id=project_id, - domain_id=domain_id) - ) - self.events.append(event) - return event - - def _revoke_by_expiration(self, user_id, expires_at, project_id=None, - domain_id=None): - event = self.tree.add_event( - revoke_model.RevokeEvent(user_id=user_id, - expires_at=expires_at, - project_id=project_id, - domain_id=domain_id)) - self.events.append(event) - return event - - def _revoke_by_grant(self, role_id, user_id=None, - domain_id=None, project_id=None): - event = self.tree.add_event( - revoke_model.RevokeEvent(user_id=user_id, - role_id=role_id, - domain_id=domain_id, - project_id=project_id)) - self.events.append(event) - return event - - def _revoke_by_user_and_project(self, user_id, project_id): - event = self.tree.add_event( - revoke_model.RevokeEvent(project_id=project_id, - user_id=user_id)) - self.events.append(event) - return event - - def _revoke_by_project_role_assignment(self, project_id, role_id): - event = self.tree.add_event( - revoke_model.RevokeEvent(project_id=project_id, - role_id=role_id)) - self.events.append(event) - return event - - def _revoke_by_domain_role_assignment(self, domain_id, role_id): - event = self.tree.add_event( - revoke_model.RevokeEvent(domain_id=domain_id, - role_id=role_id)) - self.events.append(event) - return event - - def _revoke_by_domain(self, domain_id): - event = self.tree.add_event( - revoke_model.RevokeEvent(domain_id=domain_id)) - self.events.append(event) - - def _user_field_test(self, field_name): - user_id = _new_id() - event = self._revoke_by_user(user_id) - self.events.append(event) - token_data_u1 = _sample_blank_token() - token_data_u1[field_name] = user_id - self._assertTokenRevoked(token_data_u1) - token_data_u2 = _sample_blank_token() - token_data_u2[field_name] = _new_id() - self._assertTokenNotRevoked(token_data_u2) - self.tree.remove_event(event) - self.events.remove(event) - self._assertTokenNotRevoked(token_data_u1) - - def test_revoke_by_user(self): - self._user_field_test('user_id') - - def test_revoke_by_user_matches_trustee(self): - self._user_field_test('trustee_id') - - def test_revoke_by_user_matches_trustor(self): - self._user_field_test('trustor_id') - - def test_by_user_expiration(self): - future_time = _future_time() - - user_id = 1 - event = self._revoke_by_expiration(user_id, future_time) - token_data_1 = _sample_blank_token() - token_data_1['user_id'] = user_id - token_data_1['expires_at'] = future_time.replace(microsecond=0) - self._assertTokenRevoked(token_data_1) - - token_data_2 = _sample_blank_token() - token_data_2['user_id'] = user_id - expire_delta = datetime.timedelta(seconds=2000) - future_time = timeutils.utcnow() + expire_delta - token_data_2['expires_at'] = future_time - self._assertTokenNotRevoked(token_data_2) - - self.remove_event(event) - self._assertTokenNotRevoked(token_data_1) - - def test_revoke_by_audit_id(self): - audit_id = provider.audit_info(parent_audit_id=None)[0] - token_data_1 = _sample_blank_token() - # Audit ID and Audit Chain ID are populated with the same value - # if the token is an original token - token_data_1['audit_id'] = audit_id - token_data_1['audit_chain_id'] = audit_id - event = self._revoke_by_audit_id(audit_id) - self._assertTokenRevoked(token_data_1) - - audit_id_2 = provider.audit_info(parent_audit_id=audit_id)[0] - token_data_2 = _sample_blank_token() - token_data_2['audit_id'] = audit_id_2 - token_data_2['audit_chain_id'] = audit_id - self._assertTokenNotRevoked(token_data_2) - - self.remove_event(event) - self._assertTokenNotRevoked(token_data_1) - - def test_revoke_by_audit_chain_id(self): - audit_id = provider.audit_info(parent_audit_id=None)[0] - token_data_1 = _sample_blank_token() - # Audit ID and Audit Chain ID are populated with the same value - # if the token is an original token - token_data_1['audit_id'] = audit_id - token_data_1['audit_chain_id'] = audit_id - event = self._revoke_by_audit_chain_id(audit_id) - self._assertTokenRevoked(token_data_1) - - audit_id_2 = provider.audit_info(parent_audit_id=audit_id)[0] - token_data_2 = _sample_blank_token() - token_data_2['audit_id'] = audit_id_2 - token_data_2['audit_chain_id'] = audit_id - self._assertTokenRevoked(token_data_2) - - self.remove_event(event) - self._assertTokenNotRevoked(token_data_1) - self._assertTokenNotRevoked(token_data_2) - - def test_by_user_project(self): - # When a user has a project-scoped token and the project-scoped token - # is revoked then the token is revoked. - - user_id = _new_id() - project_id = _new_id() - - future_time = _future_time() - - token_data = _sample_blank_token() - token_data['user_id'] = user_id - token_data['project_id'] = project_id - token_data['expires_at'] = future_time.replace(microsecond=0) - - self._revoke_by_expiration(user_id, future_time, project_id=project_id) - self._assertTokenRevoked(token_data) - - def test_by_user_domain(self): - # When a user has a domain-scoped token and the domain-scoped token - # is revoked then the token is revoked. - - user_id = _new_id() - domain_id = _new_id() - - future_time = _future_time() - - token_data = _sample_blank_token() - token_data['user_id'] = user_id - token_data['assignment_domain_id'] = domain_id - token_data['expires_at'] = future_time.replace(microsecond=0) - - self._revoke_by_expiration(user_id, future_time, domain_id=domain_id) - self._assertTokenRevoked(token_data) - - def remove_event(self, event): - self.events.remove(event) - self.tree.remove_event(event) - - def test_by_project_grant(self): - token_to_revoke = self.token_to_revoke - tokens = self.project_tokens - - self._assertTokenNotRevoked(token_to_revoke) - for token in tokens: - self._assertTokenNotRevoked(token) - - event = self._revoke_by_grant(role_id=self.role_ids[0], - user_id=self.user_ids[0], - project_id=self.project_ids[0]) - - self._assertTokenRevoked(token_to_revoke) - for token in tokens: - self._assertTokenNotRevoked(token) - - self.remove_event(event) - - self._assertTokenNotRevoked(token_to_revoke) - for token in tokens: - self._assertTokenNotRevoked(token) - - token_to_revoke['roles'] = [self.role_ids[0], - self.role_ids[1], - self.role_ids[2]] - - event = self._revoke_by_grant(role_id=self.role_ids[0], - user_id=self.user_ids[0], - project_id=self.project_ids[0]) - self._assertTokenRevoked(token_to_revoke) - self.remove_event(event) - self._assertTokenNotRevoked(token_to_revoke) - - event = self._revoke_by_grant(role_id=self.role_ids[1], - user_id=self.user_ids[0], - project_id=self.project_ids[0]) - self._assertTokenRevoked(token_to_revoke) - self.remove_event(event) - self._assertTokenNotRevoked(token_to_revoke) - - self._revoke_by_grant(role_id=self.role_ids[0], - user_id=self.user_ids[0], - project_id=self.project_ids[0]) - self._revoke_by_grant(role_id=self.role_ids[1], - user_id=self.user_ids[0], - project_id=self.project_ids[0]) - self._revoke_by_grant(role_id=self.role_ids[2], - user_id=self.user_ids[0], - project_id=self.project_ids[0]) - self._assertTokenRevoked(token_to_revoke) - - def test_by_project_and_user_and_role(self): - user_id1 = _new_id() - user_id2 = _new_id() - project_id = _new_id() - self.events.append(self._revoke_by_user(user_id1)) - self.events.append( - self._revoke_by_user_and_project(user_id2, project_id)) - token_data = _sample_blank_token() - token_data['user_id'] = user_id2 - token_data['project_id'] = project_id - self._assertTokenRevoked(token_data) - - def test_by_domain_user(self): - # If revoke a domain, then a token for a user in the domain is revoked - - user_id = _new_id() - domain_id = _new_id() - - token_data = _sample_blank_token() - token_data['user_id'] = user_id - token_data['identity_domain_id'] = domain_id - - self._revoke_by_domain(domain_id) - - self._assertTokenRevoked(token_data) - - def test_by_domain_project(self): - # If revoke a domain, then a token scoped to a project in the domain - # is revoked. - - user_id = _new_id() - user_domain_id = _new_id() - - project_id = _new_id() - project_domain_id = _new_id() - - token_data = _sample_blank_token() - token_data['user_id'] = user_id - token_data['identity_domain_id'] = user_domain_id - token_data['project_id'] = project_id - token_data['assignment_domain_id'] = project_domain_id - - self._revoke_by_domain(project_domain_id) - - self._assertTokenRevoked(token_data) - - def test_by_domain_domain(self): - # If revoke a domain, then a token scoped to the domain is revoked. - - user_id = _new_id() - user_domain_id = _new_id() - - domain_id = _new_id() - - token_data = _sample_blank_token() - token_data['user_id'] = user_id - token_data['identity_domain_id'] = user_domain_id - token_data['assignment_domain_id'] = domain_id - - self._revoke_by_domain(domain_id) - - self._assertTokenRevoked(token_data) - - def _assertEmpty(self, collection): - return self.assertEqual(0, len(collection), "collection not empty") - - def _assertEventsMatchIteration(self, turn): - self.assertEqual(1, len(self.tree.revoke_map)) - self.assertEqual(turn + 1, len(self.tree.revoke_map - ['trust_id=*'] - ['consumer_id=*'] - ['access_token_id=*'] - ['audit_id=*'] - ['audit_chain_id=*'])) - # two different functions add domain_ids, +1 for None - self.assertEqual(2 * turn + 1, len(self.tree.revoke_map - ['trust_id=*'] - ['consumer_id=*'] - ['access_token_id=*'] - ['audit_id=*'] - ['audit_chain_id=*'] - ['expires_at=*'])) - # two different functions add project_ids, +1 for None - self.assertEqual(2 * turn + 1, len(self.tree.revoke_map - ['trust_id=*'] - ['consumer_id=*'] - ['access_token_id=*'] - ['audit_id=*'] - ['audit_chain_id=*'] - ['expires_at=*'] - ['domain_id=*'])) - # 10 users added - self.assertEqual(turn, len(self.tree.revoke_map - ['trust_id=*'] - ['consumer_id=*'] - ['access_token_id=*'] - ['audit_id=*'] - ['audit_chain_id=*'] - ['expires_at=*'] - ['domain_id=*'] - ['project_id=*'])) - - def test_cleanup(self): - events = self.events - self._assertEmpty(self.tree.revoke_map) - expiry_base_time = _future_time() - for i in range(0, 10): - events.append( - self._revoke_by_user(_new_id())) - - args = (_new_id(), - expiry_base_time + datetime.timedelta(seconds=i)) - events.append( - self._revoke_by_expiration(*args)) - - self.assertEqual(i + 2, len(self.tree.revoke_map - ['trust_id=*'] - ['consumer_id=*'] - ['access_token_id=*'] - ['audit_id=*'] - ['audit_chain_id=*']), - 'adding %s to %s' % (args, - self.tree.revoke_map)) - - events.append( - self._revoke_by_project_role_assignment(_new_id(), _new_id())) - events.append( - self._revoke_by_domain_role_assignment(_new_id(), _new_id())) - events.append( - self._revoke_by_domain_role_assignment(_new_id(), _new_id())) - events.append( - self._revoke_by_user_and_project(_new_id(), _new_id())) - self._assertEventsMatchIteration(i + 1) - - for event in self.events: - self.tree.remove_event(event) - self._assertEmpty(self.tree.revoke_map) diff --git a/keystone-moon/keystone/tests/unit/test_singular_plural.py b/keystone-moon/keystone/tests/unit/test_singular_plural.py deleted file mode 100644 index b07ea8d5..00000000 --- a/keystone-moon/keystone/tests/unit/test_singular_plural.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2012 Red Hat, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import ast - -from keystone.contrib.admin_crud import core as admin_crud_core -from keystone.contrib.s3 import core as s3_core -from keystone.contrib.user_crud import core as user_crud_core -from keystone.identity import core as identity_core -from keystone import service - - -class TestSingularPlural(object): - def test_keyword_arg_condition_or_methods(self): - """Raise if we see a keyword arg called 'condition' or 'methods'.""" - modules = [admin_crud_core, s3_core, - user_crud_core, identity_core, service] - for module in modules: - filename = module.__file__ - if filename.endswith(".pyc"): - # In Python 2, the .py and .pyc files are in the same dir. - filename = filename[:-1] - with open(filename) as fil: - source = fil.read() - module = ast.parse(source, filename) - last_stmt_or_expr = None - for node in ast.walk(module): - if isinstance(node, ast.stmt) or isinstance(node, ast.expr): - # keyword nodes don't have line numbers, so we need to - # get that information from the parent stmt or expr. - last_stmt_or_expr = node - elif isinstance(node, ast.keyword): - for bad_word in ["condition", "methods"]: - if node.arg == bad_word: - raise AssertionError( - "Suspicious name '%s' at %s line %s" % - (bad_word, filename, last_stmt_or_expr.lineno)) diff --git a/keystone-moon/keystone/tests/unit/test_sql_livetest.py b/keystone-moon/keystone/tests/unit/test_sql_livetest.py deleted file mode 100644 index 18b8ea91..00000000 --- a/keystone-moon/keystone/tests/unit/test_sql_livetest.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2013 Red Hat, Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.tests import unit -from keystone.tests.unit import test_sql_upgrade - - -class PostgresqlMigrateTests(test_sql_upgrade.SqlUpgradeTests): - def setUp(self): - self.skip_if_env_not_set('ENABLE_LIVE_POSTGRES_TEST') - super(PostgresqlMigrateTests, self).setUp() - - def config_files(self): - files = super(PostgresqlMigrateTests, self).config_files() - files.append(unit.dirs.tests_conf("backend_postgresql.conf")) - return files - - -class MysqlMigrateTests(test_sql_upgrade.SqlUpgradeTests): - def setUp(self): - self.skip_if_env_not_set('ENABLE_LIVE_MYSQL_TEST') - super(MysqlMigrateTests, self).setUp() - - def config_files(self): - files = super(MysqlMigrateTests, self).config_files() - files.append(unit.dirs.tests_conf("backend_mysql.conf")) - return files - - -class Db2MigrateTests(test_sql_upgrade.SqlUpgradeTests): - def setUp(self): - self.skip_if_env_not_set('ENABLE_LIVE_DB2_TEST') - super(Db2MigrateTests, self).setUp() - - def config_files(self): - files = super(Db2MigrateTests, self).config_files() - files.append(unit.dirs.tests_conf("backend_db2.conf")) - return files diff --git a/keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py b/keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py deleted file mode 100644 index 0155f787..00000000 --- a/keystone-moon/keystone/tests/unit/test_sql_migrate_extensions.py +++ /dev/null @@ -1,112 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -To run these tests against a live database: - -1. Modify the file `keystone/tests/unit/config_files/backend_sql.conf` to use - the connection for your live database. -2. Set up a blank, live database. -3. Run the tests using:: - - tox -e py27 -- keystone.tests.unit.test_sql_migrate_extensions - -WARNING:: - - Your database will be wiped. - - Do not do this against a Database with valuable data as - all data will be lost. -""" - -from keystone.contrib import endpoint_filter -from keystone.contrib import endpoint_policy -from keystone.contrib import federation -from keystone.contrib import oauth1 -from keystone.contrib import revoke -from keystone import exception -from keystone.tests.unit import test_sql_upgrade - - -class SqlUpgradeOAuth1Extension(test_sql_upgrade.SqlMigrateBase): - - OAUTH1_MIGRATIONS = 5 - - def repo_package(self): - return oauth1 - - def test_upgrade(self): - for version in range(self.OAUTH1_MIGRATIONS): - v = version + 1 - self.assertRaises(exception.MigrationMovedFailure, - self.upgrade, version=v, - repository=self.repo_path) - - -class EndpointFilterExtension(test_sql_upgrade.SqlMigrateBase): - - ENDPOINT_FILTER_MIGRATIONS = 2 - - def repo_package(self): - return endpoint_filter - - def test_upgrade(self): - for version in range(self.ENDPOINT_FILTER_MIGRATIONS): - v = version + 1 - self.assertRaises(exception.MigrationMovedFailure, - self.upgrade, version=v, - repository=self.repo_path) - - -class EndpointPolicyExtension(test_sql_upgrade.SqlMigrateBase): - - ENDPOINT_POLICY_MIGRATIONS = 1 - - def repo_package(self): - return endpoint_policy - - def test_upgrade(self): - self.assertRaises(exception.MigrationMovedFailure, - self.upgrade, - version=self.ENDPOINT_POLICY_MIGRATIONS, - repository=self.repo_path) - - -class FederationExtension(test_sql_upgrade.SqlMigrateBase): - - FEDERATION_MIGRATIONS = 8 - - def repo_package(self): - return federation - - def test_upgrade(self): - for version in range(self.FEDERATION_MIGRATIONS): - v = version + 1 - self.assertRaises(exception.MigrationMovedFailure, - self.upgrade, version=v, - repository=self.repo_path) - - -class RevokeExtension(test_sql_upgrade.SqlMigrateBase): - - REVOKE_MIGRATIONS = 2 - - def repo_package(self): - return revoke - - def test_upgrade(self): - for version in range(self.REVOKE_MIGRATIONS): - v = version + 1 - self.assertRaises(exception.MigrationMovedFailure, - self.upgrade, version=v, - repository=self.repo_path) diff --git a/keystone-moon/keystone/tests/unit/test_sql_upgrade.py b/keystone-moon/keystone/tests/unit/test_sql_upgrade.py deleted file mode 100644 index 5ca12f66..00000000 --- a/keystone-moon/keystone/tests/unit/test_sql_upgrade.py +++ /dev/null @@ -1,1195 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -To run these tests against a live database: - -1. Modify the file ``keystone/tests/unit/config_files/backend_sql.conf`` to use - the connection for your live database. -2. Set up a blank, live database -3. Run the tests using:: - - tox -e py27 -- keystone.tests.unit.test_sql_upgrade - -WARNING:: - - Your database will be wiped. - - Do not do this against a database with valuable data as - all data will be lost. -""" - -import json -import uuid - -import migrate -from migrate.versioning import api as versioning_api -from migrate.versioning import repository -import mock -from oslo_config import cfg -from oslo_db import exception as db_exception -from oslo_db.sqlalchemy import migration -from oslo_db.sqlalchemy import session as db_session -from sqlalchemy.engine import reflection -import sqlalchemy.exc -from sqlalchemy import schema -from testtools import matchers - -from keystone.common import sql -from keystone.common.sql import migration_helpers -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit import default_fixtures -from keystone.tests.unit.ksfixtures import database - - -CONF = cfg.CONF - -# NOTE(morganfainberg): This should be updated when each DB migration collapse -# is done to mirror the expected structure of the DB in the format of -# { : [, , ...], ... } -INITIAL_TABLE_STRUCTURE = { - 'credential': [ - 'id', 'user_id', 'project_id', 'blob', 'type', 'extra', - ], - 'domain': [ - 'id', 'name', 'enabled', 'extra', - ], - 'endpoint': [ - 'id', 'legacy_endpoint_id', 'interface', 'region_id', 'service_id', - 'url', 'enabled', 'extra', - ], - 'group': [ - 'id', 'domain_id', 'name', 'description', 'extra', - ], - 'policy': [ - 'id', 'type', 'blob', 'extra', - ], - 'project': [ - 'id', 'name', 'extra', 'description', 'enabled', 'domain_id', - 'parent_id', - ], - 'role': [ - 'id', 'name', 'extra', - ], - 'service': [ - 'id', 'type', 'extra', 'enabled', - ], - 'token': [ - 'id', 'expires', 'extra', 'valid', 'trust_id', 'user_id', - ], - 'trust': [ - 'id', 'trustor_user_id', 'trustee_user_id', 'project_id', - 'impersonation', 'deleted_at', 'expires_at', 'remaining_uses', 'extra', - ], - 'trust_role': [ - 'trust_id', 'role_id', - ], - 'user': [ - 'id', 'name', 'extra', 'password', 'enabled', 'domain_id', - 'default_project_id', - ], - 'user_group_membership': [ - 'user_id', 'group_id', - ], - 'region': [ - 'id', 'description', 'parent_region_id', 'extra', - ], - 'assignment': [ - 'type', 'actor_id', 'target_id', 'role_id', 'inherited', - ], - 'id_mapping': [ - 'public_id', 'domain_id', 'local_id', 'entity_type', - ], - 'whitelisted_config': [ - 'domain_id', 'group', 'option', 'value', - ], - 'sensitive_config': [ - 'domain_id', 'group', 'option', 'value', - ], -} - - -# Test migration_helpers.get_init_version separately to ensure it works before -# using in the SqlUpgrade tests. -class MigrationHelpersGetInitVersionTests(unit.TestCase): - @mock.patch.object(repository, 'Repository') - def test_get_init_version_no_path(self, repo): - migrate_versions = mock.MagicMock() - # make a version list starting with zero. `get_init_version` will - # return None for this value. - migrate_versions.versions.versions = list(range(0, 5)) - repo.return_value = migrate_versions - - # os.path.isdir() is called by `find_migrate_repo()`. Mock it to avoid - # an exception. - with mock.patch('os.path.isdir', return_value=True): - # since 0 is the smallest version expect None - version = migration_helpers.get_init_version() - self.assertIsNone(version) - - # check that the default path was used as the first argument to the - # first invocation of repo. Cannot match the full path because it is - # based on where the test is run. - param = repo.call_args_list[0][0][0] - self.assertTrue(param.endswith('/sql/migrate_repo')) - - @mock.patch.object(repository, 'Repository') - def test_get_init_version_with_path_initial_version_0(self, repo): - migrate_versions = mock.MagicMock() - # make a version list starting with zero. `get_init_version` will - # return None for this value. - migrate_versions.versions.versions = list(range(0, 5)) - repo.return_value = migrate_versions - - # os.path.isdir() is called by `find_migrate_repo()`. Mock it to avoid - # an exception. - with mock.patch('os.path.isdir', return_value=True): - path = '/keystone/migrate_repo/' - - # since 0 is the smallest version expect None - version = migration_helpers.get_init_version(abs_path=path) - self.assertIsNone(version) - - @mock.patch.object(repository, 'Repository') - def test_get_init_version_with_path(self, repo): - initial_version = 10 - - migrate_versions = mock.MagicMock() - migrate_versions.versions.versions = list(range(initial_version + 1, - initial_version + 5)) - repo.return_value = migrate_versions - - # os.path.isdir() is called by `find_migrate_repo()`. Mock it to avoid - # an exception. - with mock.patch('os.path.isdir', return_value=True): - path = '/keystone/migrate_repo/' - - version = migration_helpers.get_init_version(abs_path=path) - self.assertEqual(initial_version, version) - - -class SqlMigrateBase(unit.SQLDriverOverrides, unit.TestCase): - # override this in subclasses. The default of zero covers tests such - # as extensions upgrades. - _initial_db_version = 0 - - def initialize_sql(self): - self.metadata = sqlalchemy.MetaData() - self.metadata.bind = self.engine - - def config_files(self): - config_files = super(SqlMigrateBase, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_sql.conf')) - return config_files - - def repo_package(self): - return sql - - def setUp(self): - super(SqlMigrateBase, self).setUp() - self.load_backends() - database.initialize_sql_session() - conn_str = CONF.database.connection - if (conn_str != unit.IN_MEM_DB_CONN_STRING and - conn_str.startswith('sqlite') and - conn_str[10:] == unit.DEFAULT_TEST_DB_FILE): - # Override the default with a DB that is specific to the migration - # tests only if the DB Connection string is the same as the global - # default. This is required so that no conflicts occur due to the - # global default DB already being under migrate control. This is - # only needed if the DB is not-in-memory - db_file = unit.dirs.tmp('keystone_migrate_test.db') - self.config_fixture.config( - group='database', - connection='sqlite:///%s' % db_file) - - # create and share a single sqlalchemy engine for testing - with sql.session_for_write() as session: - self.engine = session.get_bind() - self.addCleanup(self.cleanup_instance('engine')) - self.Session = db_session.get_maker(self.engine, autocommit=False) - self.addCleanup(sqlalchemy.orm.session.Session.close_all) - - self.initialize_sql() - self.repo_path = migration_helpers.find_migrate_repo( - self.repo_package()) - self.schema = versioning_api.ControlledSchema.create( - self.engine, - self.repo_path, - self._initial_db_version) - - # auto-detect the highest available schema version in the migrate_repo - self.max_version = self.schema.repository.version().version - - self.addCleanup(sql.cleanup) - - # drop tables and FKs. - self.addCleanup(self._cleanupDB) - - def _cleanupDB(self): - meta = sqlalchemy.MetaData() - meta.bind = self.engine - meta.reflect(self.engine) - - with self.engine.begin() as conn: - inspector = reflection.Inspector.from_engine(self.engine) - metadata = schema.MetaData() - tbs = [] - all_fks = [] - - for table_name in inspector.get_table_names(): - fks = [] - for fk in inspector.get_foreign_keys(table_name): - if not fk['name']: - continue - fks.append( - schema.ForeignKeyConstraint((), (), name=fk['name'])) - table = schema.Table(table_name, metadata, *fks) - tbs.append(table) - all_fks.extend(fks) - - for fkc in all_fks: - if self.engine.name != 'sqlite': - conn.execute(schema.DropConstraint(fkc)) - - for table in tbs: - conn.execute(schema.DropTable(table)) - - def select_table(self, name): - table = sqlalchemy.Table(name, - self.metadata, - autoload=True) - s = sqlalchemy.select([table]) - return s - - def assertTableExists(self, table_name): - try: - self.select_table(table_name) - except sqlalchemy.exc.NoSuchTableError: - raise AssertionError('Table "%s" does not exist' % table_name) - - def assertTableDoesNotExist(self, table_name): - """Asserts that a given table exists cannot be selected by name.""" - # Switch to a different metadata otherwise you might still - # detect renamed or dropped tables - try: - temp_metadata = sqlalchemy.MetaData() - temp_metadata.bind = self.engine - sqlalchemy.Table(table_name, temp_metadata, autoload=True) - except sqlalchemy.exc.NoSuchTableError: - pass - else: - raise AssertionError('Table "%s" already exists' % table_name) - - def assertTableCountsMatch(self, table1_name, table2_name): - try: - table1 = self.select_table(table1_name) - except sqlalchemy.exc.NoSuchTableError: - raise AssertionError('Table "%s" does not exist' % table1_name) - try: - table2 = self.select_table(table2_name) - except sqlalchemy.exc.NoSuchTableError: - raise AssertionError('Table "%s" does not exist' % table2_name) - session = self.Session() - table1_count = session.execute(table1.count()).scalar() - table2_count = session.execute(table2.count()).scalar() - if table1_count != table2_count: - raise AssertionError('Table counts do not match: {0} ({1}), {2} ' - '({3})'.format(table1_name, table1_count, - table2_name, table2_count)) - - def upgrade(self, *args, **kwargs): - self._migrate(*args, **kwargs) - - def _migrate(self, version, repository=None, downgrade=False, - current_schema=None): - repository = repository or self.repo_path - err = '' - version = versioning_api._migrate_version(self.schema, - version, - not downgrade, - err) - if not current_schema: - current_schema = self.schema - changeset = current_schema.changeset(version) - for ver, change in changeset: - self.schema.runchange(ver, change, changeset.step) - self.assertEqual(self.schema.version, version) - - def assertTableColumns(self, table_name, expected_cols): - """Asserts that the table contains the expected set of columns.""" - self.initialize_sql() - table = self.select_table(table_name) - actual_cols = [col.name for col in table.columns] - # Check if the columns are equal, but allow for a different order, - # which might occur after an upgrade followed by a downgrade - self.assertItemsEqual(expected_cols, actual_cols, - '%s table' % table_name) - - -class SqlUpgradeTests(SqlMigrateBase): - _initial_db_version = migration_helpers.get_init_version() - - def test_blank_db_to_start(self): - self.assertTableDoesNotExist('user') - - def test_start_version_db_init_version(self): - with sql.session_for_write() as session: - version = migration.db_version(session.get_bind(), self.repo_path, - self._initial_db_version) - self.assertEqual( - self._initial_db_version, - version, - 'DB is not at version %s' % self._initial_db_version) - - def test_upgrade_add_initial_tables(self): - self.upgrade(self._initial_db_version + 1) - self.check_initial_table_structure() - - def check_initial_table_structure(self): - for table in INITIAL_TABLE_STRUCTURE: - self.assertTableColumns(table, INITIAL_TABLE_STRUCTURE[table]) - - def insert_dict(self, session, table_name, d, table=None): - """Naively inserts key-value pairs into a table, given a dictionary.""" - if table is None: - this_table = sqlalchemy.Table(table_name, self.metadata, - autoload=True) - else: - this_table = table - insert = this_table.insert().values(**d) - session.execute(insert) - session.commit() - - def test_kilo_squash(self): - self.upgrade(67) - - # In 053 the size of ID and parent region ID columns were changed - table = sqlalchemy.Table('region', self.metadata, autoload=True) - self.assertEqual(255, table.c.id.type.length) - self.assertEqual(255, table.c.parent_region_id.type.length) - table = sqlalchemy.Table('endpoint', self.metadata, autoload=True) - self.assertEqual(255, table.c.region_id.type.length) - - # In 054 an index was created for the actor_id of the assignment table - table = sqlalchemy.Table('assignment', self.metadata, autoload=True) - index_data = [(idx.name, list(idx.columns.keys())) - for idx in table.indexes] - self.assertIn(('ix_actor_id', ['actor_id']), index_data) - - # In 055 indexes were created for user and trust IDs in the token table - table = sqlalchemy.Table('token', self.metadata, autoload=True) - index_data = [(idx.name, list(idx.columns.keys())) - for idx in table.indexes] - self.assertIn(('ix_token_user_id', ['user_id']), index_data) - self.assertIn(('ix_token_trust_id', ['trust_id']), index_data) - - # In 062 the role ID foreign key was removed from the assignment table - if self.engine.name == "mysql": - self.assertFalse(self.does_fk_exist('assignment', 'role_id')) - - # In 064 the domain ID FK was removed from the group and user tables - if self.engine.name != 'sqlite': - # sqlite does not support FK deletions (or enforcement) - self.assertFalse(self.does_fk_exist('group', 'domain_id')) - self.assertFalse(self.does_fk_exist('user', 'domain_id')) - - # In 067 the role ID index was removed from the assignment table - if self.engine.name == "mysql": - self.assertFalse(self._does_index_exist('assignment', - 'assignment_role_id_fkey')) - - def test_insert_assignment_inherited_pk(self): - ASSIGNMENT_TABLE_NAME = 'assignment' - INHERITED_COLUMN_NAME = 'inherited' - ROLE_TABLE_NAME = 'role' - - self.upgrade(72) - - # Check that the 'inherited' column is not part of the PK - self.assertFalse(self.does_pk_exist(ASSIGNMENT_TABLE_NAME, - INHERITED_COLUMN_NAME)) - - session = self.Session() - - role = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex} - self.insert_dict(session, ROLE_TABLE_NAME, role) - - # Create both inherited and noninherited role assignments - inherited = {'type': 'UserProject', - 'actor_id': uuid.uuid4().hex, - 'target_id': uuid.uuid4().hex, - 'role_id': role['id'], - 'inherited': True} - - noninherited = inherited.copy() - noninherited['inherited'] = False - - # Create another inherited role assignment as a spoiler - spoiler = inherited.copy() - spoiler['actor_id'] = uuid.uuid4().hex - - self.insert_dict(session, ASSIGNMENT_TABLE_NAME, inherited) - self.insert_dict(session, ASSIGNMENT_TABLE_NAME, spoiler) - - # Since 'inherited' is not part of the PK, we can't insert noninherited - self.assertRaises(db_exception.DBDuplicateEntry, - self.insert_dict, - session, - ASSIGNMENT_TABLE_NAME, - noninherited) - - session.close() - - self.upgrade(73) - - session = self.Session() - self.metadata.clear() - - # Check that the 'inherited' column is now part of the PK - self.assertTrue(self.does_pk_exist(ASSIGNMENT_TABLE_NAME, - INHERITED_COLUMN_NAME)) - - # The noninherited role assignment can now be inserted - self.insert_dict(session, ASSIGNMENT_TABLE_NAME, noninherited) - - assignment_table = sqlalchemy.Table(ASSIGNMENT_TABLE_NAME, - self.metadata, - autoload=True) - - assignments = session.query(assignment_table).all() - for assignment in (inherited, spoiler, noninherited): - self.assertIn((assignment['type'], assignment['actor_id'], - assignment['target_id'], assignment['role_id'], - assignment['inherited']), - assignments) - - def does_pk_exist(self, table, pk_column): - """Checks whether a column is primary key on a table.""" - inspector = reflection.Inspector.from_engine(self.engine) - pk_columns = inspector.get_pk_constraint(table)['constrained_columns'] - - return pk_column in pk_columns - - def does_fk_exist(self, table, fk_column): - inspector = reflection.Inspector.from_engine(self.engine) - for fk in inspector.get_foreign_keys(table): - if fk_column in fk['constrained_columns']: - return True - return False - - def does_index_exist(self, table_name, index_name): - meta = sqlalchemy.MetaData(bind=self.engine) - table = sqlalchemy.Table(table_name, meta, autoload=True) - return index_name in [idx.name for idx in table.indexes] - - def does_constraint_exist(self, table_name, constraint_name): - meta = sqlalchemy.MetaData(bind=self.engine) - table = sqlalchemy.Table(table_name, meta, autoload=True) - return constraint_name in [con.name for con in table.constraints] - - def test_endpoint_policy_upgrade(self): - self.assertTableDoesNotExist('policy_association') - self.upgrade(81) - self.assertTableColumns('policy_association', - ['id', 'policy_id', 'endpoint_id', - 'service_id', 'region_id']) - - @mock.patch.object(migration_helpers, 'get_db_version', return_value=1) - def test_endpoint_policy_already_migrated(self, mock_ep): - - # By setting the return value to 1, the migration has already been - # run, and there's no need to create the table again - - self.upgrade(81) - - mock_ep.assert_called_once_with(extension='endpoint_policy', - engine=mock.ANY) - - # It won't exist because we are mocking it, but we can verify - # that 081 did not create the table - self.assertTableDoesNotExist('policy_association') - - def test_create_federation_tables(self): - self.identity_provider = 'identity_provider' - self.federation_protocol = 'federation_protocol' - self.service_provider = 'service_provider' - self.mapping = 'mapping' - self.remote_ids = 'idp_remote_ids' - - self.assertTableDoesNotExist(self.identity_provider) - self.assertTableDoesNotExist(self.federation_protocol) - self.assertTableDoesNotExist(self.service_provider) - self.assertTableDoesNotExist(self.mapping) - self.assertTableDoesNotExist(self.remote_ids) - - self.upgrade(82) - self.assertTableColumns(self.identity_provider, - ['id', 'description', 'enabled']) - - self.assertTableColumns(self.federation_protocol, - ['id', 'idp_id', 'mapping_id']) - - self.assertTableColumns(self.mapping, - ['id', 'rules']) - - self.assertTableColumns(self.service_provider, - ['id', 'description', 'enabled', 'auth_url', - 'relay_state_prefix', 'sp_url']) - - self.assertTableColumns(self.remote_ids, ['idp_id', 'remote_id']) - - federation_protocol = sqlalchemy.Table(self.federation_protocol, - self.metadata, - autoload=True) - self.assertFalse(federation_protocol.c.mapping_id.nullable) - - sp_table = sqlalchemy.Table(self.service_provider, - self.metadata, - autoload=True) - self.assertFalse(sp_table.c.auth_url.nullable) - self.assertFalse(sp_table.c.sp_url.nullable) - - @mock.patch.object(migration_helpers, 'get_db_version', return_value=8) - def test_federation_already_migrated(self, mock_federation): - - # By setting the return value to 8, the migration has already been - # run, and there's no need to create the table again. - self.upgrade(82) - - mock_federation.assert_any_call(extension='federation', - engine=mock.ANY) - - # It won't exist because we are mocking it, but we can verify - # that 082 did not create the table. - self.assertTableDoesNotExist('identity_provider') - self.assertTableDoesNotExist('federation_protocol') - self.assertTableDoesNotExist('mapping') - self.assertTableDoesNotExist('service_provider') - self.assertTableDoesNotExist('idp_remote_ids') - - def test_create_oauth_tables(self): - consumer = 'consumer' - request_token = 'request_token' - access_token = 'access_token' - self.assertTableDoesNotExist(consumer) - self.assertTableDoesNotExist(request_token) - self.assertTableDoesNotExist(access_token) - self.upgrade(83) - self.assertTableColumns(consumer, - ['id', - 'description', - 'secret', - 'extra']) - self.assertTableColumns(request_token, - ['id', - 'request_secret', - 'verifier', - 'authorizing_user_id', - 'requested_project_id', - 'role_ids', - 'consumer_id', - 'expires_at']) - self.assertTableColumns(access_token, - ['id', - 'access_secret', - 'authorizing_user_id', - 'project_id', - 'role_ids', - 'consumer_id', - 'expires_at']) - - @mock.patch.object(migration_helpers, 'get_db_version', return_value=5) - def test_oauth1_already_migrated(self, mock_oauth1): - - # By setting the return value to 5, the migration has already been - # run, and there's no need to create the table again. - self.upgrade(83) - - mock_oauth1.assert_any_call(extension='oauth1', engine=mock.ANY) - - # It won't exist because we are mocking it, but we can verify - # that 083 did not create the table. - self.assertTableDoesNotExist('consumer') - self.assertTableDoesNotExist('request_token') - self.assertTableDoesNotExist('access_token') - - def test_create_revoke_table(self): - self.assertTableDoesNotExist('revocation_event') - self.upgrade(84) - self.assertTableColumns('revocation_event', - ['id', 'domain_id', 'project_id', 'user_id', - 'role_id', 'trust_id', 'consumer_id', - 'access_token_id', 'issued_before', - 'expires_at', 'revoked_at', - 'audit_chain_id', 'audit_id']) - - @mock.patch.object(migration_helpers, 'get_db_version', return_value=2) - def test_revoke_already_migrated(self, mock_revoke): - - # By setting the return value to 2, the migration has already been - # run, and there's no need to create the table again. - self.upgrade(84) - - mock_revoke.assert_any_call(extension='revoke', engine=mock.ANY) - - # It won't exist because we are mocking it, but we can verify - # that 084 did not create the table. - self.assertTableDoesNotExist('revocation_event') - - def test_project_is_domain_upgrade(self): - self.upgrade(74) - self.assertTableColumns('project', - ['id', 'name', 'extra', 'description', - 'enabled', 'domain_id', 'parent_id', - 'is_domain']) - - def test_implied_roles_upgrade(self): - self.upgrade(87) - self.assertTableColumns('implied_role', - ['prior_role_id', 'implied_role_id']) - self.assertTrue(self.does_fk_exist('implied_role', 'prior_role_id')) - self.assertTrue(self.does_fk_exist('implied_role', 'implied_role_id')) - - def test_add_config_registration(self): - config_registration = 'config_register' - self.upgrade(74) - self.assertTableDoesNotExist(config_registration) - self.upgrade(75) - self.assertTableColumns(config_registration, ['type', 'domain_id']) - - def test_endpoint_filter_upgrade(self): - def assert_tables_columns_exist(): - self.assertTableColumns('project_endpoint', - ['endpoint_id', 'project_id']) - self.assertTableColumns('endpoint_group', - ['id', 'name', 'description', 'filters']) - self.assertTableColumns('project_endpoint_group', - ['endpoint_group_id', 'project_id']) - - self.assertTableDoesNotExist('project_endpoint') - self.upgrade(85) - assert_tables_columns_exist() - - @mock.patch.object(migration_helpers, 'get_db_version', return_value=2) - def test_endpoint_filter_already_migrated(self, mock_endpoint_filter): - - # By setting the return value to 2, the migration has already been - # run, and there's no need to create the table again. - self.upgrade(85) - - mock_endpoint_filter.assert_any_call(extension='endpoint_filter', - engine=mock.ANY) - - # It won't exist because we are mocking it, but we can verify - # that 085 did not create the table. - self.assertTableDoesNotExist('project_endpoint') - self.assertTableDoesNotExist('endpoint_group') - self.assertTableDoesNotExist('project_endpoint_group') - - def test_add_trust_unique_constraint_upgrade(self): - self.upgrade(86) - inspector = reflection.Inspector.from_engine(self.engine) - constraints = inspector.get_unique_constraints('trust') - constraint_names = [constraint['name'] for constraint in constraints] - self.assertIn('duplicate_trust_constraint', constraint_names) - - def test_add_domain_specific_roles(self): - """Check database upgraded successfully for domain specific roles. - - The following items need to be checked: - - - The domain_id column has been added - - That it has been added to the uniqueness constraints - - Existing roles have their domain_id columns set to the specific - string of '<>' - - """ - NULL_DOMAIN_ID = '<>' - - self.upgrade(87) - session = self.Session() - role_table = sqlalchemy.Table('role', self.metadata, autoload=True) - # Add a role before we upgrade, so we can check that its new domain_id - # attribute is handled correctly - role_id = uuid.uuid4().hex - self.insert_dict(session, 'role', - {'id': role_id, 'name': uuid.uuid4().hex}) - session.close() - - self.upgrade(88) - - session = self.Session() - self.metadata.clear() - self.assertTableColumns('role', ['id', 'name', 'domain_id', 'extra']) - # Check the domain_id has been added to the uniqueness constraint - inspector = reflection.Inspector.from_engine(self.engine) - constraints = inspector.get_unique_constraints('role') - constraint_columns = [ - constraint['column_names'] for constraint in constraints - if constraint['name'] == 'ixu_role_name_domain_id'] - self.assertIn('domain_id', constraint_columns[0]) - - # Now check our role has its domain_id attribute set correctly - role_table = sqlalchemy.Table('role', self.metadata, autoload=True) - cols = [role_table.c.domain_id] - filter = role_table.c.id == role_id - statement = sqlalchemy.select(cols).where(filter) - role_entry = session.execute(statement).fetchone() - self.assertEqual(NULL_DOMAIN_ID, role_entry[0]) - - def test_add_root_of_all_domains(self): - NULL_DOMAIN_ID = '<>' - self.upgrade(89) - session = self.Session() - - domain_table = sqlalchemy.Table( - 'domain', self.metadata, autoload=True) - query = session.query(domain_table).filter_by(id=NULL_DOMAIN_ID) - domain_from_db = query.one() - self.assertIn(NULL_DOMAIN_ID, domain_from_db) - - project_table = sqlalchemy.Table( - 'project', self.metadata, autoload=True) - query = session.query(project_table).filter_by(id=NULL_DOMAIN_ID) - project_from_db = query.one() - self.assertIn(NULL_DOMAIN_ID, project_from_db) - - session.close() - - def test_add_local_user_and_password_tables(self): - local_user_table = 'local_user' - password_table = 'password' - self.upgrade(89) - self.assertTableDoesNotExist(local_user_table) - self.assertTableDoesNotExist(password_table) - self.upgrade(90) - self.assertTableColumns(local_user_table, - ['id', - 'user_id', - 'domain_id', - 'name']) - self.assertTableColumns(password_table, - ['id', - 'local_user_id', - 'password']) - - def test_migrate_data_to_local_user_and_password_tables(self): - def get_expected_users(): - expected_users = [] - for test_user in default_fixtures.USERS: - user = {} - user['id'] = uuid.uuid4().hex - user['name'] = test_user['name'] - user['domain_id'] = test_user['domain_id'] - user['password'] = test_user['password'] - user['enabled'] = True - user['extra'] = json.dumps(uuid.uuid4().hex) - user['default_project_id'] = uuid.uuid4().hex - expected_users.append(user) - return expected_users - - def add_users_to_db(expected_users, user_table): - for user in expected_users: - ins = user_table.insert().values( - {'id': user['id'], - 'name': user['name'], - 'domain_id': user['domain_id'], - 'password': user['password'], - 'enabled': user['enabled'], - 'extra': user['extra'], - 'default_project_id': user['default_project_id']}) - ins.execute() - - def get_users_from_db(user_table, local_user_table, password_table): - sel = ( - sqlalchemy.select([user_table.c.id, - user_table.c.enabled, - user_table.c.extra, - user_table.c.default_project_id, - local_user_table.c.name, - local_user_table.c.domain_id, - password_table.c.password]) - .select_from(user_table.join(local_user_table, - user_table.c.id == - local_user_table.c.user_id) - .join(password_table, - local_user_table.c.id == - password_table.c.local_user_id)) - ) - user_rows = sel.execute() - users = [] - for row in user_rows: - users.append( - {'id': row['id'], - 'name': row['name'], - 'domain_id': row['domain_id'], - 'password': row['password'], - 'enabled': row['enabled'], - 'extra': row['extra'], - 'default_project_id': row['default_project_id']}) - return users - - meta = sqlalchemy.MetaData() - meta.bind = self.engine - - user_table_name = 'user' - local_user_table_name = 'local_user' - password_table_name = 'password' - - # populate current user table - self.upgrade(90) - user_table = sqlalchemy.Table(user_table_name, meta, autoload=True) - expected_users = get_expected_users() - add_users_to_db(expected_users, user_table) - - # upgrade to migration and test - self.upgrade(91) - self.assertTableCountsMatch(user_table_name, local_user_table_name) - self.assertTableCountsMatch(local_user_table_name, password_table_name) - meta.clear() - user_table = sqlalchemy.Table(user_table_name, meta, autoload=True) - local_user_table = sqlalchemy.Table(local_user_table_name, meta, - autoload=True) - password_table = sqlalchemy.Table(password_table_name, meta, - autoload=True) - actual_users = get_users_from_db(user_table, local_user_table, - password_table) - self.assertListEqual(expected_users, actual_users) - - def test_migrate_user_with_null_password_to_password_tables(self): - USER_TABLE_NAME = 'user' - LOCAL_USER_TABLE_NAME = 'local_user' - PASSWORD_TABLE_NAME = 'password' - self.upgrade(90) - user_ref = unit.new_user_ref(uuid.uuid4().hex) - user_ref.pop('password') - # pop extra attribute which doesn't recognized by SQL expression - # layer. - user_ref.pop('email') - session = self.Session() - self.insert_dict(session, USER_TABLE_NAME, user_ref) - self.metadata.clear() - self.upgrade(91) - # migration should be successful. - self.assertTableCountsMatch(USER_TABLE_NAME, LOCAL_USER_TABLE_NAME) - # no new entry was added to the password table because the - # user doesn't have a password. - password_table = self.select_table(PASSWORD_TABLE_NAME) - rows = session.execute(password_table.count()).scalar() - self.assertEqual(0, rows) - - def test_migrate_user_skip_user_already_exist_in_local_user(self): - USER_TABLE_NAME = 'user' - LOCAL_USER_TABLE_NAME = 'local_user' - self.upgrade(90) - user1_ref = unit.new_user_ref(uuid.uuid4().hex) - # pop extra attribute which doesn't recognized by SQL expression - # layer. - user1_ref.pop('email') - user2_ref = unit.new_user_ref(uuid.uuid4().hex) - user2_ref.pop('email') - session = self.Session() - self.insert_dict(session, USER_TABLE_NAME, user1_ref) - self.insert_dict(session, USER_TABLE_NAME, user2_ref) - user_id = user1_ref.pop('id') - user_name = user1_ref.pop('name') - domain_id = user1_ref.pop('domain_id') - local_user_ref = {'user_id': user_id, 'name': user_name, - 'domain_id': domain_id} - self.insert_dict(session, LOCAL_USER_TABLE_NAME, local_user_ref) - self.metadata.clear() - self.upgrade(91) - # migration should be successful and user2_ref has been migrated to - # `local_user` table. - self.assertTableCountsMatch(USER_TABLE_NAME, LOCAL_USER_TABLE_NAME) - - def test_implied_roles_fk_on_delete_cascade(self): - if self.engine.name == 'sqlite': - self.skipTest('sqlite backend does not support foreign keys') - - self.upgrade(92) - - def _create_three_roles(): - id_list = [] - for _ in range(3): - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - id_list.append(role['id']) - return id_list - - role_id_list = _create_three_roles() - self.role_api.create_implied_role(role_id_list[0], role_id_list[1]) - self.role_api.create_implied_role(role_id_list[0], role_id_list[2]) - - # assert that there are two roles implied by role 0. - implied_roles = self.role_api.list_implied_roles(role_id_list[0]) - self.assertThat(implied_roles, matchers.HasLength(2)) - - self.role_api.delete_role(role_id_list[0]) - # assert the cascade deletion is effective. - implied_roles = self.role_api.list_implied_roles(role_id_list[0]) - self.assertThat(implied_roles, matchers.HasLength(0)) - - def test_domain_as_project_upgrade(self): - - def _populate_domain_and_project_tables(session): - # Three domains, with various different attributes - self.domains = [{'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'enabled': True, - 'extra': {'description': uuid.uuid4().hex, - 'another_attribute': True}}, - {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'enabled': True, - 'extra': {'description': uuid.uuid4().hex}}, - {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'enabled': False}] - # Four projects, two top level, two children - self.projects = [] - self.projects.append(unit.new_project_ref( - domain_id=self.domains[0]['id'], - parent_id=None)) - self.projects.append(unit.new_project_ref( - domain_id=self.domains[0]['id'], - parent_id=self.projects[0]['id'])) - self.projects.append(unit.new_project_ref( - domain_id=self.domains[1]['id'], - parent_id=None)) - self.projects.append(unit.new_project_ref( - domain_id=self.domains[1]['id'], - parent_id=self.projects[2]['id'])) - - for domain in self.domains: - this_domain = domain.copy() - if 'extra' in this_domain: - this_domain['extra'] = json.dumps(this_domain['extra']) - self.insert_dict(session, 'domain', this_domain) - for project in self.projects: - self.insert_dict(session, 'project', project) - - def _check_projects(projects): - - def _assert_domain_matches_project(project): - for domain in self.domains: - if project.id == domain['id']: - self.assertEqual(domain['name'], project.name) - self.assertEqual(domain['enabled'], project.enabled) - if domain['id'] == self.domains[0]['id']: - self.assertEqual(domain['extra']['description'], - project.description) - self.assertEqual({'another_attribute': True}, - json.loads(project.extra)) - elif domain['id'] == self.domains[1]['id']: - self.assertEqual(domain['extra']['description'], - project.description) - self.assertEqual({}, json.loads(project.extra)) - - # We had domains 3 we created, which should now be projects acting - # as domains, To this we add the 4 original projects, plus the root - # of all domains row. - self.assertEqual(8, projects.count()) - - project_ids = [] - for project in projects: - if project.is_domain: - self.assertEqual(NULL_DOMAIN_ID, project.domain_id) - self.assertIsNone(project.parent_id) - else: - self.assertIsNotNone(project.domain_id) - self.assertIsNotNone(project.parent_id) - project_ids.append(project.id) - - for domain in self.domains: - self.assertIn(domain['id'], project_ids) - for project in self.projects: - self.assertIn(project['id'], project_ids) - - # Now check the attributes of the domains came across OK - for project in projects: - _assert_domain_matches_project(project) - - NULL_DOMAIN_ID = '<>' - self.upgrade(92) - - session = self.Session() - - _populate_domain_and_project_tables(session) - - self.upgrade(93) - proj_table = sqlalchemy.Table('project', self.metadata, autoload=True) - - projects = session.query(proj_table) - _check_projects(projects) - - def test_add_federated_user_table(self): - federated_user_table = 'federated_user' - self.upgrade(93) - self.assertTableDoesNotExist(federated_user_table) - self.upgrade(94) - self.assertTableColumns(federated_user_table, - ['id', - 'user_id', - 'idp_id', - 'protocol_id', - 'unique_id', - 'display_name']) - - def test_add_int_pkey_to_revocation_event_table(self): - meta = sqlalchemy.MetaData() - meta.bind = self.engine - REVOCATION_EVENT_TABLE_NAME = 'revocation_event' - self.upgrade(94) - revocation_event_table = sqlalchemy.Table(REVOCATION_EVENT_TABLE_NAME, - meta, autoload=True) - # assert id column is a string (before) - self.assertEqual('VARCHAR(64)', str(revocation_event_table.c.id.type)) - self.upgrade(95) - meta.clear() - revocation_event_table = sqlalchemy.Table(REVOCATION_EVENT_TABLE_NAME, - meta, autoload=True) - # assert id column is an integer (after) - self.assertEqual('INTEGER', str(revocation_event_table.c.id.type)) - - def _add_unique_constraint_to_role_name(self, - constraint_name='ixu_role_name'): - meta = sqlalchemy.MetaData() - meta.bind = self.engine - role_table = sqlalchemy.Table('role', meta, autoload=True) - migrate.UniqueConstraint(role_table.c.name, - name=constraint_name).create() - - def _drop_unique_constraint_to_role_name(self, - constraint_name='ixu_role_name'): - role_table = sqlalchemy.Table('role', self.metadata, autoload=True) - migrate.UniqueConstraint(role_table.c.name, - name=constraint_name).drop() - - def test_migration_88_drops_unique_constraint(self): - self.upgrade(87) - if self.engine.name == 'mysql': - self.assertTrue(self.does_index_exist('role', 'ixu_role_name')) - else: - self.assertTrue(self.does_constraint_exist('role', - 'ixu_role_name')) - self.upgrade(88) - if self.engine.name == 'mysql': - self.assertFalse(self.does_index_exist('role', 'ixu_role_name')) - else: - self.assertFalse(self.does_constraint_exist('role', - 'ixu_role_name')) - - def test_migration_88_inconsistent_constraint_name(self): - self.upgrade(87) - self._drop_unique_constraint_to_role_name() - - constraint_name = uuid.uuid4().hex - self._add_unique_constraint_to_role_name( - constraint_name=constraint_name) - - if self.engine.name == 'mysql': - self.assertTrue(self.does_index_exist('role', constraint_name)) - self.assertFalse(self.does_index_exist('role', 'ixu_role_name')) - else: - self.assertTrue(self.does_constraint_exist('role', - constraint_name)) - self.assertFalse(self.does_constraint_exist('role', - 'ixu_role_name')) - - self.upgrade(88) - if self.engine.name == 'mysql': - self.assertFalse(self.does_index_exist('role', constraint_name)) - self.assertFalse(self.does_index_exist('role', 'ixu_role_name')) - else: - self.assertFalse(self.does_constraint_exist('role', - constraint_name)) - self.assertFalse(self.does_constraint_exist('role', - 'ixu_role_name')) - - def test_migration_96(self): - self.upgrade(95) - if self.engine.name == 'mysql': - self.assertFalse(self.does_index_exist('role', 'ixu_role_name')) - else: - self.assertFalse(self.does_constraint_exist('role', - 'ixu_role_name')) - - self.upgrade(96) - if self.engine.name == 'mysql': - self.assertFalse(self.does_index_exist('role', 'ixu_role_name')) - else: - self.assertFalse(self.does_constraint_exist('role', - 'ixu_role_name')) - - def test_migration_96_constraint_exists(self): - self.upgrade(95) - self._add_unique_constraint_to_role_name() - - if self.engine.name == 'mysql': - self.assertTrue(self.does_index_exist('role', 'ixu_role_name')) - else: - self.assertTrue(self.does_constraint_exist('role', - 'ixu_role_name')) - - self.upgrade(96) - if self.engine.name == 'mysql': - self.assertFalse(self.does_index_exist('role', 'ixu_role_name')) - else: - self.assertFalse(self.does_constraint_exist('role', - 'ixu_role_name')) - - -class VersionTests(SqlMigrateBase): - - _initial_db_version = migration_helpers.get_init_version() - - def test_core_initial(self): - """Get the version before migrated, it's the initial DB version.""" - version = migration_helpers.get_db_version() - self.assertEqual(self._initial_db_version, version) - - def test_core_max(self): - """When get the version after upgrading, it's the new version.""" - self.upgrade(self.max_version) - version = migration_helpers.get_db_version() - self.assertEqual(self.max_version, version) - - def test_assert_not_schema_downgrade(self): - self.upgrade(self.max_version) - self.assertRaises( - db_exception.DbMigrationError, - migration_helpers._sync_common_repo, - self.max_version - 1) - - def test_extension_not_controlled(self): - """When get the version before controlling, raises DbMigrationError.""" - self.assertRaises(db_exception.DbMigrationError, - migration_helpers.get_db_version, - extension='federation') - - def test_unexpected_extension(self): - """The version for a non-existent extension raises ImportError.""" - extension_name = uuid.uuid4().hex - self.assertRaises(ImportError, - migration_helpers.get_db_version, - extension=extension_name) - - def test_unversioned_extension(self): - """The version for extensions without migrations raise an exception.""" - self.assertRaises(exception.MigrationNotProvided, - migration_helpers.get_db_version, - extension='admin_crud') diff --git a/keystone-moon/keystone/tests/unit/test_ssl.py b/keystone-moon/keystone/tests/unit/test_ssl.py deleted file mode 100644 index 6a6d9ffb..00000000 --- a/keystone-moon/keystone/tests/unit/test_ssl.py +++ /dev/null @@ -1,186 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import ssl - -from oslo_config import cfg - -from keystone.common import environment -from keystone.tests import unit -from keystone.tests.unit.ksfixtures import appserver - - -CONF = cfg.CONF - -CERTDIR = unit.dirs.root('examples', 'pki', 'certs') -KEYDIR = unit.dirs.root('examples', 'pki', 'private') -CERT = os.path.join(CERTDIR, 'ssl_cert.pem') -KEY = os.path.join(KEYDIR, 'ssl_key.pem') -CA = os.path.join(CERTDIR, 'cacert.pem') -CLIENT = os.path.join(CERTDIR, 'middleware.pem') - - -class SSLTestCase(unit.TestCase): - def setUp(self): - super(SSLTestCase, self).setUp() - raise self.skipTest('SSL Version and Ciphers cannot be configured ' - 'with eventlet, some platforms have disabled ' - 'SSLv3. See bug 1381365.') - # NOTE(morganfainberg): It has been determined that this - # will not be fixed. These tests should be re-enabled for the full - # functional test suite when run against an SSL terminated - # endpoint. Some distributions/environments have patched OpenSSL to - # not have SSLv3 at all due to POODLE and this causes differing - # behavior depending on platform. See bug 1381365 for more information. - - # NOTE(jamespage): - # Deal with more secure certificate chain verification - # introduced in python 2.7.9 under PEP-0476 - # https://github.com/python/peps/blob/master/pep-0476.txt - self.context = None - if hasattr(ssl, '_create_unverified_context'): - self.context = ssl._create_unverified_context() - self.load_backends() - - def get_HTTPSConnection(self, *args): - """Simple helper to configure HTTPSConnection objects.""" - if self.context: - return environment.httplib.HTTPSConnection( - *args, - context=self.context - ) - else: - return environment.httplib.HTTPSConnection(*args) - - def test_1way_ssl_ok(self): - """Make sure both public and admin API work with 1-way SSL.""" - paste_conf = self._paste_config('keystone') - ssl_kwargs = dict(cert=CERT, key=KEY, ca=CA) - - # Verify Admin - with appserver.AppServer(paste_conf, appserver.ADMIN, **ssl_kwargs): - conn = self.get_HTTPSConnection( - '127.0.0.1', CONF.eventlet_server.admin_port) - conn.request('GET', '/') - resp = conn.getresponse() - self.assertEqual(300, resp.status) - - # Verify Public - with appserver.AppServer(paste_conf, appserver.MAIN, **ssl_kwargs): - conn = self.get_HTTPSConnection( - '127.0.0.1', CONF.eventlet_server.public_port) - conn.request('GET', '/') - resp = conn.getresponse() - self.assertEqual(300, resp.status) - - def test_2way_ssl_ok(self): - """Make sure both public and admin API work with 2-way SSL. - - Requires client certificate. - """ - paste_conf = self._paste_config('keystone') - ssl_kwargs = dict(cert=CERT, key=KEY, ca=CA, cert_required=True) - - # Verify Admin - with appserver.AppServer(paste_conf, appserver.ADMIN, **ssl_kwargs): - conn = self.get_HTTPSConnection( - '127.0.0.1', CONF.eventlet_server.admin_port, CLIENT, CLIENT) - conn.request('GET', '/') - resp = conn.getresponse() - self.assertEqual(300, resp.status) - - # Verify Public - with appserver.AppServer(paste_conf, appserver.MAIN, **ssl_kwargs): - conn = self.get_HTTPSConnection( - '127.0.0.1', CONF.eventlet_server.public_port, CLIENT, CLIENT) - conn.request('GET', '/') - resp = conn.getresponse() - self.assertEqual(300, resp.status) - - def test_1way_ssl_with_ipv6_ok(self): - """Make sure both public and admin API work with 1-way ipv6 & SSL.""" - self.skip_if_no_ipv6() - - paste_conf = self._paste_config('keystone') - ssl_kwargs = dict(cert=CERT, key=KEY, ca=CA, host="::1") - - # Verify Admin - with appserver.AppServer(paste_conf, appserver.ADMIN, **ssl_kwargs): - conn = self.get_HTTPSConnection( - '::1', CONF.eventlet_server.admin_port) - conn.request('GET', '/') - resp = conn.getresponse() - self.assertEqual(300, resp.status) - - # Verify Public - with appserver.AppServer(paste_conf, appserver.MAIN, **ssl_kwargs): - conn = self.get_HTTPSConnection( - '::1', CONF.eventlet_server.public_port) - conn.request('GET', '/') - resp = conn.getresponse() - self.assertEqual(300, resp.status) - - def test_2way_ssl_with_ipv6_ok(self): - """Make sure both public and admin API work with 2-way ipv6 & SSL. - - Requires client certificate. - """ - self.skip_if_no_ipv6() - - paste_conf = self._paste_config('keystone') - ssl_kwargs = dict(cert=CERT, key=KEY, ca=CA, - cert_required=True, host="::1") - - # Verify Admin - with appserver.AppServer(paste_conf, appserver.ADMIN, **ssl_kwargs): - conn = self.get_HTTPSConnection( - '::1', CONF.eventlet_server.admin_port, CLIENT, CLIENT) - conn.request('GET', '/') - resp = conn.getresponse() - self.assertEqual(300, resp.status) - - # Verify Public - with appserver.AppServer(paste_conf, appserver.MAIN, **ssl_kwargs): - conn = self.get_HTTPSConnection( - '::1', CONF.eventlet_server.public_port, CLIENT, CLIENT) - conn.request('GET', '/') - resp = conn.getresponse() - self.assertEqual(300, resp.status) - - def test_2way_ssl_fail(self): - """Expect to fail when client does not present proper certificate.""" - paste_conf = self._paste_config('keystone') - ssl_kwargs = dict(cert=CERT, key=KEY, ca=CA, cert_required=True) - - # Verify Admin - with appserver.AppServer(paste_conf, appserver.ADMIN, **ssl_kwargs): - conn = self.get_HTTPSConnection( - '127.0.0.1', CONF.eventlet_server.admin_port) - try: - conn.request('GET', '/') - self.fail('Admin API shoulda failed with SSL handshake!') - except ssl.SSLError: - pass - - # Verify Public - with appserver.AppServer(paste_conf, appserver.MAIN, **ssl_kwargs): - conn = self.get_HTTPSConnection( - '127.0.0.1', CONF.eventlet_server.public_port) - try: - conn.request('GET', '/') - self.fail('Public API shoulda failed with SSL handshake!') - except ssl.SSLError: - pass diff --git a/keystone-moon/keystone/tests/unit/test_token_bind.py b/keystone-moon/keystone/tests/unit/test_token_bind.py deleted file mode 100644 index ee4d011a..00000000 --- a/keystone-moon/keystone/tests/unit/test_token_bind.py +++ /dev/null @@ -1,198 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import uuid - -from keystone.common import wsgi -from keystone import exception -from keystone.models import token_model -from keystone.tests import unit -from keystone.tests.unit import test_token_provider - - -KERBEROS_BIND = 'USER@REALM' -ANY = 'any' - - -class BindTest(unit.TestCase): - """Test binding tokens to a Principal. - - Even though everything in this file references kerberos the same concepts - will apply to all future binding mechanisms. - """ - - def setUp(self): - super(BindTest, self).setUp() - self.TOKEN_BIND_KERB = copy.deepcopy( - test_token_provider.SAMPLE_V3_TOKEN) - self.TOKEN_BIND_KERB['token']['bind'] = {'kerberos': KERBEROS_BIND} - self.TOKEN_BIND_UNKNOWN = copy.deepcopy( - test_token_provider.SAMPLE_V3_TOKEN) - self.TOKEN_BIND_UNKNOWN['token']['bind'] = {'FOO': 'BAR'} - self.TOKEN_BIND_NONE = copy.deepcopy( - test_token_provider.SAMPLE_V3_TOKEN) - - self.ALL_TOKENS = [self.TOKEN_BIND_KERB, self.TOKEN_BIND_UNKNOWN, - self.TOKEN_BIND_NONE] - - def assert_kerberos_bind(self, tokens, bind_level, - use_kerberos=True, success=True): - if not isinstance(tokens, dict): - for token in tokens: - self.assert_kerberos_bind(token, bind_level, - use_kerberos=use_kerberos, - success=success) - elif use_kerberos == ANY: - for val in (True, False): - self.assert_kerberos_bind(tokens, bind_level, - use_kerberos=val, success=success) - else: - context = {'environment': {}} - self.config_fixture.config(group='token', - enforce_token_bind=bind_level) - - if use_kerberos: - context['environment']['REMOTE_USER'] = KERBEROS_BIND - context['environment']['AUTH_TYPE'] = 'Negotiate' - - # NOTE(morganfainberg): This assumes a V3 token. - token_ref = token_model.KeystoneToken( - token_id=uuid.uuid4().hex, - token_data=tokens) - - if not success: - self.assertRaises(exception.Unauthorized, - wsgi.validate_token_bind, - context, token_ref) - else: - wsgi.validate_token_bind(context, token_ref) - - # DISABLED - - def test_bind_disabled_with_kerb_user(self): - self.assert_kerberos_bind(self.ALL_TOKENS, - bind_level='disabled', - use_kerberos=ANY, - success=True) - - # PERMISSIVE - - def test_bind_permissive_with_kerb_user(self): - self.assert_kerberos_bind(self.TOKEN_BIND_KERB, - bind_level='permissive', - use_kerberos=True, - success=True) - - def test_bind_permissive_with_regular_token(self): - self.assert_kerberos_bind(self.TOKEN_BIND_NONE, - bind_level='permissive', - use_kerberos=ANY, - success=True) - - def test_bind_permissive_without_kerb_user(self): - self.assert_kerberos_bind(self.TOKEN_BIND_KERB, - bind_level='permissive', - use_kerberos=False, - success=False) - - def test_bind_permissive_with_unknown_bind(self): - self.assert_kerberos_bind(self.TOKEN_BIND_UNKNOWN, - bind_level='permissive', - use_kerberos=ANY, - success=True) - - # STRICT - - def test_bind_strict_with_regular_token(self): - self.assert_kerberos_bind(self.TOKEN_BIND_NONE, - bind_level='strict', - use_kerberos=ANY, - success=True) - - def test_bind_strict_with_kerb_user(self): - self.assert_kerberos_bind(self.TOKEN_BIND_KERB, - bind_level='strict', - use_kerberos=True, - success=True) - - def test_bind_strict_without_kerb_user(self): - self.assert_kerberos_bind(self.TOKEN_BIND_KERB, - bind_level='strict', - use_kerberos=False, - success=False) - - def test_bind_strict_with_unknown_bind(self): - self.assert_kerberos_bind(self.TOKEN_BIND_UNKNOWN, - bind_level='strict', - use_kerberos=ANY, - success=False) - - # REQUIRED - - def test_bind_required_with_regular_token(self): - self.assert_kerberos_bind(self.TOKEN_BIND_NONE, - bind_level='required', - use_kerberos=ANY, - success=False) - - def test_bind_required_with_kerb_user(self): - self.assert_kerberos_bind(self.TOKEN_BIND_KERB, - bind_level='required', - use_kerberos=True, - success=True) - - def test_bind_required_without_kerb_user(self): - self.assert_kerberos_bind(self.TOKEN_BIND_KERB, - bind_level='required', - use_kerberos=False, - success=False) - - def test_bind_required_with_unknown_bind(self): - self.assert_kerberos_bind(self.TOKEN_BIND_UNKNOWN, - bind_level='required', - use_kerberos=ANY, - success=False) - - # NAMED - - def test_bind_named_with_regular_token(self): - self.assert_kerberos_bind(self.TOKEN_BIND_NONE, - bind_level='kerberos', - use_kerberos=ANY, - success=False) - - def test_bind_named_with_kerb_user(self): - self.assert_kerberos_bind(self.TOKEN_BIND_KERB, - bind_level='kerberos', - use_kerberos=True, - success=True) - - def test_bind_named_without_kerb_user(self): - self.assert_kerberos_bind(self.TOKEN_BIND_KERB, - bind_level='kerberos', - use_kerberos=False, - success=False) - - def test_bind_named_with_unknown_bind(self): - self.assert_kerberos_bind(self.TOKEN_BIND_UNKNOWN, - bind_level='kerberos', - use_kerberos=ANY, - success=False) - - def test_bind_named_with_unknown_scheme(self): - self.assert_kerberos_bind(self.ALL_TOKENS, - bind_level='unknown', - use_kerberos=ANY, - success=False) diff --git a/keystone-moon/keystone/tests/unit/test_token_provider.py b/keystone-moon/keystone/tests/unit/test_token_provider.py deleted file mode 100644 index 5c71363b..00000000 --- a/keystone-moon/keystone/tests/unit/test_token_provider.py +++ /dev/null @@ -1,845 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime - -from oslo_config import cfg -from oslo_utils import timeutils -from six.moves import reload_module - -from keystone.common import dependency -from keystone.common import utils -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit.ksfixtures import database -from keystone import token -from keystone.token.providers import fernet -from keystone.token.providers import pki -from keystone.token.providers import pkiz -from keystone.token.providers import uuid - - -CONF = cfg.CONF - -FUTURE_DELTA = datetime.timedelta(seconds=CONF.token.expiration) -CURRENT_DATE = timeutils.utcnow() - -SAMPLE_V2_TOKEN = { - "access": { - "trust": { - "id": "abc123", - "trustee_user_id": "123456", - "trustor_user_id": "333333", - "impersonation": False - }, - "serviceCatalog": [ - { - "endpoints": [ - { - "adminURL": "http://localhost:8774/v1.1/01257", - "id": "51934fe63a5b4ac0a32664f64eb462c3", - "internalURL": "http://localhost:8774/v1.1/01257", - "publicURL": "http://localhost:8774/v1.1/01257", - "region": "RegionOne" - } - ], - "endpoints_links": [], - "name": "nova", - "type": "compute" - }, - { - "endpoints": [ - { - "adminURL": "http://localhost:9292", - "id": "aaa17a539e364297a7845d67c7c7cc4b", - "internalURL": "http://localhost:9292", - "publicURL": "http://localhost:9292", - "region": "RegionOne" - } - ], - "endpoints_links": [], - "name": "glance", - "type": "image" - }, - { - "endpoints": [ - { - "adminURL": "http://localhost:8776/v1/01257", - "id": "077d82df25304abeac2294004441db5a", - "internalURL": "http://localhost:8776/v1/01257", - "publicURL": "http://localhost:8776/v1/01257", - "region": "RegionOne" - } - ], - "endpoints_links": [], - "name": "volume", - "type": "volume" - }, - { - "endpoints": [ - { - "adminURL": "http://localhost:8773/services/Admin", - "id": "b06997fd08414903ad458836efaa9067", - "internalURL": "http://localhost:8773/services/Cloud", - "publicURL": "http://localhost:8773/services/Cloud", - "region": "RegionOne" - } - ], - "endpoints_links": [], - "name": "ec2", - "type": "ec2" - }, - { - "endpoints": [ - { - "adminURL": "http://localhost:8080/v1", - "id": "7bd0c643e05a4a2ab40902b2fa0dd4e6", - "internalURL": "http://localhost:8080/v1/AUTH_01257", - "publicURL": "http://localhost:8080/v1/AUTH_01257", - "region": "RegionOne" - } - ], - "endpoints_links": [], - "name": "swift", - "type": "object-store" - }, - { - "endpoints": [ - { - "adminURL": "http://localhost:35357/v2.0", - "id": "02850c5d1d094887bdc46e81e1e15dc7", - "internalURL": "http://localhost:5000/v2.0", - "publicURL": "http://localhost:5000/v2.0", - "region": "RegionOne" - } - ], - "endpoints_links": [], - "name": "keystone", - "type": "identity" - } - ], - "token": { - "expires": "2013-05-22T00:02:43.941430Z", - "id": "ce4fc2d36eea4cc9a36e666ac2f1029a", - "issued_at": "2013-05-21T00:02:43.941473Z", - "tenant": { - "enabled": True, - "id": "01257", - "name": "service" - } - }, - "user": { - "id": "f19ddbe2c53c46f189fe66d0a7a9c9ce", - "name": "nova", - "roles": [ - { - "name": "_member_" - }, - { - "name": "admin" - } - ], - "roles_links": [], - "username": "nova" - } - } -} - -SAMPLE_V3_TOKEN = { - "token": { - "catalog": [ - { - "endpoints": [ - { - "id": "02850c5d1d094887bdc46e81e1e15dc7", - "interface": "admin", - "region": "RegionOne", - "url": "http://localhost:35357/v2.0" - }, - { - "id": "446e244b75034a9ab4b0811e82d0b7c8", - "interface": "internal", - "region": "RegionOne", - "url": "http://localhost:5000/v2.0" - }, - { - "id": "47fa3d9f499240abb5dfcf2668f168cd", - "interface": "public", - "region": "RegionOne", - "url": "http://localhost:5000/v2.0" - } - ], - "id": "26d7541715a44a4d9adad96f9872b633", - "type": "identity", - }, - { - "endpoints": [ - { - "id": "aaa17a539e364297a7845d67c7c7cc4b", - "interface": "admin", - "region": "RegionOne", - "url": "http://localhost:9292" - }, - { - "id": "4fa9620e42394cb1974736dce0856c71", - "interface": "internal", - "region": "RegionOne", - "url": "http://localhost:9292" - }, - { - "id": "9673687f9bc441d88dec37942bfd603b", - "interface": "public", - "region": "RegionOne", - "url": "http://localhost:9292" - } - ], - "id": "d27a41843f4e4b0e8cf6dac4082deb0d", - "type": "image", - }, - { - "endpoints": [ - { - "id": "7bd0c643e05a4a2ab40902b2fa0dd4e6", - "interface": "admin", - "region": "RegionOne", - "url": "http://localhost:8080/v1" - }, - { - "id": "43bef154594d4ccb8e49014d20624e1d", - "interface": "internal", - "region": "RegionOne", - "url": "http://localhost:8080/v1/AUTH_01257" - }, - { - "id": "e63b5f5d7aa3493690189d0ff843b9b3", - "interface": "public", - "region": "RegionOne", - "url": "http://localhost:8080/v1/AUTH_01257" - } - ], - "id": "a669e152f1104810a4b6701aade721bb", - "type": "object-store", - }, - { - "endpoints": [ - { - "id": "51934fe63a5b4ac0a32664f64eb462c3", - "interface": "admin", - "region": "RegionOne", - "url": "http://localhost:8774/v1.1/01257" - }, - { - "id": "869b535eea0d42e483ae9da0d868ebad", - "interface": "internal", - "region": "RegionOne", - "url": "http://localhost:8774/v1.1/01257" - }, - { - "id": "93583824c18f4263a2245ca432b132a6", - "interface": "public", - "region": "RegionOne", - "url": "http://localhost:8774/v1.1/01257" - } - ], - "id": "7f32cc2af6c9476e82d75f80e8b3bbb8", - "type": "compute", - }, - { - "endpoints": [ - { - "id": "b06997fd08414903ad458836efaa9067", - "interface": "admin", - "region": "RegionOne", - "url": "http://localhost:8773/services/Admin" - }, - { - "id": "411f7de7c9a8484c9b46c254fb2676e2", - "interface": "internal", - "region": "RegionOne", - "url": "http://localhost:8773/services/Cloud" - }, - { - "id": "f21c93f3da014785854b4126d0109c49", - "interface": "public", - "region": "RegionOne", - "url": "http://localhost:8773/services/Cloud" - } - ], - "id": "b08c9c7d4ef543eba5eeb766f72e5aa1", - "type": "ec2", - }, - { - "endpoints": [ - { - "id": "077d82df25304abeac2294004441db5a", - "interface": "admin", - "region": "RegionOne", - "url": "http://localhost:8776/v1/01257" - }, - { - "id": "875bf282362c40219665278b4fd11467", - "interface": "internal", - "region": "RegionOne", - "url": "http://localhost:8776/v1/01257" - }, - { - "id": "cd229aa6df0640dc858a8026eb7e640c", - "interface": "public", - "region": "RegionOne", - "url": "http://localhost:8776/v1/01257" - } - ], - "id": "5db21b82617f4a95816064736a7bec22", - "type": "volume", - } - ], - "expires_at": "2013-05-22T00:02:43.941430Z", - "issued_at": "2013-05-21T00:02:43.941473Z", - "methods": [ - "password" - ], - "project": { - "domain": { - "id": "default", - "name": "Default" - }, - "id": "01257", - "name": "service" - }, - "roles": [ - { - "id": "9fe2ff9ee4384b1894a90878d3e92bab", - "name": "_member_" - }, - { - "id": "53bff13443bd4450b97f978881d47b18", - "name": "admin" - } - ], - "user": { - "domain": { - "id": "default", - "name": "Default" - }, - "id": "f19ddbe2c53c46f189fe66d0a7a9c9ce", - "name": "nova" - }, - "OS-TRUST:trust": { - "id": "abc123", - "trustee_user_id": "123456", - "trustor_user_id": "333333", - "impersonation": False - } - } -} - -SAMPLE_V2_TOKEN_WITH_EMBEDED_VERSION = { - "access": { - "trust": { - "id": "abc123", - "trustee_user_id": "123456", - "trustor_user_id": "333333", - "impersonation": False - }, - "serviceCatalog": [ - { - "endpoints": [ - { - "adminURL": "http://localhost:8774/v1.1/01257", - "id": "51934fe63a5b4ac0a32664f64eb462c3", - "internalURL": "http://localhost:8774/v1.1/01257", - "publicURL": "http://localhost:8774/v1.1/01257", - "region": "RegionOne" - } - ], - "endpoints_links": [], - "name": "nova", - "type": "compute" - }, - { - "endpoints": [ - { - "adminURL": "http://localhost:9292", - "id": "aaa17a539e364297a7845d67c7c7cc4b", - "internalURL": "http://localhost:9292", - "publicURL": "http://localhost:9292", - "region": "RegionOne" - } - ], - "endpoints_links": [], - "name": "glance", - "type": "image" - }, - { - "endpoints": [ - { - "adminURL": "http://localhost:8776/v1/01257", - "id": "077d82df25304abeac2294004441db5a", - "internalURL": "http://localhost:8776/v1/01257", - "publicURL": "http://localhost:8776/v1/01257", - "region": "RegionOne" - } - ], - "endpoints_links": [], - "name": "volume", - "type": "volume" - }, - { - "endpoints": [ - { - "adminURL": "http://localhost:8773/services/Admin", - "id": "b06997fd08414903ad458836efaa9067", - "internalURL": "http://localhost:8773/services/Cloud", - "publicURL": "http://localhost:8773/services/Cloud", - "region": "RegionOne" - } - ], - "endpoints_links": [], - "name": "ec2", - "type": "ec2" - }, - { - "endpoints": [ - { - "adminURL": "http://localhost:8080/v1", - "id": "7bd0c643e05a4a2ab40902b2fa0dd4e6", - "internalURL": "http://localhost:8080/v1/AUTH_01257", - "publicURL": "http://localhost:8080/v1/AUTH_01257", - "region": "RegionOne" - } - ], - "endpoints_links": [], - "name": "swift", - "type": "object-store" - }, - { - "endpoints": [ - { - "adminURL": "http://localhost:35357/v2.0", - "id": "02850c5d1d094887bdc46e81e1e15dc7", - "internalURL": "http://localhost:5000/v2.0", - "publicURL": "http://localhost:5000/v2.0", - "region": "RegionOne" - } - ], - "endpoints_links": [], - "name": "keystone", - "type": "identity" - } - ], - "token": { - "expires": "2013-05-22T00:02:43.941430Z", - "id": "ce4fc2d36eea4cc9a36e666ac2f1029a", - "issued_at": "2013-05-21T00:02:43.941473Z", - "tenant": { - "enabled": True, - "id": "01257", - "name": "service" - } - }, - "user": { - "id": "f19ddbe2c53c46f189fe66d0a7a9c9ce", - "name": "nova", - "roles": [ - { - "name": "_member_" - }, - { - "name": "admin" - } - ], - "roles_links": [], - "username": "nova" - } - }, - 'token_version': 'v2.0' -} -SAMPLE_V3_TOKEN_WITH_EMBEDED_VERSION = { - "token": { - "catalog": [ - { - "endpoints": [ - { - "id": "02850c5d1d094887bdc46e81e1e15dc7", - "interface": "admin", - "region": "RegionOne", - "url": "http://localhost:35357/v2.0" - }, - { - "id": "446e244b75034a9ab4b0811e82d0b7c8", - "interface": "internal", - "region": "RegionOne", - "url": "http://localhost:5000/v2.0" - }, - { - "id": "47fa3d9f499240abb5dfcf2668f168cd", - "interface": "public", - "region": "RegionOne", - "url": "http://localhost:5000/v2.0" - } - ], - "id": "26d7541715a44a4d9adad96f9872b633", - "type": "identity", - }, - { - "endpoints": [ - { - "id": "aaa17a539e364297a7845d67c7c7cc4b", - "interface": "admin", - "region": "RegionOne", - "url": "http://localhost:9292" - }, - { - "id": "4fa9620e42394cb1974736dce0856c71", - "interface": "internal", - "region": "RegionOne", - "url": "http://localhost:9292" - }, - { - "id": "9673687f9bc441d88dec37942bfd603b", - "interface": "public", - "region": "RegionOne", - "url": "http://localhost:9292" - } - ], - "id": "d27a41843f4e4b0e8cf6dac4082deb0d", - "type": "image", - }, - { - "endpoints": [ - { - "id": "7bd0c643e05a4a2ab40902b2fa0dd4e6", - "interface": "admin", - "region": "RegionOne", - "url": "http://localhost:8080/v1" - }, - { - "id": "43bef154594d4ccb8e49014d20624e1d", - "interface": "internal", - "region": "RegionOne", - "url": "http://localhost:8080/v1/AUTH_01257" - }, - { - "id": "e63b5f5d7aa3493690189d0ff843b9b3", - "interface": "public", - "region": "RegionOne", - "url": "http://localhost:8080/v1/AUTH_01257" - } - ], - "id": "a669e152f1104810a4b6701aade721bb", - "type": "object-store", - }, - { - "endpoints": [ - { - "id": "51934fe63a5b4ac0a32664f64eb462c3", - "interface": "admin", - "region": "RegionOne", - "url": "http://localhost:8774/v1.1/01257" - }, - { - "id": "869b535eea0d42e483ae9da0d868ebad", - "interface": "internal", - "region": "RegionOne", - "url": "http://localhost:8774/v1.1/01257" - }, - { - "id": "93583824c18f4263a2245ca432b132a6", - "interface": "public", - "region": "RegionOne", - "url": "http://localhost:8774/v1.1/01257" - } - ], - "id": "7f32cc2af6c9476e82d75f80e8b3bbb8", - "type": "compute", - }, - { - "endpoints": [ - { - "id": "b06997fd08414903ad458836efaa9067", - "interface": "admin", - "region": "RegionOne", - "url": "http://localhost:8773/services/Admin" - }, - { - "id": "411f7de7c9a8484c9b46c254fb2676e2", - "interface": "internal", - "region": "RegionOne", - "url": "http://localhost:8773/services/Cloud" - }, - { - "id": "f21c93f3da014785854b4126d0109c49", - "interface": "public", - "region": "RegionOne", - "url": "http://localhost:8773/services/Cloud" - } - ], - "id": "b08c9c7d4ef543eba5eeb766f72e5aa1", - "type": "ec2", - }, - { - "endpoints": [ - { - "id": "077d82df25304abeac2294004441db5a", - "interface": "admin", - "region": "RegionOne", - "url": "http://localhost:8776/v1/01257" - }, - { - "id": "875bf282362c40219665278b4fd11467", - "interface": "internal", - "region": "RegionOne", - "url": "http://localhost:8776/v1/01257" - }, - { - "id": "cd229aa6df0640dc858a8026eb7e640c", - "interface": "public", - "region": "RegionOne", - "url": "http://localhost:8776/v1/01257" - } - ], - "id": "5db21b82617f4a95816064736a7bec22", - "type": "volume", - } - ], - "expires_at": "2013-05-22T00:02:43.941430Z", - "issued_at": "2013-05-21T00:02:43.941473Z", - "methods": [ - "password" - ], - "project": { - "domain": { - "id": "default", - "name": "Default" - }, - "id": "01257", - "name": "service" - }, - "roles": [ - { - "id": "9fe2ff9ee4384b1894a90878d3e92bab", - "name": "_member_" - }, - { - "id": "53bff13443bd4450b97f978881d47b18", - "name": "admin" - } - ], - "user": { - "domain": { - "id": "default", - "name": "Default" - }, - "id": "f19ddbe2c53c46f189fe66d0a7a9c9ce", - "name": "nova" - }, - "OS-TRUST:trust": { - "id": "abc123", - "trustee_user_id": "123456", - "trustor_user_id": "333333", - "impersonation": False - } - }, - 'token_version': 'v3.0' -} - - -def create_v2_token(): - return { - "access": { - "token": { - "expires": utils.isotime(timeutils.utcnow() + - FUTURE_DELTA), - "issued_at": "2013-05-21T00:02:43.941473Z", - "tenant": { - "enabled": True, - "id": "01257", - "name": "service" - } - } - } - } - - -SAMPLE_V2_TOKEN_EXPIRED = { - "access": { - "token": { - "expires": utils.isotime(CURRENT_DATE), - "issued_at": "2013-05-21T00:02:43.941473Z", - "tenant": { - "enabled": True, - "id": "01257", - "name": "service" - } - } - } -} - - -def create_v3_token(): - return { - "token": { - 'methods': [], - "expires_at": utils.isotime(timeutils.utcnow() + FUTURE_DELTA), - "issued_at": "2013-05-21T00:02:43.941473Z", - } - } - - -SAMPLE_V3_TOKEN_EXPIRED = { - "token": { - "expires_at": utils.isotime(CURRENT_DATE), - "issued_at": "2013-05-21T00:02:43.941473Z", - } -} - -SAMPLE_MALFORMED_TOKEN = { - "token": { - "bogus": { - "no expiration data": None - } - } -} - - -class TestTokenProvider(unit.TestCase): - def setUp(self): - super(TestTokenProvider, self).setUp() - self.useFixture(database.Database()) - self.load_backends() - - def test_get_token_version(self): - self.assertEqual( - token.provider.V2, - self.token_provider_api.get_token_version(SAMPLE_V2_TOKEN)) - self.assertEqual( - token.provider.V2, - self.token_provider_api.get_token_version( - SAMPLE_V2_TOKEN_WITH_EMBEDED_VERSION)) - self.assertEqual( - token.provider.V3, - self.token_provider_api.get_token_version(SAMPLE_V3_TOKEN)) - self.assertEqual( - token.provider.V3, - self.token_provider_api.get_token_version( - SAMPLE_V3_TOKEN_WITH_EMBEDED_VERSION)) - self.assertRaises(exception.UnsupportedTokenVersionException, - self.token_provider_api.get_token_version, - 'bogus') - - def test_supported_token_providers(self): - # test default config - - dependency.reset() - self.assertIsInstance(token.provider.Manager().driver, - uuid.Provider) - - dependency.reset() - self.config_fixture.config(group='token', provider='uuid') - self.assertIsInstance(token.provider.Manager().driver, uuid.Provider) - - dependency.reset() - self.config_fixture.config(group='token', provider='pki') - self.assertIsInstance(token.provider.Manager().driver, pki.Provider) - - dependency.reset() - self.config_fixture.config(group='token', provider='pkiz') - self.assertIsInstance(token.provider.Manager().driver, pkiz.Provider) - - dependency.reset() - self.config_fixture.config(group='token', provider='fernet') - self.assertIsInstance(token.provider.Manager().driver, fernet.Provider) - - def test_unsupported_token_provider(self): - self.config_fixture.config(group='token', - provider='my.package.MyProvider') - self.assertRaises(ImportError, - token.provider.Manager) - - def test_provider_token_expiration_validation(self): - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._is_valid_token, - SAMPLE_V2_TOKEN_EXPIRED) - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._is_valid_token, - SAMPLE_V3_TOKEN_EXPIRED) - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._is_valid_token, - SAMPLE_MALFORMED_TOKEN) - self.assertIsNone( - self.token_provider_api._is_valid_token(create_v2_token())) - self.assertIsNone( - self.token_provider_api._is_valid_token(create_v3_token())) - - def test_no_token_raises_token_not_found(self): - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api.validate_token, - None) - - -# NOTE(ayoung): renamed to avoid automatic test detection -class PKIProviderTests(object): - - def setUp(self): - super(PKIProviderTests, self).setUp() - - from keystoneclient.common import cms - self.cms = cms - - from keystone.common import environment - self.environment = environment - - old_cms_subprocess = cms.subprocess - self.addCleanup(setattr, cms, 'subprocess', old_cms_subprocess) - - old_env_subprocess = environment.subprocess - self.addCleanup(setattr, environment, 'subprocess', old_env_subprocess) - - self.cms.subprocess = self.target_subprocess - self.environment.subprocess = self.target_subprocess - - # force module reload so the imports get re-evaluated - reload_module(pki) - - def test_get_token_id_error_handling(self): - # cause command-line failure - self.config_fixture.config(group='signing', - keyfile='--please-break-me') - - provider = pki.Provider() - token_data = {} - self.assertRaises(exception.UnexpectedError, - provider._get_token_id, - token_data) - - -class TestPKIProviderWithEventlet(PKIProviderTests, unit.TestCase): - - def setUp(self): - # force keystoneclient.common.cms to use eventlet's subprocess - from eventlet.green import subprocess - self.target_subprocess = subprocess - - super(TestPKIProviderWithEventlet, self).setUp() - - -class TestPKIProviderWithStdlib(PKIProviderTests, unit.TestCase): - - def setUp(self): - # force keystoneclient.common.cms to use the stdlib subprocess - import subprocess - self.target_subprocess = subprocess - - super(TestPKIProviderWithStdlib, self).setUp() diff --git a/keystone-moon/keystone/tests/unit/test_url_middleware.py b/keystone-moon/keystone/tests/unit/test_url_middleware.py deleted file mode 100644 index 3b160b93..00000000 --- a/keystone-moon/keystone/tests/unit/test_url_middleware.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import webob - -from keystone import middleware -from keystone.tests import unit - - -class FakeApp(object): - """Fakes a WSGI app URL normalized.""" - - def __call__(self, env, start_response): - resp = webob.Response() - resp.body = 'SUCCESS' - return resp(env, start_response) - - -class UrlMiddlewareTest(unit.TestCase): - def setUp(self): - self.middleware = middleware.NormalizingFilter(FakeApp()) - self.response_status = None - self.response_headers = None - super(UrlMiddlewareTest, self).setUp() - - def start_fake_response(self, status, headers): - self.response_status = int(status.split(' ', 1)[0]) - self.response_headers = dict(headers) - - def test_trailing_slash_normalization(self): - """Tests /v2.0/tokens and /v2.0/tokens/ normalized URLs match.""" - req1 = webob.Request.blank('/v2.0/tokens') - req2 = webob.Request.blank('/v2.0/tokens/') - self.middleware(req1.environ, self.start_fake_response) - self.middleware(req2.environ, self.start_fake_response) - self.assertEqual(req1.path_url, req2.path_url) - self.assertEqual('http://localhost/v2.0/tokens', req1.path_url) - - def test_rewrite_empty_path(self): - """Tests empty path is rewritten to root.""" - req = webob.Request.blank('') - self.middleware(req.environ, self.start_fake_response) - self.assertEqual('http://localhost/', req.path_url) diff --git a/keystone-moon/keystone/tests/unit/test_v2.py b/keystone-moon/keystone/tests/unit/test_v2.py deleted file mode 100644 index e81c6040..00000000 --- a/keystone-moon/keystone/tests/unit/test_v2.py +++ /dev/null @@ -1,1590 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import json -import time -import uuid - -from keystoneclient.common import cms -from oslo_config import cfg -import six -from six.moves import http_client -from testtools import matchers - -from keystone.common import extension as keystone_extension -from keystone.tests import unit -from keystone.tests.unit import default_fixtures -from keystone.tests.unit import ksfixtures -from keystone.tests.unit import rest -from keystone.tests.unit.schema import v2 - -CONF = cfg.CONF - - -class CoreApiTests(object): - def assertValidError(self, error): - self.assertIsNotNone(error.get('code')) - self.assertIsNotNone(error.get('title')) - self.assertIsNotNone(error.get('message')) - - def assertValidVersion(self, version): - self.assertIsNotNone(version) - self.assertIsNotNone(version.get('id')) - self.assertIsNotNone(version.get('status')) - self.assertIsNotNone(version.get('updated')) - - def assertValidExtension(self, extension): - self.assertIsNotNone(extension) - self.assertIsNotNone(extension.get('name')) - self.assertIsNotNone(extension.get('namespace')) - self.assertIsNotNone(extension.get('alias')) - self.assertIsNotNone(extension.get('updated')) - - def assertValidExtensionLink(self, link): - self.assertIsNotNone(link.get('rel')) - self.assertIsNotNone(link.get('type')) - self.assertIsNotNone(link.get('href')) - - def assertValidTenant(self, tenant): - self.assertIsNotNone(tenant.get('id')) - self.assertIsNotNone(tenant.get('name')) - self.assertNotIn('domain_id', tenant) - self.assertNotIn('parent_id', tenant) - - def assertValidUser(self, user): - self.assertIsNotNone(user.get('id')) - self.assertIsNotNone(user.get('name')) - - def assertValidRole(self, tenant): - self.assertIsNotNone(tenant.get('id')) - self.assertIsNotNone(tenant.get('name')) - - def test_public_not_found(self): - r = self.public_request( - path='/%s' % uuid.uuid4().hex, - expected_status=http_client.NOT_FOUND) - self.assertValidErrorResponse(r) - - def test_admin_not_found(self): - r = self.admin_request( - path='/%s' % uuid.uuid4().hex, - expected_status=http_client.NOT_FOUND) - self.assertValidErrorResponse(r) - - def test_public_multiple_choice(self): - r = self.public_request(path='/', expected_status=300) - self.assertValidMultipleChoiceResponse(r) - - def test_admin_multiple_choice(self): - r = self.admin_request(path='/', expected_status=300) - self.assertValidMultipleChoiceResponse(r) - - def test_public_version(self): - r = self.public_request(path='/v2.0/') - self.assertValidVersionResponse(r) - - def test_admin_version(self): - r = self.admin_request(path='/v2.0/') - self.assertValidVersionResponse(r) - - def test_public_extensions(self): - r = self.public_request(path='/v2.0/extensions') - self.assertValidExtensionListResponse( - r, keystone_extension.PUBLIC_EXTENSIONS) - - def test_admin_extensions(self): - r = self.admin_request(path='/v2.0/extensions') - self.assertValidExtensionListResponse( - r, keystone_extension.ADMIN_EXTENSIONS) - - def test_admin_extensions_returns_not_found(self): - self.admin_request(path='/v2.0/extensions/invalid-extension', - expected_status=http_client.NOT_FOUND) - - def test_public_osksadm_extension_returns_not_found(self): - self.public_request(path='/v2.0/extensions/OS-KSADM', - expected_status=http_client.NOT_FOUND) - - def test_admin_osksadm_extension(self): - r = self.admin_request(path='/v2.0/extensions/OS-KSADM') - self.assertValidExtensionResponse( - r, keystone_extension.ADMIN_EXTENSIONS) - - def test_authenticate(self): - r = self.public_request( - method='POST', - path='/v2.0/tokens', - body={ - 'auth': { - 'passwordCredentials': { - 'username': self.user_foo['name'], - 'password': self.user_foo['password'], - }, - 'tenantId': self.tenant_bar['id'], - }, - }, - expected_status=http_client.OK) - self.assertValidAuthenticationResponse(r, require_service_catalog=True) - - def test_authenticate_unscoped(self): - r = self.public_request( - method='POST', - path='/v2.0/tokens', - body={ - 'auth': { - 'passwordCredentials': { - 'username': self.user_foo['name'], - 'password': self.user_foo['password'], - }, - }, - }, - expected_status=http_client.OK) - self.assertValidAuthenticationResponse(r) - - def test_get_tenants_for_token(self): - r = self.public_request(path='/v2.0/tenants', - token=self.get_scoped_token()) - self.assertValidTenantListResponse(r) - - def test_validate_token(self): - token = self.get_scoped_token() - r = self.admin_request( - path='/v2.0/tokens/%(token_id)s' % { - 'token_id': token, - }, - token=token) - self.assertValidAuthenticationResponse(r) - - def test_invalid_token_returns_not_found(self): - token = self.get_scoped_token() - self.admin_request( - path='/v2.0/tokens/%(token_id)s' % { - 'token_id': 'invalid', - }, - token=token, - expected_status=http_client.NOT_FOUND) - - def test_validate_token_service_role(self): - self.md_foobar = self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], - self.tenant_service['id'], - self.role_service['id']) - - token = self.get_scoped_token( - tenant_id=default_fixtures.SERVICE_TENANT_ID) - r = self.admin_request( - path='/v2.0/tokens/%s' % token, - token=token) - self.assertValidAuthenticationResponse(r) - - def test_remove_role_revokes_token(self): - self.md_foobar = self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], - self.tenant_service['id'], - self.role_service['id']) - - token = self.get_scoped_token( - tenant_id=default_fixtures.SERVICE_TENANT_ID) - r = self.admin_request( - path='/v2.0/tokens/%s' % token, - token=token) - self.assertValidAuthenticationResponse(r) - - self.assignment_api.remove_role_from_user_and_project( - self.user_foo['id'], - self.tenant_service['id'], - self.role_service['id']) - - r = self.admin_request( - path='/v2.0/tokens/%s' % token, - token=token, - expected_status=http_client.UNAUTHORIZED) - - def test_validate_token_belongs_to(self): - token = self.get_scoped_token() - path = ('/v2.0/tokens/%s?belongsTo=%s' % (token, - self.tenant_bar['id'])) - r = self.admin_request(path=path, token=token) - self.assertValidAuthenticationResponse(r, require_service_catalog=True) - - def test_validate_token_no_belongs_to_still_returns_catalog(self): - token = self.get_scoped_token() - path = ('/v2.0/tokens/%s' % token) - r = self.admin_request(path=path, token=token) - self.assertValidAuthenticationResponse(r, require_service_catalog=True) - - def test_validate_token_head(self): - """The same call as above, except using HEAD. - - There's no response to validate here, but this is included for the - sake of completely covering the core API. - - """ - token = self.get_scoped_token() - self.admin_request( - method='HEAD', - path='/v2.0/tokens/%(token_id)s' % { - 'token_id': token, - }, - token=token, - expected_status=http_client.OK) - - def test_endpoints(self): - token = self.get_scoped_token() - r = self.admin_request( - path='/v2.0/tokens/%(token_id)s/endpoints' % { - 'token_id': token, - }, - token=token) - self.assertValidEndpointListResponse(r) - - def test_get_tenant(self): - token = self.get_scoped_token() - r = self.admin_request( - path='/v2.0/tenants/%(tenant_id)s' % { - 'tenant_id': self.tenant_bar['id'], - }, - token=token) - self.assertValidTenantResponse(r) - - def test_get_tenant_by_name(self): - token = self.get_scoped_token() - r = self.admin_request( - path='/v2.0/tenants?name=%(tenant_name)s' % { - 'tenant_name': self.tenant_bar['name'], - }, - token=token) - self.assertValidTenantResponse(r) - - def test_get_user_roles_with_tenant(self): - token = self.get_scoped_token() - r = self.admin_request( - path='/v2.0/tenants/%(tenant_id)s/users/%(user_id)s/roles' % { - 'tenant_id': self.tenant_bar['id'], - 'user_id': self.user_foo['id'], - }, - token=token) - self.assertValidRoleListResponse(r) - - def test_get_user_roles_without_tenant(self): - token = self.get_scoped_token() - self.admin_request( - path='/v2.0/users/%(user_id)s/roles' % { - 'user_id': self.user_foo['id'], - }, - token=token, expected_status=http_client.NOT_IMPLEMENTED) - - def test_get_user(self): - token = self.get_scoped_token() - r = self.admin_request( - path='/v2.0/users/%(user_id)s' % { - 'user_id': self.user_foo['id'], - }, - token=token) - self.assertValidUserResponse(r) - - def test_get_user_by_name(self): - token = self.get_scoped_token() - r = self.admin_request( - path='/v2.0/users?name=%(user_name)s' % { - 'user_name': self.user_foo['name'], - }, - token=token) - self.assertValidUserResponse(r) - - def test_create_update_user_invalid_enabled_type(self): - # Enforce usage of boolean for 'enabled' field - token = self.get_scoped_token() - - # Test CREATE request - r = self.admin_request( - method='POST', - path='/v2.0/users', - body={ - 'user': { - 'name': uuid.uuid4().hex, - 'password': uuid.uuid4().hex, - 'enabled': "False", - }, - }, - token=token, - expected_status=http_client.BAD_REQUEST) - self.assertValidErrorResponse(r) - - r = self.admin_request( - method='POST', - path='/v2.0/users', - body={ - 'user': { - 'name': uuid.uuid4().hex, - 'password': uuid.uuid4().hex, - # In JSON, 0|1 are not booleans - 'enabled': 0, - }, - }, - token=token, - expected_status=http_client.BAD_REQUEST) - self.assertValidErrorResponse(r) - - # Test UPDATE request - path = '/v2.0/users/%(user_id)s' % { - 'user_id': self.user_foo['id'], - } - - r = self.admin_request( - method='PUT', - path=path, - body={ - 'user': { - 'enabled': "False", - }, - }, - token=token, - expected_status=http_client.BAD_REQUEST) - self.assertValidErrorResponse(r) - - r = self.admin_request( - method='PUT', - path=path, - body={ - 'user': { - # In JSON, 0|1 are not booleans - 'enabled': 1, - }, - }, - token=token, - expected_status=http_client.BAD_REQUEST) - self.assertValidErrorResponse(r) - - def test_create_update_user_valid_enabled_type(self): - # Enforce usage of boolean for 'enabled' field - token = self.get_scoped_token() - - # Test CREATE request - self.admin_request(method='POST', - path='/v2.0/users', - body={ - 'user': { - 'name': uuid.uuid4().hex, - 'password': uuid.uuid4().hex, - 'enabled': False, - }, - }, - token=token, - expected_status=http_client.OK) - - def test_error_response(self): - """This triggers assertValidErrorResponse by convention.""" - self.public_request(path='/v2.0/tenants', - expected_status=http_client.UNAUTHORIZED) - - def test_invalid_parameter_error_response(self): - token = self.get_scoped_token() - bad_body = { - 'OS-KSADM:service%s' % uuid.uuid4().hex: { - 'name': uuid.uuid4().hex, - 'type': uuid.uuid4().hex, - }, - } - res = self.admin_request(method='POST', - path='/v2.0/OS-KSADM/services', - body=bad_body, - token=token, - expected_status=http_client.BAD_REQUEST) - self.assertValidErrorResponse(res) - res = self.admin_request(method='POST', - path='/v2.0/users', - body=bad_body, - token=token, - expected_status=http_client.BAD_REQUEST) - self.assertValidErrorResponse(res) - - def _get_user_id(self, r): - """Helper method to return user ID from a response. - - This needs to be overridden by child classes - based on their content type. - - """ - raise NotImplementedError() - - def _get_role_id(self, r): - """Helper method to return a role ID from a response. - - This needs to be overridden by child classes - based on their content type. - - """ - raise NotImplementedError() - - def _get_role_name(self, r): - """Helper method to return role NAME from a response. - - This needs to be overridden by child classes - based on their content type. - - """ - raise NotImplementedError() - - def _get_project_id(self, r): - """Helper method to return project ID from a response. - - This needs to be overridden by child classes - based on their content type. - - """ - raise NotImplementedError() - - def assertNoRoles(self, r): - """Helper method to assert No Roles - - This needs to be overridden by child classes - based on their content type. - - """ - raise NotImplementedError() - - def test_update_user_tenant(self): - token = self.get_scoped_token() - - # Create a new user - r = self.admin_request( - method='POST', - path='/v2.0/users', - body={ - 'user': { - 'name': uuid.uuid4().hex, - 'password': uuid.uuid4().hex, - 'tenantId': self.tenant_bar['id'], - 'enabled': True, - }, - }, - token=token, - expected_status=http_client.OK) - - user_id = self._get_user_id(r.result) - - # Check if member_role is in tenant_bar - r = self.admin_request( - path='/v2.0/tenants/%(project_id)s/users/%(user_id)s/roles' % { - 'project_id': self.tenant_bar['id'], - 'user_id': user_id - }, - token=token, - expected_status=http_client.OK) - self.assertEqual(CONF.member_role_name, self._get_role_name(r.result)) - - # Create a new tenant - r = self.admin_request( - method='POST', - path='/v2.0/tenants', - body={ - 'tenant': { - 'name': 'test_update_user', - 'description': 'A description ...', - 'enabled': True, - }, - }, - token=token, - expected_status=http_client.OK) - - project_id = self._get_project_id(r.result) - - # Update user's tenant - r = self.admin_request( - method='PUT', - path='/v2.0/users/%(user_id)s' % { - 'user_id': user_id, - }, - body={ - 'user': { - 'tenantId': project_id, - }, - }, - token=token, - expected_status=http_client.OK) - - # 'member_role' should be in new_tenant - r = self.admin_request( - path='/v2.0/tenants/%(project_id)s/users/%(user_id)s/roles' % { - 'project_id': project_id, - 'user_id': user_id - }, - token=token, - expected_status=http_client.OK) - self.assertEqual('_member_', self._get_role_name(r.result)) - - # 'member_role' should not be in tenant_bar any more - r = self.admin_request( - path='/v2.0/tenants/%(project_id)s/users/%(user_id)s/roles' % { - 'project_id': self.tenant_bar['id'], - 'user_id': user_id - }, - token=token, - expected_status=http_client.OK) - self.assertNoRoles(r.result) - - def test_update_user_with_invalid_tenant(self): - token = self.get_scoped_token() - - # Create a new user - r = self.admin_request( - method='POST', - path='/v2.0/users', - body={ - 'user': { - 'name': 'test_invalid_tenant', - 'password': uuid.uuid4().hex, - 'tenantId': self.tenant_bar['id'], - 'enabled': True, - }, - }, - token=token, - expected_status=http_client.OK) - user_id = self._get_user_id(r.result) - - # Update user with an invalid tenant - r = self.admin_request( - method='PUT', - path='/v2.0/users/%(user_id)s' % { - 'user_id': user_id, - }, - body={ - 'user': { - 'tenantId': 'abcde12345heha', - }, - }, - token=token, - expected_status=http_client.NOT_FOUND) - - def test_update_user_with_invalid_tenant_no_prev_tenant(self): - token = self.get_scoped_token() - - # Create a new user - r = self.admin_request( - method='POST', - path='/v2.0/users', - body={ - 'user': { - 'name': 'test_invalid_tenant', - 'password': uuid.uuid4().hex, - 'enabled': True, - }, - }, - token=token, - expected_status=http_client.OK) - user_id = self._get_user_id(r.result) - - # Update user with an invalid tenant - r = self.admin_request( - method='PUT', - path='/v2.0/users/%(user_id)s' % { - 'user_id': user_id, - }, - body={ - 'user': { - 'tenantId': 'abcde12345heha', - }, - }, - token=token, - expected_status=http_client.NOT_FOUND) - - def test_update_user_with_old_tenant(self): - token = self.get_scoped_token() - - # Create a new user - r = self.admin_request( - method='POST', - path='/v2.0/users', - body={ - 'user': { - 'name': uuid.uuid4().hex, - 'password': uuid.uuid4().hex, - 'tenantId': self.tenant_bar['id'], - 'enabled': True, - }, - }, - token=token, - expected_status=http_client.OK) - - user_id = self._get_user_id(r.result) - - # Check if member_role is in tenant_bar - r = self.admin_request( - path='/v2.0/tenants/%(project_id)s/users/%(user_id)s/roles' % { - 'project_id': self.tenant_bar['id'], - 'user_id': user_id - }, - token=token, - expected_status=http_client.OK) - self.assertEqual(CONF.member_role_name, self._get_role_name(r.result)) - - # Update user's tenant with old tenant id - r = self.admin_request( - method='PUT', - path='/v2.0/users/%(user_id)s' % { - 'user_id': user_id, - }, - body={ - 'user': { - 'tenantId': self.tenant_bar['id'], - }, - }, - token=token, - expected_status=http_client.OK) - - # 'member_role' should still be in tenant_bar - r = self.admin_request( - path='/v2.0/tenants/%(project_id)s/users/%(user_id)s/roles' % { - 'project_id': self.tenant_bar['id'], - 'user_id': user_id - }, - token=token, - expected_status=http_client.OK) - self.assertEqual('_member_', self._get_role_name(r.result)) - - def test_authenticating_a_user_with_no_password(self): - token = self.get_scoped_token() - - username = uuid.uuid4().hex - - # create the user - self.admin_request( - method='POST', - path='/v2.0/users', - body={ - 'user': { - 'name': username, - 'enabled': True, - }, - }, - token=token) - - # fail to authenticate - r = self.public_request( - method='POST', - path='/v2.0/tokens', - body={ - 'auth': { - 'passwordCredentials': { - 'username': username, - 'password': 'password', - }, - }, - }, - expected_status=http_client.UNAUTHORIZED) - self.assertValidErrorResponse(r) - - def test_www_authenticate_header(self): - r = self.public_request( - path='/v2.0/tenants', - expected_status=http_client.UNAUTHORIZED) - self.assertEqual('Keystone uri="http://localhost"', - r.headers.get('WWW-Authenticate')) - - def test_www_authenticate_header_host(self): - test_url = 'http://%s:4187' % uuid.uuid4().hex - self.config_fixture.config(public_endpoint=test_url) - r = self.public_request( - path='/v2.0/tenants', - expected_status=http_client.UNAUTHORIZED) - self.assertEqual('Keystone uri="%s"' % test_url, - r.headers.get('WWW-Authenticate')) - - -class LegacyV2UsernameTests(object): - """Tests to show the broken username behavior in V2. - - The V2 API is documented to use `username` instead of `name`. The - API forced used to use name and left the username to fall into the - `extra` field. - - These tests ensure this behavior works so fixes to `username`/`name` - will be backward compatible. - """ - - def create_user(self, **user_attrs): - """Creates a users and returns the response object. - - :param user_attrs: attributes added to the request body (optional) - """ - token = self.get_scoped_token() - body = { - 'user': { - 'name': uuid.uuid4().hex, - 'enabled': True, - }, - } - body['user'].update(user_attrs) - - return self.admin_request( - method='POST', - path='/v2.0/users', - token=token, - body=body, - expected_status=http_client.OK) - - def test_create_with_extra_username(self): - """The response for creating a user will contain the extra fields.""" - fake_username = uuid.uuid4().hex - r = self.create_user(username=fake_username) - - self.assertValidUserResponse(r) - - user = self.get_user_from_response(r) - self.assertEqual(fake_username, user.get('username')) - - def test_get_returns_username_from_extra(self): - """The response for getting a user will contain the extra fields.""" - token = self.get_scoped_token() - - fake_username = uuid.uuid4().hex - r = self.create_user(username=fake_username) - - id_ = self.get_user_attribute_from_response(r, 'id') - r = self.admin_request(path='/v2.0/users/%s' % id_, token=token) - - self.assertValidUserResponse(r) - - user = self.get_user_from_response(r) - self.assertEqual(fake_username, user.get('username')) - - def test_update_returns_new_username_when_adding_username(self): - """The response for updating a user will contain the extra fields. - - This is specifically testing for updating a username when a value - was not previously set. - """ - token = self.get_scoped_token() - - r = self.create_user() - - id_ = self.get_user_attribute_from_response(r, 'id') - name = self.get_user_attribute_from_response(r, 'name') - enabled = self.get_user_attribute_from_response(r, 'enabled') - r = self.admin_request( - method='PUT', - path='/v2.0/users/%s' % id_, - token=token, - body={ - 'user': { - 'name': name, - 'username': 'new_username', - 'enabled': enabled, - }, - }, - expected_status=http_client.OK) - - self.assertValidUserResponse(r) - - user = self.get_user_from_response(r) - self.assertEqual('new_username', user.get('username')) - - def test_update_returns_new_username_when_updating_username(self): - """The response for updating a user will contain the extra fields. - - This tests updating a username that was previously set. - """ - token = self.get_scoped_token() - - r = self.create_user(username='original_username') - - id_ = self.get_user_attribute_from_response(r, 'id') - name = self.get_user_attribute_from_response(r, 'name') - enabled = self.get_user_attribute_from_response(r, 'enabled') - r = self.admin_request( - method='PUT', - path='/v2.0/users/%s' % id_, - token=token, - body={ - 'user': { - 'name': name, - 'username': 'new_username', - 'enabled': enabled, - }, - }, - expected_status=http_client.OK) - - self.assertValidUserResponse(r) - - user = self.get_user_from_response(r) - self.assertEqual('new_username', user.get('username')) - - def test_username_is_always_returned_create(self): - """Username is set as the value of name if no username is provided. - - This matches the v2.0 spec where we really should be using username - and not name. - """ - r = self.create_user() - - self.assertValidUserResponse(r) - - user = self.get_user_from_response(r) - self.assertEqual(user.get('name'), user.get('username')) - - def test_username_is_always_returned_get(self): - """Username is set as the value of name if no username is provided. - - This matches the v2.0 spec where we really should be using username - and not name. - """ - token = self.get_scoped_token() - - r = self.create_user() - - id_ = self.get_user_attribute_from_response(r, 'id') - r = self.admin_request(path='/v2.0/users/%s' % id_, token=token) - - self.assertValidUserResponse(r) - - user = self.get_user_from_response(r) - self.assertEqual(user.get('name'), user.get('username')) - - def test_username_is_always_returned_get_by_name(self): - """Username is set as the value of name if no username is provided. - - This matches the v2.0 spec where we really should be using username - and not name. - """ - token = self.get_scoped_token() - - r = self.create_user() - - name = self.get_user_attribute_from_response(r, 'name') - r = self.admin_request(path='/v2.0/users?name=%s' % name, token=token) - - self.assertValidUserResponse(r) - - user = self.get_user_from_response(r) - self.assertEqual(user.get('name'), user.get('username')) - - def test_username_is_always_returned_update_no_username_provided(self): - """Username is set as the value of name if no username is provided. - - This matches the v2.0 spec where we really should be using username - and not name. - """ - token = self.get_scoped_token() - - r = self.create_user() - - id_ = self.get_user_attribute_from_response(r, 'id') - name = self.get_user_attribute_from_response(r, 'name') - enabled = self.get_user_attribute_from_response(r, 'enabled') - r = self.admin_request( - method='PUT', - path='/v2.0/users/%s' % id_, - token=token, - body={ - 'user': { - 'name': name, - 'enabled': enabled, - }, - }, - expected_status=http_client.OK) - - self.assertValidUserResponse(r) - - user = self.get_user_from_response(r) - self.assertEqual(user.get('name'), user.get('username')) - - def test_updated_username_is_returned(self): - """Username is set as the value of name if no username is provided. - - This matches the v2.0 spec where we really should be using username - and not name. - """ - token = self.get_scoped_token() - - r = self.create_user() - - id_ = self.get_user_attribute_from_response(r, 'id') - name = self.get_user_attribute_from_response(r, 'name') - enabled = self.get_user_attribute_from_response(r, 'enabled') - r = self.admin_request( - method='PUT', - path='/v2.0/users/%s' % id_, - token=token, - body={ - 'user': { - 'name': name, - 'enabled': enabled, - }, - }, - expected_status=http_client.OK) - - self.assertValidUserResponse(r) - - user = self.get_user_from_response(r) - self.assertEqual(user.get('name'), user.get('username')) - - def test_username_can_be_used_instead_of_name_create(self): - token = self.get_scoped_token() - - r = self.admin_request( - method='POST', - path='/v2.0/users', - token=token, - body={ - 'user': { - 'username': uuid.uuid4().hex, - 'enabled': True, - }, - }, - expected_status=http_client.OK) - - self.assertValidUserResponse(r) - - user = self.get_user_from_response(r) - self.assertEqual(user.get('name'), user.get('username')) - - def test_username_can_be_used_instead_of_name_update(self): - token = self.get_scoped_token() - - r = self.create_user() - - id_ = self.get_user_attribute_from_response(r, 'id') - new_username = uuid.uuid4().hex - enabled = self.get_user_attribute_from_response(r, 'enabled') - r = self.admin_request( - method='PUT', - path='/v2.0/users/%s' % id_, - token=token, - body={ - 'user': { - 'username': new_username, - 'enabled': enabled, - }, - }, - expected_status=http_client.OK) - - self.assertValidUserResponse(r) - - user = self.get_user_from_response(r) - self.assertEqual(new_username, user.get('name')) - self.assertEqual(user.get('name'), user.get('username')) - - -class RestfulTestCase(rest.RestfulTestCase): - - def setUp(self): - super(RestfulTestCase, self).setUp() - - # TODO(termie): add an admin user to the fixtures and use that user - # override the fixtures, for now - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], - self.tenant_bar['id'], - self.role_admin['id']) - - -class V2TestCase(RestfulTestCase, CoreApiTests, LegacyV2UsernameTests): - - def config_overrides(self): - super(V2TestCase, self).config_overrides() - self.config_fixture.config( - group='catalog', - driver='templated', - template_file=unit.dirs.tests('default_catalog.templates')) - - def _get_user_id(self, r): - return r['user']['id'] - - def _get_role_name(self, r): - return r['roles'][0]['name'] - - def _get_role_id(self, r): - return r['roles'][0]['id'] - - def _get_project_id(self, r): - return r['tenant']['id'] - - def _get_token_id(self, r): - return r.result['access']['token']['id'] - - def assertNoRoles(self, r): - self.assertEqual([], r['roles']) - - def assertValidErrorResponse(self, r): - self.assertIsNotNone(r.result.get('error')) - self.assertValidError(r.result['error']) - self.assertEqual(r.result['error']['code'], r.status_code) - - def assertValidExtension(self, extension, expected): - super(V2TestCase, self).assertValidExtension(extension) - descriptions = [ext['description'] for ext in six.itervalues(expected)] - description = extension.get('description') - self.assertIsNotNone(description) - self.assertIn(description, descriptions) - self.assertIsNotNone(extension.get('links')) - self.assertNotEmpty(extension.get('links')) - for link in extension.get('links'): - self.assertValidExtensionLink(link) - - def assertValidExtensionListResponse(self, r, expected): - self.assertIsNotNone(r.result.get('extensions')) - self.assertIsNotNone(r.result['extensions'].get('values')) - self.assertNotEmpty(r.result['extensions'].get('values')) - for extension in r.result['extensions']['values']: - self.assertValidExtension(extension, expected) - - def assertValidExtensionResponse(self, r, expected): - self.assertValidExtension(r.result.get('extension'), expected) - - def assertValidUser(self, user): - super(V2TestCase, self).assertValidUser(user) - self.assertNotIn('default_project_id', user) - if 'tenantId' in user: - # NOTE(morganfainberg): tenantId should never be "None", it gets - # filtered out of the object if it is there. This is suspenders - # and a belt check to avoid unintended regressions. - self.assertIsNotNone(user.get('tenantId')) - - def assertValidAuthenticationResponse(self, r, - require_service_catalog=False): - self.assertIsNotNone(r.result.get('access')) - self.assertIsNotNone(r.result['access'].get('token')) - self.assertIsNotNone(r.result['access'].get('user')) - - # validate token - self.assertIsNotNone(r.result['access']['token'].get('id')) - self.assertIsNotNone(r.result['access']['token'].get('expires')) - tenant = r.result['access']['token'].get('tenant') - if tenant is not None: - # validate tenant - self.assertIsNotNone(tenant.get('id')) - self.assertIsNotNone(tenant.get('name')) - - # validate user - self.assertIsNotNone(r.result['access']['user'].get('id')) - self.assertIsNotNone(r.result['access']['user'].get('name')) - - if require_service_catalog: - # roles are only provided with a service catalog - roles = r.result['access']['user'].get('roles') - self.assertNotEmpty(roles) - for role in roles: - self.assertIsNotNone(role.get('name')) - - serviceCatalog = r.result['access'].get('serviceCatalog') - # validate service catalog - if require_service_catalog: - self.assertIsNotNone(serviceCatalog) - if serviceCatalog is not None: - self.assertIsInstance(serviceCatalog, list) - if require_service_catalog: - self.assertNotEmpty(serviceCatalog) - for service in r.result['access']['serviceCatalog']: - # validate service - self.assertIsNotNone(service.get('name')) - self.assertIsNotNone(service.get('type')) - - # services contain at least one endpoint - self.assertIsNotNone(service.get('endpoints')) - self.assertNotEmpty(service['endpoints']) - for endpoint in service['endpoints']: - # validate service endpoint - self.assertIsNotNone(endpoint.get('publicURL')) - - def assertValidTenantListResponse(self, r): - self.assertIsNotNone(r.result.get('tenants')) - self.assertNotEmpty(r.result['tenants']) - for tenant in r.result['tenants']: - self.assertValidTenant(tenant) - self.assertIsNotNone(tenant.get('enabled')) - self.assertIn(tenant.get('enabled'), [True, False]) - - def assertValidUserResponse(self, r): - self.assertIsNotNone(r.result.get('user')) - self.assertValidUser(r.result['user']) - - def assertValidTenantResponse(self, r): - self.assertIsNotNone(r.result.get('tenant')) - self.assertValidTenant(r.result['tenant']) - - def assertValidRoleListResponse(self, r): - self.assertIsNotNone(r.result.get('roles')) - self.assertNotEmpty(r.result['roles']) - for role in r.result['roles']: - self.assertValidRole(role) - - def assertValidVersion(self, version): - super(V2TestCase, self).assertValidVersion(version) - - self.assertIsNotNone(version.get('links')) - self.assertNotEmpty(version.get('links')) - for link in version.get('links'): - self.assertIsNotNone(link.get('rel')) - self.assertIsNotNone(link.get('href')) - - self.assertIsNotNone(version.get('media-types')) - self.assertNotEmpty(version.get('media-types')) - for media in version.get('media-types'): - self.assertIsNotNone(media.get('base')) - self.assertIsNotNone(media.get('type')) - - def assertValidMultipleChoiceResponse(self, r): - self.assertIsNotNone(r.result.get('versions')) - self.assertIsNotNone(r.result['versions'].get('values')) - self.assertNotEmpty(r.result['versions']['values']) - for version in r.result['versions']['values']: - self.assertValidVersion(version) - - def assertValidVersionResponse(self, r): - self.assertValidVersion(r.result.get('version')) - - def assertValidEndpointListResponse(self, r): - self.assertIsNotNone(r.result.get('endpoints')) - self.assertNotEmpty(r.result['endpoints']) - for endpoint in r.result['endpoints']: - self.assertIsNotNone(endpoint.get('id')) - self.assertIsNotNone(endpoint.get('name')) - self.assertIsNotNone(endpoint.get('type')) - self.assertIsNotNone(endpoint.get('publicURL')) - self.assertIsNotNone(endpoint.get('internalURL')) - self.assertIsNotNone(endpoint.get('adminURL')) - - def get_user_from_response(self, r): - return r.result.get('user') - - def get_user_attribute_from_response(self, r, attribute_name): - return r.result['user'][attribute_name] - - def test_service_crud_requires_auth(self): - """Service CRUD should return unauthorized without an X-Auth-Token.""" - # values here don't matter because it will be unauthorized before - # they're checked (bug 1006822). - service_path = '/v2.0/OS-KSADM/services/%s' % uuid.uuid4().hex - service_body = { - 'OS-KSADM:service': { - 'name': uuid.uuid4().hex, - 'type': uuid.uuid4().hex, - }, - } - - r = self.admin_request(method='GET', - path='/v2.0/OS-KSADM/services', - expected_status=http_client.UNAUTHORIZED) - self.assertValidErrorResponse(r) - - r = self.admin_request(method='POST', - path='/v2.0/OS-KSADM/services', - body=service_body, - expected_status=http_client.UNAUTHORIZED) - self.assertValidErrorResponse(r) - - r = self.admin_request(method='GET', - path=service_path, - expected_status=http_client.UNAUTHORIZED) - self.assertValidErrorResponse(r) - - r = self.admin_request(method='DELETE', - path=service_path, - expected_status=http_client.UNAUTHORIZED) - self.assertValidErrorResponse(r) - - def test_user_role_list_requires_auth(self): - """User role list return unauthorized without an X-Auth-Token.""" - # values here don't matter because it will be unauthorized before - # they're checked (bug 1006815). - path = '/v2.0/tenants/%(tenant_id)s/users/%(user_id)s/roles' % { - 'tenant_id': uuid.uuid4().hex, - 'user_id': uuid.uuid4().hex, - } - - r = self.admin_request(path=path, - expected_status=http_client.UNAUTHORIZED) - self.assertValidErrorResponse(r) - - def test_fetch_revocation_list_nonadmin_fails(self): - self.admin_request( - method='GET', - path='/v2.0/tokens/revoked', - expected_status=http_client.UNAUTHORIZED) - - def test_fetch_revocation_list_admin_200(self): - token = self.get_scoped_token() - r = self.admin_request( - method='GET', - path='/v2.0/tokens/revoked', - token=token, - expected_status=http_client.OK) - self.assertValidRevocationListResponse(r) - - def assertValidRevocationListResponse(self, response): - self.assertIsNotNone(response.result['signed']) - - def _fetch_parse_revocation_list(self): - - token1 = self.get_scoped_token() - - # TODO(morganfainberg): Because this is making a restful call to the - # app a change to UTCNOW via mock.patch will not affect the returned - # token. The only surefire way to ensure there is not a transient bug - # based upon when the second token is issued is with a sleep. This - # issue all stems from the limited resolution (no microseconds) on the - # expiry time of tokens and the way revocation events utilizes token - # expiry to revoke individual tokens. This is a stop-gap until all - # associated issues with resolution on expiration and revocation events - # are resolved. - time.sleep(1) - - token2 = self.get_scoped_token() - - self.admin_request(method='DELETE', - path='/v2.0/tokens/%s' % token2, - token=token1) - - r = self.admin_request( - method='GET', - path='/v2.0/tokens/revoked', - token=token1, - expected_status=http_client.OK) - signed_text = r.result['signed'] - - data_json = cms.cms_verify(signed_text, CONF.signing.certfile, - CONF.signing.ca_certs) - - data = json.loads(data_json) - - return (data, token2) - - def test_fetch_revocation_list_md5(self): - """Hash for tokens in revocation list and server config should match. - - If the server is configured for md5, then the revocation list has - tokens hashed with MD5. - """ - # The default hash algorithm is md5. - hash_algorithm = 'md5' - - (data, token) = self._fetch_parse_revocation_list() - token_hash = cms.cms_hash_token(token, mode=hash_algorithm) - self.assertThat(token_hash, matchers.Equals(data['revoked'][0]['id'])) - - def test_fetch_revocation_list_sha256(self): - """Hash for tokens in revocation list and server config should match. - - If the server is configured for sha256, then the revocation list has - tokens hashed with SHA256. - """ - hash_algorithm = 'sha256' - self.config_fixture.config(group='token', - hash_algorithm=hash_algorithm) - - (data, token) = self._fetch_parse_revocation_list() - token_hash = cms.cms_hash_token(token, mode=hash_algorithm) - self.assertThat(token_hash, matchers.Equals(data['revoked'][0]['id'])) - - def test_create_update_user_invalid_enabled_type(self): - # Enforce usage of boolean for 'enabled' field - token = self.get_scoped_token() - - # Test CREATE request - r = self.admin_request( - method='POST', - path='/v2.0/users', - body={ - 'user': { - 'name': uuid.uuid4().hex, - 'password': uuid.uuid4().hex, - # In JSON, "true|false" are not boolean - 'enabled': "true", - }, - }, - token=token, - expected_status=http_client.BAD_REQUEST) - self.assertValidErrorResponse(r) - - # Test UPDATE request - r = self.admin_request( - method='PUT', - path='/v2.0/users/%(user_id)s' % { - 'user_id': self.user_foo['id'], - }, - body={ - 'user': { - # In JSON, "true|false" are not boolean - 'enabled': "true", - }, - }, - token=token, - expected_status=http_client.BAD_REQUEST) - self.assertValidErrorResponse(r) - - def test_authenticating_a_user_with_an_OSKSADM_password(self): - token = self.get_scoped_token() - - username = uuid.uuid4().hex - password = uuid.uuid4().hex - - # create the user - r = self.admin_request( - method='POST', - path='/v2.0/users', - body={ - 'user': { - 'name': username, - 'OS-KSADM:password': password, - 'enabled': True, - }, - }, - token=token) - - # successfully authenticate - self.public_request( - method='POST', - path='/v2.0/tokens', - body={ - 'auth': { - 'passwordCredentials': { - 'username': username, - 'password': password, - }, - }, - }, - expected_status=http_client.OK) - - # ensure password doesn't leak - user_id = r.result['user']['id'] - r = self.admin_request( - method='GET', - path='/v2.0/users/%s' % user_id, - token=token, - expected_status=http_client.OK) - self.assertNotIn('OS-KSADM:password', r.result['user']) - - def test_updating_a_user_with_an_OSKSADM_password(self): - token = self.get_scoped_token() - - user_id = self.user_foo['id'] - password = uuid.uuid4().hex - - # update the user - self.admin_request( - method='PUT', - path='/v2.0/users/%s/OS-KSADM/password' % user_id, - body={ - 'user': { - 'password': password, - }, - }, - token=token, - expected_status=http_client.OK) - - # successfully authenticate - self.public_request( - method='POST', - path='/v2.0/tokens', - body={ - 'auth': { - 'passwordCredentials': { - 'username': self.user_foo['name'], - 'password': password, - }, - }, - }, - expected_status=http_client.OK) - - -class RevokeApiTestCase(V2TestCase): - def config_overrides(self): - super(RevokeApiTestCase, self).config_overrides() - self.config_fixture.config( - group='token', - provider='pki', - revoke_by_id=False) - - def test_fetch_revocation_list_admin_200(self): - self.skipTest('Revoke API disables revocation_list.') - - def test_fetch_revocation_list_md5(self): - self.skipTest('Revoke API disables revocation_list.') - - def test_fetch_revocation_list_sha256(self): - self.skipTest('Revoke API disables revocation_list.') - - -class TestFernetTokenProviderV2(RestfulTestCase): - - def setUp(self): - super(TestFernetTokenProviderV2, self).setUp() - self.useFixture(ksfixtures.KeyRepository(self.config_fixture)) - - # Add catalog data - self.region = unit.new_region_ref() - self.region_id = self.region['id'] - self.catalog_api.create_region(self.region) - - self.service = unit.new_service_ref() - self.service_id = self.service['id'] - self.catalog_api.create_service(self.service_id, self.service) - - self.endpoint = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id) - self.endpoint_id = self.endpoint['id'] - self.catalog_api.create_endpoint(self.endpoint_id, self.endpoint) - - def assertValidUnscopedTokenResponse(self, r): - v2.unscoped_validator.validate(r.json['access']) - - def assertValidScopedTokenResponse(self, r): - v2.scoped_validator.validate(r.json['access']) - - # Used by RestfulTestCase - def _get_token_id(self, r): - return r.result['access']['token']['id'] - - def new_project_ref(self): - return {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - 'domain_id': 'default', - 'enabled': True} - - def config_overrides(self): - super(TestFernetTokenProviderV2, self).config_overrides() - self.config_fixture.config(group='token', provider='fernet') - - def test_authenticate_unscoped_token(self): - unscoped_token = self.get_unscoped_token() - # Fernet token must be of length 255 per usability requirements - self.assertLess(len(unscoped_token), 255) - - def test_validate_unscoped_token(self): - # Grab an admin token to validate with - project_ref = self.new_project_ref() - self.resource_api.create_project(project_ref['id'], project_ref) - self.assignment_api.add_role_to_user_and_project(self.user_foo['id'], - project_ref['id'], - self.role_admin['id']) - admin_token = self.get_scoped_token(tenant_id=project_ref['id']) - unscoped_token = self.get_unscoped_token() - path = ('/v2.0/tokens/%s' % unscoped_token) - resp = self.admin_request( - method='GET', - path=path, - token=admin_token, - expected_status=http_client.OK) - self.assertValidUnscopedTokenResponse(resp) - - def test_authenticate_scoped_token(self): - project_ref = self.new_project_ref() - self.resource_api.create_project(project_ref['id'], project_ref) - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], project_ref['id'], self.role_service['id']) - token = self.get_scoped_token(tenant_id=project_ref['id']) - # Fernet token must be of length 255 per usability requirements - self.assertLess(len(token), 255) - - def test_validate_scoped_token(self): - project_ref = self.new_project_ref() - self.resource_api.create_project(project_ref['id'], project_ref) - self.assignment_api.add_role_to_user_and_project(self.user_foo['id'], - project_ref['id'], - self.role_admin['id']) - project2_ref = self.new_project_ref() - self.resource_api.create_project(project2_ref['id'], project2_ref) - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], project2_ref['id'], self.role_member['id']) - admin_token = self.get_scoped_token(tenant_id=project_ref['id']) - member_token = self.get_scoped_token(tenant_id=project2_ref['id']) - path = ('/v2.0/tokens/%s?belongsTo=%s' % (member_token, - project2_ref['id'])) - # Validate token belongs to project - resp = self.admin_request( - method='GET', - path=path, - token=admin_token, - expected_status=http_client.OK) - self.assertValidScopedTokenResponse(resp) - - def test_token_authentication_and_validation(self): - """Test token authentication for Fernet token provider. - - Verify that token authentication returns validate response code and - valid token belongs to project. - """ - project_ref = self.new_project_ref() - self.resource_api.create_project(project_ref['id'], project_ref) - unscoped_token = self.get_unscoped_token() - self.assignment_api.add_role_to_user_and_project(self.user_foo['id'], - project_ref['id'], - self.role_admin['id']) - r = self.public_request( - method='POST', - path='/v2.0/tokens', - body={ - 'auth': { - 'tenantName': project_ref['name'], - 'token': { - 'id': unscoped_token.encode('ascii') - } - } - }, - expected_status=http_client.OK) - - token_id = self._get_token_id(r) - path = ('/v2.0/tokens/%s?belongsTo=%s' % (token_id, project_ref['id'])) - # Validate token belongs to project - resp = self.admin_request( - method='GET', - path=path, - token=self.get_admin_token(), - expected_status=http_client.OK) - self.assertValidScopedTokenResponse(resp) - - def test_rescoped_tokens_maintain_original_expiration(self): - project_ref = self.new_project_ref() - self.resource_api.create_project(project_ref['id'], project_ref) - self.assignment_api.add_role_to_user_and_project(self.user_foo['id'], - project_ref['id'], - self.role_admin['id']) - resp = self.public_request( - method='POST', - path='/v2.0/tokens', - body={ - 'auth': { - 'tenantName': project_ref['name'], - 'passwordCredentials': { - 'username': self.user_foo['name'], - 'password': self.user_foo['password'] - } - } - }, - # NOTE(lbragstad): This test may need to be refactored if Keystone - # decides to disallow rescoping using a scoped token. - expected_status=http_client.OK) - original_token = resp.result['access']['token']['id'] - original_expiration = resp.result['access']['token']['expires'] - - resp = self.public_request( - method='POST', - path='/v2.0/tokens', - body={ - 'auth': { - 'tenantName': project_ref['name'], - 'token': { - 'id': original_token, - } - } - }, - expected_status=http_client.OK) - rescoped_token = resp.result['access']['token']['id'] - rescoped_expiration = resp.result['access']['token']['expires'] - self.assertNotEqual(original_token, rescoped_token) - self.assertEqual(original_expiration, rescoped_expiration) - self.assertValidScopedTokenResponse(resp) diff --git a/keystone-moon/keystone/tests/unit/test_v2_controller.py b/keystone-moon/keystone/tests/unit/test_v2_controller.py deleted file mode 100644 index 6cf8bc53..00000000 --- a/keystone-moon/keystone/tests/unit/test_v2_controller.py +++ /dev/null @@ -1,186 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import copy -import uuid - -from testtools import matchers - -from keystone.assignment import controllers as assignment_controllers -from keystone import exception -from keystone.resource import controllers as resource_controllers -from keystone.tests import unit -from keystone.tests.unit import default_fixtures -from keystone.tests.unit.ksfixtures import database - - -_ADMIN_CONTEXT = {'is_admin': True, 'query_string': {}} - - -class TenantTestCase(unit.TestCase): - """Tests for the V2 Tenant controller. - - These tests exercise :class:`keystone.assignment.controllers.Tenant`. - - """ - - def setUp(self): - super(TenantTestCase, self).setUp() - self.useFixture(database.Database()) - self.load_backends() - self.load_fixtures(default_fixtures) - self.tenant_controller = resource_controllers.Tenant() - self.assignment_tenant_controller = ( - assignment_controllers.TenantAssignment()) - self.assignment_role_controller = ( - assignment_controllers.RoleAssignmentV2()) - - def test_get_project_users_no_user(self): - """get_project_users when user doesn't exist. - - When a user that's not known to `identity` has a role on a project, - then `get_project_users` just skips that user. - - """ - project_id = self.tenant_bar['id'] - - orig_project_users = ( - self.assignment_tenant_controller.get_project_users(_ADMIN_CONTEXT, - project_id)) - - # Assign a role to a user that doesn't exist to the `bar` project. - - user_id = uuid.uuid4().hex - self.assignment_role_controller.add_role_to_user( - _ADMIN_CONTEXT, user_id, self.role_other['id'], project_id) - - new_project_users = ( - self.assignment_tenant_controller.get_project_users(_ADMIN_CONTEXT, - project_id)) - - # The new user isn't included in the result, so no change. - # asserting that the expected values appear in the list, - # without asserting the order of the results - self.assertEqual(sorted(orig_project_users), sorted(new_project_users)) - - def test_list_projects_default_domain(self): - """Test that list projects only returns those in the default domain.""" - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - project1 = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project1['id'], project1) - # Check the real total number of projects, we should have the: - # - tenants in the default fixtures - # - the project representing the default domain - # - the project representing the domain we created above - # - the project we created above - refs = self.resource_api.list_projects() - self.assertThat( - refs, matchers.HasLength(len(default_fixtures.TENANTS) + 3)) - - # Now list all projects using the v2 API - we should only get - # back those in the default features, since only those are in the - # default domain. - refs = self.tenant_controller.get_all_projects(_ADMIN_CONTEXT) - self.assertEqual(len(default_fixtures.TENANTS), len(refs['tenants'])) - for tenant in default_fixtures.TENANTS: - tenant_copy = tenant.copy() - tenant_copy.pop('domain_id') - tenant_copy.pop('parent_id') - tenant_copy.pop('is_domain') - self.assertIn(tenant_copy, refs['tenants']) - - def _create_is_domain_project(self): - project = unit.new_project_ref(is_domain=True) - project_ref = self.resource_api.create_project(project['id'], project) - return self.tenant_controller.v3_to_v2_project(project_ref) - - def test_get_is_domain_project_not_found(self): - """Test that get project does not return is_domain projects.""" - project = self._create_is_domain_project() - - context = copy.deepcopy(_ADMIN_CONTEXT) - context['query_string']['name'] = project['name'] - - self.assertRaises( - exception.ProjectNotFound, - self.tenant_controller.get_all_projects, - context) - - context = copy.deepcopy(_ADMIN_CONTEXT) - context['query_string']['name'] = project['id'] - - self.assertRaises( - exception.ProjectNotFound, - self.tenant_controller.get_all_projects, - context) - - def test_create_is_domain_project_fails(self): - """Test that the creation of a project acting as a domain fails.""" - project = {'name': uuid.uuid4().hex, 'domain_id': 'default', - 'is_domain': True} - - self.assertRaises( - exception.ValidationError, - self.tenant_controller.create_project, - _ADMIN_CONTEXT, - project) - - def test_create_project_passing_is_domain_false_fails(self): - """Test that passing is_domain=False is not allowed.""" - project = {'name': uuid.uuid4().hex, 'domain_id': 'default', - 'is_domain': False} - - self.assertRaises( - exception.ValidationError, - self.tenant_controller.create_project, - _ADMIN_CONTEXT, - project) - - def test_update_is_domain_project_not_found(self): - """Test that update is_domain project is not allowed in v2.""" - project = self._create_is_domain_project() - - project['name'] = uuid.uuid4().hex - self.assertRaises( - exception.ProjectNotFound, - self.tenant_controller.update_project, - _ADMIN_CONTEXT, - project['id'], - project) - - def test_delete_is_domain_project_not_found(self): - """Test that delete is_domain project is not allowed in v2.""" - project = self._create_is_domain_project() - - self.assertRaises( - exception.ProjectNotFound, - self.tenant_controller.delete_project, - _ADMIN_CONTEXT, - project['id']) - - def test_list_is_domain_project_not_found(self): - """Test v2 get_all_projects having projects that act as a domain. - - In v2 no project with the is_domain flag enabled should be returned. - """ - project1 = self._create_is_domain_project() - project2 = self._create_is_domain_project() - - refs = self.tenant_controller.get_all_projects(_ADMIN_CONTEXT) - projects = refs.get('tenants') - - self.assertNotIn(project1, projects) - self.assertNotIn(project2, projects) diff --git a/keystone-moon/keystone/tests/unit/test_v2_keystoneclient.py b/keystone-moon/keystone/tests/unit/test_v2_keystoneclient.py deleted file mode 100644 index 2a3fad86..00000000 --- a/keystone-moon/keystone/tests/unit/test_v2_keystoneclient.py +++ /dev/null @@ -1,1376 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime -import uuid - -from keystoneclient.contrib.ec2 import utils as ec2_utils -from keystoneclient import exceptions as client_exceptions -from keystoneclient.v2_0 import client as ks_client -import mock -from oslo_config import cfg -from oslo_serialization import jsonutils -from oslo_utils import timeutils -from six.moves import http_client -from six.moves import range -import webob - -from keystone.tests import unit -from keystone.tests.unit import default_fixtures -from keystone.tests.unit.ksfixtures import appserver -from keystone.tests.unit.ksfixtures import database - - -CONF = cfg.CONF -DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id - - -class ClientDrivenTestCase(unit.TestCase): - - def config_files(self): - config_files = super(ClientDrivenTestCase, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_sql.conf')) - return config_files - - def setUp(self): - super(ClientDrivenTestCase, self).setUp() - - # FIXME(morganfainberg): Since we are running tests through the - # controllers and some internal api drivers are SQL-only, the correct - # approach is to ensure we have the correct backing store. The - # credential api makes some very SQL specific assumptions that should - # be addressed allowing for non-SQL based testing to occur. - self.useFixture(database.Database()) - self.load_backends() - - self.load_fixtures(default_fixtures) - - # TODO(termie): add an admin user to the fixtures and use that user - # override the fixtures, for now - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], - self.tenant_bar['id'], - self.role_admin['id']) - - conf = self._paste_config('keystone') - fixture = self.useFixture(appserver.AppServer(conf, appserver.MAIN)) - self.public_server = fixture.server - fixture = self.useFixture(appserver.AppServer(conf, appserver.ADMIN)) - self.admin_server = fixture.server - - self.default_client = self.get_client() - - self.addCleanup(self.cleanup_instance('public_server', 'admin_server', - 'default_client')) - - def _public_url(self): - public_port = self.public_server.socket_info['socket'][1] - return "http://localhost:%s/v2.0" % public_port - - def _admin_url(self): - admin_port = self.admin_server.socket_info['socket'][1] - return "http://localhost:%s/v2.0" % admin_port - - def _client(self, admin=False, **kwargs): - url = self._admin_url() if admin else self._public_url() - kc = ks_client.Client(endpoint=url, - auth_url=self._public_url(), - **kwargs) - kc.authenticate() - # have to manually overwrite the management url after authentication - kc.management_url = url - return kc - - def get_client(self, user_ref=None, tenant_ref=None, admin=False): - if user_ref is None: - user_ref = self.user_foo - if tenant_ref is None: - for user in default_fixtures.USERS: - # The fixture ID is no longer used as the ID in the database - # The fixture ID, however, is still used as part of the - # attribute name when storing the created object on the test - # case. This means that we need to use the fixture ID below to - # find the actial object so that we can get the ID as stored - # in the database to compare against. - if (getattr(self, 'user_%s' % user['id'])['id'] == - user_ref['id']): - tenant_id = user['tenants'][0] - else: - tenant_id = tenant_ref['id'] - - return self._client(username=user_ref['name'], - password=user_ref['password'], - tenant_id=tenant_id, - admin=admin) - - def test_authenticate_tenant_name_and_tenants(self): - client = self.get_client() - tenants = client.tenants.list() - self.assertEqual(self.tenant_bar['id'], tenants[0].id) - - def test_authenticate_tenant_id_and_tenants(self): - client = self._client(username=self.user_foo['name'], - password=self.user_foo['password'], - tenant_id='bar') - tenants = client.tenants.list() - self.assertEqual(self.tenant_bar['id'], tenants[0].id) - - def test_authenticate_invalid_tenant_id(self): - self.assertRaises(client_exceptions.Unauthorized, - self._client, - username=self.user_foo['name'], - password=self.user_foo['password'], - tenant_id='baz') - - def test_authenticate_token_no_tenant(self): - client = self.get_client() - token = client.auth_token - token_client = self._client(token=token) - tenants = token_client.tenants.list() - self.assertEqual(self.tenant_bar['id'], tenants[0].id) - - def test_authenticate_token_tenant_id(self): - client = self.get_client() - token = client.auth_token - token_client = self._client(token=token, tenant_id='bar') - tenants = token_client.tenants.list() - self.assertEqual(self.tenant_bar['id'], tenants[0].id) - - def test_authenticate_token_invalid_tenant_id(self): - client = self.get_client() - token = client.auth_token - self.assertRaises(client_exceptions.Unauthorized, - self._client, token=token, - tenant_id=uuid.uuid4().hex) - - def test_authenticate_token_invalid_tenant_name(self): - client = self.get_client() - token = client.auth_token - self.assertRaises(client_exceptions.Unauthorized, - self._client, token=token, - tenant_name=uuid.uuid4().hex) - - def test_authenticate_token_tenant_name(self): - client = self.get_client() - token = client.auth_token - token_client = self._client(token=token, tenant_name='BAR') - tenants = token_client.tenants.list() - self.assertEqual(self.tenant_bar['id'], tenants[0].id) - self.assertEqual(self.tenant_bar['id'], tenants[0].id) - - def test_authenticate_and_delete_token(self): - client = self.get_client(admin=True) - token = client.auth_token - token_client = self._client(token=token) - tenants = token_client.tenants.list() - self.assertEqual(self.tenant_bar['id'], tenants[0].id) - - client.tokens.delete(token_client.auth_token) - - self.assertRaises(client_exceptions.Unauthorized, - token_client.tenants.list) - - def test_authenticate_no_password(self): - user_ref = self.user_foo.copy() - user_ref['password'] = None - self.assertRaises(client_exceptions.AuthorizationFailure, - self.get_client, - user_ref) - - def test_authenticate_no_username(self): - user_ref = self.user_foo.copy() - user_ref['name'] = None - self.assertRaises(client_exceptions.AuthorizationFailure, - self.get_client, - user_ref) - - def test_authenticate_disabled_tenant(self): - admin_client = self.get_client(admin=True) - - tenant = { - 'name': uuid.uuid4().hex, - 'description': uuid.uuid4().hex, - 'enabled': False, - } - tenant_ref = admin_client.tenants.create( - tenant_name=tenant['name'], - description=tenant['description'], - enabled=tenant['enabled']) - tenant['id'] = tenant_ref.id - - user = { - 'name': uuid.uuid4().hex, - 'password': uuid.uuid4().hex, - 'email': uuid.uuid4().hex, - 'tenant_id': tenant['id'], - } - user_ref = admin_client.users.create( - name=user['name'], - password=user['password'], - email=user['email'], - tenant_id=user['tenant_id']) - user['id'] = user_ref.id - - # password authentication - self.assertRaises( - client_exceptions.Unauthorized, - self._client, - username=user['name'], - password=user['password'], - tenant_id=tenant['id']) - - # token authentication - client = self._client( - username=user['name'], - password=user['password']) - self.assertRaises( - client_exceptions.Unauthorized, - self._client, - token=client.auth_token, - tenant_id=tenant['id']) - - # FIXME(ja): this test should require the "keystone:admin" roled - # (probably the role set via --keystone_admin_role flag) - # FIXME(ja): add a test that admin endpoint is only sent to admin user - # FIXME(ja): add a test that admin endpoint returns unauthorized if not - # admin - def test_tenant_create_update_and_delete(self): - tenant_name = 'original_tenant' - tenant_description = 'My original tenant!' - tenant_enabled = True - client = self.get_client(admin=True) - - # create, get, and list a tenant - tenant = client.tenants.create(tenant_name=tenant_name, - description=tenant_description, - enabled=tenant_enabled) - self.assertEqual(tenant_name, tenant.name) - self.assertEqual(tenant_description, tenant.description) - self.assertEqual(tenant_enabled, tenant.enabled) - - tenant = client.tenants.get(tenant_id=tenant.id) - self.assertEqual(tenant_name, tenant.name) - self.assertEqual(tenant_description, tenant.description) - self.assertEqual(tenant_enabled, tenant.enabled) - - tenant = [t for t in client.tenants.list() if t.id == tenant.id].pop() - self.assertEqual(tenant_name, tenant.name) - self.assertEqual(tenant_description, tenant.description) - self.assertEqual(tenant_enabled, tenant.enabled) - - # update, get, and list a tenant - tenant_name = 'updated_tenant' - tenant_description = 'Updated tenant!' - tenant_enabled = False - tenant = client.tenants.update(tenant_id=tenant.id, - tenant_name=tenant_name, - enabled=tenant_enabled, - description=tenant_description) - self.assertEqual(tenant_name, tenant.name) - self.assertEqual(tenant_description, tenant.description) - self.assertEqual(tenant_enabled, tenant.enabled) - - tenant = client.tenants.get(tenant_id=tenant.id) - self.assertEqual(tenant_name, tenant.name) - self.assertEqual(tenant_description, tenant.description) - self.assertEqual(tenant_enabled, tenant.enabled) - - tenant = [t for t in client.tenants.list() if t.id == tenant.id].pop() - self.assertEqual(tenant_name, tenant.name) - self.assertEqual(tenant_description, tenant.description) - self.assertEqual(tenant_enabled, tenant.enabled) - - # delete, get, and list a tenant - client.tenants.delete(tenant=tenant.id) - self.assertRaises(client_exceptions.NotFound, client.tenants.get, - tenant.id) - self.assertFalse([t for t in client.tenants.list() - if t.id == tenant.id]) - - def test_tenant_create_update_and_delete_unicode(self): - tenant_name = u'original \u540d\u5b57' - tenant_description = 'My original tenant!' - tenant_enabled = True - client = self.get_client(admin=True) - - # create, get, and list a tenant - tenant = client.tenants.create(tenant_name, - description=tenant_description, - enabled=tenant_enabled) - self.assertEqual(tenant_name, tenant.name) - self.assertEqual(tenant_description, tenant.description) - self.assertIs(tenant.enabled, tenant_enabled) - - tenant = client.tenants.get(tenant.id) - self.assertEqual(tenant_name, tenant.name) - self.assertEqual(tenant_description, tenant.description) - self.assertIs(tenant.enabled, tenant_enabled) - - # multiple tenants exist due to fixtures, so find the one we're testing - tenant = [t for t in client.tenants.list() if t.id == tenant.id].pop() - self.assertEqual(tenant_name, tenant.name) - self.assertEqual(tenant_description, tenant.description) - self.assertIs(tenant.enabled, tenant_enabled) - - # update, get, and list a tenant - tenant_name = u'updated \u540d\u5b57' - tenant_description = 'Updated tenant!' - tenant_enabled = False - tenant = client.tenants.update(tenant.id, - tenant_name=tenant_name, - enabled=tenant_enabled, - description=tenant_description) - self.assertEqual(tenant_name, tenant.name) - self.assertEqual(tenant_description, tenant.description) - self.assertIs(tenant.enabled, tenant_enabled) - - tenant = client.tenants.get(tenant.id) - self.assertEqual(tenant_name, tenant.name) - self.assertEqual(tenant_description, tenant.description) - self.assertIs(tenant.enabled, tenant_enabled) - - tenant = [t for t in client.tenants.list() if t.id == tenant.id].pop() - self.assertEqual(tenant_name, tenant.name) - self.assertEqual(tenant_description, tenant.description) - self.assertIs(tenant.enabled, tenant_enabled) - - # delete, get, and list a tenant - client.tenants.delete(tenant.id) - self.assertRaises(client_exceptions.NotFound, client.tenants.get, - tenant.id) - self.assertFalse([t for t in client.tenants.list() - if t.id == tenant.id]) - - def test_tenant_create_no_name(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.BadRequest, - client.tenants.create, - tenant_name="") - - def test_tenant_delete_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.tenants.delete, - tenant=uuid.uuid4().hex) - - def test_tenant_get_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.tenants.get, - tenant_id=uuid.uuid4().hex) - - def test_tenant_update_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.tenants.update, - tenant_id=uuid.uuid4().hex) - - def test_tenant_list(self): - client = self.get_client() - tenants = client.tenants.list() - self.assertEqual(1, len(tenants)) - - # Admin endpoint should return *all* tenants - client = self.get_client(admin=True) - tenants = client.tenants.list() - self.assertEqual(len(default_fixtures.TENANTS), len(tenants)) - - def test_invalid_password(self): - good_client = self._client(username=self.user_foo['name'], - password=self.user_foo['password']) - good_client.tenants.list() - - self.assertRaises(client_exceptions.Unauthorized, - self._client, - username=self.user_foo['name'], - password=uuid.uuid4().hex) - - def test_invalid_user_and_password(self): - self.assertRaises(client_exceptions.Unauthorized, - self._client, - username=uuid.uuid4().hex, - password=uuid.uuid4().hex) - - def test_change_password_invalidates_token(self): - admin_client = self.get_client(admin=True) - - username = uuid.uuid4().hex - password = uuid.uuid4().hex - user = admin_client.users.create(name=username, password=password, - email=uuid.uuid4().hex) - - # auth as user should work before a password change - client = self._client(username=username, password=password) - - # auth as user with a token should work before a password change - self._client(token=client.auth_token) - - # administrative password reset - admin_client.users.update_password( - user=user.id, - password=uuid.uuid4().hex) - - # auth as user with original password should not work after change - self.assertRaises(client_exceptions.Unauthorized, - self._client, - username=username, - password=password) - - # authenticate with an old token should not work after change - self.assertRaises(client_exceptions.Unauthorized, - self._client, - token=client.auth_token) - - def test_user_change_own_password_invalidates_token(self): - # bootstrap a user as admin - client = self.get_client(admin=True) - username = uuid.uuid4().hex - password = uuid.uuid4().hex - client.users.create(name=username, password=password, - email=uuid.uuid4().hex) - - # auth as user should work before a password change - client = self._client(username=username, password=password) - - # auth as user with a token should work before a password change - self._client(token=client.auth_token) - - # change the user's own password - # TODO(dolphm): This should NOT raise an HTTPError at all, but rather - # this should succeed with a 2xx. This 500 does not prevent the test - # from demonstrating the desired consequences below, though. - self.assertRaises(client_exceptions.HTTPError, - client.users.update_own_password, - password, uuid.uuid4().hex) - - # auth as user with original password should not work after change - self.assertRaises(client_exceptions.Unauthorized, - self._client, - username=username, - password=password) - - # auth as user with an old token should not work after change - self.assertRaises(client_exceptions.Unauthorized, - self._client, - token=client.auth_token) - - def test_disable_tenant_invalidates_token(self): - admin_client = self.get_client(admin=True) - foo_client = self.get_client(self.user_foo) - tenant_bar = admin_client.tenants.get(self.tenant_bar['id']) - - # Disable the tenant. - tenant_bar.update(enabled=False) - - # Test that the token has been removed. - self.assertRaises(client_exceptions.Unauthorized, - foo_client.tokens.authenticate, - token=foo_client.auth_token) - - # Test that the user access has been disabled. - self.assertRaises(client_exceptions.Unauthorized, - self.get_client, - self.user_foo) - - def test_delete_tenant_invalidates_token(self): - admin_client = self.get_client(admin=True) - foo_client = self.get_client(self.user_foo) - tenant_bar = admin_client.tenants.get(self.tenant_bar['id']) - - # Delete the tenant. - tenant_bar.delete() - - # Test that the token has been removed. - self.assertRaises(client_exceptions.Unauthorized, - foo_client.tokens.authenticate, - token=foo_client.auth_token) - - # Test that the user access has been disabled. - self.assertRaises(client_exceptions.Unauthorized, - self.get_client, - self.user_foo) - - def test_disable_user_invalidates_token(self): - admin_client = self.get_client(admin=True) - foo_client = self.get_client(self.user_foo) - - admin_client.users.update_enabled(user=self.user_foo['id'], - enabled=False) - - self.assertRaises(client_exceptions.Unauthorized, - foo_client.tokens.authenticate, - token=foo_client.auth_token) - - self.assertRaises(client_exceptions.Unauthorized, - self.get_client, - self.user_foo) - - def test_delete_user_invalidates_token(self): - admin_client = self.get_client(admin=True) - client = self.get_client(admin=False) - - username = uuid.uuid4().hex - password = uuid.uuid4().hex - user_id = admin_client.users.create( - name=username, password=password, email=uuid.uuid4().hex).id - - token_id = client.tokens.authenticate( - username=username, password=password).id - - # token should be usable before the user is deleted - client.tokens.authenticate(token=token_id) - - admin_client.users.delete(user=user_id) - - # authenticate with a token should not work after the user is deleted - self.assertRaises(client_exceptions.Unauthorized, - client.tokens.authenticate, - token=token_id) - - @mock.patch.object(timeutils, 'utcnow') - def test_token_expiry_maintained(self, mock_utcnow): - now = datetime.datetime.utcnow() - mock_utcnow.return_value = now - foo_client = self.get_client(self.user_foo) - - orig_token = foo_client.service_catalog.catalog['token'] - mock_utcnow.return_value = now + datetime.timedelta(seconds=1) - reauthenticated_token = foo_client.tokens.authenticate( - token=foo_client.auth_token) - - self.assertCloseEnoughForGovernmentWork( - timeutils.parse_isotime(orig_token['expires']), - timeutils.parse_isotime(reauthenticated_token.expires)) - - def test_user_create_update_delete(self): - test_username = 'new_user' - client = self.get_client(admin=True) - user = client.users.create(name=test_username, - password='password', - email='user1@test.com') - self.assertEqual(test_username, user.name) - - user = client.users.get(user=user.id) - self.assertEqual(test_username, user.name) - - user = client.users.update(user=user, - name=test_username, - email='user2@test.com') - self.assertEqual('user2@test.com', user.email) - - # NOTE(termie): update_enabled doesn't return anything, probably a bug - client.users.update_enabled(user=user, enabled=False) - user = client.users.get(user.id) - self.assertFalse(user.enabled) - - self.assertRaises(client_exceptions.Unauthorized, - self._client, - username=test_username, - password='password') - client.users.update_enabled(user, True) - - user = client.users.update_password(user=user, password='password2') - - self._client(username=test_username, - password='password2') - - user = client.users.update_tenant(user=user, tenant='bar') - # TODO(ja): once keystonelight supports default tenant - # when you login without specifying tenant, the - # token should be scoped to tenant 'bar' - - client.users.delete(user.id) - self.assertRaises(client_exceptions.NotFound, client.users.get, - user.id) - - # Test creating a user with a tenant (auto-add to tenant) - user2 = client.users.create(name=test_username, - password='password', - email='user1@test.com', - tenant_id='bar') - self.assertEqual(test_username, user2.name) - - def test_update_default_tenant_to_existing_value(self): - client = self.get_client(admin=True) - - user = client.users.create( - name=uuid.uuid4().hex, - password=uuid.uuid4().hex, - email=uuid.uuid4().hex, - tenant_id=self.tenant_bar['id']) - - # attempting to update the tenant with the existing value should work - user = client.users.update_tenant( - user=user, tenant=self.tenant_bar['id']) - - def test_user_create_no_string_password(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.BadRequest, - client.users.create, - name='test_user', - password=12345, - email=uuid.uuid4().hex) - - def test_user_create_no_name(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.BadRequest, - client.users.create, - name="", - password=uuid.uuid4().hex, - email=uuid.uuid4().hex) - - def test_user_create_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.users.create, - name=uuid.uuid4().hex, - password=uuid.uuid4().hex, - email=uuid.uuid4().hex, - tenant_id=uuid.uuid4().hex) - - def test_user_get_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.users.get, - user=uuid.uuid4().hex) - - def test_user_list_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.users.list, - tenant_id=uuid.uuid4().hex) - - def test_user_update_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.users.update, - user=uuid.uuid4().hex) - - def test_user_update_tenant(self): - client = self.get_client(admin=True) - tenant_id = uuid.uuid4().hex - user = client.users.update(user=self.user_foo['id'], - tenant_id=tenant_id) - self.assertEqual(tenant_id, user.tenant_id) - - def test_user_update_password_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.users.update_password, - user=uuid.uuid4().hex, - password=uuid.uuid4().hex) - - def test_user_delete_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.users.delete, - user=uuid.uuid4().hex) - - def test_user_list(self): - client = self.get_client(admin=True) - users = client.users.list() - self.assertTrue(len(users) > 0) - user = users[0] - self.assertRaises(AttributeError, lambda: user.password) - - def test_user_get(self): - client = self.get_client(admin=True) - user = client.users.get(user=self.user_foo['id']) - self.assertRaises(AttributeError, lambda: user.password) - - def test_role_get(self): - client = self.get_client(admin=True) - role = client.roles.get(role=self.role_admin['id']) - self.assertEqual(self.role_admin['id'], role.id) - - def test_role_crud(self): - test_role = 'new_role' - client = self.get_client(admin=True) - role = client.roles.create(name=test_role) - self.assertEqual(test_role, role.name) - - role = client.roles.get(role=role.id) - self.assertEqual(test_role, role.name) - - client.roles.delete(role=role.id) - - self.assertRaises(client_exceptions.NotFound, - client.roles.delete, - role=role.id) - self.assertRaises(client_exceptions.NotFound, - client.roles.get, - role=role.id) - - def test_role_create_no_name(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.BadRequest, - client.roles.create, - name="") - - def test_role_create_member_role(self): - # delete the member role so that we can recreate it - client = self.get_client(admin=True) - client.roles.delete(role=CONF.member_role_id) - - # deleting the member role revokes our token, so re-authenticate - client = self.get_client(admin=True) - - # specify only the role name on creation - role = client.roles.create(name=CONF.member_role_name) - - # the ID should be set as defined in CONF - self.assertEqual(CONF.member_role_id, role.id) - - def test_role_get_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.roles.get, - role=uuid.uuid4().hex) - - def test_role_delete_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.roles.delete, - role=uuid.uuid4().hex) - - def test_role_list_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.roles.roles_for_user, - user=uuid.uuid4().hex, - tenant=uuid.uuid4().hex) - self.assertRaises(client_exceptions.NotFound, - client.roles.roles_for_user, - user=self.user_foo['id'], - tenant=uuid.uuid4().hex) - self.assertRaises(client_exceptions.NotFound, - client.roles.roles_for_user, - user=uuid.uuid4().hex, - tenant=self.tenant_bar['id']) - - def test_role_list(self): - client = self.get_client(admin=True) - roles = client.roles.list() - # TODO(devcamcar): This assert should be more specific. - self.assertTrue(len(roles) > 0) - - def test_service_crud(self): - client = self.get_client(admin=True) - - service_name = uuid.uuid4().hex - service_type = uuid.uuid4().hex - service_desc = uuid.uuid4().hex - - # create & read - service = client.services.create(name=service_name, - service_type=service_type, - description=service_desc) - self.assertEqual(service_name, service.name) - self.assertEqual(service_type, service.type) - self.assertEqual(service_desc, service.description) - - service = client.services.get(id=service.id) - self.assertEqual(service_name, service.name) - self.assertEqual(service_type, service.type) - self.assertEqual(service_desc, service.description) - - service = [x for x in client.services.list() if x.id == service.id][0] - self.assertEqual(service_name, service.name) - self.assertEqual(service_type, service.type) - self.assertEqual(service_desc, service.description) - - # update is not supported in API v2... - - # delete & read - client.services.delete(id=service.id) - self.assertRaises(client_exceptions.NotFound, - client.services.get, - id=service.id) - services = [x for x in client.services.list() if x.id == service.id] - self.assertEqual(0, len(services)) - - def test_service_delete_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.services.delete, - id=uuid.uuid4().hex) - - def test_service_get_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.services.get, - id=uuid.uuid4().hex) - - def test_endpoint_delete_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.endpoints.delete, - id=uuid.uuid4().hex) - - def test_admin_requires_adminness(self): - # FIXME(ja): this should be Unauthorized - exception = client_exceptions.ClientException - - two = self.get_client(self.user_two, admin=True) # non-admin user - - # USER CRUD - self.assertRaises(exception, - two.users.list) - self.assertRaises(exception, - two.users.get, - user=self.user_two['id']) - self.assertRaises(exception, - two.users.create, - name='oops', - password='password', - email='oops@test.com') - self.assertRaises(exception, - two.users.delete, - user=self.user_foo['id']) - - # TENANT CRUD - self.assertRaises(exception, - two.tenants.list) - self.assertRaises(exception, - two.tenants.get, - tenant_id=self.tenant_bar['id']) - self.assertRaises(exception, - two.tenants.create, - tenant_name='oops', - description="shouldn't work!", - enabled=True) - self.assertRaises(exception, - two.tenants.delete, - tenant=self.tenant_baz['id']) - - # ROLE CRUD - self.assertRaises(exception, - two.roles.get, - role=self.role_admin['id']) - self.assertRaises(exception, - two.roles.list) - self.assertRaises(exception, - two.roles.create, - name='oops') - self.assertRaises(exception, - two.roles.delete, - role=self.role_admin['id']) - - # TODO(ja): MEMBERSHIP CRUD - # TODO(ja): determine what else todo - - def test_tenant_add_and_remove_user(self): - client = self.get_client(admin=True) - client.roles.add_user_role(tenant=self.tenant_bar['id'], - user=self.user_two['id'], - role=self.role_other['id']) - user_refs = client.tenants.list_users(tenant=self.tenant_bar['id']) - self.assertIn(self.user_two['id'], [x.id for x in user_refs]) - client.roles.remove_user_role(tenant=self.tenant_bar['id'], - user=self.user_two['id'], - role=self.role_other['id']) - roles = client.roles.roles_for_user(user=self.user_foo['id'], - tenant=self.tenant_bar['id']) - self.assertNotIn(self.role_other['id'], roles) - user_refs = client.tenants.list_users(tenant=self.tenant_bar['id']) - self.assertNotIn(self.user_two['id'], [x.id for x in user_refs]) - - def test_user_role_add_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.roles.add_user_role, - tenant=uuid.uuid4().hex, - user=self.user_foo['id'], - role=self.role_member['id']) - self.assertRaises(client_exceptions.NotFound, - client.roles.add_user_role, - tenant=self.tenant_baz['id'], - user=self.user_foo['id'], - role=uuid.uuid4().hex) - - def test_user_role_add_no_user(self): - # If add_user_role and user doesn't exist, doesn't fail. - client = self.get_client(admin=True) - client.roles.add_user_role(tenant=self.tenant_baz['id'], - user=uuid.uuid4().hex, - role=self.role_member['id']) - - def test_user_role_remove_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.roles.remove_user_role, - tenant=uuid.uuid4().hex, - user=self.user_foo['id'], - role=self.role_member['id']) - self.assertRaises(client_exceptions.NotFound, - client.roles.remove_user_role, - tenant=self.tenant_baz['id'], - user=uuid.uuid4().hex, - role=self.role_member['id']) - self.assertRaises(client_exceptions.NotFound, - client.roles.remove_user_role, - tenant=self.tenant_baz['id'], - user=self.user_foo['id'], - role=uuid.uuid4().hex) - self.assertRaises(client_exceptions.NotFound, - client.roles.remove_user_role, - tenant=self.tenant_baz['id'], - user=self.user_foo['id'], - role=self.role_member['id']) - - def test_tenant_list_marker(self): - client = self.get_client() - - # Add two arbitrary tenants to user for testing purposes - for i in range(2): - tenant_id = uuid.uuid4().hex - tenant = {'name': 'tenant-%s' % tenant_id, 'id': tenant_id, - 'domain_id': DEFAULT_DOMAIN_ID} - self.resource_api.create_project(tenant_id, tenant) - self.assignment_api.add_user_to_project(tenant_id, - self.user_foo['id']) - - tenants = client.tenants.list() - self.assertEqual(3, len(tenants)) - - tenants_marker = client.tenants.list(marker=tenants[0].id) - self.assertEqual(2, len(tenants_marker)) - self.assertEqual(tenants_marker[0].name, tenants[1].name) - self.assertEqual(tenants_marker[1].name, tenants[2].name) - - def test_tenant_list_marker_not_found(self): - client = self.get_client() - self.assertRaises(client_exceptions.BadRequest, - client.tenants.list, marker=uuid.uuid4().hex) - - def test_tenant_list_limit(self): - client = self.get_client() - - # Add two arbitrary tenants to user for testing purposes - for i in range(2): - tenant_id = uuid.uuid4().hex - tenant = {'name': 'tenant-%s' % tenant_id, 'id': tenant_id, - 'domain_id': DEFAULT_DOMAIN_ID} - self.resource_api.create_project(tenant_id, tenant) - self.assignment_api.add_user_to_project(tenant_id, - self.user_foo['id']) - - tenants = client.tenants.list() - self.assertEqual(3, len(tenants)) - - tenants_limited = client.tenants.list(limit=2) - self.assertEqual(2, len(tenants_limited)) - self.assertEqual(tenants[0].name, tenants_limited[0].name) - self.assertEqual(tenants[1].name, tenants_limited[1].name) - - def test_tenant_list_limit_bad_value(self): - client = self.get_client() - self.assertRaises(client_exceptions.BadRequest, - client.tenants.list, limit='a') - self.assertRaises(client_exceptions.BadRequest, - client.tenants.list, limit=-1) - - def test_roles_get_by_user(self): - client = self.get_client(admin=True) - roles = client.roles.roles_for_user(user=self.user_foo['id'], - tenant=self.tenant_bar['id']) - self.assertTrue(len(roles) > 0) - - def test_user_can_update_passwd(self): - client = self.get_client(self.user_two) - - token_id = client.auth_token - new_password = uuid.uuid4().hex - - # TODO(derekh): Update to use keystoneclient when available - class FakeResponse(object): - def start_fake_response(self, status, headers): - self.response_status = int(status.split(' ', 1)[0]) - self.response_headers = dict(headers) - responseobject = FakeResponse() - - req = webob.Request.blank( - '/v2.0/OS-KSCRUD/users/%s' % self.user_two['id'], - headers={'X-Auth-Token': token_id}) - req.method = 'PATCH' - req.body = ('{"user":{"password":"%s","original_password":"%s"}}' % - (new_password, self.user_two['password'])) - self.public_server.application(req.environ, - responseobject.start_fake_response) - - self.user_two['password'] = new_password - self.get_client(self.user_two) - - def test_user_cannot_update_other_users_passwd(self): - client = self.get_client(self.user_two) - - token_id = client.auth_token - new_password = uuid.uuid4().hex - - # TODO(derekh): Update to use keystoneclient when available - class FakeResponse(object): - def start_fake_response(self, status, headers): - self.response_status = int(status.split(' ', 1)[0]) - self.response_headers = dict(headers) - responseobject = FakeResponse() - - req = webob.Request.blank( - '/v2.0/OS-KSCRUD/users/%s' % self.user_foo['id'], - headers={'X-Auth-Token': token_id}) - req.method = 'PATCH' - req.body = ('{"user":{"password":"%s","original_password":"%s"}}' % - (new_password, self.user_two['password'])) - self.public_server.application(req.environ, - responseobject.start_fake_response) - self.assertEqual(http_client.FORBIDDEN, - responseobject.response_status) - - self.user_two['password'] = new_password - self.assertRaises(client_exceptions.Unauthorized, - self.get_client, self.user_two) - - def test_tokens_after_user_update_passwd(self): - client = self.get_client(self.user_two) - - token_id = client.auth_token - new_password = uuid.uuid4().hex - - # TODO(derekh): Update to use keystoneclient when available - class FakeResponse(object): - def start_fake_response(self, status, headers): - self.response_status = int(status.split(' ', 1)[0]) - self.response_headers = dict(headers) - responseobject = FakeResponse() - - req = webob.Request.blank( - '/v2.0/OS-KSCRUD/users/%s' % self.user_two['id'], - headers={'X-Auth-Token': token_id}) - req.method = 'PATCH' - req.body = ('{"user":{"password":"%s","original_password":"%s"}}' % - (new_password, self.user_two['password'])) - - rv = self.public_server.application( - req.environ, - responseobject.start_fake_response) - response_json = jsonutils.loads(rv.pop()) - new_token_id = response_json['access']['token']['id'] - - self.assertRaises(client_exceptions.Unauthorized, client.tenants.list) - client.auth_token = new_token_id - client.tenants.list() - - def test_endpoint_crud(self): - client = self.get_client(admin=True) - - service = client.services.create(name=uuid.uuid4().hex, - service_type=uuid.uuid4().hex, - description=uuid.uuid4().hex) - - endpoint_region = uuid.uuid4().hex - invalid_service_id = uuid.uuid4().hex - endpoint_publicurl = uuid.uuid4().hex - endpoint_internalurl = uuid.uuid4().hex - endpoint_adminurl = uuid.uuid4().hex - - # a non-existent service ID should trigger a 400 - self.assertRaises(client_exceptions.BadRequest, - client.endpoints.create, - region=endpoint_region, - service_id=invalid_service_id, - publicurl=endpoint_publicurl, - adminurl=endpoint_adminurl, - internalurl=endpoint_internalurl) - - endpoint = client.endpoints.create(region=endpoint_region, - service_id=service.id, - publicurl=endpoint_publicurl, - adminurl=endpoint_adminurl, - internalurl=endpoint_internalurl) - - self.assertEqual(endpoint_region, endpoint.region) - self.assertEqual(service.id, endpoint.service_id) - self.assertEqual(endpoint_publicurl, endpoint.publicurl) - self.assertEqual(endpoint_internalurl, endpoint.internalurl) - self.assertEqual(endpoint_adminurl, endpoint.adminurl) - - client.endpoints.delete(id=endpoint.id) - self.assertRaises(client_exceptions.NotFound, client.endpoints.delete, - id=endpoint.id) - - def _send_ec2_auth_request(self, credentials, client=None): - if not client: - client = self.default_client - url = '%s/ec2tokens' % self.default_client.auth_url - resp = client.session.request( - url=url, method='POST', - json={'credentials': credentials}) - return resp, resp.json() - - def _generate_default_user_ec2_credentials(self): - cred = self. default_client.ec2.create( - user_id=self.user_foo['id'], - tenant_id=self.tenant_bar['id']) - return self._generate_user_ec2_credentials(cred.access, cred.secret) - - def _generate_user_ec2_credentials(self, access, secret): - signer = ec2_utils.Ec2Signer(secret) - credentials = {'params': {'SignatureVersion': '2'}, - 'access': access, - 'verb': 'GET', - 'host': 'localhost', - 'path': '/service/cloud'} - signature = signer.generate(credentials) - return credentials, signature - - def test_ec2_auth_success(self): - credentials, signature = self._generate_default_user_ec2_credentials() - credentials['signature'] = signature - resp, token = self._send_ec2_auth_request(credentials) - self.assertEqual(200, resp.status_code) - self.assertIn('access', token) - - def test_ec2_auth_success_trust(self): - # Add "other" role user_foo and create trust delegating it to user_two - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], - self.tenant_bar['id'], - self.role_other['id']) - trust_id = 'atrust123' - trust = {'trustor_user_id': self.user_foo['id'], - 'trustee_user_id': self.user_two['id'], - 'project_id': self.tenant_bar['id'], - 'impersonation': True} - roles = [self.role_other] - self.trust_api.create_trust(trust_id, trust, roles) - - # Create a client for user_two, scoped to the trust - client = self.get_client(self.user_two) - ret = client.authenticate(trust_id=trust_id, - tenant_id=self.tenant_bar['id']) - self.assertTrue(ret) - self.assertTrue(client.auth_ref.trust_scoped) - self.assertEqual(trust_id, client.auth_ref.trust_id) - - # Create an ec2 keypair using the trust client impersonating user_foo - cred = client.ec2.create(user_id=self.user_foo['id'], - tenant_id=self.tenant_bar['id']) - credentials, signature = self._generate_user_ec2_credentials( - cred.access, cred.secret) - credentials['signature'] = signature - resp, token = self._send_ec2_auth_request(credentials) - self.assertEqual(200, resp.status_code) - self.assertEqual(trust_id, token['access']['trust']['id']) - # TODO(shardy) we really want to check the roles and trustee - # but because of where the stubbing happens we don't seem to - # hit the necessary code in controllers.py _authenticate_token - # so although all is OK via a real request, it incorrect in - # this test.. - - def test_ec2_auth_failure(self): - credentials, signature = self._generate_default_user_ec2_credentials() - credentials['signature'] = uuid.uuid4().hex - self.assertRaises(client_exceptions.Unauthorized, - self._send_ec2_auth_request, - credentials) - - def test_ec2_credential_crud(self): - creds = self.default_client.ec2.list(user_id=self.user_foo['id']) - self.assertEqual([], creds) - - cred = self.default_client.ec2.create(user_id=self.user_foo['id'], - tenant_id=self.tenant_bar['id']) - creds = self.default_client.ec2.list(user_id=self.user_foo['id']) - self.assertEqual(creds, [cred]) - got = self.default_client.ec2.get(user_id=self.user_foo['id'], - access=cred.access) - self.assertEqual(cred, got) - - self.default_client.ec2.delete(user_id=self.user_foo['id'], - access=cred.access) - creds = self.default_client.ec2.list(user_id=self.user_foo['id']) - self.assertEqual([], creds) - - def test_ec2_credential_crud_non_admin(self): - na_client = self.get_client(self.user_two) - creds = na_client.ec2.list(user_id=self.user_two['id']) - self.assertEqual([], creds) - - cred = na_client.ec2.create(user_id=self.user_two['id'], - tenant_id=self.tenant_baz['id']) - creds = na_client.ec2.list(user_id=self.user_two['id']) - self.assertEqual(creds, [cred]) - got = na_client.ec2.get(user_id=self.user_two['id'], - access=cred.access) - self.assertEqual(cred, got) - - na_client.ec2.delete(user_id=self.user_two['id'], - access=cred.access) - creds = na_client.ec2.list(user_id=self.user_two['id']) - self.assertEqual([], creds) - - def test_ec2_list_credentials(self): - cred_1 = self.default_client.ec2.create( - user_id=self.user_foo['id'], - tenant_id=self.tenant_bar['id']) - cred_2 = self.default_client.ec2.create( - user_id=self.user_foo['id'], - tenant_id=self.tenant_service['id']) - cred_3 = self.default_client.ec2.create( - user_id=self.user_foo['id'], - tenant_id=self.tenant_mtu['id']) - two = self.get_client(self.user_two) - cred_4 = two.ec2.create(user_id=self.user_two['id'], - tenant_id=self.tenant_bar['id']) - creds = self.default_client.ec2.list(user_id=self.user_foo['id']) - self.assertEqual(3, len(creds)) - self.assertEqual(sorted([cred_1, cred_2, cred_3], - key=lambda x: x.access), - sorted(creds, key=lambda x: x.access)) - self.assertNotIn(cred_4, creds) - - def test_ec2_credentials_create_404(self): - self.assertRaises(client_exceptions.NotFound, - self.default_client.ec2.create, - user_id=uuid.uuid4().hex, - tenant_id=self.tenant_bar['id']) - self.assertRaises(client_exceptions.NotFound, - self.default_client.ec2.create, - user_id=self.user_foo['id'], - tenant_id=uuid.uuid4().hex) - - def test_ec2_credentials_delete_404(self): - self.assertRaises(client_exceptions.NotFound, - self.default_client.ec2.delete, - user_id=uuid.uuid4().hex, - access=uuid.uuid4().hex) - - def test_ec2_credentials_get_404(self): - self.assertRaises(client_exceptions.NotFound, - self.default_client.ec2.get, - user_id=uuid.uuid4().hex, - access=uuid.uuid4().hex) - - def test_ec2_credentials_list_404(self): - self.assertRaises(client_exceptions.NotFound, - self.default_client.ec2.list, - user_id=uuid.uuid4().hex) - - def test_ec2_credentials_list_user_forbidden(self): - two = self.get_client(self.user_two) - self.assertRaises(client_exceptions.Forbidden, two.ec2.list, - user_id=self.user_foo['id']) - - def test_ec2_credentials_get_user_forbidden(self): - cred = self.default_client.ec2.create(user_id=self.user_foo['id'], - tenant_id=self.tenant_bar['id']) - - two = self.get_client(self.user_two) - self.assertRaises(client_exceptions.Forbidden, two.ec2.get, - user_id=self.user_foo['id'], access=cred.access) - - self.default_client.ec2.delete(user_id=self.user_foo['id'], - access=cred.access) - - def test_ec2_credentials_delete_user_forbidden(self): - cred = self.default_client.ec2.create(user_id=self.user_foo['id'], - tenant_id=self.tenant_bar['id']) - - two = self.get_client(self.user_two) - self.assertRaises(client_exceptions.Forbidden, two.ec2.delete, - user_id=self.user_foo['id'], access=cred.access) - - self.default_client.ec2.delete(user_id=self.user_foo['id'], - access=cred.access) - - def test_endpoint_create_nonexistent_service(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.BadRequest, - client.endpoints.create, - region=uuid.uuid4().hex, - service_id=uuid.uuid4().hex, - publicurl=uuid.uuid4().hex, - adminurl=uuid.uuid4().hex, - internalurl=uuid.uuid4().hex) - - def test_policy_crud(self): - # FIXME(dolph): this test was written prior to the v3 implementation of - # the client and essentially refers to a non-existent - # policy manager in the v2 client. this test needs to be - # moved to a test suite running against the v3 api - self.skipTest('Written prior to v3 client; needs refactor') - - client = self.get_client(admin=True) - - policy_blob = uuid.uuid4().hex - policy_type = uuid.uuid4().hex - service = client.services.create( - name=uuid.uuid4().hex, - service_type=uuid.uuid4().hex, - description=uuid.uuid4().hex) - endpoint = client.endpoints.create( - service_id=service.id, - region=uuid.uuid4().hex, - adminurl=uuid.uuid4().hex, - internalurl=uuid.uuid4().hex, - publicurl=uuid.uuid4().hex) - - # create - policy = client.policies.create( - blob=policy_blob, - type=policy_type, - endpoint=endpoint.id) - self.assertEqual(policy_blob, policy.policy) - self.assertEqual(policy_type, policy.type) - self.assertEqual(endpoint.id, policy.endpoint_id) - - policy = client.policies.get(policy=policy.id) - self.assertEqual(policy_blob, policy.policy) - self.assertEqual(policy_type, policy.type) - self.assertEqual(endpoint.id, policy.endpoint_id) - - endpoints = [x for x in client.endpoints.list() if x.id == endpoint.id] - endpoint = endpoints[0] - self.assertEqual(policy_blob, policy.policy) - self.assertEqual(policy_type, policy.type) - self.assertEqual(endpoint.id, policy.endpoint_id) - - # update - policy_blob = uuid.uuid4().hex - policy_type = uuid.uuid4().hex - endpoint = client.endpoints.create( - service_id=service.id, - region=uuid.uuid4().hex, - adminurl=uuid.uuid4().hex, - internalurl=uuid.uuid4().hex, - publicurl=uuid.uuid4().hex) - - policy = client.policies.update( - policy=policy.id, - blob=policy_blob, - type=policy_type, - endpoint=endpoint.id) - - policy = client.policies.get(policy=policy.id) - self.assertEqual(policy_blob, policy.policy) - self.assertEqual(policy_type, policy.type) - self.assertEqual(endpoint.id, policy.endpoint_id) - - # delete - client.policies.delete(policy=policy.id) - self.assertRaises( - client_exceptions.NotFound, - client.policies.get, - policy=policy.id) - policies = [x for x in client.policies.list() if x.id == policy.id] - self.assertEqual(0, len(policies)) diff --git a/keystone-moon/keystone/tests/unit/test_v2_keystoneclient_sql.py b/keystone-moon/keystone/tests/unit/test_v2_keystoneclient_sql.py deleted file mode 100644 index 0fb60fd9..00000000 --- a/keystone-moon/keystone/tests/unit/test_v2_keystoneclient_sql.py +++ /dev/null @@ -1,344 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from keystoneclient.contrib.ec2 import utils as ec2_utils -from keystoneclient import exceptions as client_exceptions - -from keystone.tests import unit as tests -from keystone.tests.unit import test_v2_keystoneclient - - -class ClientDrivenSqlTestCase(test_v2_keystoneclient.ClientDrivenTestCase): - def config_files(self): - config_files = super(ClientDrivenSqlTestCase, self).config_files() - config_files.append(tests.dirs.tests_conf('backend_sql.conf')) - return config_files - - def setUp(self): - super(ClientDrivenSqlTestCase, self).setUp() - self.default_client = self.get_client() - self.addCleanup(self.cleanup_instance('default_client')) - - def test_endpoint_crud(self): - client = self.get_client(admin=True) - - service = client.services.create(name=uuid.uuid4().hex, - service_type=uuid.uuid4().hex, - description=uuid.uuid4().hex) - - endpoint_region = uuid.uuid4().hex - invalid_service_id = uuid.uuid4().hex - endpoint_publicurl = uuid.uuid4().hex - endpoint_internalurl = uuid.uuid4().hex - endpoint_adminurl = uuid.uuid4().hex - - # a non-existent service ID should trigger a 400 - self.assertRaises(client_exceptions.BadRequest, - client.endpoints.create, - region=endpoint_region, - service_id=invalid_service_id, - publicurl=endpoint_publicurl, - adminurl=endpoint_adminurl, - internalurl=endpoint_internalurl) - - endpoint = client.endpoints.create(region=endpoint_region, - service_id=service.id, - publicurl=endpoint_publicurl, - adminurl=endpoint_adminurl, - internalurl=endpoint_internalurl) - - self.assertEqual(endpoint_region, endpoint.region) - self.assertEqual(service.id, endpoint.service_id) - self.assertEqual(endpoint_publicurl, endpoint.publicurl) - self.assertEqual(endpoint_internalurl, endpoint.internalurl) - self.assertEqual(endpoint_adminurl, endpoint.adminurl) - - client.endpoints.delete(id=endpoint.id) - self.assertRaises(client_exceptions.NotFound, client.endpoints.delete, - id=endpoint.id) - - def _send_ec2_auth_request(self, credentials, client=None): - if not client: - client = self.default_client - url = '%s/ec2tokens' % self.default_client.auth_url - (resp, token) = client.request( - url=url, method='POST', - body={'credentials': credentials}) - return resp, token - - def _generate_default_user_ec2_credentials(self): - cred = self. default_client.ec2.create( - user_id=self.user_foo['id'], - tenant_id=self.tenant_bar['id']) - return self._generate_user_ec2_credentials(cred.access, cred.secret) - - def _generate_user_ec2_credentials(self, access, secret): - signer = ec2_utils.Ec2Signer(secret) - credentials = {'params': {'SignatureVersion': '2'}, - 'access': access, - 'verb': 'GET', - 'host': 'localhost', - 'path': '/service/cloud'} - signature = signer.generate(credentials) - return credentials, signature - - def test_ec2_auth_success(self): - credentials, signature = self._generate_default_user_ec2_credentials() - credentials['signature'] = signature - resp, token = self._send_ec2_auth_request(credentials) - self.assertEqual(200, resp.status_code) - self.assertIn('access', token) - - def test_ec2_auth_success_trust(self): - # Add "other" role user_foo and create trust delegating it to user_two - self.assignment_api.add_role_to_user_and_project( - self.user_foo['id'], - self.tenant_bar['id'], - self.role_other['id']) - trust_id = 'atrust123' - trust = {'trustor_user_id': self.user_foo['id'], - 'trustee_user_id': self.user_two['id'], - 'project_id': self.tenant_bar['id'], - 'impersonation': True} - roles = [self.role_other] - self.trust_api.create_trust(trust_id, trust, roles) - - # Create a client for user_two, scoped to the trust - client = self.get_client(self.user_two) - ret = client.authenticate(trust_id=trust_id, - tenant_id=self.tenant_bar['id']) - self.assertTrue(ret) - self.assertTrue(client.auth_ref.trust_scoped) - self.assertEqual(trust_id, client.auth_ref.trust_id) - - # Create an ec2 keypair using the trust client impersonating user_foo - cred = client.ec2.create(user_id=self.user_foo['id'], - tenant_id=self.tenant_bar['id']) - credentials, signature = self._generate_user_ec2_credentials( - cred.access, cred.secret) - credentials['signature'] = signature - resp, token = self._send_ec2_auth_request(credentials) - self.assertEqual(200, resp.status_code) - self.assertEqual(trust_id, token['access']['trust']['id']) - # TODO(shardy) we really want to check the roles and trustee - # but because of where the stubbing happens we don't seem to - # hit the necessary code in controllers.py _authenticate_token - # so although all is OK via a real request, it incorrect in - # this test.. - - def test_ec2_auth_failure(self): - credentials, signature = self._generate_default_user_ec2_credentials() - credentials['signature'] = uuid.uuid4().hex - self.assertRaises(client_exceptions.Unauthorized, - self._send_ec2_auth_request, - credentials) - - def test_ec2_credential_crud(self): - creds = self.default_client.ec2.list(user_id=self.user_foo['id']) - self.assertEqual([], creds) - - cred = self.default_client.ec2.create(user_id=self.user_foo['id'], - tenant_id=self.tenant_bar['id']) - creds = self.default_client.ec2.list(user_id=self.user_foo['id']) - self.assertEqual(creds, [cred]) - got = self.default_client.ec2.get(user_id=self.user_foo['id'], - access=cred.access) - self.assertEqual(cred, got) - - self.default_client.ec2.delete(user_id=self.user_foo['id'], - access=cred.access) - creds = self.default_client.ec2.list(user_id=self.user_foo['id']) - self.assertEqual([], creds) - - def test_ec2_credential_crud_non_admin(self): - na_client = self.get_client(self.user_two) - creds = na_client.ec2.list(user_id=self.user_two['id']) - self.assertEqual([], creds) - - cred = na_client.ec2.create(user_id=self.user_two['id'], - tenant_id=self.tenant_baz['id']) - creds = na_client.ec2.list(user_id=self.user_two['id']) - self.assertEqual(creds, [cred]) - got = na_client.ec2.get(user_id=self.user_two['id'], - access=cred.access) - self.assertEqual(cred, got) - - na_client.ec2.delete(user_id=self.user_two['id'], - access=cred.access) - creds = na_client.ec2.list(user_id=self.user_two['id']) - self.assertEqual([], creds) - - def test_ec2_list_credentials(self): - cred_1 = self.default_client.ec2.create( - user_id=self.user_foo['id'], - tenant_id=self.tenant_bar['id']) - cred_2 = self.default_client.ec2.create( - user_id=self.user_foo['id'], - tenant_id=self.tenant_service['id']) - cred_3 = self.default_client.ec2.create( - user_id=self.user_foo['id'], - tenant_id=self.tenant_mtu['id']) - two = self.get_client(self.user_two) - cred_4 = two.ec2.create(user_id=self.user_two['id'], - tenant_id=self.tenant_bar['id']) - creds = self.default_client.ec2.list(user_id=self.user_foo['id']) - self.assertEqual(3, len(creds)) - self.assertEqual(sorted([cred_1, cred_2, cred_3], - key=lambda x: x.access), - sorted(creds, key=lambda x: x.access)) - self.assertNotIn(cred_4, creds) - - def test_ec2_credentials_create_404(self): - self.assertRaises(client_exceptions.NotFound, - self.default_client.ec2.create, - user_id=uuid.uuid4().hex, - tenant_id=self.tenant_bar['id']) - self.assertRaises(client_exceptions.NotFound, - self.default_client.ec2.create, - user_id=self.user_foo['id'], - tenant_id=uuid.uuid4().hex) - - def test_ec2_credentials_delete_404(self): - self.assertRaises(client_exceptions.NotFound, - self.default_client.ec2.delete, - user_id=uuid.uuid4().hex, - access=uuid.uuid4().hex) - - def test_ec2_credentials_get_404(self): - self.assertRaises(client_exceptions.NotFound, - self.default_client.ec2.get, - user_id=uuid.uuid4().hex, - access=uuid.uuid4().hex) - - def test_ec2_credentials_list_404(self): - self.assertRaises(client_exceptions.NotFound, - self.default_client.ec2.list, - user_id=uuid.uuid4().hex) - - def test_ec2_credentials_list_user_forbidden(self): - two = self.get_client(self.user_two) - self.assertRaises(client_exceptions.Forbidden, two.ec2.list, - user_id=self.user_foo['id']) - - def test_ec2_credentials_get_user_forbidden(self): - cred = self.default_client.ec2.create(user_id=self.user_foo['id'], - tenant_id=self.tenant_bar['id']) - - two = self.get_client(self.user_two) - self.assertRaises(client_exceptions.Forbidden, two.ec2.get, - user_id=self.user_foo['id'], access=cred.access) - - self.default_client.ec2.delete(user_id=self.user_foo['id'], - access=cred.access) - - def test_ec2_credentials_delete_user_forbidden(self): - cred = self.default_client.ec2.create(user_id=self.user_foo['id'], - tenant_id=self.tenant_bar['id']) - - two = self.get_client(self.user_two) - self.assertRaises(client_exceptions.Forbidden, two.ec2.delete, - user_id=self.user_foo['id'], access=cred.access) - - self.default_client.ec2.delete(user_id=self.user_foo['id'], - access=cred.access) - - def test_endpoint_create_nonexistent_service(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.BadRequest, - client.endpoints.create, - region=uuid.uuid4().hex, - service_id=uuid.uuid4().hex, - publicurl=uuid.uuid4().hex, - adminurl=uuid.uuid4().hex, - internalurl=uuid.uuid4().hex) - - def test_endpoint_delete_404(self): - client = self.get_client(admin=True) - self.assertRaises(client_exceptions.NotFound, - client.endpoints.delete, - id=uuid.uuid4().hex) - - def test_policy_crud(self): - # FIXME(dolph): this test was written prior to the v3 implementation of - # the client and essentially refers to a non-existent - # policy manager in the v2 client. this test needs to be - # moved to a test suite running against the v3 api - self.skipTest('Written prior to v3 client; needs refactor') - - client = self.get_client(admin=True) - - policy_blob = uuid.uuid4().hex - policy_type = uuid.uuid4().hex - service = client.services.create( - name=uuid.uuid4().hex, - service_type=uuid.uuid4().hex, - description=uuid.uuid4().hex) - endpoint = client.endpoints.create( - service_id=service.id, - region=uuid.uuid4().hex, - adminurl=uuid.uuid4().hex, - internalurl=uuid.uuid4().hex, - publicurl=uuid.uuid4().hex) - - # create - policy = client.policies.create( - blob=policy_blob, - type=policy_type, - endpoint=endpoint.id) - self.assertEqual(policy_blob, policy.policy) - self.assertEqual(policy_type, policy.type) - self.assertEqual(endpoint.id, policy.endpoint_id) - - policy = client.policies.get(policy=policy.id) - self.assertEqual(policy_blob, policy.policy) - self.assertEqual(policy_type, policy.type) - self.assertEqual(endpoint.id, policy.endpoint_id) - - endpoints = [x for x in client.endpoints.list() if x.id == endpoint.id] - endpoint = endpoints[0] - self.assertEqual(policy_blob, policy.policy) - self.assertEqual(policy_type, policy.type) - self.assertEqual(endpoint.id, policy.endpoint_id) - - # update - policy_blob = uuid.uuid4().hex - policy_type = uuid.uuid4().hex - endpoint = client.endpoints.create( - service_id=service.id, - region=uuid.uuid4().hex, - adminurl=uuid.uuid4().hex, - internalurl=uuid.uuid4().hex, - publicurl=uuid.uuid4().hex) - - policy = client.policies.update( - policy=policy.id, - blob=policy_blob, - type=policy_type, - endpoint=endpoint.id) - - policy = client.policies.get(policy=policy.id) - self.assertEqual(policy_blob, policy.policy) - self.assertEqual(policy_type, policy.type) - self.assertEqual(endpoint.id, policy.endpoint_id) - - # delete - client.policies.delete(policy=policy.id) - self.assertRaises( - client_exceptions.NotFound, - client.policies.get, - policy=policy.id) - policies = [x for x in client.policies.list() if x.id == policy.id] - self.assertEqual(0, len(policies)) diff --git a/keystone-moon/keystone/tests/unit/test_v3.py b/keystone-moon/keystone/tests/unit/test_v3.py deleted file mode 100644 index 216d8c79..00000000 --- a/keystone-moon/keystone/tests/unit/test_v3.py +++ /dev/null @@ -1,1640 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -import mock -from oslo_config import cfg -import oslo_context.context -from oslo_serialization import jsonutils -from oslo_utils import timeutils -from six.moves import http_client -from testtools import matchers -import webtest - -from keystone import auth -from keystone.common import authorization -from keystone.common import cache -from keystone.common.validation import validators -from keystone import exception -from keystone import middleware -from keystone.middleware import auth as middleware_auth -from keystone.tests.common import auth as common_auth -from keystone.tests import unit -from keystone.tests.unit import rest - - -CONF = cfg.CONF -DEFAULT_DOMAIN_ID = 'default' - -TIME_FORMAT = unit.TIME_FORMAT - - -class AuthTestMixin(object): - """To hold auth building helper functions.""" - - def build_auth_scope(self, project_id=None, project_name=None, - project_domain_id=None, project_domain_name=None, - domain_id=None, domain_name=None, trust_id=None, - unscoped=None): - scope_data = {} - if unscoped: - scope_data['unscoped'] = {} - if project_id or project_name: - scope_data['project'] = {} - if project_id: - scope_data['project']['id'] = project_id - else: - scope_data['project']['name'] = project_name - if project_domain_id or project_domain_name: - project_domain_json = {} - if project_domain_id: - project_domain_json['id'] = project_domain_id - else: - project_domain_json['name'] = project_domain_name - scope_data['project']['domain'] = project_domain_json - if domain_id or domain_name: - scope_data['domain'] = {} - if domain_id: - scope_data['domain']['id'] = domain_id - else: - scope_data['domain']['name'] = domain_name - if trust_id: - scope_data['OS-TRUST:trust'] = {} - scope_data['OS-TRUST:trust']['id'] = trust_id - return scope_data - - def build_password_auth(self, user_id=None, username=None, - user_domain_id=None, user_domain_name=None, - password=None): - password_data = {'user': {}} - if user_id: - password_data['user']['id'] = user_id - else: - password_data['user']['name'] = username - if user_domain_id or user_domain_name: - password_data['user']['domain'] = {} - if user_domain_id: - password_data['user']['domain']['id'] = user_domain_id - else: - password_data['user']['domain']['name'] = user_domain_name - password_data['user']['password'] = password - return password_data - - def build_token_auth(self, token): - return {'id': token} - - def build_authentication_request(self, token=None, user_id=None, - username=None, user_domain_id=None, - user_domain_name=None, password=None, - kerberos=False, **kwargs): - """Build auth dictionary. - - It will create an auth dictionary based on all the arguments - that it receives. - """ - auth_data = {} - auth_data['identity'] = {'methods': []} - if kerberos: - auth_data['identity']['methods'].append('kerberos') - auth_data['identity']['kerberos'] = {} - if token: - auth_data['identity']['methods'].append('token') - auth_data['identity']['token'] = self.build_token_auth(token) - if user_id or username: - auth_data['identity']['methods'].append('password') - auth_data['identity']['password'] = self.build_password_auth( - user_id, username, user_domain_id, user_domain_name, password) - if kwargs: - auth_data['scope'] = self.build_auth_scope(**kwargs) - return {'auth': auth_data} - - -class RestfulTestCase(unit.SQLDriverOverrides, rest.RestfulTestCase, - common_auth.AuthTestMixin): - - def generate_token_schema(self, domain_scoped=False, project_scoped=False): - """Return a dictionary of token properties to validate against.""" - properties = { - 'audit_ids': { - 'type': 'array', - 'items': { - 'type': 'string', - }, - 'minItems': 1, - 'maxItems': 2, - }, - 'bind': { - 'type': 'object', - 'properties': { - 'kerberos': { - 'type': 'string', - }, - }, - 'required': ['kerberos'], - 'additionalProperties': False, - }, - 'expires_at': {'type': 'string'}, - 'issued_at': {'type': 'string'}, - 'methods': { - 'type': 'array', - 'items': { - 'type': 'string', - }, - }, - 'user': { - 'type': 'object', - 'required': ['id', 'name', 'domain'], - 'properties': { - 'id': {'type': 'string'}, - 'name': {'type': 'string'}, - 'domain': { - 'type': 'object', - 'properties': { - 'id': {'type': 'string'}, - 'name': {'type': 'string'} - }, - 'required': ['id', 'name'], - 'additonalProperties': False, - } - }, - 'additionalProperties': False, - } - } - - if domain_scoped: - properties['catalog'] = {'type': 'array'} - properties['roles'] = { - 'type': 'array', - 'items': { - 'type': 'object', - 'properties': { - 'id': {'type': 'string', }, - 'name': {'type': 'string', }, - }, - 'required': ['id', 'name', ], - 'additionalProperties': False, - }, - 'minItems': 1, - } - properties['domain'] = { - 'domain': { - 'type': 'object', - 'required': ['id', 'name'], - 'properties': { - 'id': {'type': 'string'}, - 'name': {'type': 'string'} - }, - 'additionalProperties': False - } - } - elif project_scoped: - properties['is_admin_project'] = {'type': 'boolean'} - properties['catalog'] = {'type': 'array'} - properties['roles'] = {'type': 'array'} - properties['project'] = { - 'type': ['object'], - 'required': ['id', 'name', 'domain'], - 'properties': { - 'id': {'type': 'string'}, - 'name': {'type': 'string'}, - 'domain': { - 'type': ['object'], - 'required': ['id', 'name'], - 'properties': { - 'id': {'type': 'string'}, - 'name': {'type': 'string'} - }, - 'additionalProperties': False - } - }, - 'additionalProperties': False - } - - schema = { - 'type': 'object', - 'properties': properties, - 'required': ['audit_ids', 'expires_at', 'issued_at', 'methods', - 'user'], - 'optional': ['bind'], - 'additionalProperties': False - } - - if domain_scoped: - schema['required'].extend(['domain', 'roles']) - schema['optional'].append('catalog') - elif project_scoped: - schema['required'].append('project') - schema['optional'].append('bind') - schema['optional'].append('catalog') - schema['optional'].append('OS-TRUST:trust') - schema['optional'].append('is_admin_project') - - return schema - - def config_files(self): - config_files = super(RestfulTestCase, self).config_files() - config_files.append(unit.dirs.tests_conf('backend_sql.conf')) - return config_files - - def get_extensions(self): - extensions = set(['revoke']) - if hasattr(self, 'EXTENSION_NAME'): - extensions.add(self.EXTENSION_NAME) - return extensions - - def generate_paste_config(self): - new_paste_file = None - try: - new_paste_file = unit.generate_paste_config(self.EXTENSION_TO_ADD) - except AttributeError: - # no need to report this error here, as most tests will not have - # EXTENSION_TO_ADD defined. - pass - finally: - return new_paste_file - - def remove_generated_paste_config(self): - try: - unit.remove_generated_paste_config(self.EXTENSION_TO_ADD) - except AttributeError: - pass - - def setUp(self, app_conf='keystone'): - """Setup for v3 Restful Test Cases.""" - new_paste_file = self.generate_paste_config() - self.addCleanup(self.remove_generated_paste_config) - if new_paste_file: - app_conf = 'config:%s' % (new_paste_file) - - super(RestfulTestCase, self).setUp(app_conf=app_conf) - - self.empty_context = {'environment': {}} - - def load_backends(self): - # ensure the cache region instance is setup - cache.configure_cache() - - super(RestfulTestCase, self).load_backends() - - def load_fixtures(self, fixtures): - self.load_sample_data() - - def _populate_default_domain(self): - if CONF.database.connection == unit.IN_MEM_DB_CONN_STRING: - # NOTE(morganfainberg): If an in-memory db is being used, be sure - # to populate the default domain, this is typically done by - # a migration, but the in-mem db uses model definitions to create - # the schema (no migrations are run). - try: - self.resource_api.get_domain(DEFAULT_DOMAIN_ID) - except exception.DomainNotFound: - domain = unit.new_domain_ref( - description=(u'The default domain'), - id=DEFAULT_DOMAIN_ID, - name=u'Default') - self.resource_api.create_domain(DEFAULT_DOMAIN_ID, domain) - - def load_sample_data(self): - self._populate_default_domain() - self.domain = unit.new_domain_ref() - self.domain_id = self.domain['id'] - self.resource_api.create_domain(self.domain_id, self.domain) - - self.project = unit.new_project_ref(domain_id=self.domain_id) - self.project_id = self.project['id'] - self.project = self.resource_api.create_project(self.project_id, - self.project) - - self.user = unit.create_user(self.identity_api, - domain_id=self.domain_id) - self.user_id = self.user['id'] - - self.default_domain_project_id = uuid.uuid4().hex - self.default_domain_project = unit.new_project_ref( - domain_id=DEFAULT_DOMAIN_ID) - self.default_domain_project['id'] = self.default_domain_project_id - self.resource_api.create_project(self.default_domain_project_id, - self.default_domain_project) - - self.default_domain_user = unit.create_user( - self.identity_api, - domain_id=DEFAULT_DOMAIN_ID) - self.default_domain_user_id = self.default_domain_user['id'] - - # create & grant policy.json's default role for admin_required - self.role = unit.new_role_ref(name='admin') - self.role_id = self.role['id'] - self.role_api.create_role(self.role_id, self.role) - self.assignment_api.add_role_to_user_and_project( - self.user_id, self.project_id, self.role_id) - self.assignment_api.add_role_to_user_and_project( - self.default_domain_user_id, self.default_domain_project_id, - self.role_id) - self.assignment_api.add_role_to_user_and_project( - self.default_domain_user_id, self.project_id, - self.role_id) - - # Create "req_admin" user for simulating a real user instead of the - # admin_token_auth middleware - self.user_reqadmin = unit.create_user(self.identity_api, - DEFAULT_DOMAIN_ID) - self.assignment_api.add_role_to_user_and_project( - self.user_reqadmin['id'], - self.default_domain_project_id, - self.role_id) - - self.region = unit.new_region_ref() - self.region_id = self.region['id'] - self.catalog_api.create_region(self.region) - - self.service = unit.new_service_ref() - self.service_id = self.service['id'] - self.catalog_api.create_service(self.service_id, self.service.copy()) - - self.endpoint = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id) - self.endpoint_id = self.endpoint['id'] - self.catalog_api.create_endpoint(self.endpoint_id, - self.endpoint.copy()) - # The server adds 'enabled' and defaults to True. - self.endpoint['enabled'] = True - - def create_new_default_project_for_user(self, user_id, domain_id, - enable_project=True): - ref = unit.new_project_ref(domain_id=domain_id, enabled=enable_project) - r = self.post('/projects', body={'project': ref}) - project = self.assertValidProjectResponse(r, ref) - # set the user's preferred project - body = {'user': {'default_project_id': project['id']}} - r = self.patch('/users/%(user_id)s' % { - 'user_id': user_id}, - body=body) - self.assertValidUserResponse(r) - - return project - - def get_admin_token(self): - """Convenience method so that we can test authenticated requests.""" - r = self.admin_request( - method='POST', - path='/v3/auth/tokens', - body={ - 'auth': { - 'identity': { - 'methods': ['password'], - 'password': { - 'user': { - 'name': self.user_reqadmin['name'], - 'password': self.user_reqadmin['password'], - 'domain': { - 'id': self.user_reqadmin['domain_id'] - } - } - } - }, - 'scope': { - 'project': { - 'id': self.default_domain_project_id, - } - } - } - }) - return r.headers.get('X-Subject-Token') - - def get_unscoped_token(self): - """Convenience method so that we can test authenticated requests.""" - r = self.admin_request( - method='POST', - path='/v3/auth/tokens', - body={ - 'auth': { - 'identity': { - 'methods': ['password'], - 'password': { - 'user': { - 'name': self.user['name'], - 'password': self.user['password'], - 'domain': { - 'id': self.user['domain_id'] - } - } - } - } - } - }) - return r.headers.get('X-Subject-Token') - - def get_scoped_token(self): - """Convenience method so that we can test authenticated requests.""" - r = self.admin_request( - method='POST', - path='/v3/auth/tokens', - body={ - 'auth': { - 'identity': { - 'methods': ['password'], - 'password': { - 'user': { - 'name': self.user['name'], - 'password': self.user['password'], - 'domain': { - 'id': self.user['domain_id'] - } - } - } - }, - 'scope': { - 'project': { - 'id': self.project['id'], - } - } - } - }) - return r.headers.get('X-Subject-Token') - - def get_domain_scoped_token(self): - """Convenience method for requesting domain scoped token.""" - r = self.admin_request( - method='POST', - path='/v3/auth/tokens', - body={ - 'auth': { - 'identity': { - 'methods': ['password'], - 'password': { - 'user': { - 'name': self.user['name'], - 'password': self.user['password'], - 'domain': { - 'id': self.user['domain_id'] - } - } - } - }, - 'scope': { - 'domain': { - 'id': self.domain['id'], - } - } - } - }) - return r.headers.get('X-Subject-Token') - - def get_requested_token(self, auth): - """Request the specific token we want.""" - r = self.v3_create_token(auth) - return r.headers.get('X-Subject-Token') - - def v3_create_token(self, auth, expected_status=http_client.CREATED): - return self.admin_request(method='POST', - path='/v3/auth/tokens', - body=auth, - expected_status=expected_status) - - def v3_noauth_request(self, path, **kwargs): - # request does not require auth token header - path = '/v3' + path - return self.admin_request(path=path, **kwargs) - - def v3_request(self, path, **kwargs): - # check to see if caller requires token for the API call. - if kwargs.pop('noauth', None): - return self.v3_noauth_request(path, **kwargs) - - # Check if the caller has passed in auth details for - # use in requesting the token - auth_arg = kwargs.pop('auth', None) - if auth_arg: - token = self.get_requested_token(auth_arg) - else: - token = kwargs.pop('token', None) - if not token: - token = self.get_scoped_token() - path = '/v3' + path - - return self.admin_request(path=path, token=token, **kwargs) - - def get(self, path, expected_status=http_client.OK, **kwargs): - return self.v3_request(path, method='GET', - expected_status=expected_status, **kwargs) - - def head(self, path, expected_status=http_client.NO_CONTENT, **kwargs): - r = self.v3_request(path, method='HEAD', - expected_status=expected_status, **kwargs) - self.assertEqual(b'', r.body) - return r - - def post(self, path, expected_status=http_client.CREATED, **kwargs): - return self.v3_request(path, method='POST', - expected_status=expected_status, **kwargs) - - def put(self, path, expected_status=http_client.NO_CONTENT, **kwargs): - return self.v3_request(path, method='PUT', - expected_status=expected_status, **kwargs) - - def patch(self, path, expected_status=http_client.OK, **kwargs): - return self.v3_request(path, method='PATCH', - expected_status=expected_status, **kwargs) - - def delete(self, path, expected_status=http_client.NO_CONTENT, **kwargs): - return self.v3_request(path, method='DELETE', - expected_status=expected_status, **kwargs) - - def assertValidErrorResponse(self, r): - resp = r.result - self.assertIsNotNone(resp.get('error')) - self.assertIsNotNone(resp['error'].get('code')) - self.assertIsNotNone(resp['error'].get('title')) - self.assertIsNotNone(resp['error'].get('message')) - self.assertEqual(int(resp['error']['code']), r.status_code) - - def assertValidListLinks(self, links, resource_url=None): - self.assertIsNotNone(links) - self.assertIsNotNone(links.get('self')) - self.assertThat(links['self'], matchers.StartsWith('http://localhost')) - - if resource_url: - self.assertThat(links['self'], matchers.EndsWith(resource_url)) - - self.assertIn('next', links) - if links['next'] is not None: - self.assertThat(links['next'], - matchers.StartsWith('http://localhost')) - - self.assertIn('previous', links) - if links['previous'] is not None: - self.assertThat(links['previous'], - matchers.StartsWith('http://localhost')) - - def assertValidListResponse(self, resp, key, entity_validator, ref=None, - expected_length=None, keys_to_check=None, - resource_url=None): - """Make assertions common to all API list responses. - - If a reference is provided, it's ID will be searched for in the - response, and asserted to be equal. - - """ - entities = resp.result.get(key) - self.assertIsNotNone(entities) - - if expected_length is not None: - self.assertEqual(expected_length, len(entities)) - elif ref is not None: - # we're at least expecting the ref - self.assertNotEmpty(entities) - - # collections should have relational links - self.assertValidListLinks(resp.result.get('links'), - resource_url=resource_url) - - for entity in entities: - self.assertIsNotNone(entity) - self.assertValidEntity(entity, keys_to_check=keys_to_check) - entity_validator(entity) - if ref: - entity = [x for x in entities if x['id'] == ref['id']][0] - self.assertValidEntity(entity, ref=ref, - keys_to_check=keys_to_check) - entity_validator(entity, ref) - return entities - - def assertValidResponse(self, resp, key, entity_validator, *args, - **kwargs): - """Make assertions common to all API responses.""" - entity = resp.result.get(key) - self.assertIsNotNone(entity) - keys = kwargs.pop('keys_to_check', None) - self.assertValidEntity(entity, keys_to_check=keys, *args, **kwargs) - entity_validator(entity, *args, **kwargs) - return entity - - def assertValidEntity(self, entity, ref=None, keys_to_check=None): - """Make assertions common to all API entities. - - If a reference is provided, the entity will also be compared against - the reference. - """ - if keys_to_check is not None: - keys = keys_to_check - else: - keys = ['name', 'description', 'enabled'] - - for k in ['id'] + keys: - msg = '%s unexpectedly None in %s' % (k, entity) - self.assertIsNotNone(entity.get(k), msg) - - self.assertIsNotNone(entity.get('links')) - self.assertIsNotNone(entity['links'].get('self')) - self.assertThat(entity['links']['self'], - matchers.StartsWith('http://localhost')) - self.assertIn(entity['id'], entity['links']['self']) - - if ref: - for k in keys: - msg = '%s not equal: %s != %s' % (k, ref[k], entity[k]) - self.assertEqual(ref[k], entity[k]) - - return entity - - # auth validation - - def assertValidISO8601ExtendedFormatDatetime(self, dt): - try: - return timeutils.parse_strtime(dt, fmt=TIME_FORMAT) - except Exception: - msg = '%s is not a valid ISO 8601 extended format date time.' % dt - raise AssertionError(msg) - - def assertValidTokenResponse(self, r, user=None): - self.assertTrue(r.headers.get('X-Subject-Token')) - token = r.result['token'] - - self.assertIsNotNone(token.get('expires_at')) - expires_at = self.assertValidISO8601ExtendedFormatDatetime( - token['expires_at']) - self.assertIsNotNone(token.get('issued_at')) - issued_at = self.assertValidISO8601ExtendedFormatDatetime( - token['issued_at']) - self.assertTrue(issued_at < expires_at) - - self.assertIn('user', token) - self.assertIn('id', token['user']) - self.assertIn('name', token['user']) - self.assertIn('domain', token['user']) - self.assertIn('id', token['user']['domain']) - - if user is not None: - self.assertEqual(user['id'], token['user']['id']) - self.assertEqual(user['name'], token['user']['name']) - self.assertEqual(user['domain_id'], token['user']['domain']['id']) - - return token - - def assertValidUnscopedTokenResponse(self, r, *args, **kwargs): - token = self.assertValidTokenResponse(r, *args, **kwargs) - validator_object = validators.SchemaValidator( - self.generate_token_schema() - ) - validator_object.validate(token) - - return token - - def assertValidScopedTokenResponse(self, r, *args, **kwargs): - require_catalog = kwargs.pop('require_catalog', True) - endpoint_filter = kwargs.pop('endpoint_filter', False) - ep_filter_assoc = kwargs.pop('ep_filter_assoc', 0) - is_admin_project = kwargs.pop('is_admin_project', False) - token = self.assertValidTokenResponse(r, *args, **kwargs) - - if require_catalog: - endpoint_num = 0 - self.assertIn('catalog', token) - - if isinstance(token['catalog'], list): - # only test JSON - for service in token['catalog']: - for endpoint in service['endpoints']: - self.assertNotIn('enabled', endpoint) - self.assertNotIn('legacy_endpoint_id', endpoint) - self.assertNotIn('service_id', endpoint) - endpoint_num += 1 - - # sub test for the OS-EP-FILTER extension enabled - if endpoint_filter: - self.assertEqual(ep_filter_assoc, endpoint_num) - else: - self.assertNotIn('catalog', token) - - self.assertIn('roles', token) - self.assertTrue(token['roles']) - for role in token['roles']: - self.assertIn('id', role) - self.assertIn('name', role) - - if is_admin_project: - # NOTE(samueldmq): We want to explicitly test for boolean - self.assertIs(True, token['is_admin_project']) - else: - self.assertNotIn('is_admin_project', token) - - return token - - def assertValidProjectScopedTokenResponse(self, r, *args, **kwargs): - token = self.assertValidScopedTokenResponse(r, *args, **kwargs) - - project_scoped_token_schema = self.generate_token_schema( - project_scoped=True) - - if token.get('OS-TRUST:trust'): - trust_properties = { - 'OS-TRUST:trust': { - 'type': ['object'], - 'required': ['id', 'impersonation', 'trustor_user', - 'trustee_user'], - 'properties': { - 'id': {'type': 'string'}, - 'impersonation': {'type': 'boolean'}, - 'trustor_user': { - 'type': 'object', - 'required': ['id'], - 'properties': { - 'id': {'type': 'string'} - }, - 'additionalProperties': False - }, - 'trustee_user': { - 'type': 'object', - 'required': ['id'], - 'properties': { - 'id': {'type': 'string'} - }, - 'additionalProperties': False - } - }, - 'additionalProperties': False - } - } - project_scoped_token_schema['properties'].update(trust_properties) - - validator_object = validators.SchemaValidator( - project_scoped_token_schema) - validator_object.validate(token) - - self.assertEqual(self.role_id, token['roles'][0]['id']) - - return token - - def assertValidDomainScopedTokenResponse(self, r, *args, **kwargs): - token = self.assertValidScopedTokenResponse(r, *args, **kwargs) - - validator_object = validators.SchemaValidator( - self.generate_token_schema(domain_scoped=True) - ) - validator_object.validate(token) - - return token - - def assertEqualTokens(self, a, b): - """Assert that two tokens are equal. - - Compare two tokens except for their ids. This also truncates - the time in the comparison. - """ - def normalize(token): - del token['token']['expires_at'] - del token['token']['issued_at'] - return token - - a_expires_at = self.assertValidISO8601ExtendedFormatDatetime( - a['token']['expires_at']) - b_expires_at = self.assertValidISO8601ExtendedFormatDatetime( - b['token']['expires_at']) - self.assertCloseEnoughForGovernmentWork(a_expires_at, b_expires_at) - - a_issued_at = self.assertValidISO8601ExtendedFormatDatetime( - a['token']['issued_at']) - b_issued_at = self.assertValidISO8601ExtendedFormatDatetime( - b['token']['issued_at']) - self.assertCloseEnoughForGovernmentWork(a_issued_at, b_issued_at) - - return self.assertDictEqual(normalize(a), normalize(b)) - - # catalog validation - - def assertValidCatalogResponse(self, resp, *args, **kwargs): - self.assertEqual(set(['catalog', 'links']), set(resp.json.keys())) - self.assertValidCatalog(resp.json['catalog']) - self.assertIn('links', resp.json) - self.assertIsInstance(resp.json['links'], dict) - self.assertEqual(['self'], list(resp.json['links'].keys())) - self.assertEqual( - 'http://localhost/v3/auth/catalog', - resp.json['links']['self']) - - def assertValidCatalog(self, entity): - self.assertIsInstance(entity, list) - self.assertTrue(len(entity) > 0) - for service in entity: - self.assertIsNotNone(service.get('id')) - self.assertIsNotNone(service.get('name')) - self.assertIsNotNone(service.get('type')) - self.assertNotIn('enabled', service) - self.assertTrue(len(service['endpoints']) > 0) - for endpoint in service['endpoints']: - self.assertIsNotNone(endpoint.get('id')) - self.assertIsNotNone(endpoint.get('interface')) - self.assertIsNotNone(endpoint.get('url')) - self.assertNotIn('enabled', endpoint) - self.assertNotIn('legacy_endpoint_id', endpoint) - self.assertNotIn('service_id', endpoint) - - # region validation - - def assertValidRegionListResponse(self, resp, *args, **kwargs): - # NOTE(jaypipes): I have to pass in a blank keys_to_check parameter - # below otherwise the base assertValidEntity method - # tries to find a "name" and an "enabled" key in the - # returned ref dicts. The issue is, I don't understand - # how the service and endpoint entity assertions below - # actually work (they don't raise assertions), since - # AFAICT, the service and endpoint tables don't have - # a "name" column either... :( - return self.assertValidListResponse( - resp, - 'regions', - self.assertValidRegion, - keys_to_check=[], - *args, - **kwargs) - - def assertValidRegionResponse(self, resp, *args, **kwargs): - return self.assertValidResponse( - resp, - 'region', - self.assertValidRegion, - keys_to_check=[], - *args, - **kwargs) - - def assertValidRegion(self, entity, ref=None): - self.assertIsNotNone(entity.get('description')) - if ref: - self.assertEqual(ref['description'], entity['description']) - return entity - - # service validation - - def assertValidServiceListResponse(self, resp, *args, **kwargs): - return self.assertValidListResponse( - resp, - 'services', - self.assertValidService, - *args, - **kwargs) - - def assertValidServiceResponse(self, resp, *args, **kwargs): - return self.assertValidResponse( - resp, - 'service', - self.assertValidService, - *args, - **kwargs) - - def assertValidService(self, entity, ref=None): - self.assertIsNotNone(entity.get('type')) - self.assertIsInstance(entity.get('enabled'), bool) - if ref: - self.assertEqual(ref['type'], entity['type']) - return entity - - # endpoint validation - - def assertValidEndpointListResponse(self, resp, *args, **kwargs): - return self.assertValidListResponse( - resp, - 'endpoints', - self.assertValidEndpoint, - *args, - **kwargs) - - def assertValidEndpointResponse(self, resp, *args, **kwargs): - return self.assertValidResponse( - resp, - 'endpoint', - self.assertValidEndpoint, - *args, - **kwargs) - - def assertValidEndpoint(self, entity, ref=None): - self.assertIsNotNone(entity.get('interface')) - self.assertIsNotNone(entity.get('service_id')) - self.assertIsInstance(entity['enabled'], bool) - - # this is intended to be an unexposed implementation detail - self.assertNotIn('legacy_endpoint_id', entity) - - if ref: - self.assertEqual(ref['interface'], entity['interface']) - self.assertEqual(ref['service_id'], entity['service_id']) - if ref.get('region') is not None: - self.assertEqual(ref['region_id'], entity.get('region_id')) - - return entity - - # domain validation - - def assertValidDomainListResponse(self, resp, *args, **kwargs): - return self.assertValidListResponse( - resp, - 'domains', - self.assertValidDomain, - *args, - **kwargs) - - def assertValidDomainResponse(self, resp, *args, **kwargs): - return self.assertValidResponse( - resp, - 'domain', - self.assertValidDomain, - *args, - **kwargs) - - def assertValidDomain(self, entity, ref=None): - if ref: - pass - return entity - - # project validation - - def assertValidProjectListResponse(self, resp, *args, **kwargs): - return self.assertValidListResponse( - resp, - 'projects', - self.assertValidProject, - *args, - **kwargs) - - def assertValidProjectResponse(self, resp, *args, **kwargs): - return self.assertValidResponse( - resp, - 'project', - self.assertValidProject, - *args, - **kwargs) - - def assertValidProject(self, entity, ref=None): - if ref: - self.assertEqual(ref['domain_id'], entity['domain_id']) - return entity - - # user validation - - def assertValidUserListResponse(self, resp, *args, **kwargs): - return self.assertValidListResponse( - resp, - 'users', - self.assertValidUser, - keys_to_check=['name', 'enabled'], - *args, - **kwargs) - - def assertValidUserResponse(self, resp, *args, **kwargs): - return self.assertValidResponse( - resp, - 'user', - self.assertValidUser, - keys_to_check=['name', 'enabled'], - *args, - **kwargs) - - def assertValidUser(self, entity, ref=None): - self.assertIsNotNone(entity.get('domain_id')) - self.assertIsNotNone(entity.get('email')) - self.assertIsNone(entity.get('password')) - self.assertNotIn('tenantId', entity) - if ref: - self.assertEqual(ref['domain_id'], entity['domain_id']) - self.assertEqual(ref['email'], entity['email']) - if 'default_project_id' in ref: - self.assertIsNotNone(ref['default_project_id']) - self.assertEqual(ref['default_project_id'], - entity['default_project_id']) - return entity - - # group validation - - def assertValidGroupListResponse(self, resp, *args, **kwargs): - return self.assertValidListResponse( - resp, - 'groups', - self.assertValidGroup, - keys_to_check=['name', 'description', 'domain_id'], - *args, - **kwargs) - - def assertValidGroupResponse(self, resp, *args, **kwargs): - return self.assertValidResponse( - resp, - 'group', - self.assertValidGroup, - keys_to_check=['name', 'description', 'domain_id'], - *args, - **kwargs) - - def assertValidGroup(self, entity, ref=None): - self.assertIsNotNone(entity.get('name')) - if ref: - self.assertEqual(ref['name'], entity['name']) - return entity - - # credential validation - - def assertValidCredentialListResponse(self, resp, *args, **kwargs): - return self.assertValidListResponse( - resp, - 'credentials', - self.assertValidCredential, - keys_to_check=['blob', 'user_id', 'type'], - *args, - **kwargs) - - def assertValidCredentialResponse(self, resp, *args, **kwargs): - return self.assertValidResponse( - resp, - 'credential', - self.assertValidCredential, - keys_to_check=['blob', 'user_id', 'type'], - *args, - **kwargs) - - def assertValidCredential(self, entity, ref=None): - self.assertIsNotNone(entity.get('user_id')) - self.assertIsNotNone(entity.get('blob')) - self.assertIsNotNone(entity.get('type')) - if ref: - self.assertEqual(ref['user_id'], entity['user_id']) - self.assertEqual(ref['blob'], entity['blob']) - self.assertEqual(ref['type'], entity['type']) - self.assertEqual(ref.get('project_id'), entity.get('project_id')) - return entity - - # role validation - - def assertValidRoleListResponse(self, resp, *args, **kwargs): - return self.assertValidListResponse( - resp, - 'roles', - self.assertValidRole, - keys_to_check=['name'], - *args, - **kwargs) - - def assertRoleInListResponse(self, resp, ref, expected=1): - found_count = 0 - for entity in resp.result.get('roles'): - try: - self.assertValidRole(entity, ref=ref) - except Exception: - # It doesn't match, so let's go onto the next one - pass - else: - found_count += 1 - self.assertEqual(expected, found_count) - - def assertRoleNotInListResponse(self, resp, ref): - self.assertRoleInListResponse(resp, ref=ref, expected=0) - - def assertValidRoleResponse(self, resp, *args, **kwargs): - return self.assertValidResponse( - resp, - 'role', - self.assertValidRole, - keys_to_check=['name'], - *args, - **kwargs) - - def assertValidRole(self, entity, ref=None): - self.assertIsNotNone(entity.get('name')) - if ref: - self.assertEqual(ref['name'], entity['name']) - self.assertEqual(ref['domain_id'], entity['domain_id']) - return entity - - # role assignment validation - - def assertValidRoleAssignmentListResponse(self, resp, expected_length=None, - resource_url=None): - entities = resp.result.get('role_assignments') - - if expected_length: - self.assertEqual(expected_length, len(entities)) - - # Collections should have relational links - self.assertValidListLinks(resp.result.get('links'), - resource_url=resource_url) - - for entity in entities: - self.assertIsNotNone(entity) - self.assertValidRoleAssignment(entity) - return entities - - def assertValidRoleAssignment(self, entity, ref=None): - # A role should be present - self.assertIsNotNone(entity.get('role')) - self.assertIsNotNone(entity['role'].get('id')) - - # Only one of user or group should be present - if entity.get('user'): - self.assertIsNone(entity.get('group')) - self.assertIsNotNone(entity['user'].get('id')) - else: - self.assertIsNotNone(entity.get('group')) - self.assertIsNotNone(entity['group'].get('id')) - - # A scope should be present and have only one of domain or project - self.assertIsNotNone(entity.get('scope')) - - if entity['scope'].get('project'): - self.assertIsNone(entity['scope'].get('domain')) - self.assertIsNotNone(entity['scope']['project'].get('id')) - else: - self.assertIsNotNone(entity['scope'].get('domain')) - self.assertIsNotNone(entity['scope']['domain'].get('id')) - - # An assignment link should be present - self.assertIsNotNone(entity.get('links')) - self.assertIsNotNone(entity['links'].get('assignment')) - - if ref: - links = ref.pop('links') - try: - self.assertDictContainsSubset(ref, entity) - self.assertIn(links['assignment'], - entity['links']['assignment']) - finally: - if links: - ref['links'] = links - - def assertRoleAssignmentInListResponse(self, resp, ref, expected=1): - - found_count = 0 - for entity in resp.result.get('role_assignments'): - try: - self.assertValidRoleAssignment(entity, ref=ref) - except Exception: - # It doesn't match, so let's go onto the next one - pass - else: - found_count += 1 - self.assertEqual(expected, found_count) - - def assertRoleAssignmentNotInListResponse(self, resp, ref): - self.assertRoleAssignmentInListResponse(resp, ref=ref, expected=0) - - # policy validation - - def assertValidPolicyListResponse(self, resp, *args, **kwargs): - return self.assertValidListResponse( - resp, - 'policies', - self.assertValidPolicy, - *args, - **kwargs) - - def assertValidPolicyResponse(self, resp, *args, **kwargs): - return self.assertValidResponse( - resp, - 'policy', - self.assertValidPolicy, - *args, - **kwargs) - - def assertValidPolicy(self, entity, ref=None): - self.assertIsNotNone(entity.get('blob')) - self.assertIsNotNone(entity.get('type')) - if ref: - self.assertEqual(ref['blob'], entity['blob']) - self.assertEqual(ref['type'], entity['type']) - return entity - - # trust validation - - def assertValidTrustListResponse(self, resp, *args, **kwargs): - return self.assertValidListResponse( - resp, - 'trusts', - self.assertValidTrustSummary, - keys_to_check=['trustor_user_id', - 'trustee_user_id', - 'impersonation'], - *args, - **kwargs) - - def assertValidTrustResponse(self, resp, *args, **kwargs): - return self.assertValidResponse( - resp, - 'trust', - self.assertValidTrust, - keys_to_check=['trustor_user_id', - 'trustee_user_id', - 'impersonation'], - *args, - **kwargs) - - def assertValidTrustSummary(self, entity, ref=None): - return self.assertValidTrust(entity, ref, summary=True) - - def assertValidTrust(self, entity, ref=None, summary=False): - self.assertIsNotNone(entity.get('trustor_user_id')) - self.assertIsNotNone(entity.get('trustee_user_id')) - self.assertIsNotNone(entity.get('impersonation')) - - self.assertIn('expires_at', entity) - if entity['expires_at'] is not None: - self.assertValidISO8601ExtendedFormatDatetime(entity['expires_at']) - - if summary: - # Trust list contains no roles, but getting a specific - # trust by ID provides the detailed response containing roles - self.assertNotIn('roles', entity) - self.assertIn('project_id', entity) - else: - for role in entity['roles']: - self.assertIsNotNone(role) - self.assertValidEntity(role, keys_to_check=['name']) - self.assertValidRole(role) - - self.assertValidListLinks(entity.get('roles_links')) - - # always disallow role xor project_id (neither or both is allowed) - has_roles = bool(entity.get('roles')) - has_project = bool(entity.get('project_id')) - self.assertFalse(has_roles ^ has_project) - - if ref: - self.assertEqual(ref['trustor_user_id'], entity['trustor_user_id']) - self.assertEqual(ref['trustee_user_id'], entity['trustee_user_id']) - self.assertEqual(ref['project_id'], entity['project_id']) - if entity.get('expires_at') or ref.get('expires_at'): - entity_exp = self.assertValidISO8601ExtendedFormatDatetime( - entity['expires_at']) - ref_exp = self.assertValidISO8601ExtendedFormatDatetime( - ref['expires_at']) - self.assertCloseEnoughForGovernmentWork(entity_exp, ref_exp) - else: - self.assertEqual(ref.get('expires_at'), - entity.get('expires_at')) - - return entity - - # Service providers (federation) - - def assertValidServiceProvider(self, entity, ref=None, *args, **kwargs): - - attributes = frozenset(['auth_url', 'id', 'enabled', 'description', - 'links', 'relay_state_prefix', 'sp_url']) - for attribute in attributes: - self.assertIsNotNone(entity.get(attribute)) - - def assertValidServiceProviderListResponse(self, resp, *args, **kwargs): - if kwargs.get('keys_to_check') is None: - kwargs['keys_to_check'] = ['auth_url', 'id', 'enabled', - 'description', 'relay_state_prefix', - 'sp_url'] - return self.assertValidListResponse( - resp, - 'service_providers', - self.assertValidServiceProvider, - *args, - **kwargs) - - def build_external_auth_request(self, remote_user, - remote_domain=None, auth_data=None, - kerberos=False): - context = {'environment': {'REMOTE_USER': remote_user, - 'AUTH_TYPE': 'Negotiate'}} - if remote_domain: - context['environment']['REMOTE_DOMAIN'] = remote_domain - if not auth_data: - auth_data = self.build_authentication_request( - kerberos=kerberos)['auth'] - no_context = None - auth_info = auth.controllers.AuthInfo.create(no_context, auth_data) - auth_context = {'extras': {}, 'method_names': []} - return context, auth_info, auth_context - - -class VersionTestCase(RestfulTestCase): - def test_get_version(self): - pass - - -# NOTE(morganfainberg): To be removed when admin_token_auth is removed. This -# has been split out to allow testing admin_token auth without enabling it -# for other tests. -class AuthContextMiddlewareAdminTokenTestCase(RestfulTestCase): - EXTENSION_TO_ADD = 'admin_token_auth' - - def config_overrides(self): - super(AuthContextMiddlewareAdminTokenTestCase, self).config_overrides() - self.config_fixture.config( - admin_token='ADMIN') - - # NOTE(morganfainberg): This is knowingly copied from below for simplicity - # during the deprecation cycle. - def _middleware_request(self, token, extra_environ=None): - - def application(environ, start_response): - body = b'body' - headers = [('Content-Type', 'text/html; charset=utf8'), - ('Content-Length', str(len(body)))] - start_response('200 OK', headers) - return [body] - - app = webtest.TestApp(middleware.AuthContextMiddleware(application), - extra_environ=extra_environ) - resp = app.get('/', headers={middleware.AUTH_TOKEN_HEADER: token}) - self.assertEqual('body', resp.text) # just to make sure it worked - return resp.request - - def test_admin_auth_context(self): - # test to make sure AuthContextMiddleware does not attempt to build the - # auth context if the admin_token middleware indicates it's admin - # already. - token_id = uuid.uuid4().hex # token doesn't matter. - # the admin_token middleware sets is_admin in the context. - extra_environ = {middleware.CONTEXT_ENV: {'is_admin': True}} - req = self._middleware_request(token_id, extra_environ) - auth_context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self.assertDictEqual({}, auth_context) - - @mock.patch.object(middleware_auth.versionutils, - 'report_deprecated_feature') - def test_admin_token_auth_context_deprecated(self, mock_report_deprecated): - # For backwards compatibility AuthContextMiddleware will check that the - # admin token (as configured in the CONF file) is present and not - # attempt to build the auth context. This is deprecated. - req = self._middleware_request('ADMIN') - auth_context = req.environ.get(authorization.AUTH_CONTEXT_ENV) - self.assertDictEqual({}, auth_context) - self.assertEqual(1, mock_report_deprecated.call_count) - - -# NOTE(gyee): test AuthContextMiddleware here instead of test_middleware.py -# because we need the token -class AuthContextMiddlewareTestCase(RestfulTestCase): - - def _middleware_request(self, token, extra_environ=None): - - def application(environ, start_response): - body = b'body' - headers = [('Content-Type', 'text/html; charset=utf8'), - ('Content-Length', str(len(body)))] - start_response('200 OK', headers) - return [body] - - app = webtest.TestApp(middleware.AuthContextMiddleware(application), - extra_environ=extra_environ) - resp = app.get('/', headers={middleware.AUTH_TOKEN_HEADER: token}) - self.assertEqual(b'body', resp.body) # just to make sure it worked - return resp.request - - def test_auth_context_build_by_middleware(self): - # test to make sure AuthContextMiddleware successful build the auth - # context from the incoming auth token - admin_token = self.get_scoped_token() - req = self._middleware_request(admin_token) - self.assertEqual( - self.user['id'], - req.environ.get(authorization.AUTH_CONTEXT_ENV)['user_id']) - - def test_auth_context_override(self): - overridden_context = 'OVERRIDDEN_CONTEXT' - # this token should not be used - token = uuid.uuid4().hex - - extra_environ = {authorization.AUTH_CONTEXT_ENV: overridden_context} - req = self._middleware_request(token, extra_environ=extra_environ) - # make sure overridden context take precedence - self.assertEqual(overridden_context, - req.environ.get(authorization.AUTH_CONTEXT_ENV)) - - def test_unscoped_token_auth_context(self): - unscoped_token = self.get_unscoped_token() - req = self._middleware_request(unscoped_token) - for key in ['project_id', 'domain_id', 'domain_name']: - self.assertNotIn( - key, - req.environ.get(authorization.AUTH_CONTEXT_ENV)) - - def test_project_scoped_token_auth_context(self): - project_scoped_token = self.get_scoped_token() - req = self._middleware_request(project_scoped_token) - self.assertEqual( - self.project['id'], - req.environ.get(authorization.AUTH_CONTEXT_ENV)['project_id']) - - def test_domain_scoped_token_auth_context(self): - # grant the domain role to user - path = '/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id']) - self.put(path=path) - - domain_scoped_token = self.get_domain_scoped_token() - req = self._middleware_request(domain_scoped_token) - self.assertEqual( - self.domain['id'], - req.environ.get(authorization.AUTH_CONTEXT_ENV)['domain_id']) - self.assertEqual( - self.domain['name'], - req.environ.get(authorization.AUTH_CONTEXT_ENV)['domain_name']) - - def test_oslo_context(self): - # After AuthContextMiddleware runs, an - # oslo_context.context.RequestContext was created so that its fields - # can be logged. This test validates that the RequestContext was - # created and the fields are set as expected. - - # Use a scoped token so more fields can be set. - token = self.get_scoped_token() - - # oslo_middleware RequestId middleware sets openstack.request_id. - request_id = uuid.uuid4().hex - environ = {'openstack.request_id': request_id} - self._middleware_request(token, extra_environ=environ) - - req_context = oslo_context.context.get_current() - self.assertEqual(request_id, req_context.request_id) - self.assertEqual(token, req_context.auth_token) - self.assertEqual(self.user['id'], req_context.user) - self.assertEqual(self.project['id'], req_context.tenant) - self.assertIsNone(req_context.domain) - self.assertEqual(self.user['domain_id'], req_context.user_domain) - self.assertEqual(self.project['domain_id'], req_context.project_domain) - self.assertFalse(req_context.is_admin) - - -class JsonHomeTestMixin(object): - """JSON Home test - - Mixin this class to provide a test for the JSON-Home response for an - extension. - - The base class must set JSON_HOME_DATA to a dict of relationship URLs - (rels) to the JSON-Home data for the relationship. The rels and associated - data must be in the response. - - """ - - def test_get_json_home(self): - resp = self.get('/', convert=False, - headers={'Accept': 'application/json-home'}) - self.assertThat(resp.headers['Content-Type'], - matchers.Equals('application/json-home')) - resp_data = jsonutils.loads(resp.body) - - # Check that the example relationships are present. - for rel in self.JSON_HOME_DATA: - self.assertThat(resp_data['resources'][rel], - matchers.Equals(self.JSON_HOME_DATA[rel])) - - -class AssignmentTestMixin(object): - """To hold assignment helper functions.""" - - def build_role_assignment_query_url(self, effective=False, **filters): - """Build and return a role assignment query url with provided params. - - Available filters are: domain_id, project_id, user_id, group_id, - role_id and inherited_to_projects. - """ - query_params = '?effective' if effective else '' - - for k, v in filters.items(): - query_params += '?' if not query_params else '&' - - if k == 'inherited_to_projects': - query_params += 'scope.OS-INHERIT:inherited_to=projects' - else: - if k in ['domain_id', 'project_id']: - query_params += 'scope.' - elif k not in ['user_id', 'group_id', 'role_id']: - raise ValueError( - 'Invalid key \'%s\' in provided filters.' % k) - - query_params += '%s=%s' % (k.replace('_', '.'), v) - - return '/role_assignments%s' % query_params - - def build_role_assignment_link(self, **attribs): - """Build and return a role assignment link with provided attributes. - - Provided attributes are expected to contain: domain_id or project_id, - user_id or group_id, role_id and, optionally, inherited_to_projects. - """ - if attribs.get('domain_id'): - link = '/domains/' + attribs['domain_id'] - else: - link = '/projects/' + attribs['project_id'] - - if attribs.get('user_id'): - link += '/users/' + attribs['user_id'] - else: - link += '/groups/' + attribs['group_id'] - - link += '/roles/' + attribs['role_id'] - - if attribs.get('inherited_to_projects'): - return '/OS-INHERIT%s/inherited_to_projects' % link - - return link - - def build_role_assignment_entity( - self, link=None, prior_role_link=None, **attribs): - """Build and return a role assignment entity with provided attributes. - - Provided attributes are expected to contain: domain_id or project_id, - user_id or group_id, role_id and, optionally, inherited_to_projects. - """ - entity = {'links': {'assignment': ( - link or self.build_role_assignment_link(**attribs))}} - - if attribs.get('domain_id'): - entity['scope'] = {'domain': {'id': attribs['domain_id']}} - else: - entity['scope'] = {'project': {'id': attribs['project_id']}} - - if attribs.get('user_id'): - entity['user'] = {'id': attribs['user_id']} - - if attribs.get('group_id'): - entity['links']['membership'] = ('/groups/%s/users/%s' % - (attribs['group_id'], - attribs['user_id'])) - else: - entity['group'] = {'id': attribs['group_id']} - - entity['role'] = {'id': attribs['role_id']} - - if attribs.get('inherited_to_projects'): - entity['scope']['OS-INHERIT:inherited_to'] = 'projects' - - if prior_role_link: - entity['links']['prior_role'] = prior_role_link - - return entity - - def build_role_assignment_entity_include_names(self, - domain_ref=None, - role_ref=None, - group_ref=None, - user_ref=None, - project_ref=None, - inherited_assignment=None): - """Build and return a role assignment entity with provided attributes. - - The expected attributes are: domain_ref or project_ref, - user_ref or group_ref, role_ref and, optionally, inherited_to_projects. - """ - entity = {'links': {}} - attributes_for_links = {} - if project_ref: - dmn_name = self.resource_api.get_domain( - project_ref['domain_id'])['name'] - - entity['scope'] = {'project': { - 'id': project_ref['id'], - 'name': project_ref['name'], - 'domain': { - 'id': project_ref['domain_id'], - 'name': dmn_name}}} - attributes_for_links['project_id'] = project_ref['id'] - else: - entity['scope'] = {'domain': {'id': domain_ref['id'], - 'name': domain_ref['name']}} - attributes_for_links['domain_id'] = domain_ref['id'] - if user_ref: - dmn_name = self.resource_api.get_domain( - user_ref['domain_id'])['name'] - entity['user'] = {'id': user_ref['id'], - 'name': user_ref['name'], - 'domain': {'id': user_ref['domain_id'], - 'name': dmn_name}} - attributes_for_links['user_id'] = user_ref['id'] - else: - dmn_name = self.resource_api.get_domain( - group_ref['domain_id'])['name'] - entity['group'] = {'id': group_ref['id'], - 'name': group_ref['name'], - 'domain': { - 'id': group_ref['domain_id'], - 'name': dmn_name}} - attributes_for_links['group_id'] = group_ref['id'] - - if role_ref: - entity['role'] = {'id': role_ref['id'], - 'name': role_ref['name']} - attributes_for_links['role_id'] = role_ref['id'] - - if inherited_assignment: - entity['scope']['OS-INHERIT:inherited_to'] = 'projects' - attributes_for_links['inherited_to_projects'] = True - - entity['links']['assignment'] = self.build_role_assignment_link( - **attributes_for_links) - - return entity diff --git a/keystone-moon/keystone/tests/unit/test_v3_assignment.py b/keystone-moon/keystone/tests/unit/test_v3_assignment.py deleted file mode 100644 index 86fb9f74..00000000 --- a/keystone-moon/keystone/tests/unit/test_v3_assignment.py +++ /dev/null @@ -1,2871 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import random -import uuid - -from oslo_config import cfg -from six.moves import http_client -from six.moves import range -from testtools import matchers - -from keystone.tests import unit -from keystone.tests.unit import test_v3 - - -CONF = cfg.CONF - - -class AssignmentTestCase(test_v3.RestfulTestCase, - test_v3.AssignmentTestMixin): - """Test roles and role assignments.""" - - def setUp(self): - super(AssignmentTestCase, self).setUp() - - self.group = unit.new_group_ref(domain_id=self.domain_id) - self.group = self.identity_api.create_group(self.group) - self.group_id = self.group['id'] - - # Role CRUD tests - - def test_create_role(self): - """Call ``POST /roles``.""" - ref = unit.new_role_ref() - r = self.post( - '/roles', - body={'role': ref}) - return self.assertValidRoleResponse(r, ref) - - def test_create_role_bad_request(self): - """Call ``POST /roles``.""" - self.post('/roles', body={'role': {}}, - expected_status=http_client.BAD_REQUEST) - - def test_list_roles(self): - """Call ``GET /roles``.""" - resource_url = '/roles' - r = self.get(resource_url) - self.assertValidRoleListResponse(r, ref=self.role, - resource_url=resource_url) - - def test_get_role(self): - """Call ``GET /roles/{role_id}``.""" - r = self.get('/roles/%(role_id)s' % { - 'role_id': self.role_id}) - self.assertValidRoleResponse(r, self.role) - - def test_update_role(self): - """Call ``PATCH /roles/{role_id}``.""" - ref = unit.new_role_ref() - del ref['id'] - r = self.patch('/roles/%(role_id)s' % { - 'role_id': self.role_id}, - body={'role': ref}) - self.assertValidRoleResponse(r, ref) - - def test_delete_role(self): - """Call ``DELETE /roles/{role_id}``.""" - self.delete('/roles/%(role_id)s' % { - 'role_id': self.role_id}) - - def test_create_member_role(self): - """Call ``POST /roles``.""" - # specify only the name on creation - ref = unit.new_role_ref(name=CONF.member_role_name) - r = self.post( - '/roles', - body={'role': ref}) - self.assertValidRoleResponse(r, ref) - - # but the ID should be set as defined in CONF - self.assertEqual(CONF.member_role_id, r.json['role']['id']) - - # Role Grants tests - - def test_crud_user_project_role_grants(self): - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - - collection_url = ( - '/projects/%(project_id)s/users/%(user_id)s/roles' % { - 'project_id': self.project['id'], - 'user_id': self.user['id']}) - member_url = '%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': role['id']} - - # There is a role assignment for self.user on self.project - r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=self.role, - expected_length=1) - - self.put(member_url) - self.head(member_url) - r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=role, - resource_url=collection_url, - expected_length=2) - - self.delete(member_url) - r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=self.role, expected_length=1) - self.assertIn(collection_url, r.result['links']['self']) - - def test_crud_user_project_role_grants_no_user(self): - """Grant role on a project to a user that doesn't exist. - - When grant a role on a project to a user that doesn't exist, the server - returns Not Found for the user. - - """ - user_id = uuid.uuid4().hex - - collection_url = ( - '/projects/%(project_id)s/users/%(user_id)s/roles' % { - 'project_id': self.project['id'], 'user_id': user_id}) - member_url = '%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': self.role_id} - - self.put(member_url, expected_status=http_client.NOT_FOUND) - - def test_crud_user_domain_role_grants(self): - collection_url = ( - '/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': self.domain_id, - 'user_id': self.user['id']}) - member_url = '%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': self.role_id} - - self.put(member_url) - self.head(member_url) - r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=self.role, - resource_url=collection_url) - - self.delete(member_url) - r = self.get(collection_url) - self.assertValidRoleListResponse(r, expected_length=0, - resource_url=collection_url) - - def test_crud_user_domain_role_grants_no_user(self): - """Grant role on a domain to a user that doesn't exist. - - When grant a role on a domain to a user that doesn't exist, the server - returns 404 Not Found for the user. - - """ - user_id = uuid.uuid4().hex - - collection_url = ( - '/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': self.domain_id, 'user_id': user_id}) - member_url = '%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': self.role_id} - - self.put(member_url, expected_status=http_client.NOT_FOUND) - - def test_crud_group_project_role_grants(self): - collection_url = ( - '/projects/%(project_id)s/groups/%(group_id)s/roles' % { - 'project_id': self.project_id, - 'group_id': self.group_id}) - member_url = '%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': self.role_id} - - self.put(member_url) - self.head(member_url) - r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=self.role, - resource_url=collection_url) - - self.delete(member_url) - r = self.get(collection_url) - self.assertValidRoleListResponse(r, expected_length=0, - resource_url=collection_url) - - def test_crud_group_project_role_grants_no_group(self): - """Grant role on a project to a group that doesn't exist. - - When grant a role on a project to a group that doesn't exist, the - server returns 404 Not Found for the group. - - """ - group_id = uuid.uuid4().hex - - collection_url = ( - '/projects/%(project_id)s/groups/%(group_id)s/roles' % { - 'project_id': self.project_id, - 'group_id': group_id}) - member_url = '%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': self.role_id} - - self.put(member_url, expected_status=http_client.NOT_FOUND) - - def test_crud_group_domain_role_grants(self): - collection_url = ( - '/domains/%(domain_id)s/groups/%(group_id)s/roles' % { - 'domain_id': self.domain_id, - 'group_id': self.group_id}) - member_url = '%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': self.role_id} - - self.put(member_url) - self.head(member_url) - r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=self.role, - resource_url=collection_url) - - self.delete(member_url) - r = self.get(collection_url) - self.assertValidRoleListResponse(r, expected_length=0, - resource_url=collection_url) - - def test_crud_group_domain_role_grants_no_group(self): - """Grant role on a domain to a group that doesn't exist. - - When grant a role on a domain to a group that doesn't exist, the server - returns 404 Not Found for the group. - - """ - group_id = uuid.uuid4().hex - - collection_url = ( - '/domains/%(domain_id)s/groups/%(group_id)s/roles' % { - 'domain_id': self.domain_id, - 'group_id': group_id}) - member_url = '%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': self.role_id} - - self.put(member_url, expected_status=http_client.NOT_FOUND) - - def _create_new_user_and_assign_role_on_project(self): - """Create a new user and assign user a role on a project.""" - # Create a new user - new_user = unit.new_user_ref(domain_id=self.domain_id) - user_ref = self.identity_api.create_user(new_user) - # Assign the user a role on the project - collection_url = ( - '/projects/%(project_id)s/users/%(user_id)s/roles' % { - 'project_id': self.project_id, - 'user_id': user_ref['id']}) - member_url = ('%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': self.role_id}) - self.put(member_url) - # Check the user has the role assigned - self.head(member_url) - return member_url, user_ref - - def test_delete_user_before_removing_role_assignment_succeeds(self): - """Call ``DELETE`` on the user before the role assignment.""" - member_url, user = self._create_new_user_and_assign_role_on_project() - # Delete the user from identity backend - self.identity_api.driver.delete_user(user['id']) - # Clean up the role assignment - self.delete(member_url) - # Make sure the role is gone - self.head(member_url, expected_status=http_client.NOT_FOUND) - - def test_delete_user_and_check_role_assignment_fails(self): - """Call ``DELETE`` on the user and check the role assignment.""" - member_url, user = self._create_new_user_and_assign_role_on_project() - # Delete the user from identity backend - self.identity_api.delete_user(user['id']) - # We should get a 404 Not Found when looking for the user in the - # identity backend because we're not performing a delete operation on - # the role. - self.head(member_url, expected_status=http_client.NOT_FOUND) - - def test_token_revoked_once_group_role_grant_revoked(self): - """Test token is revoked when group role grant is revoked - - When a role granted to a group is revoked for a given scope, - all tokens related to this scope and belonging to one of the members - of this group should be revoked. - - The revocation should be independently to the presence - of the revoke API. - """ - # creates grant from group on project. - self.assignment_api.create_grant(role_id=self.role['id'], - project_id=self.project['id'], - group_id=self.group['id']) - - # adds user to the group. - self.identity_api.add_user_to_group(user_id=self.user['id'], - group_id=self.group['id']) - - # creates a token for the user - auth_body = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id']) - token_resp = self.post('/auth/tokens', body=auth_body) - token = token_resp.headers.get('x-subject-token') - - # validates the returned token; it should be valid. - self.head('/auth/tokens', - headers={'x-subject-token': token}, - expected_status=http_client.OK) - - # revokes the grant from group on project. - self.assignment_api.delete_grant(role_id=self.role['id'], - project_id=self.project['id'], - group_id=self.group['id']) - - # validates the same token again; it should not longer be valid. - self.head('/auth/tokens', - headers={'x-subject-token': token}, - expected_status=http_client.NOT_FOUND) - - @unit.skip_if_cache_disabled('assignment') - def test_delete_grant_from_user_and_project_invalidate_cache(self): - # create a new project - new_project = unit.new_project_ref(domain_id=self.domain_id) - self.resource_api.create_project(new_project['id'], new_project) - - collection_url = ( - '/projects/%(project_id)s/users/%(user_id)s/roles' % { - 'project_id': new_project['id'], - 'user_id': self.user['id']}) - member_url = '%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': self.role_id} - - # create the user a grant on the new project - self.put(member_url) - - # check the grant that was just created - self.head(member_url) - resp = self.get(collection_url) - self.assertValidRoleListResponse(resp, ref=self.role, - resource_url=collection_url) - - # delete the grant - self.delete(member_url) - - # get the collection and ensure there are no roles on the project - resp = self.get(collection_url) - self.assertListEqual(resp.json_body['roles'], []) - - @unit.skip_if_cache_disabled('assignment') - def test_delete_grant_from_user_and_domain_invalidates_cache(self): - # create a new domain - new_domain = unit.new_domain_ref() - self.resource_api.create_domain(new_domain['id'], new_domain) - - collection_url = ( - '/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': new_domain['id'], - 'user_id': self.user['id']}) - member_url = '%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': self.role_id} - - # create the user a grant on the new domain - self.put(member_url) - - # check the grant that was just created - self.head(member_url) - resp = self.get(collection_url) - self.assertValidRoleListResponse(resp, ref=self.role, - resource_url=collection_url) - - # delete the grant - self.delete(member_url) - - # get the collection and ensure there are no roles on the domain - resp = self.get(collection_url) - self.assertListEqual(resp.json_body['roles'], []) - - @unit.skip_if_cache_disabled('assignment') - def test_delete_grant_from_group_and_project_invalidates_cache(self): - # create a new project - new_project = unit.new_project_ref(domain_id=self.domain_id) - self.resource_api.create_project(new_project['id'], new_project) - - collection_url = ( - '/projects/%(project_id)s/groups/%(group_id)s/roles' % { - 'project_id': new_project['id'], - 'group_id': self.group['id']}) - member_url = '%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': self.role_id} - - # create the group a grant on the new project - self.put(member_url) - - # check the grant that was just created - self.head(member_url) - resp = self.get(collection_url) - self.assertValidRoleListResponse(resp, ref=self.role, - resource_url=collection_url) - - # delete the grant - self.delete(member_url) - - # get the collection and ensure there are no roles on the project - resp = self.get(collection_url) - self.assertListEqual(resp.json_body['roles'], []) - - @unit.skip_if_cache_disabled('assignment') - def test_delete_grant_from_group_and_domain_invalidates_cache(self): - # create a new domain - new_domain = unit.new_domain_ref() - self.resource_api.create_domain(new_domain['id'], new_domain) - - collection_url = ( - '/domains/%(domain_id)s/groups/%(group_id)s/roles' % { - 'domain_id': new_domain['id'], - 'group_id': self.group['id']}) - member_url = '%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': self.role_id} - - # create the group a grant on the new domain - self.put(member_url) - - # check the grant that was just created - self.head(member_url) - resp = self.get(collection_url) - self.assertValidRoleListResponse(resp, ref=self.role, - resource_url=collection_url) - - # delete the grant - self.delete(member_url) - - # get the collection and ensure there are no roles on the domain - resp = self.get(collection_url) - self.assertListEqual(resp.json_body['roles'], []) - - # Role Assignments tests - - def test_get_role_assignments(self): - """Call ``GET /role_assignments``. - - The sample data set up already has a user, group and project - that is part of self.domain. We use these plus a new user - we create as our data set, making sure we ignore any - role assignments that are already in existence. - - Since we don't yet support a first class entity for role - assignments, we are only testing the LIST API. To create - and delete the role assignments we use the old grant APIs. - - Test Plan: - - - Create extra user for tests - - Get a list of all existing role assignments - - Add a new assignment for each of the four combinations, i.e. - group+domain, user+domain, group+project, user+project, using - the same role each time - - Get a new list of all role assignments, checking these four new - ones have been added - - Then delete the four we added - - Get a new list of all role assignments, checking the four have - been removed - - """ - # Since the default fixtures already assign some roles to the - # user it creates, we also need a new user that will not have any - # existing assignments - user1 = unit.new_user_ref(domain_id=self.domain['id']) - user1 = self.identity_api.create_user(user1) - - collection_url = '/role_assignments' - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - resource_url=collection_url) - existing_assignments = len(r.result.get('role_assignments')) - - # Now add one of each of the four types of assignment, making sure - # that we get them all back. - gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id, - group_id=self.group_id, - role_id=self.role_id) - self.put(gd_entity['links']['assignment']) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse( - r, - expected_length=existing_assignments + 1, - resource_url=collection_url) - self.assertRoleAssignmentInListResponse(r, gd_entity) - - ud_entity = self.build_role_assignment_entity(domain_id=self.domain_id, - user_id=user1['id'], - role_id=self.role_id) - self.put(ud_entity['links']['assignment']) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse( - r, - expected_length=existing_assignments + 2, - resource_url=collection_url) - self.assertRoleAssignmentInListResponse(r, ud_entity) - - gp_entity = self.build_role_assignment_entity( - project_id=self.project_id, group_id=self.group_id, - role_id=self.role_id) - self.put(gp_entity['links']['assignment']) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse( - r, - expected_length=existing_assignments + 3, - resource_url=collection_url) - self.assertRoleAssignmentInListResponse(r, gp_entity) - - up_entity = self.build_role_assignment_entity( - project_id=self.project_id, user_id=user1['id'], - role_id=self.role_id) - self.put(up_entity['links']['assignment']) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse( - r, - expected_length=existing_assignments + 4, - resource_url=collection_url) - self.assertRoleAssignmentInListResponse(r, up_entity) - - # Now delete the four we added and make sure they are removed - # from the collection. - - self.delete(gd_entity['links']['assignment']) - self.delete(ud_entity['links']['assignment']) - self.delete(gp_entity['links']['assignment']) - self.delete(up_entity['links']['assignment']) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse( - r, - expected_length=existing_assignments, - resource_url=collection_url) - self.assertRoleAssignmentNotInListResponse(r, gd_entity) - self.assertRoleAssignmentNotInListResponse(r, ud_entity) - self.assertRoleAssignmentNotInListResponse(r, gp_entity) - self.assertRoleAssignmentNotInListResponse(r, up_entity) - - def test_get_effective_role_assignments(self): - """Call ``GET /role_assignments?effective``. - - Test Plan: - - - Create two extra user for tests - - Add these users to a group - - Add a role assignment for the group on a domain - - Get a list of all role assignments, checking one has been added - - Then get a list of all effective role assignments - the group - assignment should have turned into assignments on the domain - for each of the group members. - - """ - user1 = unit.create_user(self.identity_api, - domain_id=self.domain['id']) - user2 = unit.create_user(self.identity_api, - domain_id=self.domain['id']) - - self.identity_api.add_user_to_group(user1['id'], self.group['id']) - self.identity_api.add_user_to_group(user2['id'], self.group['id']) - - collection_url = '/role_assignments' - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - resource_url=collection_url) - existing_assignments = len(r.result.get('role_assignments')) - - gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id, - group_id=self.group_id, - role_id=self.role_id) - self.put(gd_entity['links']['assignment']) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse( - r, - expected_length=existing_assignments + 1, - resource_url=collection_url) - self.assertRoleAssignmentInListResponse(r, gd_entity) - - # Now re-read the collection asking for effective roles - this - # should mean the group assignment is translated into the two - # member user assignments - collection_url = '/role_assignments?effective' - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse( - r, - expected_length=existing_assignments + 2, - resource_url=collection_url) - ud_entity = self.build_role_assignment_entity( - link=gd_entity['links']['assignment'], domain_id=self.domain_id, - user_id=user1['id'], role_id=self.role_id) - self.assertRoleAssignmentInListResponse(r, ud_entity) - ud_entity = self.build_role_assignment_entity( - link=gd_entity['links']['assignment'], domain_id=self.domain_id, - user_id=user2['id'], role_id=self.role_id) - self.assertRoleAssignmentInListResponse(r, ud_entity) - - def test_check_effective_values_for_role_assignments(self): - """Call ``GET /role_assignments?effective=value``. - - Check the various ways of specifying the 'effective' - query parameter. If the 'effective' query parameter - is included then this should always be treated as meaning 'True' - unless it is specified as: - - {url}?effective=0 - - This is by design to match the agreed way of handling - policy checking on query/filter parameters. - - Test Plan: - - - Create two extra user for tests - - Add these users to a group - - Add a role assignment for the group on a domain - - Get a list of all role assignments, checking one has been added - - Then issue various request with different ways of defining - the 'effective' query parameter. As we have tested the - correctness of the data coming back when we get effective roles - in other tests, here we just use the count of entities to - know if we are getting effective roles or not - - """ - user1 = unit.create_user(self.identity_api, - domain_id=self.domain['id']) - user2 = unit.create_user(self.identity_api, - domain_id=self.domain['id']) - - self.identity_api.add_user_to_group(user1['id'], self.group['id']) - self.identity_api.add_user_to_group(user2['id'], self.group['id']) - - collection_url = '/role_assignments' - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - resource_url=collection_url) - existing_assignments = len(r.result.get('role_assignments')) - - gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id, - group_id=self.group_id, - role_id=self.role_id) - self.put(gd_entity['links']['assignment']) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse( - r, - expected_length=existing_assignments + 1, - resource_url=collection_url) - self.assertRoleAssignmentInListResponse(r, gd_entity) - - # Now re-read the collection asking for effective roles, - # using the most common way of defining "effective'. This - # should mean the group assignment is translated into the two - # member user assignments - collection_url = '/role_assignments?effective' - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse( - r, - expected_length=existing_assignments + 2, - resource_url=collection_url) - # Now set 'effective' to false explicitly - should get - # back the regular roles - collection_url = '/role_assignments?effective=0' - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse( - r, - expected_length=existing_assignments + 1, - resource_url=collection_url) - # Now try setting 'effective' to 'False' explicitly- this is - # NOT supported as a way of setting a query or filter - # parameter to false by design. Hence we should get back - # effective roles. - collection_url = '/role_assignments?effective=False' - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse( - r, - expected_length=existing_assignments + 2, - resource_url=collection_url) - # Now set 'effective' to True explicitly - collection_url = '/role_assignments?effective=True' - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse( - r, - expected_length=existing_assignments + 2, - resource_url=collection_url) - - def test_filtered_role_assignments(self): - """Call ``GET /role_assignments?filters``. - - Test Plan: - - - Create extra users, group, role and project for tests - - Make the following assignments: - Give group1, role1 on project1 and domain - Give user1, role2 on project1 and domain - Make User1 a member of Group1 - - Test a series of single filter list calls, checking that - the correct results are obtained - - Test a multi-filtered list call - - Test listing all effective roles for a given user - - Test the equivalent of the list of roles in a project scoped - token (all effective roles for a user on a project) - - """ - # Since the default fixtures already assign some roles to the - # user it creates, we also need a new user that will not have any - # existing assignments - user1 = unit.create_user(self.identity_api, - domain_id=self.domain['id']) - user2 = unit.create_user(self.identity_api, - domain_id=self.domain['id']) - - group1 = unit.new_group_ref(domain_id=self.domain['id']) - group1 = self.identity_api.create_group(group1) - self.identity_api.add_user_to_group(user1['id'], group1['id']) - self.identity_api.add_user_to_group(user2['id'], group1['id']) - project1 = unit.new_project_ref(domain_id=self.domain['id']) - self.resource_api.create_project(project1['id'], project1) - self.role1 = unit.new_role_ref() - self.role_api.create_role(self.role1['id'], self.role1) - self.role2 = unit.new_role_ref() - self.role_api.create_role(self.role2['id'], self.role2) - - # Now add one of each of the four types of assignment - - gd_entity = self.build_role_assignment_entity( - domain_id=self.domain_id, group_id=group1['id'], - role_id=self.role1['id']) - self.put(gd_entity['links']['assignment']) - - ud_entity = self.build_role_assignment_entity(domain_id=self.domain_id, - user_id=user1['id'], - role_id=self.role2['id']) - self.put(ud_entity['links']['assignment']) - - gp_entity = self.build_role_assignment_entity( - project_id=project1['id'], - group_id=group1['id'], - role_id=self.role1['id']) - self.put(gp_entity['links']['assignment']) - - up_entity = self.build_role_assignment_entity( - project_id=project1['id'], - user_id=user1['id'], - role_id=self.role2['id']) - self.put(up_entity['links']['assignment']) - - # Now list by various filters to make sure we get back the right ones - - collection_url = ('/role_assignments?scope.project.id=%s' % - project1['id']) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=2, - resource_url=collection_url) - self.assertRoleAssignmentInListResponse(r, up_entity) - self.assertRoleAssignmentInListResponse(r, gp_entity) - - collection_url = ('/role_assignments?scope.domain.id=%s' % - self.domain['id']) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=2, - resource_url=collection_url) - self.assertRoleAssignmentInListResponse(r, ud_entity) - self.assertRoleAssignmentInListResponse(r, gd_entity) - - collection_url = '/role_assignments?user.id=%s' % user1['id'] - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=2, - resource_url=collection_url) - self.assertRoleAssignmentInListResponse(r, up_entity) - self.assertRoleAssignmentInListResponse(r, ud_entity) - - collection_url = '/role_assignments?group.id=%s' % group1['id'] - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=2, - resource_url=collection_url) - self.assertRoleAssignmentInListResponse(r, gd_entity) - self.assertRoleAssignmentInListResponse(r, gp_entity) - - collection_url = '/role_assignments?role.id=%s' % self.role1['id'] - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=2, - resource_url=collection_url) - self.assertRoleAssignmentInListResponse(r, gd_entity) - self.assertRoleAssignmentInListResponse(r, gp_entity) - - # Let's try combining two filers together.... - - collection_url = ( - '/role_assignments?user.id=%(user_id)s' - '&scope.project.id=%(project_id)s' % { - 'user_id': user1['id'], - 'project_id': project1['id']}) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=1, - resource_url=collection_url) - self.assertRoleAssignmentInListResponse(r, up_entity) - - # Now for a harder one - filter for user with effective - # roles - this should return role assignment that were directly - # assigned as well as by virtue of group membership - - collection_url = ('/role_assignments?effective&user.id=%s' % - user1['id']) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=4, - resource_url=collection_url) - # Should have the two direct roles... - self.assertRoleAssignmentInListResponse(r, up_entity) - self.assertRoleAssignmentInListResponse(r, ud_entity) - # ...and the two via group membership... - gp1_link = self.build_role_assignment_link( - project_id=project1['id'], - group_id=group1['id'], - role_id=self.role1['id']) - gd1_link = self.build_role_assignment_link(domain_id=self.domain_id, - group_id=group1['id'], - role_id=self.role1['id']) - - up1_entity = self.build_role_assignment_entity( - link=gp1_link, project_id=project1['id'], - user_id=user1['id'], role_id=self.role1['id']) - ud1_entity = self.build_role_assignment_entity( - link=gd1_link, domain_id=self.domain_id, user_id=user1['id'], - role_id=self.role1['id']) - self.assertRoleAssignmentInListResponse(r, up1_entity) - self.assertRoleAssignmentInListResponse(r, ud1_entity) - - # ...and for the grand-daddy of them all, simulate the request - # that would generate the list of effective roles in a project - # scoped token. - - collection_url = ( - '/role_assignments?effective&user.id=%(user_id)s' - '&scope.project.id=%(project_id)s' % { - 'user_id': user1['id'], - 'project_id': project1['id']}) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=2, - resource_url=collection_url) - # Should have one direct role and one from group membership... - self.assertRoleAssignmentInListResponse(r, up_entity) - self.assertRoleAssignmentInListResponse(r, up1_entity) - - -class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase, - test_v3.AssignmentTestMixin): - """Base class for testing /v3/role_assignments API behavior.""" - - MAX_HIERARCHY_BREADTH = 3 - MAX_HIERARCHY_DEPTH = CONF.max_project_tree_depth - 1 - - def load_sample_data(self): - """Creates sample data to be used on tests. - - Created data are i) a role and ii) a domain containing: a project - hierarchy and 3 users within 3 groups. - - """ - def create_project_hierarchy(parent_id, depth): - """Creates a random project hierarchy.""" - if depth == 0: - return - - breadth = random.randint(1, self.MAX_HIERARCHY_BREADTH) - - subprojects = [] - for i in range(breadth): - subprojects.append(unit.new_project_ref( - domain_id=self.domain_id, parent_id=parent_id)) - self.resource_api.create_project(subprojects[-1]['id'], - subprojects[-1]) - - new_parent = subprojects[random.randint(0, breadth - 1)] - create_project_hierarchy(new_parent['id'], depth - 1) - - super(RoleAssignmentBaseTestCase, self).load_sample_data() - - # Create a domain - self.domain = unit.new_domain_ref() - self.domain_id = self.domain['id'] - self.resource_api.create_domain(self.domain_id, self.domain) - - # Create a project hierarchy - self.project = unit.new_project_ref(domain_id=self.domain_id) - self.project_id = self.project['id'] - self.resource_api.create_project(self.project_id, self.project) - - # Create a random project hierarchy - create_project_hierarchy(self.project_id, - random.randint(1, self.MAX_HIERARCHY_DEPTH)) - - # Create 3 users - self.user_ids = [] - for i in range(3): - user = unit.new_user_ref(domain_id=self.domain_id) - user = self.identity_api.create_user(user) - self.user_ids.append(user['id']) - - # Create 3 groups - self.group_ids = [] - for i in range(3): - group = unit.new_group_ref(domain_id=self.domain_id) - group = self.identity_api.create_group(group) - self.group_ids.append(group['id']) - - # Put 2 members on each group - self.identity_api.add_user_to_group(user_id=self.user_ids[i], - group_id=group['id']) - self.identity_api.add_user_to_group(user_id=self.user_ids[i % 2], - group_id=group['id']) - - self.assignment_api.create_grant(user_id=self.user_id, - project_id=self.project_id, - role_id=self.role_id) - - # Create a role - self.role = unit.new_role_ref() - self.role_id = self.role['id'] - self.role_api.create_role(self.role_id, self.role) - - # Set default user and group to be used on tests - self.default_user_id = self.user_ids[0] - self.default_group_id = self.group_ids[0] - - def get_role_assignments(self, expected_status=http_client.OK, **filters): - """Returns the result from querying role assignment API + queried URL. - - Calls GET /v3/role_assignments? and returns its result, where - is the HTTP query parameters form of effective option plus - filters, if provided. Queried URL is returned as well. - - :returns: a tuple containing the list role assignments API response and - queried URL. - - """ - query_url = self._get_role_assignments_query_url(**filters) - response = self.get(query_url, expected_status=expected_status) - - return (response, query_url) - - def _get_role_assignments_query_url(self, **filters): - """Returns non-effective role assignments query URL from given filters. - - :param filters: query parameters are created with the provided filters - on role assignments attributes. Valid filters are: - role_id, domain_id, project_id, group_id, user_id and - inherited_to_projects. - - :returns: role assignments query URL. - - """ - return self.build_role_assignment_query_url(**filters) - - -class RoleAssignmentFailureTestCase(RoleAssignmentBaseTestCase): - """Class for testing invalid query params on /v3/role_assignments API. - - Querying domain and project, or user and group results in a HTTP 400 Bad - Request, since a role assignment must contain only a single pair of (actor, - target). In addition, since filtering on role assignments applies only to - the final result, effective mode cannot be combined with i) group or ii) - domain and inherited, because it would always result in an empty list. - - """ - - def test_get_role_assignments_by_domain_and_project(self): - self.get_role_assignments(domain_id=self.domain_id, - project_id=self.project_id, - expected_status=http_client.BAD_REQUEST) - - def test_get_role_assignments_by_user_and_group(self): - self.get_role_assignments(user_id=self.default_user_id, - group_id=self.default_group_id, - expected_status=http_client.BAD_REQUEST) - - def test_get_role_assignments_by_effective_and_inherited(self): - self.config_fixture.config(group='os_inherit', enabled=True) - - self.get_role_assignments(domain_id=self.domain_id, effective=True, - inherited_to_projects=True, - expected_status=http_client.BAD_REQUEST) - - def test_get_role_assignments_by_effective_and_group(self): - self.get_role_assignments(effective=True, - group_id=self.default_group_id, - expected_status=http_client.BAD_REQUEST) - - -class RoleAssignmentDirectTestCase(RoleAssignmentBaseTestCase): - """Class for testing direct assignments on /v3/role_assignments API. - - Direct assignments on a domain or project have effect on them directly, - instead of on their project hierarchy, i.e they are non-inherited. In - addition, group direct assignments are not expanded to group's users. - - Tests on this class make assertions on the representation and API filtering - of direct assignments. - - """ - - def _test_get_role_assignments(self, **filters): - """Generic filtering test method. - - According to the provided filters, this method: - - creates a new role assignment; - - asserts that list role assignments API reponds correctly; - - deletes the created role assignment. - - :param filters: filters to be considered when listing role assignments. - Valid filters are: role_id, domain_id, project_id, - group_id, user_id and inherited_to_projects. - - """ - # Fills default assignment with provided filters - test_assignment = self._set_default_assignment_attributes(**filters) - - # Create new role assignment for this test - self.assignment_api.create_grant(**test_assignment) - - # Get expected role assignments - expected_assignments = self._list_expected_role_assignments( - **test_assignment) - - # Get role assignments from API - response, query_url = self.get_role_assignments(**test_assignment) - self.assertValidRoleAssignmentListResponse(response, - resource_url=query_url) - self.assertEqual(len(expected_assignments), - len(response.result.get('role_assignments'))) - - # Assert that expected role assignments were returned by the API call - for assignment in expected_assignments: - self.assertRoleAssignmentInListResponse(response, assignment) - - # Delete created role assignment - self.assignment_api.delete_grant(**test_assignment) - - def _set_default_assignment_attributes(self, **attribs): - """Inserts default values for missing attributes of role assignment. - - If no actor, target or role are provided, they will default to values - from sample data. - - :param attribs: info from a role assignment entity. Valid attributes - are: role_id, domain_id, project_id, group_id, user_id - and inherited_to_projects. - - """ - if not any(target in attribs - for target in ('domain_id', 'projects_id')): - attribs['project_id'] = self.project_id - - if not any(actor in attribs for actor in ('user_id', 'group_id')): - attribs['user_id'] = self.default_user_id - - if 'role_id' not in attribs: - attribs['role_id'] = self.role_id - - return attribs - - def _list_expected_role_assignments(self, **filters): - """Given the filters, it returns expected direct role assignments. - - :param filters: filters that will be considered when listing role - assignments. Valid filters are: role_id, domain_id, - project_id, group_id, user_id and - inherited_to_projects. - - :returns: the list of the expected role assignments. - - """ - return [self.build_role_assignment_entity(**filters)] - - # Test cases below call the generic test method, providing different filter - # combinations. Filters are provided as specified in the method name, after - # 'by'. For example, test_get_role_assignments_by_project_user_and_role - # calls the generic test method with project_id, user_id and role_id. - - def test_get_role_assignments_by_domain(self, **filters): - self._test_get_role_assignments(domain_id=self.domain_id, **filters) - - def test_get_role_assignments_by_project(self, **filters): - self._test_get_role_assignments(project_id=self.project_id, **filters) - - def test_get_role_assignments_by_user(self, **filters): - self._test_get_role_assignments(user_id=self.default_user_id, - **filters) - - def test_get_role_assignments_by_group(self, **filters): - self._test_get_role_assignments(group_id=self.default_group_id, - **filters) - - def test_get_role_assignments_by_role(self, **filters): - self._test_get_role_assignments(role_id=self.role_id, **filters) - - def test_get_role_assignments_by_domain_and_user(self, **filters): - self.test_get_role_assignments_by_domain(user_id=self.default_user_id, - **filters) - - def test_get_role_assignments_by_domain_and_group(self, **filters): - self.test_get_role_assignments_by_domain( - group_id=self.default_group_id, **filters) - - def test_get_role_assignments_by_project_and_user(self, **filters): - self.test_get_role_assignments_by_project(user_id=self.default_user_id, - **filters) - - def test_get_role_assignments_by_project_and_group(self, **filters): - self.test_get_role_assignments_by_project( - group_id=self.default_group_id, **filters) - - def test_get_role_assignments_by_domain_user_and_role(self, **filters): - self.test_get_role_assignments_by_domain_and_user(role_id=self.role_id, - **filters) - - def test_get_role_assignments_by_domain_group_and_role(self, **filters): - self.test_get_role_assignments_by_domain_and_group( - role_id=self.role_id, **filters) - - def test_get_role_assignments_by_project_user_and_role(self, **filters): - self.test_get_role_assignments_by_project_and_user( - role_id=self.role_id, **filters) - - def test_get_role_assignments_by_project_group_and_role(self, **filters): - self.test_get_role_assignments_by_project_and_group( - role_id=self.role_id, **filters) - - -class RoleAssignmentInheritedTestCase(RoleAssignmentDirectTestCase): - """Class for testing inherited assignments on /v3/role_assignments API. - - Inherited assignments on a domain or project have no effect on them - directly, but on the projects under them instead. - - Tests on this class do not make assertions on the effect of inherited - assignments, but in their representation and API filtering. - - """ - - def config_overrides(self): - super(RoleAssignmentBaseTestCase, self).config_overrides() - self.config_fixture.config(group='os_inherit', enabled=True) - - def _test_get_role_assignments(self, **filters): - """Adds inherited_to_project filter to expected entity in tests.""" - super(RoleAssignmentInheritedTestCase, - self)._test_get_role_assignments(inherited_to_projects=True, - **filters) - - -class RoleAssignmentEffectiveTestCase(RoleAssignmentInheritedTestCase): - """Class for testing inheritance effects on /v3/role_assignments API. - - Inherited assignments on a domain or project have no effect on them - directly, but on the projects under them instead. - - Tests on this class make assertions on the effect of inherited assignments - and API filtering. - - """ - - def _get_role_assignments_query_url(self, **filters): - """Returns effective role assignments query URL from given filters. - - For test methods in this class, effetive will always be true. As in - effective mode, inherited_to_projects, group_id, domain_id and - project_id will always be desconsidered from provided filters. - - :param filters: query parameters are created with the provided filters. - Valid filters are: role_id, domain_id, project_id, - group_id, user_id and inherited_to_projects. - - :returns: role assignments query URL. - - """ - query_filters = filters.copy() - query_filters.pop('inherited_to_projects') - - query_filters.pop('group_id', None) - query_filters.pop('domain_id', None) - query_filters.pop('project_id', None) - - return self.build_role_assignment_query_url(effective=True, - **query_filters) - - def _list_expected_role_assignments(self, **filters): - """Given the filters, it returns expected direct role assignments. - - :param filters: filters that will be considered when listing role - assignments. Valid filters are: role_id, domain_id, - project_id, group_id, user_id and - inherited_to_projects. - - :returns: the list of the expected role assignments. - - """ - # Get assignment link, to be put on 'links': {'assignment': link} - assignment_link = self.build_role_assignment_link(**filters) - - # Expand group membership - user_ids = [None] - if filters.get('group_id'): - user_ids = [user['id'] for user in - self.identity_api.list_users_in_group( - filters['group_id'])] - else: - user_ids = [self.default_user_id] - - # Expand role inheritance - project_ids = [None] - if filters.get('domain_id'): - project_ids = [project['id'] for project in - self.resource_api.list_projects_in_domain( - filters.pop('domain_id'))] - else: - project_ids = [project['id'] for project in - self.resource_api.list_projects_in_subtree( - self.project_id)] - - # Compute expected role assignments - assignments = [] - for project_id in project_ids: - filters['project_id'] = project_id - for user_id in user_ids: - filters['user_id'] = user_id - assignments.append(self.build_role_assignment_entity( - link=assignment_link, **filters)) - - return assignments - - -class AssignmentInheritanceTestCase(test_v3.RestfulTestCase, - test_v3.AssignmentTestMixin): - """Test inheritance crud and its effects.""" - - def config_overrides(self): - super(AssignmentInheritanceTestCase, self).config_overrides() - self.config_fixture.config(group='os_inherit', enabled=True) - - def test_get_token_from_inherited_user_domain_role_grants(self): - # Create a new user to ensure that no grant is loaded from sample data - user = unit.create_user(self.identity_api, domain_id=self.domain_id) - - # Define domain and project authentication data - domain_auth_data = self.build_authentication_request( - user_id=user['id'], - password=user['password'], - domain_id=self.domain_id) - project_auth_data = self.build_authentication_request( - user_id=user['id'], - password=user['password'], - project_id=self.project_id) - - # Check the user cannot get a domain nor a project token - self.v3_create_token(domain_auth_data, - expected_status=http_client.UNAUTHORIZED) - self.v3_create_token(project_auth_data, - expected_status=http_client.UNAUTHORIZED) - - # Grant non-inherited role for user on domain - non_inher_ud_link = self.build_role_assignment_link( - domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id) - self.put(non_inher_ud_link) - - # Check the user can get only a domain token - self.v3_create_token(domain_auth_data) - self.v3_create_token(project_auth_data, - expected_status=http_client.UNAUTHORIZED) - - # Create inherited role - inherited_role = unit.new_role_ref(name='inherited') - self.role_api.create_role(inherited_role['id'], inherited_role) - - # Grant inherited role for user on domain - inher_ud_link = self.build_role_assignment_link( - domain_id=self.domain_id, user_id=user['id'], - role_id=inherited_role['id'], inherited_to_projects=True) - self.put(inher_ud_link) - - # Check the user can get both a domain and a project token - self.v3_create_token(domain_auth_data) - self.v3_create_token(project_auth_data) - - # Delete inherited grant - self.delete(inher_ud_link) - - # Check the user can only get a domain token - self.v3_create_token(domain_auth_data) - self.v3_create_token(project_auth_data, - expected_status=http_client.UNAUTHORIZED) - - # Delete non-inherited grant - self.delete(non_inher_ud_link) - - # Check the user cannot get a domain token anymore - self.v3_create_token(domain_auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_get_token_from_inherited_group_domain_role_grants(self): - # Create a new group and put a new user in it to - # ensure that no grant is loaded from sample data - user = unit.create_user(self.identity_api, domain_id=self.domain_id) - - group = unit.new_group_ref(domain_id=self.domain['id']) - group = self.identity_api.create_group(group) - self.identity_api.add_user_to_group(user['id'], group['id']) - - # Define domain and project authentication data - domain_auth_data = self.build_authentication_request( - user_id=user['id'], - password=user['password'], - domain_id=self.domain_id) - project_auth_data = self.build_authentication_request( - user_id=user['id'], - password=user['password'], - project_id=self.project_id) - - # Check the user cannot get a domain nor a project token - self.v3_create_token(domain_auth_data, - expected_status=http_client.UNAUTHORIZED) - self.v3_create_token(project_auth_data, - expected_status=http_client.UNAUTHORIZED) - - # Grant non-inherited role for user on domain - non_inher_gd_link = self.build_role_assignment_link( - domain_id=self.domain_id, user_id=user['id'], role_id=self.role_id) - self.put(non_inher_gd_link) - - # Check the user can get only a domain token - self.v3_create_token(domain_auth_data) - self.v3_create_token(project_auth_data, - expected_status=http_client.UNAUTHORIZED) - - # Create inherited role - inherited_role = unit.new_role_ref(name='inherited') - self.role_api.create_role(inherited_role['id'], inherited_role) - - # Grant inherited role for user on domain - inher_gd_link = self.build_role_assignment_link( - domain_id=self.domain_id, user_id=user['id'], - role_id=inherited_role['id'], inherited_to_projects=True) - self.put(inher_gd_link) - - # Check the user can get both a domain and a project token - self.v3_create_token(domain_auth_data) - self.v3_create_token(project_auth_data) - - # Delete inherited grant - self.delete(inher_gd_link) - - # Check the user can only get a domain token - self.v3_create_token(domain_auth_data) - self.v3_create_token(project_auth_data, - expected_status=http_client.UNAUTHORIZED) - - # Delete non-inherited grant - self.delete(non_inher_gd_link) - - # Check the user cannot get a domain token anymore - self.v3_create_token(domain_auth_data, - expected_status=http_client.UNAUTHORIZED) - - def _test_crud_inherited_and_direct_assignment_on_target(self, target_url): - # Create a new role to avoid assignments loaded from sample data - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - - # Define URLs - direct_url = '%s/users/%s/roles/%s' % ( - target_url, self.user_id, role['id']) - inherited_url = '/OS-INHERIT/%s/inherited_to_projects' % direct_url - - # Create the direct assignment - self.put(direct_url) - # Check the direct assignment exists, but the inherited one does not - self.head(direct_url) - self.head(inherited_url, expected_status=http_client.NOT_FOUND) - - # Now add the inherited assignment - self.put(inherited_url) - # Check both the direct and inherited assignment exist - self.head(direct_url) - self.head(inherited_url) - - # Delete indirect assignment - self.delete(inherited_url) - # Check the direct assignment exists, but the inherited one does not - self.head(direct_url) - self.head(inherited_url, expected_status=http_client.NOT_FOUND) - - # Now delete the inherited assignment - self.delete(direct_url) - # Check that none of them exist - self.head(direct_url, expected_status=http_client.NOT_FOUND) - self.head(inherited_url, expected_status=http_client.NOT_FOUND) - - def test_crud_inherited_and_direct_assignment_on_domains(self): - self._test_crud_inherited_and_direct_assignment_on_target( - '/domains/%s' % self.domain_id) - - def test_crud_inherited_and_direct_assignment_on_projects(self): - self._test_crud_inherited_and_direct_assignment_on_target( - '/projects/%s' % self.project_id) - - def test_crud_user_inherited_domain_role_grants(self): - role_list = [] - for _ in range(2): - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - role_list.append(role) - - # Create a non-inherited role as a spoiler - self.assignment_api.create_grant( - role_list[1]['id'], user_id=self.user['id'], - domain_id=self.domain_id) - - base_collection_url = ( - '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': self.domain_id, - 'user_id': self.user['id']}) - member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { - 'collection_url': base_collection_url, - 'role_id': role_list[0]['id']} - collection_url = base_collection_url + '/inherited_to_projects' - - self.put(member_url) - - # Check we can read it back - self.head(member_url) - r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=role_list[0], - resource_url=collection_url) - - # Now delete and check its gone - self.delete(member_url) - r = self.get(collection_url) - self.assertValidRoleListResponse(r, expected_length=0, - resource_url=collection_url) - - def test_list_role_assignments_for_inherited_domain_grants(self): - """Call ``GET /role_assignments with inherited domain grants``. - - Test Plan: - - - Create 4 roles - - Create a domain with a user and two projects - - Assign two direct roles to project1 - - Assign a spoiler role to project2 - - Issue the URL to add inherited role to the domain - - Issue the URL to check it is indeed on the domain - - Issue the URL to check effective roles on project1 - this - should return 3 roles. - - """ - role_list = [] - for _ in range(4): - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - role_list.append(role) - - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - user1 = unit.create_user(self.identity_api, domain_id=domain['id']) - project1 = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project1['id'], project1) - project2 = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project2['id'], project2) - # Add some roles to the project - self.assignment_api.add_role_to_user_and_project( - user1['id'], project1['id'], role_list[0]['id']) - self.assignment_api.add_role_to_user_and_project( - user1['id'], project1['id'], role_list[1]['id']) - # ..and one on a different project as a spoiler - self.assignment_api.add_role_to_user_and_project( - user1['id'], project2['id'], role_list[2]['id']) - - # Now create our inherited role on the domain - base_collection_url = ( - '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': domain['id'], - 'user_id': user1['id']}) - member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { - 'collection_url': base_collection_url, - 'role_id': role_list[3]['id']} - collection_url = base_collection_url + '/inherited_to_projects' - - self.put(member_url) - self.head(member_url) - r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=role_list[3], - resource_url=collection_url) - - # Now use the list domain role assignments api to check if this - # is included - collection_url = ( - '/role_assignments?user.id=%(user_id)s' - '&scope.domain.id=%(domain_id)s' % { - 'user_id': user1['id'], - 'domain_id': domain['id']}) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=1, - resource_url=collection_url) - ud_entity = self.build_role_assignment_entity( - domain_id=domain['id'], user_id=user1['id'], - role_id=role_list[3]['id'], inherited_to_projects=True) - self.assertRoleAssignmentInListResponse(r, ud_entity) - - # Now ask for effective list role assignments - the role should - # turn into a project role, along with the two direct roles that are - # on the project - collection_url = ( - '/role_assignments?effective&user.id=%(user_id)s' - '&scope.project.id=%(project_id)s' % { - 'user_id': user1['id'], - 'project_id': project1['id']}) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=3, - resource_url=collection_url) - # An effective role for an inherited role will be a project - # entity, with a domain link to the inherited assignment - ud_url = self.build_role_assignment_link( - domain_id=domain['id'], user_id=user1['id'], - role_id=role_list[3]['id'], inherited_to_projects=True) - up_entity = self.build_role_assignment_entity( - link=ud_url, project_id=project1['id'], - user_id=user1['id'], role_id=role_list[3]['id'], - inherited_to_projects=True) - self.assertRoleAssignmentInListResponse(r, up_entity) - - def test_list_role_assignments_include_names(self): - """Call ``GET /role_assignments with include names``. - - Test Plan: - - - Create a domain with a group and a user - - Create a project with a group and a user - - """ - role1 = unit.new_role_ref() - self.role_api.create_role(role1['id'], role1) - user1 = unit.create_user(self.identity_api, domain_id=self.domain_id) - group = unit.new_group_ref(domain_id=self.domain_id) - group = self.identity_api.create_group(group) - project1 = unit.new_project_ref(domain_id=self.domain_id) - self.resource_api.create_project(project1['id'], project1) - - expected_entity1 = self.build_role_assignment_entity_include_names( - role_ref=role1, - project_ref=project1, - user_ref=user1) - self.put(expected_entity1['links']['assignment']) - expected_entity2 = self.build_role_assignment_entity_include_names( - role_ref=role1, - domain_ref=self.domain, - group_ref=group) - self.put(expected_entity2['links']['assignment']) - expected_entity3 = self.build_role_assignment_entity_include_names( - role_ref=role1, - domain_ref=self.domain, - user_ref=user1) - self.put(expected_entity3['links']['assignment']) - expected_entity4 = self.build_role_assignment_entity_include_names( - role_ref=role1, - project_ref=project1, - group_ref=group) - self.put(expected_entity4['links']['assignment']) - - collection_url_domain = ( - '/role_assignments?include_names&scope.domain.id=%(domain_id)s' % { - 'domain_id': self.domain_id}) - rs_domain = self.get(collection_url_domain) - collection_url_project = ( - '/role_assignments?include_names&' - 'scope.project.id=%(project_id)s' % { - 'project_id': project1['id']}) - rs_project = self.get(collection_url_project) - collection_url_group = ( - '/role_assignments?include_names&group.id=%(group_id)s' % { - 'group_id': group['id']}) - rs_group = self.get(collection_url_group) - collection_url_user = ( - '/role_assignments?include_names&user.id=%(user_id)s' % { - 'user_id': user1['id']}) - rs_user = self.get(collection_url_user) - collection_url_role = ( - '/role_assignments?include_names&role.id=%(role_id)s' % { - 'role_id': role1['id']}) - rs_role = self.get(collection_url_role) - # Make sure all entities were created successfully - self.assertEqual(rs_domain.status_int, http_client.OK) - self.assertEqual(rs_project.status_int, http_client.OK) - self.assertEqual(rs_group.status_int, http_client.OK) - self.assertEqual(rs_user.status_int, http_client.OK) - # Make sure we can get back the correct number of entities - self.assertValidRoleAssignmentListResponse( - rs_domain, - expected_length=2, - resource_url=collection_url_domain) - self.assertValidRoleAssignmentListResponse( - rs_project, - expected_length=2, - resource_url=collection_url_project) - self.assertValidRoleAssignmentListResponse( - rs_group, - expected_length=2, - resource_url=collection_url_group) - self.assertValidRoleAssignmentListResponse( - rs_user, - expected_length=2, - resource_url=collection_url_user) - self.assertValidRoleAssignmentListResponse( - rs_role, - expected_length=4, - resource_url=collection_url_role) - # Verify all types of entities have the correct format - self.assertRoleAssignmentInListResponse(rs_domain, expected_entity2) - self.assertRoleAssignmentInListResponse(rs_project, expected_entity1) - self.assertRoleAssignmentInListResponse(rs_group, expected_entity4) - self.assertRoleAssignmentInListResponse(rs_user, expected_entity3) - self.assertRoleAssignmentInListResponse(rs_role, expected_entity1) - - def test_list_role_assignments_for_disabled_inheritance_extension(self): - """Call ``GET /role_assignments with inherited domain grants``. - - Test Plan: - - - Issue the URL to add inherited role to the domain - - Issue the URL to check effective roles on project include the - inherited role - - Disable the extension - - Re-check the effective roles, proving the inherited role no longer - shows up. - - """ - role_list = [] - for _ in range(4): - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - role_list.append(role) - - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - user1 = unit.create_user(self.identity_api, domain_id=domain['id']) - project1 = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project1['id'], project1) - project2 = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project2['id'], project2) - # Add some roles to the project - self.assignment_api.add_role_to_user_and_project( - user1['id'], project1['id'], role_list[0]['id']) - self.assignment_api.add_role_to_user_and_project( - user1['id'], project1['id'], role_list[1]['id']) - # ..and one on a different project as a spoiler - self.assignment_api.add_role_to_user_and_project( - user1['id'], project2['id'], role_list[2]['id']) - - # Now create our inherited role on the domain - base_collection_url = ( - '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': domain['id'], - 'user_id': user1['id']}) - member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { - 'collection_url': base_collection_url, - 'role_id': role_list[3]['id']} - collection_url = base_collection_url + '/inherited_to_projects' - - self.put(member_url) - self.head(member_url) - r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=role_list[3], - resource_url=collection_url) - - # Get effective list role assignments - the role should - # turn into a project role, along with the two direct roles that are - # on the project - collection_url = ( - '/role_assignments?effective&user.id=%(user_id)s' - '&scope.project.id=%(project_id)s' % { - 'user_id': user1['id'], - 'project_id': project1['id']}) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=3, - resource_url=collection_url) - - ud_url = self.build_role_assignment_link( - domain_id=domain['id'], user_id=user1['id'], - role_id=role_list[3]['id'], inherited_to_projects=True) - up_entity = self.build_role_assignment_entity( - link=ud_url, project_id=project1['id'], - user_id=user1['id'], role_id=role_list[3]['id'], - inherited_to_projects=True) - - self.assertRoleAssignmentInListResponse(r, up_entity) - - # Disable the extension and re-check the list, the role inherited - # from the project should no longer show up - self.config_fixture.config(group='os_inherit', enabled=False) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=2, - resource_url=collection_url) - - self.assertRoleAssignmentNotInListResponse(r, up_entity) - - def test_list_role_assignments_for_inherited_group_domain_grants(self): - """Call ``GET /role_assignments with inherited group domain grants``. - - Test Plan: - - - Create 4 roles - - Create a domain with a user and two projects - - Assign two direct roles to project1 - - Assign a spoiler role to project2 - - Issue the URL to add inherited role to the domain - - Issue the URL to check it is indeed on the domain - - Issue the URL to check effective roles on project1 - this - should return 3 roles. - - """ - role_list = [] - for _ in range(4): - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - role_list.append(role) - - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - user1 = unit.create_user(self.identity_api, domain_id=domain['id']) - user2 = unit.create_user(self.identity_api, domain_id=domain['id']) - group1 = unit.new_group_ref(domain_id=domain['id']) - group1 = self.identity_api.create_group(group1) - self.identity_api.add_user_to_group(user1['id'], - group1['id']) - self.identity_api.add_user_to_group(user2['id'], - group1['id']) - project1 = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project1['id'], project1) - project2 = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project2['id'], project2) - # Add some roles to the project - self.assignment_api.add_role_to_user_and_project( - user1['id'], project1['id'], role_list[0]['id']) - self.assignment_api.add_role_to_user_and_project( - user1['id'], project1['id'], role_list[1]['id']) - # ..and one on a different project as a spoiler - self.assignment_api.add_role_to_user_and_project( - user1['id'], project2['id'], role_list[2]['id']) - - # Now create our inherited role on the domain - base_collection_url = ( - '/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % { - 'domain_id': domain['id'], - 'group_id': group1['id']}) - member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { - 'collection_url': base_collection_url, - 'role_id': role_list[3]['id']} - collection_url = base_collection_url + '/inherited_to_projects' - - self.put(member_url) - self.head(member_url) - r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=role_list[3], - resource_url=collection_url) - - # Now use the list domain role assignments api to check if this - # is included - collection_url = ( - '/role_assignments?group.id=%(group_id)s' - '&scope.domain.id=%(domain_id)s' % { - 'group_id': group1['id'], - 'domain_id': domain['id']}) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=1, - resource_url=collection_url) - gd_entity = self.build_role_assignment_entity( - domain_id=domain['id'], group_id=group1['id'], - role_id=role_list[3]['id'], inherited_to_projects=True) - self.assertRoleAssignmentInListResponse(r, gd_entity) - - # Now ask for effective list role assignments - the role should - # turn into a user project role, along with the two direct roles - # that are on the project - collection_url = ( - '/role_assignments?effective&user.id=%(user_id)s' - '&scope.project.id=%(project_id)s' % { - 'user_id': user1['id'], - 'project_id': project1['id']}) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=3, - resource_url=collection_url) - # An effective role for an inherited role will be a project - # entity, with a domain link to the inherited assignment - up_entity = self.build_role_assignment_entity( - link=gd_entity['links']['assignment'], project_id=project1['id'], - user_id=user1['id'], role_id=role_list[3]['id'], - inherited_to_projects=True) - self.assertRoleAssignmentInListResponse(r, up_entity) - - def test_filtered_role_assignments_for_inherited_grants(self): - """Call ``GET /role_assignments?scope.OS-INHERIT:inherited_to``. - - Test Plan: - - - Create 5 roles - - Create a domain with a user, group and two projects - - Assign three direct spoiler roles to projects - - Issue the URL to add an inherited user role to the domain - - Issue the URL to add an inherited group role to the domain - - Issue the URL to filter by inherited roles - this should - return just the 2 inherited roles. - - """ - role_list = [] - for _ in range(5): - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - role_list.append(role) - - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - user1 = unit.create_user(self.identity_api, domain_id=domain['id']) - group1 = unit.new_group_ref(domain_id=domain['id']) - group1 = self.identity_api.create_group(group1) - project1 = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project1['id'], project1) - project2 = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project2['id'], project2) - # Add some spoiler roles to the projects - self.assignment_api.add_role_to_user_and_project( - user1['id'], project1['id'], role_list[0]['id']) - self.assignment_api.add_role_to_user_and_project( - user1['id'], project2['id'], role_list[1]['id']) - # Create a non-inherited role as a spoiler - self.assignment_api.create_grant( - role_list[2]['id'], user_id=user1['id'], domain_id=domain['id']) - - # Now create two inherited roles on the domain, one for a user - # and one for a domain - base_collection_url = ( - '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': domain['id'], - 'user_id': user1['id']}) - member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { - 'collection_url': base_collection_url, - 'role_id': role_list[3]['id']} - collection_url = base_collection_url + '/inherited_to_projects' - - self.put(member_url) - self.head(member_url) - r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=role_list[3], - resource_url=collection_url) - - base_collection_url = ( - '/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % { - 'domain_id': domain['id'], - 'group_id': group1['id']}) - member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { - 'collection_url': base_collection_url, - 'role_id': role_list[4]['id']} - collection_url = base_collection_url + '/inherited_to_projects' - - self.put(member_url) - self.head(member_url) - r = self.get(collection_url) - self.assertValidRoleListResponse(r, ref=role_list[4], - resource_url=collection_url) - - # Now use the list role assignments api to get a list of inherited - # roles on the domain - should get back the two roles - collection_url = ( - '/role_assignments?scope.OS-INHERIT:inherited_to=projects') - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - expected_length=2, - resource_url=collection_url) - ud_entity = self.build_role_assignment_entity( - domain_id=domain['id'], user_id=user1['id'], - role_id=role_list[3]['id'], inherited_to_projects=True) - gd_entity = self.build_role_assignment_entity( - domain_id=domain['id'], group_id=group1['id'], - role_id=role_list[4]['id'], inherited_to_projects=True) - self.assertRoleAssignmentInListResponse(r, ud_entity) - self.assertRoleAssignmentInListResponse(r, gd_entity) - - def _setup_hierarchical_projects_scenario(self): - """Creates basic hierarchical projects scenario. - - This basic scenario contains a root with one leaf project and - two roles with the following names: non-inherited and inherited. - - """ - # Create project hierarchy - root = unit.new_project_ref(domain_id=self.domain['id']) - leaf = unit.new_project_ref(domain_id=self.domain['id'], - parent_id=root['id']) - - self.resource_api.create_project(root['id'], root) - self.resource_api.create_project(leaf['id'], leaf) - - # Create 'non-inherited' and 'inherited' roles - non_inherited_role = unit.new_role_ref(name='non-inherited') - self.role_api.create_role(non_inherited_role['id'], non_inherited_role) - inherited_role = unit.new_role_ref(name='inherited') - self.role_api.create_role(inherited_role['id'], inherited_role) - - return (root['id'], leaf['id'], - non_inherited_role['id'], inherited_role['id']) - - def test_get_token_from_inherited_user_project_role_grants(self): - # Create default scenario - root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( - self._setup_hierarchical_projects_scenario()) - - # Define root and leaf projects authentication data - root_project_auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=root_id) - leaf_project_auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=leaf_id) - - # Check the user cannot get a token on root nor leaf project - self.v3_create_token(root_project_auth_data, - expected_status=http_client.UNAUTHORIZED) - self.v3_create_token(leaf_project_auth_data, - expected_status=http_client.UNAUTHORIZED) - - # Grant non-inherited role for user on leaf project - non_inher_up_link = self.build_role_assignment_link( - project_id=leaf_id, user_id=self.user['id'], - role_id=non_inherited_role_id) - self.put(non_inher_up_link) - - # Check the user can only get a token on leaf project - self.v3_create_token(root_project_auth_data, - expected_status=http_client.UNAUTHORIZED) - self.v3_create_token(leaf_project_auth_data) - - # Grant inherited role for user on root project - inher_up_link = self.build_role_assignment_link( - project_id=root_id, user_id=self.user['id'], - role_id=inherited_role_id, inherited_to_projects=True) - self.put(inher_up_link) - - # Check the user still can get a token only on leaf project - self.v3_create_token(root_project_auth_data, - expected_status=http_client.UNAUTHORIZED) - self.v3_create_token(leaf_project_auth_data) - - # Delete non-inherited grant - self.delete(non_inher_up_link) - - # Check the inherited role still applies for leaf project - self.v3_create_token(root_project_auth_data, - expected_status=http_client.UNAUTHORIZED) - self.v3_create_token(leaf_project_auth_data) - - # Delete inherited grant - self.delete(inher_up_link) - - # Check the user cannot get a token on leaf project anymore - self.v3_create_token(leaf_project_auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_get_token_from_inherited_group_project_role_grants(self): - # Create default scenario - root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( - self._setup_hierarchical_projects_scenario()) - - # Create group and add user to it - group = unit.new_group_ref(domain_id=self.domain['id']) - group = self.identity_api.create_group(group) - self.identity_api.add_user_to_group(self.user['id'], group['id']) - - # Define root and leaf projects authentication data - root_project_auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=root_id) - leaf_project_auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=leaf_id) - - # Check the user cannot get a token on root nor leaf project - self.v3_create_token(root_project_auth_data, - expected_status=http_client.UNAUTHORIZED) - self.v3_create_token(leaf_project_auth_data, - expected_status=http_client.UNAUTHORIZED) - - # Grant non-inherited role for group on leaf project - non_inher_gp_link = self.build_role_assignment_link( - project_id=leaf_id, group_id=group['id'], - role_id=non_inherited_role_id) - self.put(non_inher_gp_link) - - # Check the user can only get a token on leaf project - self.v3_create_token(root_project_auth_data, - expected_status=http_client.UNAUTHORIZED) - self.v3_create_token(leaf_project_auth_data) - - # Grant inherited role for group on root project - inher_gp_link = self.build_role_assignment_link( - project_id=root_id, group_id=group['id'], - role_id=inherited_role_id, inherited_to_projects=True) - self.put(inher_gp_link) - - # Check the user still can get a token only on leaf project - self.v3_create_token(root_project_auth_data, - expected_status=http_client.UNAUTHORIZED) - self.v3_create_token(leaf_project_auth_data) - - # Delete no-inherited grant - self.delete(non_inher_gp_link) - - # Check the inherited role still applies for leaf project - self.v3_create_token(leaf_project_auth_data) - - # Delete inherited grant - self.delete(inher_gp_link) - - # Check the user cannot get a token on leaf project anymore - self.v3_create_token(leaf_project_auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_get_role_assignments_for_project_hierarchy(self): - """Call ``GET /role_assignments``. - - Test Plan: - - - Create 2 roles - - Create a hierarchy of projects with one root and one leaf project - - Issue the URL to add a non-inherited user role to the root project - - Issue the URL to add an inherited user role to the root project - - Issue the URL to get all role assignments - this should return just - 2 roles (non-inherited and inherited) in the root project. - - """ - # Create default scenario - root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( - self._setup_hierarchical_projects_scenario()) - - # Grant non-inherited role - non_inher_up_entity = self.build_role_assignment_entity( - project_id=root_id, user_id=self.user['id'], - role_id=non_inherited_role_id) - self.put(non_inher_up_entity['links']['assignment']) - - # Grant inherited role - inher_up_entity = self.build_role_assignment_entity( - project_id=root_id, user_id=self.user['id'], - role_id=inherited_role_id, inherited_to_projects=True) - self.put(inher_up_entity['links']['assignment']) - - # Get role assignments - collection_url = '/role_assignments' - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - resource_url=collection_url) - - # Assert that the user has non-inherited role on root project - self.assertRoleAssignmentInListResponse(r, non_inher_up_entity) - - # Assert that the user has inherited role on root project - self.assertRoleAssignmentInListResponse(r, inher_up_entity) - - # Assert that the user does not have non-inherited role on leaf project - non_inher_up_entity = self.build_role_assignment_entity( - project_id=leaf_id, user_id=self.user['id'], - role_id=non_inherited_role_id) - self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity) - - # Assert that the user does not have inherited role on leaf project - inher_up_entity['scope']['project']['id'] = leaf_id - self.assertRoleAssignmentNotInListResponse(r, inher_up_entity) - - def test_get_effective_role_assignments_for_project_hierarchy(self): - """Call ``GET /role_assignments?effective``. - - Test Plan: - - - Create 2 roles - - Create a hierarchy of projects with one root and one leaf project - - Issue the URL to add a non-inherited user role to the root project - - Issue the URL to add an inherited user role to the root project - - Issue the URL to get effective role assignments - this should return - 1 role (non-inherited) on the root project and 1 role (inherited) on - the leaf project. - - """ - # Create default scenario - root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( - self._setup_hierarchical_projects_scenario()) - - # Grant non-inherited role - non_inher_up_entity = self.build_role_assignment_entity( - project_id=root_id, user_id=self.user['id'], - role_id=non_inherited_role_id) - self.put(non_inher_up_entity['links']['assignment']) - - # Grant inherited role - inher_up_entity = self.build_role_assignment_entity( - project_id=root_id, user_id=self.user['id'], - role_id=inherited_role_id, inherited_to_projects=True) - self.put(inher_up_entity['links']['assignment']) - - # Get effective role assignments - collection_url = '/role_assignments?effective' - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - resource_url=collection_url) - - # Assert that the user has non-inherited role on root project - self.assertRoleAssignmentInListResponse(r, non_inher_up_entity) - - # Assert that the user does not have inherited role on root project - self.assertRoleAssignmentNotInListResponse(r, inher_up_entity) - - # Assert that the user does not have non-inherited role on leaf project - non_inher_up_entity = self.build_role_assignment_entity( - project_id=leaf_id, user_id=self.user['id'], - role_id=non_inherited_role_id) - self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity) - - # Assert that the user has inherited role on leaf project - inher_up_entity['scope']['project']['id'] = leaf_id - self.assertRoleAssignmentInListResponse(r, inher_up_entity) - - def test_project_id_specified_if_include_subtree_specified(self): - """When using include_subtree, you must specify a project ID.""" - self.get('/role_assignments?include_subtree=True', - expected_status=http_client.BAD_REQUEST) - self.get('/role_assignments?scope.project.id&' - 'include_subtree=True', - expected_status=http_client.BAD_REQUEST) - - def test_get_role_assignments_for_project_tree(self): - """Get role_assignment?scope.project.id=X?include_subtree``. - - Test Plan: - - - Create 2 roles and a hierarchy of projects with one root and one leaf - - Issue the URL to add a non-inherited user role to the root project - and the leaf project - - Issue the URL to get role assignments for the root project but - not the subtree - this should return just the root assignment - - Issue the URL to get role assignments for the root project and - it's subtree - this should return both assignments - - Check that explicitly setting include_subtree to False is the - equivalent to not including it at all in the query. - - """ - # Create default scenario - root_id, leaf_id, non_inherited_role_id, unused_role_id = ( - self._setup_hierarchical_projects_scenario()) - - # Grant non-inherited role to root and leaf projects - non_inher_entity_root = self.build_role_assignment_entity( - project_id=root_id, user_id=self.user['id'], - role_id=non_inherited_role_id) - self.put(non_inher_entity_root['links']['assignment']) - non_inher_entity_leaf = self.build_role_assignment_entity( - project_id=leaf_id, user_id=self.user['id'], - role_id=non_inherited_role_id) - self.put(non_inher_entity_leaf['links']['assignment']) - - # Without the subtree, we should get the one assignment on the - # root project - collection_url = ( - '/role_assignments?scope.project.id=%(project)s' % { - 'project': root_id}) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse( - r, resource_url=collection_url) - - self.assertThat(r.result['role_assignments'], matchers.HasLength(1)) - self.assertRoleAssignmentInListResponse(r, non_inher_entity_root) - - # With the subtree, we should get both assignments - collection_url = ( - '/role_assignments?scope.project.id=%(project)s' - '&include_subtree=True' % { - 'project': root_id}) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse( - r, resource_url=collection_url) - - self.assertThat(r.result['role_assignments'], matchers.HasLength(2)) - self.assertRoleAssignmentInListResponse(r, non_inher_entity_root) - self.assertRoleAssignmentInListResponse(r, non_inher_entity_leaf) - - # With subtree=0, we should also only get the one assignment on the - # root project - collection_url = ( - '/role_assignments?scope.project.id=%(project)s' - '&include_subtree=0' % { - 'project': root_id}) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse( - r, resource_url=collection_url) - - self.assertThat(r.result['role_assignments'], matchers.HasLength(1)) - self.assertRoleAssignmentInListResponse(r, non_inher_entity_root) - - def test_get_effective_role_assignments_for_project_tree(self): - """Get role_assignment ?project_id=X?include_subtree=True?effective``. - - Test Plan: - - - Create 2 roles and a hierarchy of projects with one root and 4 levels - of child project - - Issue the URL to add a non-inherited user role to the root project - and a level 1 project - - Issue the URL to add an inherited user role on the level 2 project - - Issue the URL to get effective role assignments for the level 1 - project and it's subtree - this should return a role (non-inherited) - on the level 1 project and roles (inherited) on each of the level - 2, 3 and 4 projects - - """ - # Create default scenario - root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( - self._setup_hierarchical_projects_scenario()) - - # Add some extra projects to the project hierarchy - level2 = unit.new_project_ref(domain_id=self.domain['id'], - parent_id=leaf_id) - level3 = unit.new_project_ref(domain_id=self.domain['id'], - parent_id=level2['id']) - level4 = unit.new_project_ref(domain_id=self.domain['id'], - parent_id=level3['id']) - self.resource_api.create_project(level2['id'], level2) - self.resource_api.create_project(level3['id'], level3) - self.resource_api.create_project(level4['id'], level4) - - # Grant non-inherited role to root (as a spoiler) and to - # the level 1 (leaf) project - non_inher_entity_root = self.build_role_assignment_entity( - project_id=root_id, user_id=self.user['id'], - role_id=non_inherited_role_id) - self.put(non_inher_entity_root['links']['assignment']) - non_inher_entity_leaf = self.build_role_assignment_entity( - project_id=leaf_id, user_id=self.user['id'], - role_id=non_inherited_role_id) - self.put(non_inher_entity_leaf['links']['assignment']) - - # Grant inherited role to level 2 - inher_entity = self.build_role_assignment_entity( - project_id=level2['id'], user_id=self.user['id'], - role_id=inherited_role_id, inherited_to_projects=True) - self.put(inher_entity['links']['assignment']) - - # Get effective role assignments - collection_url = ( - '/role_assignments?scope.project.id=%(project)s' - '&include_subtree=True&effective' % { - 'project': leaf_id}) - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse( - r, resource_url=collection_url) - - # There should be three assignments returned in total - self.assertThat(r.result['role_assignments'], matchers.HasLength(3)) - - # Assert that the user does not non-inherited role on root project - self.assertRoleAssignmentNotInListResponse(r, non_inher_entity_root) - - # Assert that the user does have non-inherited role on leaf project - self.assertRoleAssignmentInListResponse(r, non_inher_entity_leaf) - - # Assert that the user has inherited role on levels 3 and 4 - inher_entity['scope']['project']['id'] = level3['id'] - self.assertRoleAssignmentInListResponse(r, inher_entity) - inher_entity['scope']['project']['id'] = level4['id'] - self.assertRoleAssignmentInListResponse(r, inher_entity) - - def test_get_inherited_role_assignments_for_project_hierarchy(self): - """Call ``GET /role_assignments?scope.OS-INHERIT:inherited_to``. - - Test Plan: - - - Create 2 roles - - Create a hierarchy of projects with one root and one leaf project - - Issue the URL to add a non-inherited user role to the root project - - Issue the URL to add an inherited user role to the root project - - Issue the URL to filter inherited to projects role assignments - this - should return 1 role (inherited) on the root project. - - """ - # Create default scenario - root_id, leaf_id, non_inherited_role_id, inherited_role_id = ( - self._setup_hierarchical_projects_scenario()) - - # Grant non-inherited role - non_inher_up_entity = self.build_role_assignment_entity( - project_id=root_id, user_id=self.user['id'], - role_id=non_inherited_role_id) - self.put(non_inher_up_entity['links']['assignment']) - - # Grant inherited role - inher_up_entity = self.build_role_assignment_entity( - project_id=root_id, user_id=self.user['id'], - role_id=inherited_role_id, inherited_to_projects=True) - self.put(inher_up_entity['links']['assignment']) - - # Get inherited role assignments - collection_url = ('/role_assignments' - '?scope.OS-INHERIT:inherited_to=projects') - r = self.get(collection_url) - self.assertValidRoleAssignmentListResponse(r, - resource_url=collection_url) - - # Assert that the user does not have non-inherited role on root project - self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity) - - # Assert that the user has inherited role on root project - self.assertRoleAssignmentInListResponse(r, inher_up_entity) - - # Assert that the user does not have non-inherited role on leaf project - non_inher_up_entity = self.build_role_assignment_entity( - project_id=leaf_id, user_id=self.user['id'], - role_id=non_inherited_role_id) - self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity) - - # Assert that the user does not have inherited role on leaf project - inher_up_entity['scope']['project']['id'] = leaf_id - self.assertRoleAssignmentNotInListResponse(r, inher_up_entity) - - -class AssignmentInheritanceDisabledTestCase(test_v3.RestfulTestCase): - """Test inheritance crud and its effects.""" - - def config_overrides(self): - super(AssignmentInheritanceDisabledTestCase, self).config_overrides() - self.config_fixture.config(group='os_inherit', enabled=False) - - def test_crud_inherited_role_grants_failed_if_disabled(self): - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - - base_collection_url = ( - '/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': self.domain_id, - 'user_id': self.user['id']}) - member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % { - 'collection_url': base_collection_url, - 'role_id': role['id']} - collection_url = base_collection_url + '/inherited_to_projects' - - self.put(member_url, expected_status=http_client.NOT_FOUND) - self.head(member_url, expected_status=http_client.NOT_FOUND) - self.get(collection_url, expected_status=http_client.NOT_FOUND) - self.delete(member_url, expected_status=http_client.NOT_FOUND) - - -class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin, - unit.TestCase): - def _create_role(self): - """Call ``POST /roles``.""" - ref = unit.new_role_ref() - r = self.post('/roles', body={'role': ref}) - return self.assertValidRoleResponse(r, ref) - - def test_list_implied_roles_none(self): - self.prior = self._create_role() - url = '/roles/%s/implies' % (self.prior['id']) - response = self.get(url).json["role_inference"] - self.assertEqual(self.prior['id'], response['prior_role']['id']) - self.assertEqual(0, len(response['implies'])) - - def _create_implied_role(self, prior, implied): - self.put('/roles/%s/implies/%s' % (prior['id'], implied['id']), - expected_status=http_client.CREATED) - - def _delete_implied_role(self, prior, implied): - self.delete('/roles/%s/implies/%s' % (prior['id'], implied['id'])) - - def _setup_prior_two_implied(self): - self.prior = self._create_role() - self.implied1 = self._create_role() - self._create_implied_role(self.prior, self.implied1) - self.implied2 = self._create_role() - self._create_implied_role(self.prior, self.implied2) - - def _assert_expected_implied_role_response( - self, expected_prior_id, expected_implied_ids): - r = self.get('/roles/%s/implies' % expected_prior_id) - response = r.json["role_inference"] - self.assertEqual(expected_prior_id, response['prior_role']['id']) - - actual_implied_ids = [implied['id'] for implied in response['implies']] - - for expected_id in expected_implied_ids: - self.assertIn(expected_id, actual_implied_ids) - self.assertEqual(len(expected_implied_ids), len(response['implies'])) - - self.assertIsNotNone(response['prior_role']['links']['self']) - for implied in response['implies']: - self.assertIsNotNone(implied['links']['self']) - - def _assert_two_roles_implied(self): - self._assert_expected_implied_role_response( - self.prior['id'], [self.implied1['id'], self.implied2['id']]) - - def _assert_one_role_implied(self): - self._assert_expected_implied_role_response( - self.prior['id'], [self.implied1['id']]) - - self.get('/roles/%s/implies/%s' % - (self.prior['id'], self.implied2['id']), - expected_status=http_client.NOT_FOUND) - - def _assert_two_rules_defined(self): - r = self.get('/role_inferences/') - - rules = r.result['role_inferences'] - - self.assertEqual(self.prior['id'], rules[0]['prior_role']['id']) - self.assertEqual(2, len(rules[0]['implies'])) - implied_ids = [implied['id'] for implied in rules[0]['implies']] - implied_names = [implied['name'] for implied in rules[0]['implies']] - - self.assertIn(self.implied1['id'], implied_ids) - self.assertIn(self.implied2['id'], implied_ids) - self.assertIn(self.implied1['name'], implied_names) - self.assertIn(self.implied2['name'], implied_names) - - def _assert_one_rule_defined(self): - r = self.get('/role_inferences/') - rules = r.result['role_inferences'] - self.assertEqual(self.prior['id'], rules[0]['prior_role']['id']) - self.assertEqual(self.implied1['id'], rules[0]['implies'][0]['id']) - self.assertEqual(self.implied1['name'], rules[0]['implies'][0]['name']) - self.assertEqual(1, len(rules[0]['implies'])) - - def test_list_all_rules(self): - self._setup_prior_two_implied() - self._assert_two_rules_defined() - - self._delete_implied_role(self.prior, self.implied2) - self._assert_one_rule_defined() - - def test_CRD_implied_roles(self): - - self._setup_prior_two_implied() - self._assert_two_roles_implied() - - self._delete_implied_role(self.prior, self.implied2) - self._assert_one_role_implied() - - def _create_three_roles(self): - self.role_list = [] - for _ in range(3): - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - self.role_list.append(role) - - def _create_test_domain_user_project(self): - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - user = unit.create_user(self.identity_api, domain_id=domain['id']) - project = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project['id'], project) - return domain, user, project - - def _assign_top_role_to_user_on_project(self, user, project): - self.assignment_api.add_role_to_user_and_project( - user['id'], project['id'], self.role_list[0]['id']) - - def _build_effective_role_assignments_url(self, user): - return '/role_assignments?effective&user.id=%(user_id)s' % { - 'user_id': user['id']} - - def _assert_all_roles_in_assignment(self, response, user): - # Now use the list role assignments api to check that all three roles - # appear in the collection - self.assertValidRoleAssignmentListResponse( - response, - expected_length=len(self.role_list), - resource_url=self._build_effective_role_assignments_url(user)) - - def _assert_initial_assignment_in_effective(self, response, user, project): - # The initial assignment should be there (the link url will be - # generated and checked automatically since it matches the assignment) - entity = self.build_role_assignment_entity( - project_id=project['id'], - user_id=user['id'], role_id=self.role_list[0]['id']) - self.assertRoleAssignmentInListResponse(response, entity) - - def _assert_effective_role_for_implied_has_prior_in_links( - self, response, user, project, prior_index, implied_index): - # An effective role for an implied role will have the prior role - # assignment in the links - prior_link = '/prior_roles/%(prior)s/implies/%(implied)s' % { - 'prior': self.role_list[prior_index]['id'], - 'implied': self.role_list[implied_index]['id']} - link = self.build_role_assignment_link( - project_id=project['id'], user_id=user['id'], - role_id=self.role_list[prior_index]['id']) - entity = self.build_role_assignment_entity( - link=link, project_id=project['id'], - user_id=user['id'], role_id=self.role_list[implied_index]['id'], - prior_link=prior_link) - self.assertRoleAssignmentInListResponse(response, entity) - - def test_list_role_assignments_with_implied_roles(self): - """Call ``GET /role_assignments`` with implied role grant. - - Test Plan: - - - Create a domain with a user and a project - - Create 3 roles - - Role 0 implies role 1 and role 1 implies role 2 - - Assign the top role to the project - - Issue the URL to check effective roles on project - this - should return all 3 roles. - - Check the links of the 3 roles indicate the prior role where - appropriate - - """ - (domain, user, project) = self._create_test_domain_user_project() - self._create_three_roles() - self._create_implied_role(self.role_list[0], self.role_list[1]) - self._create_implied_role(self.role_list[1], self.role_list[2]) - self._assign_top_role_to_user_on_project(user, project) - - response = self.get(self._build_effective_role_assignments_url(user)) - r = response - - self._assert_all_roles_in_assignment(r, user) - self._assert_initial_assignment_in_effective(response, user, project) - self._assert_effective_role_for_implied_has_prior_in_links( - response, user, project, 0, 1) - self._assert_effective_role_for_implied_has_prior_in_links( - response, user, project, 1, 2) - - def _create_named_role(self, name): - role = unit.new_role_ref() - role['name'] = name - self.role_api.create_role(role['id'], role) - return role - - def test_root_role_as_implied_role_forbidden(self): - """Test root role is forbidden to be set as an implied role. - - Create 2 roles that are prohibited from being an implied role. - Create 1 additional role which should be accepted as an implied - role. Assure the prohibited role names cannot be set as an implied - role. Assure the accepted role name which is not a member of the - prohibited implied role list can be successfully set an implied - role. - """ - prohibited_name1 = 'root1' - prohibited_name2 = 'root2' - accepted_name1 = 'implied1' - - prohibited_names = [prohibited_name1, prohibited_name2] - self.config_fixture.config(group='assignment', - prohibited_implied_role=prohibited_names) - - prior_role = self._create_role() - - prohibited_role1 = self._create_named_role(prohibited_name1) - url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format( - prior_role_id=prior_role['id'], - implied_role_id=prohibited_role1['id']) - self.put(url, expected_status=http_client.FORBIDDEN) - - prohibited_role2 = self._create_named_role(prohibited_name2) - url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format( - prior_role_id=prior_role['id'], - implied_role_id=prohibited_role2['id']) - self.put(url, expected_status=http_client.FORBIDDEN) - - accepted_role1 = self._create_named_role(accepted_name1) - url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format( - prior_role_id=prior_role['id'], - implied_role_id=accepted_role1['id']) - self.put(url, expected_status=http_client.CREATED) - - def test_trusts_from_implied_role(self): - self._create_three_roles() - self._create_implied_role(self.role_list[0], self.role_list[1]) - self._create_implied_role(self.role_list[1], self.role_list[2]) - self._assign_top_role_to_user_on_project(self.user, self.project) - - # Create a trustee and assign the prior role to her - trustee = unit.create_user(self.identity_api, domain_id=self.domain_id) - ref = unit.new_trust_ref( - trustor_user_id=self.user['id'], - trustee_user_id=trustee['id'], - project_id=self.project['id'], - role_ids=[self.role_list[0]['id']]) - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - trust = r.result['trust'] - - # Only the role that was specified is in the trust, NOT implied roles - self.assertEqual(self.role_list[0]['id'], trust['roles'][0]['id']) - self.assertThat(trust['roles'], matchers.HasLength(1)) - - # Authenticate as the trustee - auth_data = self.build_authentication_request( - user_id=trustee['id'], - password=trustee['password'], - trust_id=trust['id']) - r = self.v3_create_token(auth_data) - token = r.result['token'] - self.assertThat(token['roles'], - matchers.HasLength(len(self.role_list))) - for role in token['roles']: - self.assertIn(role, self.role_list) - for role in self.role_list: - self.assertIn(role, token['roles']) - - def test_trusts_from_domain_specific_implied_role(self): - self._create_three_roles() - # Overwrite the first role with a domain specific role - role = unit.new_role_ref(domain_id=self.domain_id) - self.role_list[0] = self.role_api.create_role(role['id'], role) - self._create_implied_role(self.role_list[0], self.role_list[1]) - self._create_implied_role(self.role_list[1], self.role_list[2]) - self._assign_top_role_to_user_on_project(self.user, self.project) - - # Create a trustee and assign the prior role to her - trustee = unit.create_user(self.identity_api, domain_id=self.domain_id) - ref = unit.new_trust_ref( - trustor_user_id=self.user['id'], - trustee_user_id=trustee['id'], - project_id=self.project['id'], - role_ids=[self.role_list[0]['id']]) - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - trust = r.result['trust'] - - # Only the role that was specified is in the trust, NOT implied roles - self.assertEqual(self.role_list[0]['id'], trust['roles'][0]['id']) - self.assertThat(trust['roles'], matchers.HasLength(1)) - - # Authenticate as the trustee - auth_data = self.build_authentication_request( - user_id=trustee['id'], - password=trustee['password'], - trust_id=trust['id']) - r = self.v3_create_token(auth_data) - token = r.result['token'] - - # The token should have the roles implies by the domain specific role, - # but not the domain specific role itself. - self.assertThat(token['roles'], - matchers.HasLength(len(self.role_list) - 1)) - for role in token['roles']: - self.assertIn(role, self.role_list) - for role in [self.role_list[1], self.role_list[2]]: - self.assertIn(role, token['roles']) - self.assertNotIn(self.role_list[0], token['roles']) - - -class DomainSpecificRoleTests(test_v3.RestfulTestCase, unit.TestCase): - def setUp(self): - def create_role(domain_id=None): - """Call ``POST /roles``.""" - ref = unit.new_role_ref(domain_id=domain_id) - r = self.post( - '/roles', - body={'role': ref}) - return self.assertValidRoleResponse(r, ref) - - super(DomainSpecificRoleTests, self).setUp() - self.domainA = unit.new_domain_ref() - self.resource_api.create_domain(self.domainA['id'], self.domainA) - self.domainB = unit.new_domain_ref() - self.resource_api.create_domain(self.domainB['id'], self.domainB) - - self.global_role1 = create_role() - self.global_role2 = create_role() - # Since there maybe other global roles already created, let's count - # them, so we can ensure we can check subsequent list responses - # are correct - r = self.get('/roles') - self.existing_global_roles = len(r.result['roles']) - - # And now create some domain specific roles - self.domainA_role1 = create_role(domain_id=self.domainA['id']) - self.domainA_role2 = create_role(domain_id=self.domainA['id']) - self.domainB_role = create_role(domain_id=self.domainB['id']) - - def test_get_and_list_domain_specific_roles(self): - # Check we can get a domain specific role - r = self.get('/roles/%s' % self.domainA_role1['id']) - self.assertValidRoleResponse(r, self.domainA_role1) - - # If we list without specifying a domain, we should only get global - # roles back. - r = self.get('/roles') - self.assertValidRoleListResponse( - r, expected_length=self.existing_global_roles) - self.assertRoleInListResponse(r, self.global_role1) - self.assertRoleInListResponse(r, self.global_role2) - self.assertRoleNotInListResponse(r, self.domainA_role1) - self.assertRoleNotInListResponse(r, self.domainA_role2) - self.assertRoleNotInListResponse(r, self.domainB_role) - - # Now list those in domainA, making sure that's all we get back - r = self.get('/roles?domain_id=%s' % self.domainA['id']) - self.assertValidRoleListResponse(r, expected_length=2) - self.assertRoleInListResponse(r, self.domainA_role1) - self.assertRoleInListResponse(r, self.domainA_role2) - - def test_update_domain_specific_roles(self): - self.domainA_role1['name'] = uuid.uuid4().hex - self.patch('/roles/%(role_id)s' % { - 'role_id': self.domainA_role1['id']}, - body={'role': self.domainA_role1}) - r = self.get('/roles/%s' % self.domainA_role1['id']) - self.assertValidRoleResponse(r, self.domainA_role1) - - def test_delete_domain_specific_roles(self): - # Check delete only removes that one domain role - self.delete('/roles/%(role_id)s' % { - 'role_id': self.domainA_role1['id']}) - - self.get('/roles/%s' % self.domainA_role1['id'], - expected_status=http_client.NOT_FOUND) - # Now re-list those in domainA, making sure there's only one left - r = self.get('/roles?domain_id=%s' % self.domainA['id']) - self.assertValidRoleListResponse(r, expected_length=1) - self.assertRoleInListResponse(r, self.domainA_role2) - - -class ListUserProjectsTestCase(test_v3.RestfulTestCase): - """Tests for /users//projects""" - - def load_sample_data(self): - # do not load base class's data, keep it focused on the tests - - self.auths = [] - self.domains = [] - self.projects = [] - self.roles = [] - self.users = [] - - # Create 3 sets of domain, roles, projects, and users to demonstrate - # the right user's data is loaded and only projects they can access - # are returned. - - for _ in range(3): - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - - user = unit.create_user(self.identity_api, domain_id=domain['id']) - - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - - self.assignment_api.create_grant(role['id'], - user_id=user['id'], - domain_id=domain['id']) - - project = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project['id'], project) - - self.assignment_api.create_grant(role['id'], - user_id=user['id'], - project_id=project['id']) - - auth = self.build_authentication_request( - user_id=user['id'], - password=user['password'], - domain_id=domain['id']) - - self.auths.append(auth) - self.domains.append(domain) - self.projects.append(project) - self.roles.append(role) - self.users.append(user) - - def test_list_all(self): - for i in range(len(self.users)): - user = self.users[i] - auth = self.auths[i] - - url = '/users/%s/projects' % user['id'] - result = self.get(url, auth=auth) - projects_result = result.json['projects'] - self.assertEqual(1, len(projects_result)) - self.assertEqual(self.projects[i]['id'], projects_result[0]['id']) - - def test_list_enabled(self): - for i in range(len(self.users)): - user = self.users[i] - auth = self.auths[i] - - # There are no disabled projects - url = '/users/%s/projects?enabled=True' % user['id'] - result = self.get(url, auth=auth) - projects_result = result.json['projects'] - self.assertEqual(1, len(projects_result)) - self.assertEqual(self.projects[i]['id'], projects_result[0]['id']) - - def test_list_disabled(self): - for i in range(len(self.users)): - user = self.users[i] - auth = self.auths[i] - project = self.projects[i] - - # There are no disabled projects - url = '/users/%s/projects?enabled=False' % user['id'] - result = self.get(url, auth=auth) - self.assertEqual(0, len(result.json['projects'])) - - # disable this one and check again - project['enabled'] = False - self.resource_api.update_project(project['id'], project) - result = self.get(url, auth=auth) - projects_result = result.json['projects'] - self.assertEqual(1, len(projects_result)) - self.assertEqual(self.projects[i]['id'], projects_result[0]['id']) - - def test_list_by_domain_id(self): - for i in range(len(self.users)): - user = self.users[i] - domain = self.domains[i] - auth = self.auths[i] - - # Try looking for projects with a non-existent domain_id - url = '/users/%s/projects?domain_id=%s' % (user['id'], - uuid.uuid4().hex) - result = self.get(url, auth=auth) - self.assertEqual(0, len(result.json['projects'])) - - # Now try a valid one - url = '/users/%s/projects?domain_id=%s' % (user['id'], - domain['id']) - result = self.get(url, auth=auth) - projects_result = result.json['projects'] - self.assertEqual(1, len(projects_result)) - self.assertEqual(self.projects[i]['id'], projects_result[0]['id']) diff --git a/keystone-moon/keystone/tests/unit/test_v3_auth.py b/keystone-moon/keystone/tests/unit/test_v3_auth.py deleted file mode 100644 index 698feeb8..00000000 --- a/keystone-moon/keystone/tests/unit/test_v3_auth.py +++ /dev/null @@ -1,4955 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import datetime -import itertools -import json -import operator -import uuid - -from keystoneclient.common import cms -import mock -from oslo_config import cfg -from oslo_log import versionutils -from oslo_utils import fixture -from oslo_utils import timeutils -from six.moves import http_client -from six.moves import range -from testtools import matchers -from testtools import testcase - -from keystone import auth -from keystone.auth.plugins import totp -from keystone.common import utils -from keystone.contrib.revoke import routers -from keystone import exception -from keystone.policy.backends import rules -from keystone.tests.common import auth as common_auth -from keystone.tests import unit -from keystone.tests.unit import ksfixtures -from keystone.tests.unit import test_v3 - -CONF = cfg.CONF - - -class TestAuthInfo(common_auth.AuthTestMixin, testcase.TestCase): - def setUp(self): - super(TestAuthInfo, self).setUp() - auth.controllers.load_auth_methods() - - def test_missing_auth_methods(self): - auth_data = {'identity': {}} - auth_data['identity']['token'] = {'id': uuid.uuid4().hex} - self.assertRaises(exception.ValidationError, - auth.controllers.AuthInfo.create, - None, - auth_data) - - def test_unsupported_auth_method(self): - auth_data = {'methods': ['abc']} - auth_data['abc'] = {'test': 'test'} - auth_data = {'identity': auth_data} - self.assertRaises(exception.AuthMethodNotSupported, - auth.controllers.AuthInfo.create, - None, - auth_data) - - def test_missing_auth_method_data(self): - auth_data = {'methods': ['password']} - auth_data = {'identity': auth_data} - self.assertRaises(exception.ValidationError, - auth.controllers.AuthInfo.create, - None, - auth_data) - - def test_project_name_no_domain(self): - auth_data = self.build_authentication_request( - username='test', - password='test', - project_name='abc')['auth'] - self.assertRaises(exception.ValidationError, - auth.controllers.AuthInfo.create, - None, - auth_data) - - def test_both_project_and_domain_in_scope(self): - auth_data = self.build_authentication_request( - user_id='test', - password='test', - project_name='test', - domain_name='test')['auth'] - self.assertRaises(exception.ValidationError, - auth.controllers.AuthInfo.create, - None, - auth_data) - - def test_get_method_names_duplicates(self): - auth_data = self.build_authentication_request( - token='test', - user_id='test', - password='test')['auth'] - auth_data['identity']['methods'] = ['password', 'token', - 'password', 'password'] - context = None - auth_info = auth.controllers.AuthInfo.create(context, auth_data) - self.assertEqual(['password', 'token'], - auth_info.get_method_names()) - - def test_get_method_data_invalid_method(self): - auth_data = self.build_authentication_request( - user_id='test', - password='test')['auth'] - context = None - auth_info = auth.controllers.AuthInfo.create(context, auth_data) - - method_name = uuid.uuid4().hex - self.assertRaises(exception.ValidationError, - auth_info.get_method_data, - method_name) - - -class TokenAPITests(object): - # Why is this not just setUp? Because TokenAPITests is not a test class - # itself. If TokenAPITests became a subclass of the testcase, it would get - # called by the enumerate-tests-in-file code. The way the functions get - # resolved in Python for multiple inheritance means that a setUp in this - # would get skipped by the testrunner. - def doSetUp(self): - r = self.v3_create_token(self.build_authentication_request( - username=self.user['name'], - user_domain_id=self.domain_id, - password=self.user['password'])) - self.v3_token_data = r.result - self.v3_token = r.headers.get('X-Subject-Token') - self.headers = {'X-Subject-Token': r.headers.get('X-Subject-Token')} - - def _make_auth_request(self, auth_data): - resp = self.post('/auth/tokens', body=auth_data) - token = resp.headers.get('X-Subject-Token') - return token - - def _get_unscoped_token(self): - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) - return self._make_auth_request(auth_data) - - def _get_domain_scoped_token(self): - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - domain_id=self.domain_id) - return self._make_auth_request(auth_data) - - def _get_project_scoped_token(self): - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project_id) - return self._make_auth_request(auth_data) - - def _get_trust_scoped_token(self, trustee_user, trust): - auth_data = self.build_authentication_request( - user_id=trustee_user['id'], - password=trustee_user['password'], - trust_id=trust['id']) - return self._make_auth_request(auth_data) - - def _create_trust(self, impersonation=False): - # Create a trustee user - trustee_user = unit.create_user(self.identity_api, - domain_id=self.domain_id) - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=trustee_user['id'], - project_id=self.project_id, - impersonation=impersonation, - role_ids=[self.role_id]) - - # Create a trust - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - trust = self.assertValidTrustResponse(r) - return (trustee_user, trust) - - def _validate_token(self, token, expected_status=http_client.OK): - return self.get( - '/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=expected_status) - - def _revoke_token(self, token, expected_status=http_client.NO_CONTENT): - return self.delete( - '/auth/tokens', - headers={'x-subject-token': token}, - expected_status=expected_status) - - def _set_user_enabled(self, user, enabled=True): - user['enabled'] = enabled - self.identity_api.update_user(user['id'], user) - - def test_validate_unscoped_token(self): - unscoped_token = self._get_unscoped_token() - self._validate_token(unscoped_token) - - def test_revoke_unscoped_token(self): - unscoped_token = self._get_unscoped_token() - self._validate_token(unscoped_token) - self._revoke_token(unscoped_token) - self._validate_token(unscoped_token, - expected_status=http_client.NOT_FOUND) - - def test_unscoped_token_is_invalid_after_disabling_user(self): - unscoped_token = self._get_unscoped_token() - # Make sure the token is valid - self._validate_token(unscoped_token) - # Disable the user - self._set_user_enabled(self.user, enabled=False) - # Ensure validating a token for a disabled user fails - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_token, - unscoped_token) - - def test_unscoped_token_is_invalid_after_enabling_disabled_user(self): - unscoped_token = self._get_unscoped_token() - # Make sure the token is valid - self._validate_token(unscoped_token) - # Disable the user - self._set_user_enabled(self.user, enabled=False) - # Ensure validating a token for a disabled user fails - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_token, - unscoped_token) - # Enable the user - self._set_user_enabled(self.user) - # Ensure validating a token for a re-enabled user fails - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_token, - unscoped_token) - - def test_unscoped_token_is_invalid_after_disabling_user_domain(self): - unscoped_token = self._get_unscoped_token() - # Make sure the token is valid - self._validate_token(unscoped_token) - # Disable the user's domain - self.domain['enabled'] = False - self.resource_api.update_domain(self.domain['id'], self.domain) - # Ensure validating a token for a disabled user fails - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_token, - unscoped_token) - - def test_unscoped_token_is_invalid_after_changing_user_password(self): - unscoped_token = self._get_unscoped_token() - # Make sure the token is valid - self._validate_token(unscoped_token) - # Change user's password - self.user['password'] = 'Password1' - self.identity_api.update_user(self.user['id'], self.user) - # Ensure updating user's password revokes existing user's tokens - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_token, - unscoped_token) - - def test_validate_domain_scoped_token(self): - # Grant user access to domain - self.assignment_api.create_grant(self.role['id'], - user_id=self.user['id'], - domain_id=self.domain['id']) - domain_scoped_token = self._get_domain_scoped_token() - resp = self._validate_token(domain_scoped_token) - resp_json = json.loads(resp.body) - self.assertIsNotNone(resp_json['token']['catalog']) - self.assertIsNotNone(resp_json['token']['roles']) - self.assertIsNotNone(resp_json['token']['domain']) - - def test_domain_scoped_token_is_invalid_after_disabling_user(self): - # Grant user access to domain - self.assignment_api.create_grant(self.role['id'], - user_id=self.user['id'], - domain_id=self.domain['id']) - domain_scoped_token = self._get_domain_scoped_token() - # Make sure the token is valid - self._validate_token(domain_scoped_token) - # Disable user - self._set_user_enabled(self.user, enabled=False) - # Ensure validating a token for a disabled user fails - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_token, - domain_scoped_token) - - def test_domain_scoped_token_is_invalid_after_deleting_grant(self): - # Grant user access to domain - self.assignment_api.create_grant(self.role['id'], - user_id=self.user['id'], - domain_id=self.domain['id']) - domain_scoped_token = self._get_domain_scoped_token() - # Make sure the token is valid - self._validate_token(domain_scoped_token) - # Delete access to domain - self.assignment_api.delete_grant(self.role['id'], - user_id=self.user['id'], - domain_id=self.domain['id']) - # Ensure validating a token for a disabled user fails - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_token, - domain_scoped_token) - - def test_domain_scoped_token_invalid_after_disabling_domain(self): - # Grant user access to domain - self.assignment_api.create_grant(self.role['id'], - user_id=self.user['id'], - domain_id=self.domain['id']) - domain_scoped_token = self._get_domain_scoped_token() - # Make sure the token is valid - self._validate_token(domain_scoped_token) - # Disable domain - self.domain['enabled'] = False - self.resource_api.update_domain(self.domain['id'], self.domain) - # Ensure validating a token for a disabled domain fails - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_token, - domain_scoped_token) - - def test_v2_validate_domain_scoped_token_returns_unauthorized(self): - # Test that validating a domain scoped token in v2.0 returns - # unauthorized. - # Grant user access to domain - self.assignment_api.create_grant(self.role['id'], - user_id=self.user['id'], - domain_id=self.domain['id']) - - scoped_token = self._get_domain_scoped_token() - self.assertRaises(exception.Unauthorized, - self.token_provider_api.validate_v2_token, - scoped_token) - - def test_validate_project_scoped_token(self): - project_scoped_token = self._get_project_scoped_token() - self._validate_token(project_scoped_token) - - def test_revoke_project_scoped_token(self): - project_scoped_token = self._get_project_scoped_token() - self._validate_token(project_scoped_token) - self._revoke_token(project_scoped_token) - self._validate_token(project_scoped_token, - expected_status=http_client.NOT_FOUND) - - def test_project_scoped_token_is_invalid_after_disabling_user(self): - project_scoped_token = self._get_project_scoped_token() - # Make sure the token is valid - self._validate_token(project_scoped_token) - # Disable the user - self._set_user_enabled(self.user, enabled=False) - # Ensure validating a token for a disabled user fails - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_token, - project_scoped_token) - - def test_project_scoped_token_invalid_after_changing_user_password(self): - project_scoped_token = self._get_project_scoped_token() - # Make sure the token is valid - self._validate_token(project_scoped_token) - # Update user's password - self.user['password'] = 'Password1' - self.identity_api.update_user(self.user['id'], self.user) - # Ensure updating user's password revokes existing tokens - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_token, - project_scoped_token) - - def test_project_scoped_token_invalid_after_disabling_project(self): - project_scoped_token = self._get_project_scoped_token() - # Make sure the token is valid - self._validate_token(project_scoped_token) - # Disable project - self.project['enabled'] = False - self.resource_api.update_project(self.project['id'], self.project) - # Ensure validating a token for a disabled project fails - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_token, - project_scoped_token) - - def test_rescope_unscoped_token_with_trust(self): - trustee_user, trust = self._create_trust() - self._get_trust_scoped_token(trustee_user, trust) - - def test_validate_a_trust_scoped_token(self): - trustee_user, trust = self._create_trust() - trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust) - # Validate a trust scoped token - self._validate_token(trust_scoped_token) - - def test_validate_a_trust_scoped_token_impersonated(self): - trustee_user, trust = self._create_trust(impersonation=True) - trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust) - # Validate a trust scoped token - self._validate_token(trust_scoped_token) - - def test_revoke_trust_scoped_token(self): - trustee_user, trust = self._create_trust() - trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust) - # Validate a trust scoped token - self._validate_token(trust_scoped_token) - self._revoke_token(trust_scoped_token) - self._validate_token(trust_scoped_token, - expected_status=http_client.NOT_FOUND) - - def test_trust_scoped_token_is_invalid_after_disabling_trustee(self): - trustee_user, trust = self._create_trust() - trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust) - # Validate a trust scoped token - self._validate_token(trust_scoped_token) - - # Disable trustee - trustee_update_ref = dict(enabled=False) - self.identity_api.update_user(trustee_user['id'], trustee_update_ref) - # Ensure validating a token for a disabled user fails - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_token, - trust_scoped_token) - - def test_trust_scoped_token_invalid_after_changing_trustee_password(self): - trustee_user, trust = self._create_trust() - trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust) - # Validate a trust scoped token - self._validate_token(trust_scoped_token) - # Change trustee's password - trustee_update_ref = dict(password='Password1') - self.identity_api.update_user(trustee_user['id'], trustee_update_ref) - # Ensure updating trustee's password revokes existing tokens - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_token, - trust_scoped_token) - - def test_trust_scoped_token_is_invalid_after_disabling_trustor(self): - trustee_user, trust = self._create_trust() - trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust) - # Validate a trust scoped token - self._validate_token(trust_scoped_token) - - # Disable the trustor - trustor_update_ref = dict(enabled=False) - self.identity_api.update_user(self.user['id'], trustor_update_ref) - # Ensure validating a token for a disabled user fails - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_token, - trust_scoped_token) - - def test_trust_scoped_token_invalid_after_changing_trustor_password(self): - trustee_user, trust = self._create_trust() - trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust) - # Validate a trust scoped token - self._validate_token(trust_scoped_token) - - # Change trustor's password - trustor_update_ref = dict(password='Password1') - self.identity_api.update_user(self.user['id'], trustor_update_ref) - # Ensure updating trustor's password revokes existing user's tokens - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_token, - trust_scoped_token) - - def test_trust_scoped_token_invalid_after_disabled_trustor_domain(self): - trustee_user, trust = self._create_trust() - trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust) - # Validate a trust scoped token - self._validate_token(trust_scoped_token) - - # Disable trustor's domain - self.domain['enabled'] = False - self.resource_api.update_domain(self.domain['id'], self.domain) - - trustor_update_ref = dict(password='Password1') - self.identity_api.update_user(self.user['id'], trustor_update_ref) - # Ensure updating trustor's password revokes existing user's tokens - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_token, - trust_scoped_token) - - def test_v2_validate_trust_scoped_token(self): - # Test that validating an trust scoped token in v2.0 returns - # unauthorized. - trustee_user, trust = self._create_trust() - trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust) - self.assertRaises(exception.Unauthorized, - self.token_provider_api.validate_v2_token, - trust_scoped_token) - - def test_default_fixture_scope_token(self): - self.assertIsNotNone(self.get_scoped_token()) - - def test_v3_v2_intermix_new_default_domain(self): - # If the default_domain_id config option is changed, then should be - # able to validate a v3 token with user in the new domain. - - # 1) Create a new domain for the user. - new_domain = unit.new_domain_ref() - self.resource_api.create_domain(new_domain['id'], new_domain) - - # 2) Create user in new domain. - new_user = unit.create_user(self.identity_api, - domain_id=new_domain['id']) - - # 3) Update the default_domain_id config option to the new domain - self.config_fixture.config( - group='identity', - default_domain_id=new_domain['id']) - - # 4) Get a token using v3 API. - v3_token = self.get_requested_token(self.build_authentication_request( - user_id=new_user['id'], - password=new_user['password'])) - - # 5) Validate token using v2 API. - self.admin_request( - path='/v2.0/tokens/%s' % v3_token, - token=self.get_admin_token(), - method='GET') - - def test_v3_v2_intermix_domain_scoped_token_failed(self): - # grant the domain role to user - self.put( - path='/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id'])) - - # generate a domain-scoped v3 token - v3_token = self.get_requested_token(self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - domain_id=self.domain['id'])) - - # domain-scoped tokens are not supported by v2 - self.admin_request( - method='GET', - path='/v2.0/tokens/%s' % v3_token, - token=self.get_admin_token(), - expected_status=http_client.UNAUTHORIZED) - - def test_v3_v2_intermix_non_default_project_succeed(self): - # self.project is in a non-default domain - v3_token = self.get_requested_token(self.build_authentication_request( - user_id=self.default_domain_user['id'], - password=self.default_domain_user['password'], - project_id=self.project['id'])) - - # v2 cannot reference projects outside the default domain - self.admin_request( - method='GET', - path='/v2.0/tokens/%s' % v3_token, - token=self.get_admin_token()) - - def test_v3_v2_intermix_non_default_user_succeed(self): - self.assignment_api.create_grant( - self.role['id'], - user_id=self.user['id'], - project_id=self.default_domain_project['id']) - - # self.user is in a non-default domain - v3_token = self.get_requested_token(self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.default_domain_project['id'])) - - # v2 cannot reference projects outside the default domain - self.admin_request( - method='GET', - path='/v2.0/tokens/%s' % v3_token, - token=self.get_admin_token()) - - def test_v3_v2_intermix_domain_scope_failed(self): - self.assignment_api.create_grant( - self.role['id'], - user_id=self.default_domain_user['id'], - domain_id=self.domain['id']) - - v3_token = self.get_requested_token(self.build_authentication_request( - user_id=self.default_domain_user['id'], - password=self.default_domain_user['password'], - domain_id=self.domain['id'])) - - # v2 cannot reference projects outside the default domain - self.admin_request( - path='/v2.0/tokens/%s' % v3_token, - token=self.get_admin_token(), - method='GET', - expected_status=http_client.UNAUTHORIZED) - - def test_v3_v2_unscoped_token_intermix(self): - r = self.v3_create_token(self.build_authentication_request( - user_id=self.default_domain_user['id'], - password=self.default_domain_user['password'])) - self.assertValidUnscopedTokenResponse(r) - v3_token_data = r.result - v3_token = r.headers.get('X-Subject-Token') - - # now validate the v3 token with v2 API - r = self.admin_request( - path='/v2.0/tokens/%s' % v3_token, - token=self.get_admin_token(), - method='GET') - v2_token_data = r.result - - self.assertEqual(v2_token_data['access']['user']['id'], - v3_token_data['token']['user']['id']) - # v2 token time has not fraction of second precision so - # just need to make sure the non fraction part agrees - self.assertIn(v2_token_data['access']['token']['expires'][:-1], - v3_token_data['token']['expires_at']) - - def test_v3_v2_token_intermix(self): - # FIXME(gyee): PKI tokens are not interchangeable because token - # data is baked into the token itself. - r = self.v3_create_token(self.build_authentication_request( - user_id=self.default_domain_user['id'], - password=self.default_domain_user['password'], - project_id=self.default_domain_project['id'])) - self.assertValidProjectScopedTokenResponse(r) - v3_token_data = r.result - v3_token = r.headers.get('X-Subject-Token') - - # now validate the v3 token with v2 API - r = self.admin_request( - method='GET', - path='/v2.0/tokens/%s' % v3_token, - token=self.get_admin_token()) - v2_token_data = r.result - - self.assertEqual(v2_token_data['access']['user']['id'], - v3_token_data['token']['user']['id']) - # v2 token time has not fraction of second precision so - # just need to make sure the non fraction part agrees - self.assertIn(v2_token_data['access']['token']['expires'][:-1], - v3_token_data['token']['expires_at']) - self.assertEqual(v2_token_data['access']['user']['roles'][0]['name'], - v3_token_data['token']['roles'][0]['name']) - - def test_v2_v3_unscoped_token_intermix(self): - r = self.admin_request( - method='POST', - path='/v2.0/tokens', - body={ - 'auth': { - 'passwordCredentials': { - 'userId': self.default_domain_user['id'], - 'password': self.default_domain_user['password'] - } - } - }) - v2_token_data = r.result - v2_token = v2_token_data['access']['token']['id'] - - r = self.get('/auth/tokens', headers={'X-Subject-Token': v2_token}) - self.assertValidUnscopedTokenResponse(r) - v3_token_data = r.result - - self.assertEqual(v2_token_data['access']['user']['id'], - v3_token_data['token']['user']['id']) - # v2 token time has not fraction of second precision so - # just need to make sure the non fraction part agrees - self.assertIn(v2_token_data['access']['token']['expires'][-1], - v3_token_data['token']['expires_at']) - - def test_v2_v3_token_intermix(self): - r = self.admin_request( - path='/v2.0/tokens', - method='POST', - body={ - 'auth': { - 'passwordCredentials': { - 'userId': self.default_domain_user['id'], - 'password': self.default_domain_user['password'] - }, - 'tenantId': self.default_domain_project['id'] - } - }) - v2_token_data = r.result - v2_token = v2_token_data['access']['token']['id'] - - r = self.get('/auth/tokens', headers={'X-Subject-Token': v2_token}) - self.assertValidProjectScopedTokenResponse(r) - v3_token_data = r.result - - self.assertEqual(v2_token_data['access']['user']['id'], - v3_token_data['token']['user']['id']) - # v2 token time has not fraction of second precision so - # just need to make sure the non fraction part agrees - self.assertIn(v2_token_data['access']['token']['expires'][-1], - v3_token_data['token']['expires_at']) - self.assertEqual(v2_token_data['access']['user']['roles'][0]['name'], - v3_token_data['token']['roles'][0]['name']) - - v2_issued_at = timeutils.parse_isotime( - v2_token_data['access']['token']['issued_at']) - v3_issued_at = timeutils.parse_isotime( - v3_token_data['token']['issued_at']) - - self.assertEqual(v2_issued_at, v3_issued_at) - - def test_v2_token_deleted_on_v3(self): - # Create a v2 token. - body = { - 'auth': { - 'passwordCredentials': { - 'userId': self.default_domain_user['id'], - 'password': self.default_domain_user['password'] - }, - 'tenantId': self.default_domain_project['id'] - } - } - r = self.admin_request( - path='/v2.0/tokens', method='POST', body=body) - v2_token = r.result['access']['token']['id'] - - # Delete the v2 token using v3. - self.delete( - '/auth/tokens', headers={'X-Subject-Token': v2_token}) - - # Attempting to use the deleted token on v2 should fail. - self.admin_request( - path='/v2.0/tenants', method='GET', token=v2_token, - expected_status=http_client.UNAUTHORIZED) - - def test_rescoping_token(self): - expires = self.v3_token_data['token']['expires_at'] - - # rescope the token - r = self.v3_create_token(self.build_authentication_request( - token=self.v3_token, - project_id=self.project_id)) - self.assertValidProjectScopedTokenResponse(r) - - # ensure token expiration stayed the same - self.assertEqual(expires, r.result['token']['expires_at']) - - def test_check_token(self): - self.head('/auth/tokens', headers=self.headers, - expected_status=http_client.OK) - - def test_validate_token(self): - r = self.get('/auth/tokens', headers=self.headers) - self.assertValidUnscopedTokenResponse(r) - - def test_validate_missing_subject_token(self): - self.get('/auth/tokens', - expected_status=http_client.NOT_FOUND) - - def test_validate_missing_auth_token(self): - self.admin_request( - method='GET', - path='/v3/projects', - token=None, - expected_status=http_client.UNAUTHORIZED) - - def test_validate_token_nocatalog(self): - v3_token = self.get_requested_token(self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id'])) - r = self.get( - '/auth/tokens?nocatalog', - headers={'X-Subject-Token': v3_token}) - self.assertValidProjectScopedTokenResponse(r, require_catalog=False) - - def test_is_admin_token_by_ids(self): - self.config_fixture.config( - group='resource', - admin_project_domain_name=self.domain['name'], - admin_project_name=self.project['name']) - r = self.v3_create_token(self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id'])) - self.assertValidProjectScopedTokenResponse(r, is_admin_project=True) - v3_token = r.headers.get('X-Subject-Token') - r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token}) - self.assertValidProjectScopedTokenResponse(r, is_admin_project=True) - - def test_is_admin_token_by_names(self): - self.config_fixture.config( - group='resource', - admin_project_domain_name=self.domain['name'], - admin_project_name=self.project['name']) - r = self.v3_create_token(self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_domain_name=self.domain['name'], - project_name=self.project['name'])) - self.assertValidProjectScopedTokenResponse(r, is_admin_project=True) - v3_token = r.headers.get('X-Subject-Token') - r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token}) - self.assertValidProjectScopedTokenResponse(r, is_admin_project=True) - - def test_token_for_non_admin_project_is_not_admin(self): - self.config_fixture.config( - group='resource', - admin_project_domain_name=self.domain['name'], - admin_project_name=uuid.uuid4().hex) - r = self.v3_create_token(self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id'])) - self.assertValidProjectScopedTokenResponse(r, is_admin_project=False) - v3_token = r.headers.get('X-Subject-Token') - r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token}) - self.assertValidProjectScopedTokenResponse(r, is_admin_project=False) - - def test_token_for_non_admin_domain_same_project_name_is_not_admin(self): - self.config_fixture.config( - group='resource', - admin_project_domain_name=uuid.uuid4().hex, - admin_project_name=self.project['name']) - r = self.v3_create_token(self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id'])) - self.assertValidProjectScopedTokenResponse(r, is_admin_project=False) - v3_token = r.headers.get('X-Subject-Token') - r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token}) - self.assertValidProjectScopedTokenResponse(r, is_admin_project=False) - - def test_only_admin_project_set_acts_as_non_admin(self): - self.config_fixture.config( - group='resource', - admin_project_name=self.project['name']) - r = self.v3_create_token(self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id'])) - self.assertValidProjectScopedTokenResponse(r, is_admin_project=False) - v3_token = r.headers.get('X-Subject-Token') - r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token}) - self.assertValidProjectScopedTokenResponse(r, is_admin_project=False) - - def _create_role(self, domain_id=None): - """Call ``POST /roles``.""" - ref = unit.new_role_ref(domain_id=domain_id) - r = self.post('/roles', body={'role': ref}) - return self.assertValidRoleResponse(r, ref) - - def _create_implied_role(self, prior_id): - implied = self._create_role() - url = '/roles/%s/implies/%s' % (prior_id, implied['id']) - self.put(url, expected_status=http_client.CREATED) - return implied - - def _delete_implied_role(self, prior_role_id, implied_role_id): - url = '/roles/%s/implies/%s' % (prior_role_id, implied_role_id) - self.delete(url) - - def _get_scoped_token_roles(self, is_domain=False): - if is_domain: - v3_token = self.get_domain_scoped_token() - else: - v3_token = self.get_scoped_token() - - r = self.get('/auth/tokens', headers={'X-Subject-Token': v3_token}) - v3_token_data = r.result - token_roles = v3_token_data['token']['roles'] - return token_roles - - def _create_implied_role_shows_in_v3_token(self, is_domain): - token_roles = self._get_scoped_token_roles(is_domain) - self.assertEqual(1, len(token_roles)) - - prior = token_roles[0]['id'] - implied1 = self._create_implied_role(prior) - - token_roles = self._get_scoped_token_roles(is_domain) - self.assertEqual(2, len(token_roles)) - - implied2 = self._create_implied_role(prior) - token_roles = self._get_scoped_token_roles(is_domain) - self.assertEqual(3, len(token_roles)) - - token_role_ids = [role['id'] for role in token_roles] - self.assertIn(prior, token_role_ids) - self.assertIn(implied1['id'], token_role_ids) - self.assertIn(implied2['id'], token_role_ids) - - def test_create_implied_role_shows_in_v3_project_token(self): - # regardless of the default chosen, this should always - # test with the option set. - self.config_fixture.config(group='token', infer_roles=True) - self._create_implied_role_shows_in_v3_token(False) - - def test_create_implied_role_shows_in_v3_domain_token(self): - self.config_fixture.config(group='token', infer_roles=True) - self.assignment_api.create_grant(self.role['id'], - user_id=self.user['id'], - domain_id=self.domain['id']) - - self._create_implied_role_shows_in_v3_token(True) - - def test_group_assigned_implied_role_shows_in_v3_token(self): - self.config_fixture.config(group='token', infer_roles=True) - is_domain = False - token_roles = self._get_scoped_token_roles(is_domain) - self.assertEqual(1, len(token_roles)) - - new_role = self._create_role() - prior = new_role['id'] - - new_group_ref = unit.new_group_ref(domain_id=self.domain['id']) - new_group = self.identity_api.create_group(new_group_ref) - self.assignment_api.create_grant(prior, - group_id=new_group['id'], - project_id=self.project['id']) - - token_roles = self._get_scoped_token_roles(is_domain) - self.assertEqual(1, len(token_roles)) - - self.identity_api.add_user_to_group(self.user['id'], - new_group['id']) - - token_roles = self._get_scoped_token_roles(is_domain) - self.assertEqual(2, len(token_roles)) - - implied1 = self._create_implied_role(prior) - - token_roles = self._get_scoped_token_roles(is_domain) - self.assertEqual(3, len(token_roles)) - - implied2 = self._create_implied_role(prior) - token_roles = self._get_scoped_token_roles(is_domain) - self.assertEqual(4, len(token_roles)) - - token_role_ids = [role['id'] for role in token_roles] - self.assertIn(prior, token_role_ids) - self.assertIn(implied1['id'], token_role_ids) - self.assertIn(implied2['id'], token_role_ids) - - def test_multiple_implied_roles_show_in_v3_token(self): - self.config_fixture.config(group='token', infer_roles=True) - token_roles = self._get_scoped_token_roles() - self.assertEqual(1, len(token_roles)) - - prior = token_roles[0]['id'] - implied1 = self._create_implied_role(prior) - implied2 = self._create_implied_role(prior) - implied3 = self._create_implied_role(prior) - - token_roles = self._get_scoped_token_roles() - self.assertEqual(4, len(token_roles)) - - token_role_ids = [role['id'] for role in token_roles] - self.assertIn(prior, token_role_ids) - self.assertIn(implied1['id'], token_role_ids) - self.assertIn(implied2['id'], token_role_ids) - self.assertIn(implied3['id'], token_role_ids) - - def test_chained_implied_role_shows_in_v3_token(self): - self.config_fixture.config(group='token', infer_roles=True) - token_roles = self._get_scoped_token_roles() - self.assertEqual(1, len(token_roles)) - - prior = token_roles[0]['id'] - implied1 = self._create_implied_role(prior) - implied2 = self._create_implied_role(implied1['id']) - implied3 = self._create_implied_role(implied2['id']) - - token_roles = self._get_scoped_token_roles() - self.assertEqual(4, len(token_roles)) - - token_role_ids = [role['id'] for role in token_roles] - - self.assertIn(prior, token_role_ids) - self.assertIn(implied1['id'], token_role_ids) - self.assertIn(implied2['id'], token_role_ids) - self.assertIn(implied3['id'], token_role_ids) - - def test_implied_role_disabled_by_config(self): - self.config_fixture.config(group='token', infer_roles=False) - token_roles = self._get_scoped_token_roles() - self.assertEqual(1, len(token_roles)) - - prior = token_roles[0]['id'] - implied1 = self._create_implied_role(prior) - implied2 = self._create_implied_role(implied1['id']) - self._create_implied_role(implied2['id']) - - token_roles = self._get_scoped_token_roles() - self.assertEqual(1, len(token_roles)) - token_role_ids = [role['id'] for role in token_roles] - self.assertIn(prior, token_role_ids) - - def test_delete_implied_role_do_not_show_in_v3_token(self): - self.config_fixture.config(group='token', infer_roles=True) - token_roles = self._get_scoped_token_roles() - prior = token_roles[0]['id'] - implied = self._create_implied_role(prior) - - token_roles = self._get_scoped_token_roles() - self.assertEqual(2, len(token_roles)) - self._delete_implied_role(prior, implied['id']) - - token_roles = self._get_scoped_token_roles() - self.assertEqual(1, len(token_roles)) - - def test_unrelated_implied_roles_do_not_change_v3_token(self): - self.config_fixture.config(group='token', infer_roles=True) - token_roles = self._get_scoped_token_roles() - prior = token_roles[0]['id'] - implied = self._create_implied_role(prior) - - token_roles = self._get_scoped_token_roles() - self.assertEqual(2, len(token_roles)) - - unrelated = self._create_role() - url = '/roles/%s/implies/%s' % (unrelated['id'], implied['id']) - self.put(url, expected_status=http_client.CREATED) - - token_roles = self._get_scoped_token_roles() - self.assertEqual(2, len(token_roles)) - - self._delete_implied_role(unrelated['id'], implied['id']) - token_roles = self._get_scoped_token_roles() - self.assertEqual(2, len(token_roles)) - - def test_domain_scpecific_roles_do_not_show_v3_token(self): - self.config_fixture.config(group='token', infer_roles=True) - initial_token_roles = self._get_scoped_token_roles() - - new_role = self._create_role(domain_id=self.domain_id) - self.assignment_api.create_grant(new_role['id'], - user_id=self.user['id'], - project_id=self.project['id']) - implied = self._create_implied_role(new_role['id']) - - token_roles = self._get_scoped_token_roles() - self.assertEqual(len(initial_token_roles) + 1, len(token_roles)) - - # The implied role from the domain specific role should be in the - # token, but not the domain specific role itself. - token_role_ids = [role['id'] for role in token_roles] - self.assertIn(implied['id'], token_role_ids) - self.assertNotIn(new_role['id'], token_role_ids) - - def test_remove_all_roles_from_scope_result_in_404(self): - # create a new user - new_user = unit.create_user(self.identity_api, - domain_id=self.domain['id']) - - # give the new user a role on a project - path = '/projects/%s/users/%s/roles/%s' % ( - self.project['id'], new_user['id'], self.role['id']) - self.put(path=path) - - # authenticate as the new user and get a project-scoped token - auth_data = self.build_authentication_request( - user_id=new_user['id'], - password=new_user['password'], - project_id=self.project['id']) - subject_token_id = self.v3_create_token(auth_data).headers.get( - 'X-Subject-Token') - - # make sure the project-scoped token is valid - headers = {'X-Subject-Token': subject_token_id} - r = self.get('/auth/tokens', headers=headers) - self.assertValidProjectScopedTokenResponse(r) - - # remove the roles from the user for the given scope - path = '/projects/%s/users/%s/roles/%s' % ( - self.project['id'], new_user['id'], self.role['id']) - self.delete(path=path) - - # token validation should now result in 404 - self.get('/auth/tokens', headers=headers, - expected_status=http_client.NOT_FOUND) - - -class TokenDataTests(object): - """Test the data in specific token types.""" - - def test_unscoped_token_format(self): - # ensure the unscoped token response contains the appropriate data - r = self.get('/auth/tokens', headers=self.headers) - self.assertValidUnscopedTokenResponse(r) - - def test_domain_scoped_token_format(self): - # ensure the domain scoped token response contains the appropriate data - self.assignment_api.create_grant( - self.role['id'], - user_id=self.default_domain_user['id'], - domain_id=self.domain['id']) - - domain_scoped_token = self.get_requested_token( - self.build_authentication_request( - user_id=self.default_domain_user['id'], - password=self.default_domain_user['password'], - domain_id=self.domain['id']) - ) - self.headers['X-Subject-Token'] = domain_scoped_token - r = self.get('/auth/tokens', headers=self.headers) - self.assertValidDomainScopedTokenResponse(r) - - def test_project_scoped_token_format(self): - # ensure project scoped token responses contains the appropriate data - project_scoped_token = self.get_requested_token( - self.build_authentication_request( - user_id=self.default_domain_user['id'], - password=self.default_domain_user['password'], - project_id=self.default_domain_project['id']) - ) - self.headers['X-Subject-Token'] = project_scoped_token - r = self.get('/auth/tokens', headers=self.headers) - self.assertValidProjectScopedTokenResponse(r) - - def test_extra_data_in_unscoped_token_fails_validation(self): - # ensure unscoped token response contains the appropriate data - r = self.get('/auth/tokens', headers=self.headers) - - # populate the response result with some extra data - r.result['token'][u'extra'] = unicode(uuid.uuid4().hex) - self.assertRaises(exception.SchemaValidationError, - self.assertValidUnscopedTokenResponse, - r) - - def test_extra_data_in_domain_scoped_token_fails_validation(self): - # ensure domain scoped token response contains the appropriate data - self.assignment_api.create_grant( - self.role['id'], - user_id=self.default_domain_user['id'], - domain_id=self.domain['id']) - - domain_scoped_token = self.get_requested_token( - self.build_authentication_request( - user_id=self.default_domain_user['id'], - password=self.default_domain_user['password'], - domain_id=self.domain['id']) - ) - self.headers['X-Subject-Token'] = domain_scoped_token - r = self.get('/auth/tokens', headers=self.headers) - - # populate the response result with some extra data - r.result['token'][u'extra'] = unicode(uuid.uuid4().hex) - self.assertRaises(exception.SchemaValidationError, - self.assertValidDomainScopedTokenResponse, - r) - - def test_extra_data_in_project_scoped_token_fails_validation(self): - # ensure project scoped token responses contains the appropriate data - project_scoped_token = self.get_requested_token( - self.build_authentication_request( - user_id=self.default_domain_user['id'], - password=self.default_domain_user['password'], - project_id=self.default_domain_project['id']) - ) - self.headers['X-Subject-Token'] = project_scoped_token - resp = self.get('/auth/tokens', headers=self.headers) - - # populate the response result with some extra data - resp.result['token'][u'extra'] = unicode(uuid.uuid4().hex) - self.assertRaises(exception.SchemaValidationError, - self.assertValidProjectScopedTokenResponse, - resp) - - -class AllowRescopeScopedTokenDisabledTests(test_v3.RestfulTestCase): - def config_overrides(self): - super(AllowRescopeScopedTokenDisabledTests, self).config_overrides() - self.config_fixture.config( - group='token', - allow_rescope_scoped_token=False) - - def test_rescoping_v3_to_v3_disabled(self): - self.v3_create_token( - self.build_authentication_request( - token=self.get_scoped_token(), - project_id=self.project_id), - expected_status=http_client.FORBIDDEN) - - def _v2_token(self): - body = { - 'auth': { - "tenantId": self.default_domain_project['id'], - 'passwordCredentials': { - 'userId': self.default_domain_user['id'], - 'password': self.default_domain_user['password'] - } - }} - resp = self.admin_request(path='/v2.0/tokens', - method='POST', - body=body) - v2_token_data = resp.result - return v2_token_data - - def _v2_token_from_token(self, token): - body = { - 'auth': { - "tenantId": self.project['id'], - "token": token - }} - self.admin_request(path='/v2.0/tokens', - method='POST', - body=body, - expected_status=http_client.FORBIDDEN) - - def test_rescoping_v2_to_v3_disabled(self): - token = self._v2_token() - self.v3_create_token( - self.build_authentication_request( - token=token['access']['token']['id'], - project_id=self.project_id), - expected_status=http_client.FORBIDDEN) - - def test_rescoping_v3_to_v2_disabled(self): - token = {'id': self.get_scoped_token()} - self._v2_token_from_token(token) - - def test_rescoping_v2_to_v2_disabled(self): - token = self._v2_token() - self._v2_token_from_token(token['access']['token']) - - def test_rescoped_domain_token_disabled(self): - - self.domainA = unit.new_domain_ref() - self.resource_api.create_domain(self.domainA['id'], self.domainA) - self.assignment_api.create_grant(self.role['id'], - user_id=self.user['id'], - domain_id=self.domainA['id']) - unscoped_token = self.get_requested_token( - self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'])) - # Get a domain-scoped token from the unscoped token - domain_scoped_token = self.get_requested_token( - self.build_authentication_request( - token=unscoped_token, - domain_id=self.domainA['id'])) - self.v3_create_token( - self.build_authentication_request( - token=domain_scoped_token, - project_id=self.project_id), - expected_status=http_client.FORBIDDEN) - - -class TestPKITokenAPIs(test_v3.RestfulTestCase, TokenAPITests, TokenDataTests): - def config_overrides(self): - super(TestPKITokenAPIs, self).config_overrides() - self.config_fixture.config(group='token', provider='pki') - - def setUp(self): - super(TestPKITokenAPIs, self).setUp() - self.doSetUp() - - def verify_token(self, *args, **kwargs): - return cms.verify_token(*args, **kwargs) - - def test_v3_token_id(self): - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) - resp = self.v3_create_token(auth_data) - token_data = resp.result - token_id = resp.headers.get('X-Subject-Token') - self.assertIn('expires_at', token_data['token']) - - decoded_token = self.verify_token(token_id, CONF.signing.certfile, - CONF.signing.ca_certs) - decoded_token_dict = json.loads(decoded_token) - - token_resp_dict = json.loads(resp.body) - - self.assertEqual(decoded_token_dict, token_resp_dict) - # should be able to validate hash PKI token as well - hash_token_id = cms.cms_hash_token(token_id) - headers = {'X-Subject-Token': hash_token_id} - resp = self.get('/auth/tokens', headers=headers) - expected_token_data = resp.result - self.assertDictEqual(expected_token_data, token_data) - - def test_v3_v2_hashed_pki_token_intermix(self): - auth_data = self.build_authentication_request( - user_id=self.default_domain_user['id'], - password=self.default_domain_user['password'], - project_id=self.default_domain_project['id']) - resp = self.v3_create_token(auth_data) - token_data = resp.result - token = resp.headers.get('X-Subject-Token') - - # should be able to validate a hash PKI token in v2 too - token = cms.cms_hash_token(token) - path = '/v2.0/tokens/%s' % (token) - resp = self.admin_request(path=path, - token=self.get_admin_token(), - method='GET') - v2_token = resp.result - self.assertEqual(v2_token['access']['user']['id'], - token_data['token']['user']['id']) - # v2 token time has not fraction of second precision so - # just need to make sure the non fraction part agrees - self.assertIn(v2_token['access']['token']['expires'][:-1], - token_data['token']['expires_at']) - self.assertEqual(v2_token['access']['user']['roles'][0]['name'], - token_data['token']['roles'][0]['name']) - - -class TestPKIZTokenAPIs(TestPKITokenAPIs): - def config_overrides(self): - super(TestPKIZTokenAPIs, self).config_overrides() - self.config_fixture.config(group='token', provider='pkiz') - - def verify_token(self, *args, **kwargs): - return cms.pkiz_verify(*args, **kwargs) - - -class TestUUIDTokenAPIs(test_v3.RestfulTestCase, TokenAPITests, - TokenDataTests): - def config_overrides(self): - super(TestUUIDTokenAPIs, self).config_overrides() - self.config_fixture.config(group='token', provider='uuid') - - def setUp(self): - super(TestUUIDTokenAPIs, self).setUp() - self.doSetUp() - - def test_v3_token_id(self): - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) - resp = self.v3_create_token(auth_data) - token_data = resp.result - token_id = resp.headers.get('X-Subject-Token') - self.assertIn('expires_at', token_data['token']) - self.assertFalse(cms.is_asn1_token(token_id)) - - -class TestFernetTokenAPIs(test_v3.RestfulTestCase, TokenAPITests, - TokenDataTests): - def config_overrides(self): - super(TestFernetTokenAPIs, self).config_overrides() - self.config_fixture.config(group='token', provider='fernet') - self.useFixture(ksfixtures.KeyRepository(self.config_fixture)) - - def setUp(self): - super(TestFernetTokenAPIs, self).setUp() - self.doSetUp() - - def _make_auth_request(self, auth_data): - token = super(TestFernetTokenAPIs, self)._make_auth_request(auth_data) - self.assertLess(len(token), 255) - return token - - def test_validate_tampered_unscoped_token_fails(self): - unscoped_token = self._get_unscoped_token() - tampered_token = (unscoped_token[:50] + uuid.uuid4().hex + - unscoped_token[50 + 32:]) - self._validate_token(tampered_token, - expected_status=http_client.NOT_FOUND) - - def test_validate_tampered_project_scoped_token_fails(self): - project_scoped_token = self._get_project_scoped_token() - tampered_token = (project_scoped_token[:50] + uuid.uuid4().hex + - project_scoped_token[50 + 32:]) - self._validate_token(tampered_token, - expected_status=http_client.NOT_FOUND) - - def test_validate_tampered_trust_scoped_token_fails(self): - trustee_user, trust = self._create_trust() - trust_scoped_token = self._get_trust_scoped_token(trustee_user, trust) - # Get a trust scoped token - tampered_token = (trust_scoped_token[:50] + uuid.uuid4().hex + - trust_scoped_token[50 + 32:]) - self._validate_token(tampered_token, - expected_status=http_client.NOT_FOUND) - - -class TestTokenRevokeSelfAndAdmin(test_v3.RestfulTestCase): - """Test token revoke using v3 Identity API by token owner and admin.""" - - def load_sample_data(self): - """Load Sample Data for Test Cases. - - Two domains, domainA and domainB - Two users in domainA, userNormalA and userAdminA - One user in domainB, userAdminB - - """ - super(TestTokenRevokeSelfAndAdmin, self).load_sample_data() - # DomainA setup - self.domainA = unit.new_domain_ref() - self.resource_api.create_domain(self.domainA['id'], self.domainA) - - self.userAdminA = unit.create_user(self.identity_api, - domain_id=self.domainA['id']) - - self.userNormalA = unit.create_user(self.identity_api, - domain_id=self.domainA['id']) - - self.assignment_api.create_grant(self.role['id'], - user_id=self.userAdminA['id'], - domain_id=self.domainA['id']) - - def _policy_fixture(self): - return ksfixtures.Policy(unit.dirs.etc('policy.v3cloudsample.json'), - self.config_fixture) - - def test_user_revokes_own_token(self): - user_token = self.get_requested_token( - self.build_authentication_request( - user_id=self.userNormalA['id'], - password=self.userNormalA['password'], - user_domain_id=self.domainA['id'])) - self.assertNotEmpty(user_token) - headers = {'X-Subject-Token': user_token} - - adminA_token = self.get_requested_token( - self.build_authentication_request( - user_id=self.userAdminA['id'], - password=self.userAdminA['password'], - domain_name=self.domainA['name'])) - - self.head('/auth/tokens', headers=headers, - expected_status=http_client.OK, - token=adminA_token) - self.head('/auth/tokens', headers=headers, - expected_status=http_client.OK, - token=user_token) - self.delete('/auth/tokens', headers=headers, - token=user_token) - # invalid X-Auth-Token and invalid X-Subject-Token - self.head('/auth/tokens', headers=headers, - expected_status=http_client.UNAUTHORIZED, - token=user_token) - # invalid X-Auth-Token and invalid X-Subject-Token - self.delete('/auth/tokens', headers=headers, - expected_status=http_client.UNAUTHORIZED, - token=user_token) - # valid X-Auth-Token and invalid X-Subject-Token - self.delete('/auth/tokens', headers=headers, - expected_status=http_client.NOT_FOUND, - token=adminA_token) - # valid X-Auth-Token and invalid X-Subject-Token - self.head('/auth/tokens', headers=headers, - expected_status=http_client.NOT_FOUND, - token=adminA_token) - - def test_adminA_revokes_userA_token(self): - user_token = self.get_requested_token( - self.build_authentication_request( - user_id=self.userNormalA['id'], - password=self.userNormalA['password'], - user_domain_id=self.domainA['id'])) - self.assertNotEmpty(user_token) - headers = {'X-Subject-Token': user_token} - - adminA_token = self.get_requested_token( - self.build_authentication_request( - user_id=self.userAdminA['id'], - password=self.userAdminA['password'], - domain_name=self.domainA['name'])) - - self.head('/auth/tokens', headers=headers, - expected_status=http_client.OK, - token=adminA_token) - self.head('/auth/tokens', headers=headers, - expected_status=http_client.OK, - token=user_token) - self.delete('/auth/tokens', headers=headers, - token=adminA_token) - # invalid X-Auth-Token and invalid X-Subject-Token - self.head('/auth/tokens', headers=headers, - expected_status=http_client.UNAUTHORIZED, - token=user_token) - # valid X-Auth-Token and invalid X-Subject-Token - self.delete('/auth/tokens', headers=headers, - expected_status=http_client.NOT_FOUND, - token=adminA_token) - # valid X-Auth-Token and invalid X-Subject-Token - self.head('/auth/tokens', headers=headers, - expected_status=http_client.NOT_FOUND, - token=adminA_token) - - def test_adminB_fails_revoking_userA_token(self): - # DomainB setup - self.domainB = unit.new_domain_ref() - self.resource_api.create_domain(self.domainB['id'], self.domainB) - userAdminB = unit.create_user(self.identity_api, - domain_id=self.domainB['id']) - self.assignment_api.create_grant(self.role['id'], - user_id=userAdminB['id'], - domain_id=self.domainB['id']) - - user_token = self.get_requested_token( - self.build_authentication_request( - user_id=self.userNormalA['id'], - password=self.userNormalA['password'], - user_domain_id=self.domainA['id'])) - headers = {'X-Subject-Token': user_token} - - adminB_token = self.get_requested_token( - self.build_authentication_request( - user_id=userAdminB['id'], - password=userAdminB['password'], - domain_name=self.domainB['name'])) - - self.head('/auth/tokens', headers=headers, - expected_status=http_client.FORBIDDEN, - token=adminB_token) - self.delete('/auth/tokens', headers=headers, - expected_status=http_client.FORBIDDEN, - token=adminB_token) - - -class TestTokenRevokeById(test_v3.RestfulTestCase): - """Test token revocation on the v3 Identity API.""" - - def config_overrides(self): - super(TestTokenRevokeById, self).config_overrides() - self.config_fixture.config( - group='token', - provider='pki', - revoke_by_id=False) - - def setUp(self): - """Setup for Token Revoking Test Cases. - - As well as the usual housekeeping, create a set of domains, - users, groups, roles and projects for the subsequent tests: - - - Two domains: A & B - - Three users (1, 2 and 3) - - Three groups (1, 2 and 3) - - Two roles (1 and 2) - - DomainA owns user1, domainB owns user2 and user3 - - DomainA owns group1 and group2, domainB owns group3 - - User1 and user2 are members of group1 - - User3 is a member of group2 - - Two projects: A & B, both in domainA - - Group1 has role1 on Project A and B, meaning that user1 and user2 - will get these roles by virtue of membership - - User1, 2 and 3 have role1 assigned to projectA - - Group1 has role1 on Project A and B, meaning that user1 and user2 - will get role1 (duplicated) by virtue of membership - - User1 has role2 assigned to domainA - - """ - super(TestTokenRevokeById, self).setUp() - - # Start by creating a couple of domains and projects - self.domainA = unit.new_domain_ref() - self.resource_api.create_domain(self.domainA['id'], self.domainA) - self.domainB = unit.new_domain_ref() - self.resource_api.create_domain(self.domainB['id'], self.domainB) - self.projectA = unit.new_project_ref(domain_id=self.domainA['id']) - self.resource_api.create_project(self.projectA['id'], self.projectA) - self.projectB = unit.new_project_ref(domain_id=self.domainA['id']) - self.resource_api.create_project(self.projectB['id'], self.projectB) - - # Now create some users - self.user1 = unit.create_user(self.identity_api, - domain_id=self.domainA['id']) - - self.user2 = unit.create_user(self.identity_api, - domain_id=self.domainB['id']) - - self.user3 = unit.create_user(self.identity_api, - domain_id=self.domainB['id']) - - self.group1 = unit.new_group_ref(domain_id=self.domainA['id']) - self.group1 = self.identity_api.create_group(self.group1) - - self.group2 = unit.new_group_ref(domain_id=self.domainA['id']) - self.group2 = self.identity_api.create_group(self.group2) - - self.group3 = unit.new_group_ref(domain_id=self.domainB['id']) - self.group3 = self.identity_api.create_group(self.group3) - - self.identity_api.add_user_to_group(self.user1['id'], - self.group1['id']) - self.identity_api.add_user_to_group(self.user2['id'], - self.group1['id']) - self.identity_api.add_user_to_group(self.user3['id'], - self.group2['id']) - - self.role1 = unit.new_role_ref() - self.role_api.create_role(self.role1['id'], self.role1) - self.role2 = unit.new_role_ref() - self.role_api.create_role(self.role2['id'], self.role2) - - self.assignment_api.create_grant(self.role2['id'], - user_id=self.user1['id'], - domain_id=self.domainA['id']) - self.assignment_api.create_grant(self.role1['id'], - user_id=self.user1['id'], - project_id=self.projectA['id']) - self.assignment_api.create_grant(self.role1['id'], - user_id=self.user2['id'], - project_id=self.projectA['id']) - self.assignment_api.create_grant(self.role1['id'], - user_id=self.user3['id'], - project_id=self.projectA['id']) - self.assignment_api.create_grant(self.role1['id'], - group_id=self.group1['id'], - project_id=self.projectA['id']) - - def test_unscoped_token_remains_valid_after_role_assignment(self): - unscoped_token = self.get_requested_token( - self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'])) - - scoped_token = self.get_requested_token( - self.build_authentication_request( - token=unscoped_token, - project_id=self.projectA['id'])) - - # confirm both tokens are valid - self.head('/auth/tokens', - headers={'X-Subject-Token': unscoped_token}, - expected_status=http_client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': scoped_token}, - expected_status=http_client.OK) - - # create a new role - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - - # assign a new role - self.put( - '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % { - 'project_id': self.projectA['id'], - 'user_id': self.user1['id'], - 'role_id': role['id']}) - - # both tokens should remain valid - self.head('/auth/tokens', - headers={'X-Subject-Token': unscoped_token}, - expected_status=http_client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': scoped_token}, - expected_status=http_client.OK) - - def test_deleting_user_grant_revokes_token(self): - """Test deleting a user grant revokes token. - - Test Plan: - - - Get a token for user1, scoped to ProjectA - - Delete the grant user1 has on ProjectA - - Check token is no longer valid - - """ - auth_data = self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'], - project_id=self.projectA['id']) - token = self.get_requested_token(auth_data) - # Confirm token is valid - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http_client.OK) - # Delete the grant, which should invalidate the token - grant_url = ( - '/projects/%(project_id)s/users/%(user_id)s/' - 'roles/%(role_id)s' % { - 'project_id': self.projectA['id'], - 'user_id': self.user1['id'], - 'role_id': self.role1['id']}) - self.delete(grant_url) - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http_client.NOT_FOUND) - - def role_data_fixtures(self): - self.projectC = unit.new_project_ref(domain_id=self.domainA['id']) - self.resource_api.create_project(self.projectC['id'], self.projectC) - self.user4 = unit.create_user(self.identity_api, - domain_id=self.domainB['id']) - self.user5 = unit.create_user(self.identity_api, - domain_id=self.domainA['id']) - self.user6 = unit.create_user(self.identity_api, - domain_id=self.domainA['id']) - self.identity_api.add_user_to_group(self.user5['id'], - self.group1['id']) - self.assignment_api.create_grant(self.role1['id'], - group_id=self.group1['id'], - project_id=self.projectB['id']) - self.assignment_api.create_grant(self.role2['id'], - user_id=self.user4['id'], - project_id=self.projectC['id']) - self.assignment_api.create_grant(self.role1['id'], - user_id=self.user6['id'], - project_id=self.projectA['id']) - self.assignment_api.create_grant(self.role1['id'], - user_id=self.user6['id'], - domain_id=self.domainA['id']) - - def test_deleting_role_revokes_token(self): - """Test deleting a role revokes token. - - Add some additional test data, namely: - - - A third project (project C) - - Three additional users - user4 owned by domainB and user5 and 6 owned - by domainA (different domain ownership should not affect the test - results, just provided to broaden test coverage) - - User5 is a member of group1 - - Group1 gets an additional assignment - role1 on projectB as well as - its existing role1 on projectA - - User4 has role2 on Project C - - User6 has role1 on projectA and domainA - - This allows us to create 5 tokens by virtue of different types of - role assignment: - - user1, scoped to ProjectA by virtue of user role1 assignment - - user5, scoped to ProjectB by virtue of group role1 assignment - - user4, scoped to ProjectC by virtue of user role2 assignment - - user6, scoped to ProjectA by virtue of user role1 assignment - - user6, scoped to DomainA by virtue of user role1 assignment - - role1 is then deleted - - Check the tokens on Project A and B, and DomainA are revoked, but not - the one for Project C - - """ - self.role_data_fixtures() - - # Now we are ready to start issuing requests - auth_data = self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'], - project_id=self.projectA['id']) - tokenA = self.get_requested_token(auth_data) - auth_data = self.build_authentication_request( - user_id=self.user5['id'], - password=self.user5['password'], - project_id=self.projectB['id']) - tokenB = self.get_requested_token(auth_data) - auth_data = self.build_authentication_request( - user_id=self.user4['id'], - password=self.user4['password'], - project_id=self.projectC['id']) - tokenC = self.get_requested_token(auth_data) - auth_data = self.build_authentication_request( - user_id=self.user6['id'], - password=self.user6['password'], - project_id=self.projectA['id']) - tokenD = self.get_requested_token(auth_data) - auth_data = self.build_authentication_request( - user_id=self.user6['id'], - password=self.user6['password'], - domain_id=self.domainA['id']) - tokenE = self.get_requested_token(auth_data) - # Confirm tokens are valid - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenA}, - expected_status=http_client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenB}, - expected_status=http_client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenC}, - expected_status=http_client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenD}, - expected_status=http_client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenE}, - expected_status=http_client.OK) - - # Delete the role, which should invalidate the tokens - role_url = '/roles/%s' % self.role1['id'] - self.delete(role_url) - - # Check the tokens that used role1 is invalid - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenA}, - expected_status=http_client.NOT_FOUND) - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenB}, - expected_status=http_client.NOT_FOUND) - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenD}, - expected_status=http_client.NOT_FOUND) - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenE}, - expected_status=http_client.NOT_FOUND) - - # ...but the one using role2 is still valid - self.head('/auth/tokens', - headers={'X-Subject-Token': tokenC}, - expected_status=http_client.OK) - - def test_domain_user_role_assignment_maintains_token(self): - """Test user-domain role assignment maintains existing token. - - Test Plan: - - - Get a token for user1, scoped to ProjectA - - Create a grant for user1 on DomainB - - Check token is still valid - - """ - auth_data = self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'], - project_id=self.projectA['id']) - token = self.get_requested_token(auth_data) - # Confirm token is valid - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http_client.OK) - # Assign a role, which should not affect the token - grant_url = ( - '/domains/%(domain_id)s/users/%(user_id)s/' - 'roles/%(role_id)s' % { - 'domain_id': self.domainB['id'], - 'user_id': self.user1['id'], - 'role_id': self.role1['id']}) - self.put(grant_url) - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http_client.OK) - - def test_disabling_project_revokes_token(self): - token = self.get_requested_token( - self.build_authentication_request( - user_id=self.user3['id'], - password=self.user3['password'], - project_id=self.projectA['id'])) - - # confirm token is valid - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http_client.OK) - - # disable the project, which should invalidate the token - self.patch( - '/projects/%(project_id)s' % {'project_id': self.projectA['id']}, - body={'project': {'enabled': False}}) - - # user should no longer have access to the project - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http_client.NOT_FOUND) - self.v3_create_token( - self.build_authentication_request( - user_id=self.user3['id'], - password=self.user3['password'], - project_id=self.projectA['id']), - expected_status=http_client.UNAUTHORIZED) - - def test_deleting_project_revokes_token(self): - token = self.get_requested_token( - self.build_authentication_request( - user_id=self.user3['id'], - password=self.user3['password'], - project_id=self.projectA['id'])) - - # confirm token is valid - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http_client.OK) - - # delete the project, which should invalidate the token - self.delete( - '/projects/%(project_id)s' % {'project_id': self.projectA['id']}) - - # user should no longer have access to the project - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http_client.NOT_FOUND) - self.v3_create_token( - self.build_authentication_request( - user_id=self.user3['id'], - password=self.user3['password'], - project_id=self.projectA['id']), - expected_status=http_client.UNAUTHORIZED) - - def test_deleting_group_grant_revokes_tokens(self): - """Test deleting a group grant revokes tokens. - - Test Plan: - - - Get a token for user1, scoped to ProjectA - - Get a token for user2, scoped to ProjectA - - Get a token for user3, scoped to ProjectA - - Delete the grant group1 has on ProjectA - - Check tokens for user1 & user2 are no longer valid, - since user1 and user2 are members of group1 - - Check token for user3 is invalid too - - """ - auth_data = self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'], - project_id=self.projectA['id']) - token1 = self.get_requested_token(auth_data) - auth_data = self.build_authentication_request( - user_id=self.user2['id'], - password=self.user2['password'], - project_id=self.projectA['id']) - token2 = self.get_requested_token(auth_data) - auth_data = self.build_authentication_request( - user_id=self.user3['id'], - password=self.user3['password'], - project_id=self.projectA['id']) - token3 = self.get_requested_token(auth_data) - # Confirm tokens are valid - self.head('/auth/tokens', - headers={'X-Subject-Token': token1}, - expected_status=http_client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': token2}, - expected_status=http_client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': token3}, - expected_status=http_client.OK) - # Delete the group grant, which should invalidate the - # tokens for user1 and user2 - grant_url = ( - '/projects/%(project_id)s/groups/%(group_id)s/' - 'roles/%(role_id)s' % { - 'project_id': self.projectA['id'], - 'group_id': self.group1['id'], - 'role_id': self.role1['id']}) - self.delete(grant_url) - self.head('/auth/tokens', - headers={'X-Subject-Token': token1}, - expected_status=http_client.NOT_FOUND) - self.head('/auth/tokens', - headers={'X-Subject-Token': token2}, - expected_status=http_client.NOT_FOUND) - # But user3's token should be invalid too as revocation is done for - # scope role & project - self.head('/auth/tokens', - headers={'X-Subject-Token': token3}, - expected_status=http_client.NOT_FOUND) - - def test_domain_group_role_assignment_maintains_token(self): - """Test domain-group role assignment maintains existing token. - - Test Plan: - - - Get a token for user1, scoped to ProjectA - - Create a grant for group1 on DomainB - - Check token is still longer valid - - """ - auth_data = self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'], - project_id=self.projectA['id']) - token = self.get_requested_token(auth_data) - # Confirm token is valid - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http_client.OK) - # Delete the grant, which should invalidate the token - grant_url = ( - '/domains/%(domain_id)s/groups/%(group_id)s/' - 'roles/%(role_id)s' % { - 'domain_id': self.domainB['id'], - 'group_id': self.group1['id'], - 'role_id': self.role1['id']}) - self.put(grant_url) - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http_client.OK) - - def test_group_membership_changes_revokes_token(self): - """Test add/removal to/from group revokes token. - - Test Plan: - - - Get a token for user1, scoped to ProjectA - - Get a token for user2, scoped to ProjectA - - Remove user1 from group1 - - Check token for user1 is no longer valid - - Check token for user2 is still valid, even though - user2 is also part of group1 - - Add user2 to group2 - - Check token for user2 is now no longer valid - - """ - auth_data = self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'], - project_id=self.projectA['id']) - token1 = self.get_requested_token(auth_data) - auth_data = self.build_authentication_request( - user_id=self.user2['id'], - password=self.user2['password'], - project_id=self.projectA['id']) - token2 = self.get_requested_token(auth_data) - # Confirm tokens are valid - self.head('/auth/tokens', - headers={'X-Subject-Token': token1}, - expected_status=http_client.OK) - self.head('/auth/tokens', - headers={'X-Subject-Token': token2}, - expected_status=http_client.OK) - # Remove user1 from group1, which should invalidate - # the token - self.delete('/groups/%(group_id)s/users/%(user_id)s' % { - 'group_id': self.group1['id'], - 'user_id': self.user1['id']}) - self.head('/auth/tokens', - headers={'X-Subject-Token': token1}, - expected_status=http_client.NOT_FOUND) - # But user2's token should still be valid - self.head('/auth/tokens', - headers={'X-Subject-Token': token2}, - expected_status=http_client.OK) - # Adding user2 to a group should not invalidate token - self.put('/groups/%(group_id)s/users/%(user_id)s' % { - 'group_id': self.group2['id'], - 'user_id': self.user2['id']}) - self.head('/auth/tokens', - headers={'X-Subject-Token': token2}, - expected_status=http_client.OK) - - def test_removing_role_assignment_does_not_affect_other_users(self): - """Revoking a role from one user should not affect other users.""" - # This group grant is not needed for the test - self.delete( - '/projects/%(project_id)s/groups/%(group_id)s/roles/%(role_id)s' % - {'project_id': self.projectA['id'], - 'group_id': self.group1['id'], - 'role_id': self.role1['id']}) - - user1_token = self.get_requested_token( - self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'], - project_id=self.projectA['id'])) - - user3_token = self.get_requested_token( - self.build_authentication_request( - user_id=self.user3['id'], - password=self.user3['password'], - project_id=self.projectA['id'])) - - # delete relationships between user1 and projectA from setUp - self.delete( - '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % { - 'project_id': self.projectA['id'], - 'user_id': self.user1['id'], - 'role_id': self.role1['id']}) - # authorization for the first user should now fail - self.head('/auth/tokens', - headers={'X-Subject-Token': user1_token}, - expected_status=http_client.NOT_FOUND) - self.v3_create_token( - self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'], - project_id=self.projectA['id']), - expected_status=http_client.UNAUTHORIZED) - - # authorization for the second user should still succeed - self.head('/auth/tokens', - headers={'X-Subject-Token': user3_token}, - expected_status=http_client.OK) - self.v3_create_token( - self.build_authentication_request( - user_id=self.user3['id'], - password=self.user3['password'], - project_id=self.projectA['id'])) - - def test_deleting_project_deletes_grants(self): - # This is to make it a little bit more pretty with PEP8 - role_path = ('/projects/%(project_id)s/users/%(user_id)s/' - 'roles/%(role_id)s') - role_path = role_path % {'user_id': self.user['id'], - 'project_id': self.projectA['id'], - 'role_id': self.role['id']} - - # grant the user a role on the project - self.put(role_path) - - # delete the project, which should remove the roles - self.delete( - '/projects/%(project_id)s' % {'project_id': self.projectA['id']}) - - # Make sure that we get a 404 Not Found when heading that role. - self.head(role_path, expected_status=http_client.NOT_FOUND) - - def get_v2_token(self, token=None, project_id=None): - body = {'auth': {}, } - - if token: - body['auth']['token'] = { - 'id': token - } - else: - body['auth']['passwordCredentials'] = { - 'username': self.default_domain_user['name'], - 'password': self.default_domain_user['password'], - } - - if project_id: - body['auth']['tenantId'] = project_id - - r = self.admin_request(method='POST', path='/v2.0/tokens', body=body) - return r.json_body['access']['token']['id'] - - def test_revoke_v2_token_no_check(self): - # Test that a V2 token can be revoked without validating it first. - - token = self.get_v2_token() - - self.delete('/auth/tokens', - headers={'X-Subject-Token': token}) - - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http_client.NOT_FOUND) - - def test_revoke_token_from_token(self): - # Test that a scoped token can be requested from an unscoped token, - # the scoped token can be revoked, and the unscoped token remains - # valid. - - unscoped_token = self.get_requested_token( - self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'])) - - # Get a project-scoped token from the unscoped token - project_scoped_token = self.get_requested_token( - self.build_authentication_request( - token=unscoped_token, - project_id=self.projectA['id'])) - - # Get a domain-scoped token from the unscoped token - domain_scoped_token = self.get_requested_token( - self.build_authentication_request( - token=unscoped_token, - domain_id=self.domainA['id'])) - - # revoke the project-scoped token. - self.delete('/auth/tokens', - headers={'X-Subject-Token': project_scoped_token}) - - # The project-scoped token is invalidated. - self.head('/auth/tokens', - headers={'X-Subject-Token': project_scoped_token}, - expected_status=http_client.NOT_FOUND) - - # The unscoped token should still be valid. - self.head('/auth/tokens', - headers={'X-Subject-Token': unscoped_token}, - expected_status=http_client.OK) - - # The domain-scoped token should still be valid. - self.head('/auth/tokens', - headers={'X-Subject-Token': domain_scoped_token}, - expected_status=http_client.OK) - - # revoke the domain-scoped token. - self.delete('/auth/tokens', - headers={'X-Subject-Token': domain_scoped_token}) - - # The domain-scoped token is invalid. - self.head('/auth/tokens', - headers={'X-Subject-Token': domain_scoped_token}, - expected_status=http_client.NOT_FOUND) - - # The unscoped token should still be valid. - self.head('/auth/tokens', - headers={'X-Subject-Token': unscoped_token}, - expected_status=http_client.OK) - - def test_revoke_token_from_token_v2(self): - # Test that a scoped token can be requested from an unscoped token, - # the scoped token can be revoked, and the unscoped token remains - # valid. - - unscoped_token = self.get_v2_token() - - # Get a project-scoped token from the unscoped token - project_scoped_token = self.get_v2_token( - token=unscoped_token, project_id=self.default_domain_project['id']) - - # revoke the project-scoped token. - self.delete('/auth/tokens', - headers={'X-Subject-Token': project_scoped_token}) - - # The project-scoped token is invalidated. - self.head('/auth/tokens', - headers={'X-Subject-Token': project_scoped_token}, - expected_status=http_client.NOT_FOUND) - - # The unscoped token should still be valid. - self.head('/auth/tokens', - headers={'X-Subject-Token': unscoped_token}, - expected_status=http_client.OK) - - -class TestTokenRevokeByAssignment(TestTokenRevokeById): - - def config_overrides(self): - super(TestTokenRevokeById, self).config_overrides() - self.config_fixture.config( - group='token', - provider='uuid', - revoke_by_id=True) - - def test_removing_role_assignment_keeps_other_project_token_groups(self): - """Test assignment isolation. - - Revoking a group role from one project should not invalidate all group - users' tokens - """ - self.assignment_api.create_grant(self.role1['id'], - group_id=self.group1['id'], - project_id=self.projectB['id']) - - project_token = self.get_requested_token( - self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'], - project_id=self.projectB['id'])) - - other_project_token = self.get_requested_token( - self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'], - project_id=self.projectA['id'])) - - self.assignment_api.delete_grant(self.role1['id'], - group_id=self.group1['id'], - project_id=self.projectB['id']) - - # authorization for the projectA should still succeed - self.head('/auth/tokens', - headers={'X-Subject-Token': other_project_token}, - expected_status=http_client.OK) - # while token for the projectB should not - self.head('/auth/tokens', - headers={'X-Subject-Token': project_token}, - expected_status=http_client.NOT_FOUND) - revoked_tokens = [ - t['id'] for t in self.token_provider_api.list_revoked_tokens()] - # token is in token revocation list - self.assertIn(project_token, revoked_tokens) - - -class RevokeContribTests(test_v3.RestfulTestCase): - - @mock.patch.object(versionutils, 'report_deprecated_feature') - def test_exception_happens(self, mock_deprecator): - routers.RevokeExtension(mock.ANY) - mock_deprecator.assert_called_once_with(mock.ANY, mock.ANY) - args, _kwargs = mock_deprecator.call_args - self.assertIn("Remove revoke_extension from", args[1]) - - -class TestTokenRevokeApi(TestTokenRevokeById): - """Test token revocation on the v3 Identity API.""" - - def config_overrides(self): - super(TestTokenRevokeApi, self).config_overrides() - self.config_fixture.config( - group='token', - provider='pki', - revoke_by_id=False) - - def assertValidDeletedProjectResponse(self, events_response, project_id): - events = events_response['events'] - self.assertEqual(1, len(events)) - self.assertEqual(project_id, events[0]['project_id']) - self.assertIsNotNone(events[0]['issued_before']) - self.assertIsNotNone(events_response['links']) - del (events_response['events'][0]['issued_before']) - del (events_response['links']) - expected_response = {'events': [{'project_id': project_id}]} - self.assertEqual(expected_response, events_response) - - def assertDomainAndProjectInList(self, events_response, domain_id): - events = events_response['events'] - self.assertEqual(2, len(events)) - self.assertEqual(domain_id, events[0]['project_id']) - self.assertEqual(domain_id, events[1]['domain_id']) - self.assertIsNotNone(events[0]['issued_before']) - self.assertIsNotNone(events[1]['issued_before']) - self.assertIsNotNone(events_response['links']) - del (events_response['events'][0]['issued_before']) - del (events_response['events'][1]['issued_before']) - del (events_response['links']) - expected_response = {'events': [{'project_id': domain_id}, - {'domain_id': domain_id}]} - self.assertEqual(expected_response, events_response) - - def assertValidRevokedTokenResponse(self, events_response, **kwargs): - events = events_response['events'] - self.assertEqual(1, len(events)) - for k, v in kwargs.items(): - self.assertEqual(v, events[0].get(k)) - self.assertIsNotNone(events[0]['issued_before']) - self.assertIsNotNone(events_response['links']) - del (events_response['events'][0]['issued_before']) - del (events_response['links']) - - expected_response = {'events': [kwargs]} - self.assertEqual(expected_response, events_response) - - def test_revoke_token(self): - scoped_token = self.get_scoped_token() - headers = {'X-Subject-Token': scoped_token} - response = self.get('/auth/tokens', headers=headers).json_body['token'] - - self.delete('/auth/tokens', headers=headers) - self.head('/auth/tokens', headers=headers, - expected_status=http_client.NOT_FOUND) - events_response = self.get('/OS-REVOKE/events').json_body - self.assertValidRevokedTokenResponse(events_response, - audit_id=response['audit_ids'][0]) - - def test_revoke_v2_token(self): - token = self.get_v2_token() - headers = {'X-Subject-Token': token} - response = self.get('/auth/tokens', - headers=headers).json_body['token'] - self.delete('/auth/tokens', headers=headers) - self.head('/auth/tokens', headers=headers, - expected_status=http_client.NOT_FOUND) - events_response = self.get('/OS-REVOKE/events').json_body - - self.assertValidRevokedTokenResponse( - events_response, - audit_id=response['audit_ids'][0]) - - def test_revoke_by_id_false_returns_gone(self): - self.get('/auth/tokens/OS-PKI/revoked', - expected_status=http_client.GONE) - - def test_list_delete_project_shows_in_event_list(self): - self.role_data_fixtures() - events = self.get('/OS-REVOKE/events').json_body['events'] - self.assertEqual([], events) - self.delete( - '/projects/%(project_id)s' % {'project_id': self.projectA['id']}) - events_response = self.get('/OS-REVOKE/events').json_body - - self.assertValidDeletedProjectResponse(events_response, - self.projectA['id']) - - def test_disable_domain_shows_in_event_list(self): - events = self.get('/OS-REVOKE/events').json_body['events'] - self.assertEqual([], events) - disable_body = {'domain': {'enabled': False}} - self.patch( - '/domains/%(project_id)s' % {'project_id': self.domainA['id']}, - body=disable_body) - - events = self.get('/OS-REVOKE/events').json_body - - self.assertDomainAndProjectInList(events, self.domainA['id']) - - def assertEventDataInList(self, events, **kwargs): - found = False - for e in events: - for key, value in kwargs.items(): - try: - if e[key] != value: - break - except KeyError: - # Break the loop and present a nice error instead of - # KeyError - break - else: - # If the value of the event[key] matches the value of the kwarg - # for each item in kwargs, the event was fully matched and - # the assertTrue below should succeed. - found = True - self.assertTrue(found, - 'event with correct values not in list, expected to ' - 'find event with key-value pairs. Expected: ' - '"%(expected)s" Events: "%(events)s"' % - {'expected': ','.join( - ["'%s=%s'" % (k, v) for k, v in kwargs.items()]), - 'events': events}) - - def test_list_delete_token_shows_in_event_list(self): - self.role_data_fixtures() - events = self.get('/OS-REVOKE/events').json_body['events'] - self.assertEqual([], events) - - scoped_token = self.get_scoped_token() - headers = {'X-Subject-Token': scoped_token} - auth_req = self.build_authentication_request(token=scoped_token) - response = self.v3_create_token(auth_req) - token2 = response.json_body['token'] - headers2 = {'X-Subject-Token': response.headers['X-Subject-Token']} - - response = self.v3_create_token(auth_req) - response.json_body['token'] - headers3 = {'X-Subject-Token': response.headers['X-Subject-Token']} - - self.head('/auth/tokens', headers=headers, - expected_status=http_client.OK) - self.head('/auth/tokens', headers=headers2, - expected_status=http_client.OK) - self.head('/auth/tokens', headers=headers3, - expected_status=http_client.OK) - - self.delete('/auth/tokens', headers=headers) - # NOTE(ayoung): not deleting token3, as it should be deleted - # by previous - events_response = self.get('/OS-REVOKE/events').json_body - events = events_response['events'] - self.assertEqual(1, len(events)) - self.assertEventDataInList( - events, - audit_id=token2['audit_ids'][1]) - self.head('/auth/tokens', headers=headers, - expected_status=http_client.NOT_FOUND) - self.head('/auth/tokens', headers=headers2, - expected_status=http_client.OK) - self.head('/auth/tokens', headers=headers3, - expected_status=http_client.OK) - - def test_list_with_filter(self): - - self.role_data_fixtures() - events = self.get('/OS-REVOKE/events').json_body['events'] - self.assertEqual(0, len(events)) - - scoped_token = self.get_scoped_token() - headers = {'X-Subject-Token': scoped_token} - auth = self.build_authentication_request(token=scoped_token) - headers2 = {'X-Subject-Token': self.get_requested_token(auth)} - self.delete('/auth/tokens', headers=headers) - self.delete('/auth/tokens', headers=headers2) - - events = self.get('/OS-REVOKE/events').json_body['events'] - - self.assertEqual(2, len(events)) - future = utils.isotime(timeutils.utcnow() + - datetime.timedelta(seconds=1000)) - - events = self.get('/OS-REVOKE/events?since=%s' % (future) - ).json_body['events'] - self.assertEqual(0, len(events)) - - -class TestAuthExternalDisabled(test_v3.RestfulTestCase): - def config_overrides(self): - super(TestAuthExternalDisabled, self).config_overrides() - self.config_fixture.config( - group='auth', - methods=['password', 'token']) - - def test_remote_user_disabled(self): - api = auth.controllers.Auth() - remote_user = '%s@%s' % (self.user['name'], self.domain['name']) - context, auth_info, auth_context = self.build_external_auth_request( - remote_user) - self.assertRaises(exception.Unauthorized, - api.authenticate, - context, - auth_info, - auth_context) - - -class TestAuthExternalDomain(test_v3.RestfulTestCase): - content_type = 'json' - - def config_overrides(self): - super(TestAuthExternalDomain, self).config_overrides() - self.kerberos = False - self.auth_plugin_config_override(external='Domain') - - def test_remote_user_with_realm(self): - api = auth.controllers.Auth() - remote_user = self.user['name'] - remote_domain = self.domain['name'] - context, auth_info, auth_context = self.build_external_auth_request( - remote_user, remote_domain=remote_domain, kerberos=self.kerberos) - - api.authenticate(context, auth_info, auth_context) - self.assertEqual(self.user['id'], auth_context['user_id']) - - # Now test to make sure the user name can, itself, contain the - # '@' character. - user = {'name': 'myname@mydivision'} - self.identity_api.update_user(self.user['id'], user) - remote_user = user['name'] - context, auth_info, auth_context = self.build_external_auth_request( - remote_user, remote_domain=remote_domain, kerberos=self.kerberos) - - api.authenticate(context, auth_info, auth_context) - self.assertEqual(self.user['id'], auth_context['user_id']) - - def test_project_id_scoped_with_remote_user(self): - self.config_fixture.config(group='token', bind=['kerberos']) - auth_data = self.build_authentication_request( - project_id=self.project['id'], - kerberos=self.kerberos) - remote_user = self.user['name'] - remote_domain = self.domain['name'] - self.admin_app.extra_environ.update({'REMOTE_USER': remote_user, - 'REMOTE_DOMAIN': remote_domain, - 'AUTH_TYPE': 'Negotiate'}) - r = self.v3_create_token(auth_data) - token = self.assertValidProjectScopedTokenResponse(r) - self.assertEqual(self.user['name'], token['bind']['kerberos']) - - def test_unscoped_bind_with_remote_user(self): - self.config_fixture.config(group='token', bind=['kerberos']) - auth_data = self.build_authentication_request(kerberos=self.kerberos) - remote_user = self.user['name'] - remote_domain = self.domain['name'] - self.admin_app.extra_environ.update({'REMOTE_USER': remote_user, - 'REMOTE_DOMAIN': remote_domain, - 'AUTH_TYPE': 'Negotiate'}) - r = self.v3_create_token(auth_data) - token = self.assertValidUnscopedTokenResponse(r) - self.assertEqual(self.user['name'], token['bind']['kerberos']) - - -class TestAuthExternalDefaultDomain(test_v3.RestfulTestCase): - content_type = 'json' - - def config_overrides(self): - super(TestAuthExternalDefaultDomain, self).config_overrides() - self.kerberos = False - self.auth_plugin_config_override( - external='keystone.auth.plugins.external.DefaultDomain') - - def test_remote_user_with_default_domain(self): - api = auth.controllers.Auth() - remote_user = self.default_domain_user['name'] - context, auth_info, auth_context = self.build_external_auth_request( - remote_user, kerberos=self.kerberos) - - api.authenticate(context, auth_info, auth_context) - self.assertEqual(self.default_domain_user['id'], - auth_context['user_id']) - - # Now test to make sure the user name can, itself, contain the - # '@' character. - user = {'name': 'myname@mydivision'} - self.identity_api.update_user(self.default_domain_user['id'], user) - remote_user = user['name'] - context, auth_info, auth_context = self.build_external_auth_request( - remote_user, kerberos=self.kerberos) - - api.authenticate(context, auth_info, auth_context) - self.assertEqual(self.default_domain_user['id'], - auth_context['user_id']) - - def test_project_id_scoped_with_remote_user(self): - self.config_fixture.config(group='token', bind=['kerberos']) - auth_data = self.build_authentication_request( - project_id=self.default_domain_project['id'], - kerberos=self.kerberos) - remote_user = self.default_domain_user['name'] - self.admin_app.extra_environ.update({'REMOTE_USER': remote_user, - 'AUTH_TYPE': 'Negotiate'}) - r = self.v3_create_token(auth_data) - token = self.assertValidProjectScopedTokenResponse(r) - self.assertEqual(self.default_domain_user['name'], - token['bind']['kerberos']) - - def test_unscoped_bind_with_remote_user(self): - self.config_fixture.config(group='token', bind=['kerberos']) - auth_data = self.build_authentication_request(kerberos=self.kerberos) - remote_user = self.default_domain_user['name'] - self.admin_app.extra_environ.update({'REMOTE_USER': remote_user, - 'AUTH_TYPE': 'Negotiate'}) - r = self.v3_create_token(auth_data) - token = self.assertValidUnscopedTokenResponse(r) - self.assertEqual(self.default_domain_user['name'], - token['bind']['kerberos']) - - -class TestAuthKerberos(TestAuthExternalDomain): - - def config_overrides(self): - super(TestAuthKerberos, self).config_overrides() - self.kerberos = True - self.auth_plugin_config_override( - methods=['kerberos', 'password', 'token']) - - -class TestAuth(test_v3.RestfulTestCase): - - def test_unscoped_token_with_user_id(self): - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) - r = self.v3_create_token(auth_data) - self.assertValidUnscopedTokenResponse(r) - - def test_unscoped_token_with_user_domain_id(self): - auth_data = self.build_authentication_request( - username=self.user['name'], - user_domain_id=self.domain['id'], - password=self.user['password']) - r = self.v3_create_token(auth_data) - self.assertValidUnscopedTokenResponse(r) - - def test_unscoped_token_with_user_domain_name(self): - auth_data = self.build_authentication_request( - username=self.user['name'], - user_domain_name=self.domain['name'], - password=self.user['password']) - r = self.v3_create_token(auth_data) - self.assertValidUnscopedTokenResponse(r) - - def test_project_id_scoped_token_with_user_id(self): - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id']) - r = self.v3_create_token(auth_data) - self.assertValidProjectScopedTokenResponse(r) - - def _second_project_as_default(self): - ref = unit.new_project_ref(domain_id=self.domain_id) - r = self.post('/projects', body={'project': ref}) - project = self.assertValidProjectResponse(r, ref) - - # grant the user a role on the project - self.put( - '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % { - 'user_id': self.user['id'], - 'project_id': project['id'], - 'role_id': self.role['id']}) - - # set the user's preferred project - body = {'user': {'default_project_id': project['id']}} - r = self.patch('/users/%(user_id)s' % { - 'user_id': self.user['id']}, - body=body) - self.assertValidUserResponse(r) - - return project - - def test_default_project_id_scoped_token_with_user_id(self): - project = self._second_project_as_default() - - # attempt to authenticate without requesting a project - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) - r = self.v3_create_token(auth_data) - self.assertValidProjectScopedTokenResponse(r) - self.assertEqual(project['id'], r.result['token']['project']['id']) - - def test_default_project_id_scoped_token_with_user_id_no_catalog(self): - project = self._second_project_as_default() - - # attempt to authenticate without requesting a project - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) - r = self.post('/auth/tokens?nocatalog', body=auth_data, noauth=True) - self.assertValidProjectScopedTokenResponse(r, require_catalog=False) - self.assertEqual(project['id'], r.result['token']['project']['id']) - - def test_explicit_unscoped_token(self): - self._second_project_as_default() - - # attempt to authenticate without requesting a project - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - unscoped="unscoped") - r = self.post('/auth/tokens', body=auth_data, noauth=True) - - self.assertIsNone(r.result['token'].get('project')) - self.assertIsNone(r.result['token'].get('domain')) - self.assertIsNone(r.result['token'].get('scope')) - - def test_implicit_project_id_scoped_token_with_user_id_no_catalog(self): - # attempt to authenticate without requesting a project - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id']) - r = self.post('/auth/tokens?nocatalog', body=auth_data, noauth=True) - self.assertValidProjectScopedTokenResponse(r, require_catalog=False) - self.assertEqual(self.project['id'], - r.result['token']['project']['id']) - - def test_auth_catalog_attributes(self): - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id']) - r = self.v3_create_token(auth_data) - - catalog = r.result['token']['catalog'] - self.assertEqual(1, len(catalog)) - catalog = catalog[0] - - self.assertEqual(self.service['id'], catalog['id']) - self.assertEqual(self.service['name'], catalog['name']) - self.assertEqual(self.service['type'], catalog['type']) - - endpoint = catalog['endpoints'] - self.assertEqual(1, len(endpoint)) - endpoint = endpoint[0] - - self.assertEqual(self.endpoint['id'], endpoint['id']) - self.assertEqual(self.endpoint['interface'], endpoint['interface']) - self.assertEqual(self.endpoint['region_id'], endpoint['region_id']) - self.assertEqual(self.endpoint['url'], endpoint['url']) - - def _check_disabled_endpoint_result(self, catalog, disabled_endpoint_id): - endpoints = catalog[0]['endpoints'] - endpoint_ids = [ep['id'] for ep in endpoints] - self.assertEqual([self.endpoint_id], endpoint_ids) - - def test_auth_catalog_disabled_service(self): - """On authenticate, get a catalog that excludes disabled services.""" - # although the child endpoint is enabled, the service is disabled - self.assertTrue(self.endpoint['enabled']) - self.catalog_api.update_service( - self.endpoint['service_id'], {'enabled': False}) - service = self.catalog_api.get_service(self.endpoint['service_id']) - self.assertFalse(service['enabled']) - - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id']) - r = self.v3_create_token(auth_data) - - self.assertEqual([], r.result['token']['catalog']) - - def test_auth_catalog_disabled_endpoint(self): - """On authenticate, get a catalog that excludes disabled endpoints.""" - # Create a disabled endpoint that's like the enabled one. - disabled_endpoint_ref = copy.copy(self.endpoint) - disabled_endpoint_id = uuid.uuid4().hex - disabled_endpoint_ref.update({ - 'id': disabled_endpoint_id, - 'enabled': False, - 'interface': 'internal' - }) - self.catalog_api.create_endpoint(disabled_endpoint_id, - disabled_endpoint_ref) - - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id']) - r = self.v3_create_token(auth_data) - - self._check_disabled_endpoint_result(r.result['token']['catalog'], - disabled_endpoint_id) - - def test_project_id_scoped_token_with_user_id_unauthorized(self): - project = unit.new_project_ref(domain_id=self.domain_id) - self.resource_api.create_project(project['id'], project) - - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=project['id']) - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_user_and_group_roles_scoped_token(self): - """Test correct roles are returned in scoped token. - - Test Plan: - - - Create a domain, with 1 project, 2 users (user1 and user2) - and 2 groups (group1 and group2) - - Make user1 a member of group1, user2 a member of group2 - - Create 8 roles, assigning them to each of the 8 combinations - of users/groups on domain/project - - Get a project scoped token for user1, checking that the right - two roles are returned (one directly assigned, one by virtue - of group membership) - - Repeat this for a domain scoped token - - Make user1 also a member of group2 - - Get another scoped token making sure the additional role - shows up - - User2 is just here as a spoiler, to make sure we don't get - any roles uniquely assigned to it returned in any of our - tokens - - """ - domainA = unit.new_domain_ref() - self.resource_api.create_domain(domainA['id'], domainA) - projectA = unit.new_project_ref(domain_id=domainA['id']) - self.resource_api.create_project(projectA['id'], projectA) - - user1 = unit.create_user(self.identity_api, domain_id=domainA['id']) - - user2 = unit.create_user(self.identity_api, domain_id=domainA['id']) - - group1 = unit.new_group_ref(domain_id=domainA['id']) - group1 = self.identity_api.create_group(group1) - - group2 = unit.new_group_ref(domain_id=domainA['id']) - group2 = self.identity_api.create_group(group2) - - self.identity_api.add_user_to_group(user1['id'], - group1['id']) - self.identity_api.add_user_to_group(user2['id'], - group2['id']) - - # Now create all the roles and assign them - role_list = [] - for _ in range(8): - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - role_list.append(role) - - self.assignment_api.create_grant(role_list[0]['id'], - user_id=user1['id'], - domain_id=domainA['id']) - self.assignment_api.create_grant(role_list[1]['id'], - user_id=user1['id'], - project_id=projectA['id']) - self.assignment_api.create_grant(role_list[2]['id'], - user_id=user2['id'], - domain_id=domainA['id']) - self.assignment_api.create_grant(role_list[3]['id'], - user_id=user2['id'], - project_id=projectA['id']) - self.assignment_api.create_grant(role_list[4]['id'], - group_id=group1['id'], - domain_id=domainA['id']) - self.assignment_api.create_grant(role_list[5]['id'], - group_id=group1['id'], - project_id=projectA['id']) - self.assignment_api.create_grant(role_list[6]['id'], - group_id=group2['id'], - domain_id=domainA['id']) - self.assignment_api.create_grant(role_list[7]['id'], - group_id=group2['id'], - project_id=projectA['id']) - - # First, get a project scoped token - which should - # contain the direct user role and the one by virtue - # of group membership - auth_data = self.build_authentication_request( - user_id=user1['id'], - password=user1['password'], - project_id=projectA['id']) - r = self.v3_create_token(auth_data) - token = self.assertValidScopedTokenResponse(r) - roles_ids = [] - for ref in token['roles']: - roles_ids.append(ref['id']) - self.assertEqual(2, len(token['roles'])) - self.assertIn(role_list[1]['id'], roles_ids) - self.assertIn(role_list[5]['id'], roles_ids) - - # Now the same thing for a domain scoped token - auth_data = self.build_authentication_request( - user_id=user1['id'], - password=user1['password'], - domain_id=domainA['id']) - r = self.v3_create_token(auth_data) - token = self.assertValidScopedTokenResponse(r) - roles_ids = [] - for ref in token['roles']: - roles_ids.append(ref['id']) - self.assertEqual(2, len(token['roles'])) - self.assertIn(role_list[0]['id'], roles_ids) - self.assertIn(role_list[4]['id'], roles_ids) - - # Finally, add user1 to the 2nd group, and get a new - # scoped token - the extra role should now be included - # by virtue of the 2nd group - self.identity_api.add_user_to_group(user1['id'], - group2['id']) - auth_data = self.build_authentication_request( - user_id=user1['id'], - password=user1['password'], - project_id=projectA['id']) - r = self.v3_create_token(auth_data) - token = self.assertValidScopedTokenResponse(r) - roles_ids = [] - for ref in token['roles']: - roles_ids.append(ref['id']) - self.assertEqual(3, len(token['roles'])) - self.assertIn(role_list[1]['id'], roles_ids) - self.assertIn(role_list[5]['id'], roles_ids) - self.assertIn(role_list[7]['id'], roles_ids) - - def test_auth_token_cross_domain_group_and_project(self): - """Verify getting a token in cross domain group/project roles.""" - # create domain, project and group and grant roles to user - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - project1 = unit.new_project_ref(domain_id=domain1['id']) - self.resource_api.create_project(project1['id'], project1) - user_foo = unit.create_user(self.identity_api, - domain_id=test_v3.DEFAULT_DOMAIN_ID) - role_member = unit.new_role_ref() - self.role_api.create_role(role_member['id'], role_member) - role_admin = unit.new_role_ref() - self.role_api.create_role(role_admin['id'], role_admin) - role_foo_domain1 = unit.new_role_ref() - self.role_api.create_role(role_foo_domain1['id'], role_foo_domain1) - role_group_domain1 = unit.new_role_ref() - self.role_api.create_role(role_group_domain1['id'], role_group_domain1) - self.assignment_api.add_user_to_project(project1['id'], - user_foo['id']) - new_group = unit.new_group_ref(domain_id=domain1['id']) - new_group = self.identity_api.create_group(new_group) - self.identity_api.add_user_to_group(user_foo['id'], - new_group['id']) - self.assignment_api.create_grant( - user_id=user_foo['id'], - project_id=project1['id'], - role_id=role_member['id']) - self.assignment_api.create_grant( - group_id=new_group['id'], - project_id=project1['id'], - role_id=role_admin['id']) - self.assignment_api.create_grant( - user_id=user_foo['id'], - domain_id=domain1['id'], - role_id=role_foo_domain1['id']) - self.assignment_api.create_grant( - group_id=new_group['id'], - domain_id=domain1['id'], - role_id=role_group_domain1['id']) - - # Get a scoped token for the project - auth_data = self.build_authentication_request( - username=user_foo['name'], - user_domain_id=test_v3.DEFAULT_DOMAIN_ID, - password=user_foo['password'], - project_name=project1['name'], - project_domain_id=domain1['id']) - - r = self.v3_create_token(auth_data) - scoped_token = self.assertValidScopedTokenResponse(r) - project = scoped_token["project"] - roles_ids = [] - for ref in scoped_token['roles']: - roles_ids.append(ref['id']) - self.assertEqual(project1['id'], project["id"]) - self.assertIn(role_member['id'], roles_ids) - self.assertIn(role_admin['id'], roles_ids) - self.assertNotIn(role_foo_domain1['id'], roles_ids) - self.assertNotIn(role_group_domain1['id'], roles_ids) - - def test_project_id_scoped_token_with_user_domain_id(self): - auth_data = self.build_authentication_request( - username=self.user['name'], - user_domain_id=self.domain['id'], - password=self.user['password'], - project_id=self.project['id']) - r = self.v3_create_token(auth_data) - self.assertValidProjectScopedTokenResponse(r) - - def test_project_id_scoped_token_with_user_domain_name(self): - auth_data = self.build_authentication_request( - username=self.user['name'], - user_domain_name=self.domain['name'], - password=self.user['password'], - project_id=self.project['id']) - r = self.v3_create_token(auth_data) - self.assertValidProjectScopedTokenResponse(r) - - def test_domain_id_scoped_token_with_user_id(self): - path = '/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id']) - self.put(path=path) - - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - domain_id=self.domain['id']) - r = self.v3_create_token(auth_data) - self.assertValidDomainScopedTokenResponse(r) - - def test_domain_id_scoped_token_with_user_domain_id(self): - path = '/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id']) - self.put(path=path) - - auth_data = self.build_authentication_request( - username=self.user['name'], - user_domain_id=self.domain['id'], - password=self.user['password'], - domain_id=self.domain['id']) - r = self.v3_create_token(auth_data) - self.assertValidDomainScopedTokenResponse(r) - - def test_domain_id_scoped_token_with_user_domain_name(self): - path = '/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id']) - self.put(path=path) - - auth_data = self.build_authentication_request( - username=self.user['name'], - user_domain_name=self.domain['name'], - password=self.user['password'], - domain_id=self.domain['id']) - r = self.v3_create_token(auth_data) - self.assertValidDomainScopedTokenResponse(r) - - def test_domain_name_scoped_token_with_user_id(self): - path = '/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id']) - self.put(path=path) - - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - domain_name=self.domain['name']) - r = self.v3_create_token(auth_data) - self.assertValidDomainScopedTokenResponse(r) - - def test_domain_name_scoped_token_with_user_domain_id(self): - path = '/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id']) - self.put(path=path) - - auth_data = self.build_authentication_request( - username=self.user['name'], - user_domain_id=self.domain['id'], - password=self.user['password'], - domain_name=self.domain['name']) - r = self.v3_create_token(auth_data) - self.assertValidDomainScopedTokenResponse(r) - - def test_domain_name_scoped_token_with_user_domain_name(self): - path = '/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id']) - self.put(path=path) - - auth_data = self.build_authentication_request( - username=self.user['name'], - user_domain_name=self.domain['name'], - password=self.user['password'], - domain_name=self.domain['name']) - r = self.v3_create_token(auth_data) - self.assertValidDomainScopedTokenResponse(r) - - def test_domain_scope_token_with_group_role(self): - group = unit.new_group_ref(domain_id=self.domain_id) - group = self.identity_api.create_group(group) - - # add user to group - self.identity_api.add_user_to_group(self.user['id'], group['id']) - - # grant the domain role to group - path = '/domains/%s/groups/%s/roles/%s' % ( - self.domain['id'], group['id'], self.role['id']) - self.put(path=path) - - # now get a domain-scoped token - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - domain_id=self.domain['id']) - r = self.v3_create_token(auth_data) - self.assertValidDomainScopedTokenResponse(r) - - def test_domain_scope_token_with_name(self): - # grant the domain role to user - path = '/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id']) - self.put(path=path) - # now get a domain-scoped token - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - domain_name=self.domain['name']) - r = self.v3_create_token(auth_data) - self.assertValidDomainScopedTokenResponse(r) - - def test_domain_scope_failed(self): - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - domain_id=self.domain['id']) - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_auth_with_id(self): - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) - r = self.v3_create_token(auth_data) - self.assertValidUnscopedTokenResponse(r) - - token = r.headers.get('X-Subject-Token') - - # test token auth - auth_data = self.build_authentication_request(token=token) - r = self.v3_create_token(auth_data) - self.assertValidUnscopedTokenResponse(r) - - def get_v2_token(self, tenant_id=None): - body = { - 'auth': { - 'passwordCredentials': { - 'username': self.default_domain_user['name'], - 'password': self.default_domain_user['password'], - }, - }, - } - r = self.admin_request(method='POST', path='/v2.0/tokens', body=body) - return r - - def test_validate_v2_unscoped_token_with_v3_api(self): - v2_token = self.get_v2_token().result['access']['token']['id'] - auth_data = self.build_authentication_request(token=v2_token) - r = self.v3_create_token(auth_data) - self.assertValidUnscopedTokenResponse(r) - - def test_validate_v2_scoped_token_with_v3_api(self): - v2_response = self.get_v2_token( - tenant_id=self.default_domain_project['id']) - result = v2_response.result - v2_token = result['access']['token']['id'] - auth_data = self.build_authentication_request( - token=v2_token, - project_id=self.default_domain_project['id']) - r = self.v3_create_token(auth_data) - self.assertValidScopedTokenResponse(r) - - def test_invalid_user_id(self): - auth_data = self.build_authentication_request( - user_id=uuid.uuid4().hex, - password=self.user['password']) - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_invalid_user_name(self): - auth_data = self.build_authentication_request( - username=uuid.uuid4().hex, - user_domain_id=self.domain['id'], - password=self.user['password']) - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_invalid_domain_id(self): - auth_data = self.build_authentication_request( - username=self.user['name'], - user_domain_id=uuid.uuid4().hex, - password=self.user['password']) - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_invalid_domain_name(self): - auth_data = self.build_authentication_request( - username=self.user['name'], - user_domain_name=uuid.uuid4().hex, - password=self.user['password']) - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_invalid_password(self): - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=uuid.uuid4().hex) - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_remote_user_no_realm(self): - api = auth.controllers.Auth() - context, auth_info, auth_context = self.build_external_auth_request( - self.default_domain_user['name']) - api.authenticate(context, auth_info, auth_context) - self.assertEqual(self.default_domain_user['id'], - auth_context['user_id']) - # Now test to make sure the user name can, itself, contain the - # '@' character. - user = {'name': 'myname@mydivision'} - self.identity_api.update_user(self.default_domain_user['id'], user) - context, auth_info, auth_context = self.build_external_auth_request( - user["name"]) - api.authenticate(context, auth_info, auth_context) - self.assertEqual(self.default_domain_user['id'], - auth_context['user_id']) - - def test_remote_user_no_domain(self): - api = auth.controllers.Auth() - context, auth_info, auth_context = self.build_external_auth_request( - self.user['name']) - self.assertRaises(exception.Unauthorized, - api.authenticate, - context, - auth_info, - auth_context) - - def test_remote_user_and_password(self): - # both REMOTE_USER and password methods must pass. - # note that they do not have to match - api = auth.controllers.Auth() - auth_data = self.build_authentication_request( - user_domain_id=self.default_domain_user['domain_id'], - username=self.default_domain_user['name'], - password=self.default_domain_user['password'])['auth'] - context, auth_info, auth_context = self.build_external_auth_request( - self.default_domain_user['name'], auth_data=auth_data) - - api.authenticate(context, auth_info, auth_context) - - def test_remote_user_and_explicit_external(self): - # both REMOTE_USER and password methods must pass. - # note that they do not have to match - auth_data = self.build_authentication_request( - user_domain_id=self.domain['id'], - username=self.user['name'], - password=self.user['password'])['auth'] - auth_data['identity']['methods'] = ["password", "external"] - auth_data['identity']['external'] = {} - api = auth.controllers.Auth() - auth_info = auth.controllers.AuthInfo(None, auth_data) - auth_context = {'extras': {}, 'method_names': []} - self.assertRaises(exception.Unauthorized, - api.authenticate, - self.empty_context, - auth_info, - auth_context) - - def test_remote_user_bad_password(self): - # both REMOTE_USER and password methods must pass. - api = auth.controllers.Auth() - auth_data = self.build_authentication_request( - user_domain_id=self.domain['id'], - username=self.user['name'], - password='badpassword')['auth'] - context, auth_info, auth_context = self.build_external_auth_request( - self.default_domain_user['name'], auth_data=auth_data) - self.assertRaises(exception.Unauthorized, - api.authenticate, - context, - auth_info, - auth_context) - - def test_bind_not_set_with_remote_user(self): - self.config_fixture.config(group='token', bind=[]) - auth_data = self.build_authentication_request() - remote_user = self.default_domain_user['name'] - self.admin_app.extra_environ.update({'REMOTE_USER': remote_user, - 'AUTH_TYPE': 'Negotiate'}) - r = self.v3_create_token(auth_data) - token = self.assertValidUnscopedTokenResponse(r) - self.assertNotIn('bind', token) - - # TODO(ayoung): move to TestPKITokenAPIs; it will be run for both formats - def test_verify_with_bound_token(self): - self.config_fixture.config(group='token', bind='kerberos') - auth_data = self.build_authentication_request( - project_id=self.project['id']) - remote_user = self.default_domain_user['name'] - self.admin_app.extra_environ.update({'REMOTE_USER': remote_user, - 'AUTH_TYPE': 'Negotiate'}) - - token = self.get_requested_token(auth_data) - headers = {'X-Subject-Token': token} - r = self.get('/auth/tokens', headers=headers, token=token) - token = self.assertValidProjectScopedTokenResponse(r) - self.assertEqual(self.default_domain_user['name'], - token['bind']['kerberos']) - - def test_auth_with_bind_token(self): - self.config_fixture.config(group='token', bind=['kerberos']) - - auth_data = self.build_authentication_request() - remote_user = self.default_domain_user['name'] - self.admin_app.extra_environ.update({'REMOTE_USER': remote_user, - 'AUTH_TYPE': 'Negotiate'}) - r = self.v3_create_token(auth_data) - - # the unscoped token should have bind information in it - token = self.assertValidUnscopedTokenResponse(r) - self.assertEqual(remote_user, token['bind']['kerberos']) - - token = r.headers.get('X-Subject-Token') - - # using unscoped token with remote user succeeds - auth_params = {'token': token, 'project_id': self.project_id} - auth_data = self.build_authentication_request(**auth_params) - r = self.v3_create_token(auth_data) - token = self.assertValidProjectScopedTokenResponse(r) - - # the bind information should be carried over from the original token - self.assertEqual(remote_user, token['bind']['kerberos']) - - def test_v2_v3_bind_token_intermix(self): - self.config_fixture.config(group='token', bind='kerberos') - - # we need our own user registered to the default domain because of - # the way external auth works. - remote_user = self.default_domain_user['name'] - self.admin_app.extra_environ.update({'REMOTE_USER': remote_user, - 'AUTH_TYPE': 'Negotiate'}) - body = {'auth': {}} - resp = self.admin_request(path='/v2.0/tokens', - method='POST', - body=body) - - v2_token_data = resp.result - - bind = v2_token_data['access']['token']['bind'] - self.assertEqual(self.default_domain_user['name'], bind['kerberos']) - - v2_token_id = v2_token_data['access']['token']['id'] - # NOTE(gyee): self.get() will try to obtain an auth token if one - # is not provided. When REMOTE_USER is present in the request - # environment, the external user auth plugin is used in conjunction - # with the password auth for the admin user. Therefore, we need to - # cleanup the REMOTE_USER information from the previous call. - del self.admin_app.extra_environ['REMOTE_USER'] - headers = {'X-Subject-Token': v2_token_id} - resp = self.get('/auth/tokens', headers=headers) - token_data = resp.result - - self.assertDictEqual(v2_token_data['access']['token']['bind'], - token_data['token']['bind']) - - def test_authenticating_a_user_with_no_password(self): - user = unit.new_user_ref(domain_id=self.domain['id']) - del user['password'] # can't have a password for this test - user = self.identity_api.create_user(user) - - auth_data = self.build_authentication_request( - user_id=user['id'], - password='password') - - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_disabled_default_project_result_in_unscoped_token(self): - # create a disabled project to work with - project = self.create_new_default_project_for_user( - self.user['id'], self.domain_id, enable_project=False) - - # assign a role to user for the new project - self.assignment_api.add_role_to_user_and_project(self.user['id'], - project['id'], - self.role_id) - - # attempt to authenticate without requesting a project - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) - r = self.v3_create_token(auth_data) - self.assertValidUnscopedTokenResponse(r) - - def test_disabled_default_project_domain_result_in_unscoped_token(self): - domain_ref = unit.new_domain_ref() - r = self.post('/domains', body={'domain': domain_ref}) - domain = self.assertValidDomainResponse(r, domain_ref) - - project = self.create_new_default_project_for_user( - self.user['id'], domain['id']) - - # assign a role to user for the new project - self.assignment_api.add_role_to_user_and_project(self.user['id'], - project['id'], - self.role_id) - - # now disable the project domain - body = {'domain': {'enabled': False}} - r = self.patch('/domains/%(domain_id)s' % {'domain_id': domain['id']}, - body=body) - self.assertValidDomainResponse(r) - - # attempt to authenticate without requesting a project - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) - r = self.v3_create_token(auth_data) - self.assertValidUnscopedTokenResponse(r) - - def test_no_access_to_default_project_result_in_unscoped_token(self): - # create a disabled project to work with - self.create_new_default_project_for_user(self.user['id'], - self.domain_id) - - # attempt to authenticate without requesting a project - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password']) - r = self.v3_create_token(auth_data) - self.assertValidUnscopedTokenResponse(r) - - def test_disabled_scope_project_domain_result_in_401(self): - # create a disabled domain - domain = unit.new_domain_ref() - domain = self.resource_api.create_domain(domain['id'], domain) - - # create a project in the domain - project = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project['id'], project) - - # assign some role to self.user for the project in the domain - self.assignment_api.add_role_to_user_and_project( - self.user['id'], - project['id'], - self.role_id) - - # Disable the domain - domain['enabled'] = False - self.resource_api.update_domain(domain['id'], domain) - - # user should not be able to auth with project_id - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=project['id']) - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - # user should not be able to auth with project_name & domain - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_name=project['name'], - project_domain_id=domain['id']) - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_auth_methods_with_different_identities_fails(self): - # get the token for a user. This is self.user which is different from - # self.default_domain_user. - token = self.get_scoped_token() - # try both password and token methods with different identities and it - # should fail - auth_data = self.build_authentication_request( - token=token, - user_id=self.default_domain_user['id'], - password=self.default_domain_user['password']) - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_authenticate_fails_if_project_unsafe(self): - """Verify authenticate to a project with unsafe name fails.""" - # Start with url name restrictions off, so we can create the unsafe - # named project - self.config_fixture.config(group='resource', - project_name_url_safe='off') - unsafe_name = 'i am not / safe' - project = unit.new_project_ref(domain_id=test_v3.DEFAULT_DOMAIN_ID, - name=unsafe_name) - self.resource_api.create_project(project['id'], project) - role_member = unit.new_role_ref() - self.role_api.create_role(role_member['id'], role_member) - self.assignment_api.add_role_to_user_and_project( - self.user['id'], project['id'], role_member['id']) - - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_name=project['name'], - project_domain_id=test_v3.DEFAULT_DOMAIN_ID) - - # Since name url restriction is off, we should be able to autenticate - self.v3_create_token(auth_data) - - # Set the name url restriction to new, which should still allow us to - # authenticate - self.config_fixture.config(group='resource', - project_name_url_safe='new') - self.v3_create_token(auth_data) - - # Set the name url restriction to strict and we should fail to - # authenticate - self.config_fixture.config(group='resource', - project_name_url_safe='strict') - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_authenticate_fails_if_domain_unsafe(self): - """Verify authenticate to a domain with unsafe name fails.""" - # Start with url name restrictions off, so we can create the unsafe - # named domain - self.config_fixture.config(group='resource', - domain_name_url_safe='off') - unsafe_name = 'i am not / safe' - domain = unit.new_domain_ref(name=unsafe_name) - self.resource_api.create_domain(domain['id'], domain) - role_member = unit.new_role_ref() - self.role_api.create_role(role_member['id'], role_member) - self.assignment_api.create_grant( - role_member['id'], - user_id=self.user['id'], - domain_id=domain['id']) - - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - domain_name=domain['name']) - - # Since name url restriction is off, we should be able to autenticate - self.v3_create_token(auth_data) - - # Set the name url restriction to new, which should still allow us to - # authenticate - self.config_fixture.config(group='resource', - project_name_url_safe='new') - self.v3_create_token(auth_data) - - # Set the name url restriction to strict and we should fail to - # authenticate - self.config_fixture.config(group='resource', - domain_name_url_safe='strict') - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_authenticate_fails_to_project_if_domain_unsafe(self): - """Verify authenticate to a project using unsafe domain name fails.""" - # Start with url name restrictions off, so we can create the unsafe - # named domain - self.config_fixture.config(group='resource', - domain_name_url_safe='off') - unsafe_name = 'i am not / safe' - domain = unit.new_domain_ref(name=unsafe_name) - self.resource_api.create_domain(domain['id'], domain) - # Add a (safely named) project to that domain - project = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project['id'], project) - role_member = unit.new_role_ref() - self.role_api.create_role(role_member['id'], role_member) - self.assignment_api.create_grant( - role_member['id'], - user_id=self.user['id'], - project_id=project['id']) - - # An auth request via project ID, but specifying domain by name - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_name=project['name'], - project_domain_name=domain['name']) - - # Since name url restriction is off, we should be able to autenticate - self.v3_create_token(auth_data) - - # Set the name url restriction to new, which should still allow us to - # authenticate - self.config_fixture.config(group='resource', - project_name_url_safe='new') - self.v3_create_token(auth_data) - - # Set the name url restriction to strict and we should fail to - # authenticate - self.config_fixture.config(group='resource', - domain_name_url_safe='strict') - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - -class TestAuthJSONExternal(test_v3.RestfulTestCase): - content_type = 'json' - - def auth_plugin_config_override(self, methods=None, **method_classes): - self.config_fixture.config(group='auth', methods=[]) - - def test_remote_user_no_method(self): - api = auth.controllers.Auth() - context, auth_info, auth_context = self.build_external_auth_request( - self.default_domain_user['name']) - self.assertRaises(exception.Unauthorized, - api.authenticate, - context, - auth_info, - auth_context) - - -class TestTrustOptional(test_v3.RestfulTestCase): - def config_overrides(self): - super(TestTrustOptional, self).config_overrides() - self.config_fixture.config(group='trust', enabled=False) - - def test_trusts_returns_not_found(self): - self.get('/OS-TRUST/trusts', body={'trust': {}}, - expected_status=http_client.NOT_FOUND) - self.post('/OS-TRUST/trusts', body={'trust': {}}, - expected_status=http_client.NOT_FOUND) - - def test_auth_with_scope_in_trust_forbidden(self): - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - trust_id=uuid.uuid4().hex) - self.v3_create_token(auth_data, - expected_status=http_client.FORBIDDEN) - - -class TrustAPIBehavior(test_v3.RestfulTestCase): - """Redelegation valid and secure - - Redelegation is a hierarchical structure of trusts between initial trustor - and a group of users allowed to impersonate trustor and act in his name. - Hierarchy is created in a process of trusting already trusted permissions - and organized as an adjacency list using 'redelegated_trust_id' field. - Redelegation is valid if each subsequent trust in a chain passes 'not more' - permissions than being redelegated. - - Trust constraints are: - * roles - set of roles trusted by trustor - * expiration_time - * allow_redelegation - a flag - * redelegation_count - decreasing value restricting length of trust chain - * remaining_uses - DISALLOWED when allow_redelegation == True - - Trust becomes invalid in case: - * trust roles were revoked from trustor - * one of the users in the delegation chain was disabled or deleted - * expiration time passed - * one of the parent trusts has become invalid - * one of the parent trusts was deleted - - """ - - def config_overrides(self): - super(TrustAPIBehavior, self).config_overrides() - self.config_fixture.config( - group='trust', - enabled=True, - allow_redelegation=True, - max_redelegation_count=10 - ) - - def setUp(self): - super(TrustAPIBehavior, self).setUp() - # Create a trustee to delegate stuff to - self.trustee_user = unit.create_user(self.identity_api, - domain_id=self.domain_id) - - # trustor->trustee - self.redelegated_trust_ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id, - impersonation=True, - expires=dict(minutes=1), - role_ids=[self.role_id], - allow_redelegation=True) - - # trustor->trustee (no redelegation) - self.chained_trust_ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id, - impersonation=True, - role_ids=[self.role_id], - allow_redelegation=True) - - def _get_trust_token(self, trust): - trust_id = trust['id'] - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password'], - trust_id=trust_id) - trust_token = self.get_requested_token(auth_data) - return trust_token - - def test_depleted_redelegation_count_error(self): - self.redelegated_trust_ref['redelegation_count'] = 0 - r = self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}) - trust = self.assertValidTrustResponse(r) - trust_token = self._get_trust_token(trust) - - # Attempt to create a redelegated trust. - self.post('/OS-TRUST/trusts', - body={'trust': self.chained_trust_ref}, - token=trust_token, - expected_status=http_client.FORBIDDEN) - - def test_modified_redelegation_count_error(self): - r = self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}) - trust = self.assertValidTrustResponse(r) - trust_token = self._get_trust_token(trust) - - # Attempt to create a redelegated trust with incorrect - # redelegation_count. - correct = trust['redelegation_count'] - 1 - incorrect = correct - 1 - self.chained_trust_ref['redelegation_count'] = incorrect - self.post('/OS-TRUST/trusts', - body={'trust': self.chained_trust_ref}, - token=trust_token, - expected_status=http_client.FORBIDDEN) - - def test_max_redelegation_count_constraint(self): - incorrect = CONF.trust.max_redelegation_count + 1 - self.redelegated_trust_ref['redelegation_count'] = incorrect - self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}, - expected_status=http_client.FORBIDDEN) - - def test_redelegation_expiry(self): - r = self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}) - trust = self.assertValidTrustResponse(r) - trust_token = self._get_trust_token(trust) - - # Attempt to create a redelegated trust supposed to last longer - # than the parent trust: let's give it 10 minutes (>1 minute). - too_long_live_chained_trust_ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id, - impersonation=True, - expires=dict(minutes=10), - role_ids=[self.role_id]) - self.post('/OS-TRUST/trusts', - body={'trust': too_long_live_chained_trust_ref}, - token=trust_token, - expected_status=http_client.FORBIDDEN) - - def test_redelegation_remaining_uses(self): - r = self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}) - trust = self.assertValidTrustResponse(r) - trust_token = self._get_trust_token(trust) - - # Attempt to create a redelegated trust with remaining_uses defined. - # It must fail according to specification: remaining_uses must be - # omitted for trust redelegation. Any number here. - self.chained_trust_ref['remaining_uses'] = 5 - self.post('/OS-TRUST/trusts', - body={'trust': self.chained_trust_ref}, - token=trust_token, - expected_status=http_client.BAD_REQUEST) - - def test_roles_subset(self): - # Build second role - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - # assign a new role to the user - self.assignment_api.create_grant(role_id=role['id'], - user_id=self.user_id, - project_id=self.project_id) - - # Create first trust with extended set of roles - ref = self.redelegated_trust_ref - ref['expires_at'] = datetime.datetime.utcnow().replace( - year=2032).strftime(unit.TIME_FORMAT) - ref['roles'].append({'id': role['id']}) - r = self.post('/OS-TRUST/trusts', - body={'trust': ref}) - trust = self.assertValidTrustResponse(r) - # Trust created with exact set of roles (checked by role id) - role_id_set = set(r['id'] for r in ref['roles']) - trust_role_id_set = set(r['id'] for r in trust['roles']) - self.assertEqual(role_id_set, trust_role_id_set) - - trust_token = self._get_trust_token(trust) - - # Chain second trust with roles subset - self.chained_trust_ref['expires_at'] = ( - datetime.datetime.utcnow().replace(year=2028).strftime( - unit.TIME_FORMAT)) - r = self.post('/OS-TRUST/trusts', - body={'trust': self.chained_trust_ref}, - token=trust_token) - trust2 = self.assertValidTrustResponse(r) - # First trust contains roles superset - # Second trust contains roles subset - role_id_set1 = set(r['id'] for r in trust['roles']) - role_id_set2 = set(r['id'] for r in trust2['roles']) - self.assertThat(role_id_set1, matchers.GreaterThan(role_id_set2)) - - def test_redelegate_with_role_by_name(self): - # For role by name testing - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id, - impersonation=True, - expires=dict(minutes=1), - role_names=[self.role['name']], - allow_redelegation=True) - ref['expires_at'] = datetime.datetime.utcnow().replace( - year=2032).strftime(unit.TIME_FORMAT) - r = self.post('/OS-TRUST/trusts', - body={'trust': ref}) - trust = self.assertValidTrustResponse(r) - # Ensure we can get a token with this trust - trust_token = self._get_trust_token(trust) - # Chain second trust with roles subset - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id, - impersonation=True, - role_names=[self.role['name']], - allow_redelegation=True) - ref['expires_at'] = datetime.datetime.utcnow().replace( - year=2028).strftime(unit.TIME_FORMAT) - r = self.post('/OS-TRUST/trusts', - body={'trust': ref}, - token=trust_token) - trust = self.assertValidTrustResponse(r) - # Ensure we can get a token with this trust - self._get_trust_token(trust) - - def test_redelegate_new_role_fails(self): - r = self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}) - trust = self.assertValidTrustResponse(r) - trust_token = self._get_trust_token(trust) - - # Build second trust with a role not in parent's roles - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - # assign a new role to the user - self.assignment_api.create_grant(role_id=role['id'], - user_id=self.user_id, - project_id=self.project_id) - - # Try to chain a trust with the role not from parent trust - self.chained_trust_ref['roles'] = [{'id': role['id']}] - - # Bypass policy enforcement - with mock.patch.object(rules, 'enforce', return_value=True): - self.post('/OS-TRUST/trusts', - body={'trust': self.chained_trust_ref}, - token=trust_token, - expected_status=http_client.FORBIDDEN) - - def test_redelegation_terminator(self): - self.redelegated_trust_ref['expires_at'] = ( - datetime.datetime.utcnow().replace(year=2032).strftime( - unit.TIME_FORMAT)) - r = self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}) - trust = self.assertValidTrustResponse(r) - trust_token = self._get_trust_token(trust) - - # Build second trust - the terminator - self.chained_trust_ref['expires_at'] = ( - datetime.datetime.utcnow().replace(year=2028).strftime( - unit.TIME_FORMAT)) - ref = dict(self.chained_trust_ref, - redelegation_count=1, - allow_redelegation=False) - - r = self.post('/OS-TRUST/trusts', - body={'trust': ref}, - token=trust_token) - - trust = self.assertValidTrustResponse(r) - # Check that allow_redelegation == False caused redelegation_count - # to be set to 0, while allow_redelegation is removed - self.assertNotIn('allow_redelegation', trust) - self.assertEqual(0, trust['redelegation_count']) - trust_token = self._get_trust_token(trust) - - # Build third trust, same as second - self.post('/OS-TRUST/trusts', - body={'trust': ref}, - token=trust_token, - expected_status=http_client.FORBIDDEN) - - def test_redelegation_without_impersonation(self): - # Update trust to not allow impersonation - self.redelegated_trust_ref['impersonation'] = False - - # Create trust - resp = self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}, - expected_status=http_client.CREATED) - trust = self.assertValidTrustResponse(resp) - - # Get trusted token without impersonation - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password'], - trust_id=trust['id']) - trust_token = self.get_requested_token(auth_data) - - # Create second user for redelegation - trustee_user_2 = unit.create_user(self.identity_api, - domain_id=self.domain_id) - - # Trust for redelegation - trust_ref_2 = unit.new_trust_ref( - trustor_user_id=self.trustee_user['id'], - trustee_user_id=trustee_user_2['id'], - project_id=self.project_id, - impersonation=False, - expires=dict(minutes=1), - role_ids=[self.role_id], - allow_redelegation=False) - - # Creating a second trust should not be allowed since trustor does not - # have the role to delegate thus returning 404 NOT FOUND. - resp = self.post('/OS-TRUST/trusts', - body={'trust': trust_ref_2}, - token=trust_token, - expected_status=http_client.NOT_FOUND) - - def test_create_unscoped_trust(self): - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id']) - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - self.assertValidTrustResponse(r, ref) - - def test_create_trust_no_roles(self): - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id) - self.post('/OS-TRUST/trusts', body={'trust': ref}, - expected_status=http_client.FORBIDDEN) - - def _initialize_test_consume_trust(self, count): - # Make sure remaining_uses is decremented as we consume the trust - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id, - remaining_uses=count, - role_ids=[self.role_id]) - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - # make sure the trust exists - trust = self.assertValidTrustResponse(r, ref) - r = self.get( - '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']}) - # get a token for the trustee - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password']) - r = self.v3_create_token(auth_data) - token = r.headers.get('X-Subject-Token') - # get a trust token, consume one use - auth_data = self.build_authentication_request( - token=token, - trust_id=trust['id']) - r = self.v3_create_token(auth_data) - return trust - - def test_consume_trust_once(self): - trust = self._initialize_test_consume_trust(2) - # check decremented value - r = self.get( - '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']}) - trust = r.result.get('trust') - self.assertIsNotNone(trust) - self.assertEqual(1, trust['remaining_uses']) - # FIXME(lbragstad): Assert the role that is returned is the right role. - - def test_create_one_time_use_trust(self): - trust = self._initialize_test_consume_trust(1) - # No more uses, the trust is made unavailable - self.get( - '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']}, - expected_status=http_client.NOT_FOUND) - # this time we can't get a trust token - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password'], - trust_id=trust['id']) - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_create_unlimited_use_trust(self): - # by default trusts are unlimited in terms of tokens that can be - # generated from them, this test creates such a trust explicitly - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id, - remaining_uses=None, - role_ids=[self.role_id]) - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - trust = self.assertValidTrustResponse(r, ref) - - r = self.get( - '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']}) - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password']) - r = self.v3_create_token(auth_data) - token = r.headers.get('X-Subject-Token') - auth_data = self.build_authentication_request( - token=token, - trust_id=trust['id']) - r = self.v3_create_token(auth_data) - r = self.get( - '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']}) - trust = r.result.get('trust') - self.assertIsNone(trust['remaining_uses']) - - def test_impersonation_token_cannot_create_new_trust(self): - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id, - impersonation=True, - expires=dict(minutes=1), - role_ids=[self.role_id]) - - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - trust = self.assertValidTrustResponse(r) - - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password'], - trust_id=trust['id']) - - trust_token = self.get_requested_token(auth_data) - - # Build second trust - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id, - impersonation=True, - expires=dict(minutes=1), - role_ids=[self.role_id]) - - self.post('/OS-TRUST/trusts', - body={'trust': ref}, - token=trust_token, - expected_status=http_client.FORBIDDEN) - - def test_trust_deleted_grant(self): - # create a new role - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - - grant_url = ( - '/projects/%(project_id)s/users/%(user_id)s/' - 'roles/%(role_id)s' % { - 'project_id': self.project_id, - 'user_id': self.user_id, - 'role_id': role['id']}) - - # assign a new role - self.put(grant_url) - - # create a trust that delegates the new role - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id, - impersonation=False, - expires=dict(minutes=1), - role_ids=[role['id']]) - - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - trust = self.assertValidTrustResponse(r) - - # delete the grant - self.delete(grant_url) - - # attempt to get a trust token with the deleted grant - # and ensure it's unauthorized - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password'], - trust_id=trust['id']) - r = self.v3_create_token(auth_data, - expected_status=http_client.FORBIDDEN) - - def test_trust_chained(self): - """Test that a trust token can't be used to execute another trust. - - To do this, we create an A->B->C hierarchy of trusts, then attempt to - execute the trusts in series (C->B->A). - - """ - # create a sub-trustee user - sub_trustee_user = unit.create_user( - self.identity_api, - domain_id=test_v3.DEFAULT_DOMAIN_ID) - sub_trustee_user_id = sub_trustee_user['id'] - - # create a new role - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - - # assign the new role to trustee - self.put( - '/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s' % { - 'project_id': self.project_id, - 'user_id': self.trustee_user['id'], - 'role_id': role['id']}) - - # create a trust from trustor -> trustee - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id, - impersonation=True, - expires=dict(minutes=1), - role_ids=[self.role_id]) - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - trust1 = self.assertValidTrustResponse(r) - - # authenticate as trustee so we can create a second trust - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password'], - project_id=self.project_id) - token = self.get_requested_token(auth_data) - - # create a trust from trustee -> sub-trustee - ref = unit.new_trust_ref( - trustor_user_id=self.trustee_user['id'], - trustee_user_id=sub_trustee_user_id, - project_id=self.project_id, - impersonation=True, - expires=dict(minutes=1), - role_ids=[role['id']]) - r = self.post('/OS-TRUST/trusts', token=token, body={'trust': ref}) - trust2 = self.assertValidTrustResponse(r) - - # authenticate as sub-trustee and get a trust token - auth_data = self.build_authentication_request( - user_id=sub_trustee_user['id'], - password=sub_trustee_user['password'], - trust_id=trust2['id']) - trust_token = self.get_requested_token(auth_data) - - # attempt to get the second trust using a trust token - auth_data = self.build_authentication_request( - token=trust_token, - trust_id=trust1['id']) - r = self.v3_create_token(auth_data, - expected_status=http_client.FORBIDDEN) - - def assertTrustTokensRevoked(self, trust_id): - revocation_response = self.get('/OS-REVOKE/events') - revocation_events = revocation_response.json_body['events'] - found = False - for event in revocation_events: - if event.get('OS-TRUST:trust_id') == trust_id: - found = True - self.assertTrue(found, 'event with trust_id %s not found in list' % - trust_id) - - def test_delete_trust_revokes_tokens(self): - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id, - impersonation=False, - expires=dict(minutes=1), - role_ids=[self.role_id]) - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - trust = self.assertValidTrustResponse(r) - trust_id = trust['id'] - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password'], - trust_id=trust_id) - r = self.v3_create_token(auth_data) - self.assertValidProjectScopedTokenResponse( - r, self.trustee_user) - trust_token = r.headers['X-Subject-Token'] - self.delete('/OS-TRUST/trusts/%(trust_id)s' % { - 'trust_id': trust_id}) - headers = {'X-Subject-Token': trust_token} - self.head('/auth/tokens', headers=headers, - expected_status=http_client.NOT_FOUND) - self.assertTrustTokensRevoked(trust_id) - - def disable_user(self, user): - user['enabled'] = False - self.identity_api.update_user(user['id'], user) - - def test_trust_get_token_fails_if_trustor_disabled(self): - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id, - impersonation=False, - expires=dict(minutes=1), - role_ids=[self.role_id]) - - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - - trust = self.assertValidTrustResponse(r, ref) - - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password'], - trust_id=trust['id']) - self.v3_create_token(auth_data) - - self.disable_user(self.user) - - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password'], - trust_id=trust['id']) - self.v3_create_token(auth_data, - expected_status=http_client.FORBIDDEN) - - def test_trust_get_token_fails_if_trustee_disabled(self): - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id, - impersonation=False, - expires=dict(minutes=1), - role_ids=[self.role_id]) - - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - - trust = self.assertValidTrustResponse(r, ref) - - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password'], - trust_id=trust['id']) - self.v3_create_token(auth_data) - - self.disable_user(self.trustee_user) - - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password'], - trust_id=trust['id']) - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_delete_trust(self): - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id, - impersonation=False, - expires=dict(minutes=1), - role_ids=[self.role_id]) - - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - - trust = self.assertValidTrustResponse(r, ref) - - self.delete('/OS-TRUST/trusts/%(trust_id)s' % { - 'trust_id': trust['id']}) - - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password'], - trust_id=trust['id']) - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_change_password_invalidates_trust_tokens(self): - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id, - impersonation=True, - expires=dict(minutes=1), - role_ids=[self.role_id]) - - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - trust = self.assertValidTrustResponse(r) - - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password'], - trust_id=trust['id']) - r = self.v3_create_token(auth_data) - - self.assertValidProjectScopedTokenResponse(r, self.user) - trust_token = r.headers.get('X-Subject-Token') - - self.get('/OS-TRUST/trusts?trustor_user_id=%s' % - self.user_id, token=trust_token) - - self.assertValidUserResponse( - self.patch('/users/%s' % self.trustee_user['id'], - body={'user': {'password': uuid.uuid4().hex}})) - - self.get('/OS-TRUST/trusts?trustor_user_id=%s' % - self.user_id, expected_status=http_client.UNAUTHORIZED, - token=trust_token) - - def test_trustee_can_do_role_ops(self): - resp = self.post('/OS-TRUST/trusts', - body={'trust': self.redelegated_trust_ref}) - trust = self.assertValidTrustResponse(resp) - trust_token = self._get_trust_token(trust) - - resp = self.get( - '/OS-TRUST/trusts/%(trust_id)s/roles' % { - 'trust_id': trust['id']}, - token=trust_token) - self.assertValidRoleListResponse(resp, self.role) - - self.head( - '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % { - 'trust_id': trust['id'], - 'role_id': self.role['id']}, - token=trust_token, - expected_status=http_client.OK) - - resp = self.get( - '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % { - 'trust_id': trust['id'], - 'role_id': self.role['id']}, - token=trust_token) - self.assertValidRoleResponse(resp, self.role) - - def test_do_not_consume_remaining_uses_when_get_token_fails(self): - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user['id'], - project_id=self.project_id, - impersonation=False, - expires=dict(minutes=1), - role_ids=[self.role_id], - remaining_uses=3) - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - - new_trust = r.result.get('trust') - trust_id = new_trust.get('id') - # Pass in another user's ID as the trustee, the result being a failed - # token authenticate and the remaining_uses of the trust should not be - # decremented. - auth_data = self.build_authentication_request( - user_id=self.default_domain_user['id'], - password=self.default_domain_user['password'], - trust_id=trust_id) - self.v3_create_token(auth_data, - expected_status=http_client.FORBIDDEN) - - r = self.get('/OS-TRUST/trusts/%s' % trust_id) - self.assertEqual(3, r.result.get('trust').get('remaining_uses')) - - -class TestTrustChain(test_v3.RestfulTestCase): - - def config_overrides(self): - super(TestTrustChain, self).config_overrides() - self.config_fixture.config( - group='trust', - enabled=True, - allow_redelegation=True, - max_redelegation_count=10 - ) - - def setUp(self): - super(TestTrustChain, self).setUp() - """Create a trust chain using redelegation. - - A trust chain is a series of trusts that are redelegated. For example, - self.user_list consists of userA, userB, and userC. The first trust in - the trust chain is going to be established between self.user and userA, - call it trustA. Then, userA is going to obtain a trust scoped token - using trustA, and with that token create a trust between userA and - userB called trustB. This pattern will continue with userB creating a - trust with userC. - So the trust chain should look something like: - trustA -> trustB -> trustC - Where: - self.user is trusting userA with trustA - userA is trusting userB with trustB - userB is trusting userC with trustC - - """ - self.user_list = list() - self.trust_chain = list() - for _ in range(3): - user = unit.create_user(self.identity_api, - domain_id=self.domain_id) - self.user_list.append(user) - - # trustor->trustee redelegation with impersonation - trustee = self.user_list[0] - trust_ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=trustee['id'], - project_id=self.project_id, - impersonation=True, - expires=dict(minutes=1), - role_ids=[self.role_id], - allow_redelegation=True, - redelegation_count=3) - - # Create a trust between self.user and the first user in the list - r = self.post('/OS-TRUST/trusts', - body={'trust': trust_ref}) - - trust = self.assertValidTrustResponse(r) - auth_data = self.build_authentication_request( - user_id=trustee['id'], - password=trustee['password'], - trust_id=trust['id']) - - # Generate a trusted token for the first user - trust_token = self.get_requested_token(auth_data) - self.trust_chain.append(trust) - - # Loop through the user to create a chain of redelegated trust. - for next_trustee in self.user_list[1:]: - trust_ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=next_trustee['id'], - project_id=self.project_id, - impersonation=True, - role_ids=[self.role_id], - allow_redelegation=True) - r = self.post('/OS-TRUST/trusts', - body={'trust': trust_ref}, - token=trust_token) - trust = self.assertValidTrustResponse(r) - auth_data = self.build_authentication_request( - user_id=next_trustee['id'], - password=next_trustee['password'], - trust_id=trust['id']) - trust_token = self.get_requested_token(auth_data) - self.trust_chain.append(trust) - - trustee = self.user_list[-1] - trust = self.trust_chain[-1] - auth_data = self.build_authentication_request( - user_id=trustee['id'], - password=trustee['password'], - trust_id=trust['id']) - - self.last_token = self.get_requested_token(auth_data) - - def assert_user_authenticate(self, user): - auth_data = self.build_authentication_request( - user_id=user['id'], - password=user['password'] - ) - r = self.v3_create_token(auth_data) - self.assertValidTokenResponse(r) - - def assert_trust_tokens_revoked(self, trust_id): - trustee = self.user_list[0] - auth_data = self.build_authentication_request( - user_id=trustee['id'], - password=trustee['password'] - ) - r = self.v3_create_token(auth_data) - self.assertValidTokenResponse(r) - - revocation_response = self.get('/OS-REVOKE/events') - revocation_events = revocation_response.json_body['events'] - found = False - for event in revocation_events: - if event.get('OS-TRUST:trust_id') == trust_id: - found = True - self.assertTrue(found, 'event with trust_id %s not found in list' % - trust_id) - - def test_delete_trust_cascade(self): - self.assert_user_authenticate(self.user_list[0]) - self.delete('/OS-TRUST/trusts/%(trust_id)s' % { - 'trust_id': self.trust_chain[0]['id']}) - - headers = {'X-Subject-Token': self.last_token} - self.head('/auth/tokens', headers=headers, - expected_status=http_client.NOT_FOUND) - self.assert_trust_tokens_revoked(self.trust_chain[0]['id']) - - def test_delete_broken_chain(self): - self.assert_user_authenticate(self.user_list[0]) - self.delete('/OS-TRUST/trusts/%(trust_id)s' % { - 'trust_id': self.trust_chain[0]['id']}) - - # Verify the two remaining trust have been deleted - for i in range(len(self.user_list) - 1): - auth_data = self.build_authentication_request( - user_id=self.user_list[i]['id'], - password=self.user_list[i]['password']) - - auth_token = self.get_requested_token(auth_data) - - # Assert chained trust have been deleted - self.get('/OS-TRUST/trusts/%(trust_id)s' % { - 'trust_id': self.trust_chain[i + 1]['id']}, - token=auth_token, - expected_status=http_client.NOT_FOUND) - - def test_trustor_roles_revoked(self): - self.assert_user_authenticate(self.user_list[0]) - - self.assignment_api.remove_role_from_user_and_project( - self.user_id, self.project_id, self.role_id - ) - - # Verify that users are not allowed to authenticate with trust - for i in range(len(self.user_list[1:])): - trustee = self.user_list[i] - auth_data = self.build_authentication_request( - user_id=trustee['id'], - password=trustee['password']) - - # Attempt to authenticate with trust - token = self.get_requested_token(auth_data) - auth_data = self.build_authentication_request( - token=token, - trust_id=self.trust_chain[i - 1]['id']) - - # Trustee has no delegated roles - self.v3_create_token(auth_data, - expected_status=http_client.FORBIDDEN) - - def test_intermediate_user_disabled(self): - self.assert_user_authenticate(self.user_list[0]) - - disabled = self.user_list[0] - disabled['enabled'] = False - self.identity_api.update_user(disabled['id'], disabled) - - # Bypass policy enforcement - with mock.patch.object(rules, 'enforce', return_value=True): - headers = {'X-Subject-Token': self.last_token} - self.head('/auth/tokens', headers=headers, - expected_status=http_client.FORBIDDEN) - - def test_intermediate_user_deleted(self): - self.assert_user_authenticate(self.user_list[0]) - - self.identity_api.delete_user(self.user_list[0]['id']) - - # Bypass policy enforcement - with mock.patch.object(rules, 'enforce', return_value=True): - headers = {'X-Subject-Token': self.last_token} - self.head('/auth/tokens', headers=headers, - expected_status=http_client.FORBIDDEN) - - -class TestAPIProtectionWithoutAuthContextMiddleware(test_v3.RestfulTestCase): - def test_api_protection_with_no_auth_context_in_env(self): - auth_data = self.build_authentication_request( - user_id=self.default_domain_user['id'], - password=self.default_domain_user['password'], - project_id=self.project['id']) - token = self.get_requested_token(auth_data) - auth_controller = auth.controllers.Auth() - # all we care is that auth context is not in the environment and - # 'token_id' is used to build the auth context instead - context = {'subject_token_id': token, - 'token_id': token, - 'query_string': {}, - 'environment': {}} - r = auth_controller.validate_token(context) - self.assertEqual(http_client.OK, r.status_code) - - -class TestAuthContext(unit.TestCase): - def setUp(self): - super(TestAuthContext, self).setUp() - self.auth_context = auth.controllers.AuthContext() - - def test_pick_lowest_expires_at(self): - expires_at_1 = utils.isotime(timeutils.utcnow()) - expires_at_2 = utils.isotime(timeutils.utcnow() + - datetime.timedelta(seconds=10)) - # make sure auth_context picks the lowest value - self.auth_context['expires_at'] = expires_at_1 - self.auth_context['expires_at'] = expires_at_2 - self.assertEqual(expires_at_1, self.auth_context['expires_at']) - - def test_identity_attribute_conflict(self): - for identity_attr in auth.controllers.AuthContext.IDENTITY_ATTRIBUTES: - self.auth_context[identity_attr] = uuid.uuid4().hex - if identity_attr == 'expires_at': - # 'expires_at' is a special case. Will test it in a separate - # test case. - continue - self.assertRaises(exception.Unauthorized, - operator.setitem, - self.auth_context, - identity_attr, - uuid.uuid4().hex) - - def test_identity_attribute_conflict_with_none_value(self): - for identity_attr in auth.controllers.AuthContext.IDENTITY_ATTRIBUTES: - self.auth_context[identity_attr] = None - - if identity_attr == 'expires_at': - # 'expires_at' is a special case and is tested above. - self.auth_context['expires_at'] = uuid.uuid4().hex - continue - - self.assertRaises(exception.Unauthorized, - operator.setitem, - self.auth_context, - identity_attr, - uuid.uuid4().hex) - - def test_non_identity_attribute_conflict_override(self): - # for attributes Keystone doesn't know about, make sure they can be - # freely manipulated - attr_name = uuid.uuid4().hex - attr_val_1 = uuid.uuid4().hex - attr_val_2 = uuid.uuid4().hex - self.auth_context[attr_name] = attr_val_1 - self.auth_context[attr_name] = attr_val_2 - self.assertEqual(attr_val_2, self.auth_context[attr_name]) - - -class TestAuthSpecificData(test_v3.RestfulTestCase): - - def test_get_catalog_project_scoped_token(self): - """Call ``GET /auth/catalog`` with a project-scoped token.""" - r = self.get('/auth/catalog') - self.assertValidCatalogResponse(r) - - def test_get_catalog_domain_scoped_token(self): - """Call ``GET /auth/catalog`` with a domain-scoped token.""" - # grant a domain role to a user - self.put(path='/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id'])) - - self.get( - '/auth/catalog', - auth=self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - domain_id=self.domain['id']), - expected_status=http_client.FORBIDDEN) - - def test_get_catalog_unscoped_token(self): - """Call ``GET /auth/catalog`` with an unscoped token.""" - self.get( - '/auth/catalog', - auth=self.build_authentication_request( - user_id=self.default_domain_user['id'], - password=self.default_domain_user['password']), - expected_status=http_client.FORBIDDEN) - - def test_get_catalog_no_token(self): - """Call ``GET /auth/catalog`` without a token.""" - self.get( - '/auth/catalog', - noauth=True, - expected_status=http_client.UNAUTHORIZED) - - def test_get_projects_project_scoped_token(self): - r = self.get('/auth/projects') - self.assertThat(r.json['projects'], matchers.HasLength(1)) - self.assertValidProjectListResponse(r) - - def test_get_domains_project_scoped_token(self): - self.put(path='/domains/%s/users/%s/roles/%s' % ( - self.domain['id'], self.user['id'], self.role['id'])) - - r = self.get('/auth/domains') - self.assertThat(r.json['domains'], matchers.HasLength(1)) - self.assertValidDomainListResponse(r) - - -class TestTrustAuthPKITokenProvider(TrustAPIBehavior, TestTrustChain): - def config_overrides(self): - super(TestTrustAuthPKITokenProvider, self).config_overrides() - self.config_fixture.config(group='token', - provider='pki', - revoke_by_id=False) - self.config_fixture.config(group='trust', - enabled=True) - - -class TestTrustAuthPKIZTokenProvider(TrustAPIBehavior, TestTrustChain): - def config_overrides(self): - super(TestTrustAuthPKIZTokenProvider, self).config_overrides() - self.config_fixture.config(group='token', - provider='pkiz', - revoke_by_id=False) - self.config_fixture.config(group='trust', - enabled=True) - - -class TestTrustAuthFernetTokenProvider(TrustAPIBehavior, TestTrustChain): - def config_overrides(self): - super(TestTrustAuthFernetTokenProvider, self).config_overrides() - self.config_fixture.config(group='token', - provider='fernet', - revoke_by_id=False) - self.config_fixture.config(group='trust', - enabled=True) - self.useFixture(ksfixtures.KeyRepository(self.config_fixture)) - - -class TestAuthFernetTokenProvider(TestAuth): - def setUp(self): - super(TestAuthFernetTokenProvider, self).setUp() - self.useFixture(ksfixtures.KeyRepository(self.config_fixture)) - - def config_overrides(self): - super(TestAuthFernetTokenProvider, self).config_overrides() - self.config_fixture.config(group='token', provider='fernet') - - def test_verify_with_bound_token(self): - self.config_fixture.config(group='token', bind='kerberos') - auth_data = self.build_authentication_request( - project_id=self.project['id']) - remote_user = self.default_domain_user['name'] - self.admin_app.extra_environ.update({'REMOTE_USER': remote_user, - 'AUTH_TYPE': 'Negotiate'}) - # Bind not current supported by Fernet, see bug 1433311. - self.v3_create_token(auth_data, - expected_status=http_client.NOT_IMPLEMENTED) - - def test_v2_v3_bind_token_intermix(self): - self.config_fixture.config(group='token', bind='kerberos') - - # we need our own user registered to the default domain because of - # the way external auth works. - remote_user = self.default_domain_user['name'] - self.admin_app.extra_environ.update({'REMOTE_USER': remote_user, - 'AUTH_TYPE': 'Negotiate'}) - body = {'auth': {}} - # Bind not current supported by Fernet, see bug 1433311. - self.admin_request(path='/v2.0/tokens', - method='POST', - body=body, - expected_status=http_client.NOT_IMPLEMENTED) - - def test_auth_with_bind_token(self): - self.config_fixture.config(group='token', bind=['kerberos']) - - auth_data = self.build_authentication_request() - remote_user = self.default_domain_user['name'] - self.admin_app.extra_environ.update({'REMOTE_USER': remote_user, - 'AUTH_TYPE': 'Negotiate'}) - # Bind not current supported by Fernet, see bug 1433311. - self.v3_create_token(auth_data, - expected_status=http_client.NOT_IMPLEMENTED) - - -class TestAuthTOTP(test_v3.RestfulTestCase): - - def setUp(self): - super(TestAuthTOTP, self).setUp() - - ref = unit.new_totp_credential( - user_id=self.default_domain_user['id'], - project_id=self.default_domain_project['id']) - - self.secret = ref['blob'] - - r = self.post('/credentials', body={'credential': ref}) - self.assertValidCredentialResponse(r, ref) - - self.addCleanup(self.cleanup) - - def auth_plugin_config_override(self): - methods = ['totp', 'token', 'password'] - super(TestAuthTOTP, self).auth_plugin_config_override(methods) - - def _make_credentials(self, cred_type, count=1, user_id=None, - project_id=None, blob=None): - user_id = user_id or self.default_domain_user['id'] - project_id = project_id or self.default_domain_project['id'] - - creds = [] - for __ in range(count): - if cred_type == 'totp': - ref = unit.new_totp_credential( - user_id=user_id, project_id=project_id, blob=blob) - else: - ref = unit.new_credential_ref( - user_id=user_id, project_id=project_id) - resp = self.post('/credentials', body={'credential': ref}) - creds.append(resp.json['credential']) - return creds - - def _make_auth_data_by_id(self, passcode, user_id=None): - return self.build_authentication_request( - user_id=user_id or self.default_domain_user['id'], - passcode=passcode, - project_id=self.project['id']) - - def _make_auth_data_by_name(self, passcode, username, user_domain_id): - return self.build_authentication_request( - username=username, - user_domain_id=user_domain_id, - passcode=passcode, - project_id=self.project['id']) - - def cleanup(self): - totp_creds = self.credential_api.list_credentials_for_user( - self.default_domain_user['id'], type='totp') - - other_creds = self.credential_api.list_credentials_for_user( - self.default_domain_user['id'], type='other') - - for cred in itertools.chain(other_creds, totp_creds): - self.delete('/credentials/%s' % cred['id'], - expected_status=http_client.NO_CONTENT) - - def test_with_a_valid_passcode(self): - creds = self._make_credentials('totp') - secret = creds[-1]['blob'] - auth_data = self._make_auth_data_by_id( - totp._generate_totp_passcode(secret)) - - self.v3_create_token(auth_data, expected_status=http_client.CREATED) - - def test_with_an_invalid_passcode_and_user_credentials(self): - self._make_credentials('totp') - auth_data = self._make_auth_data_by_id('000000') - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_with_an_invalid_passcode_with_no_user_credentials(self): - auth_data = self._make_auth_data_by_id('000000') - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_with_a_corrupt_totp_credential(self): - self._make_credentials('totp', count=1, blob='0') - auth_data = self._make_auth_data_by_id('000000') - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_with_multiple_credentials(self): - self._make_credentials('other', 3) - creds = self._make_credentials('totp', count=3) - secret = creds[-1]['blob'] - - auth_data = self._make_auth_data_by_id( - totp._generate_totp_passcode(secret)) - self.v3_create_token(auth_data, expected_status=http_client.CREATED) - - def test_with_multiple_users(self): - # make some credentials for the existing user - self._make_credentials('totp', count=3) - - # create a new user and their credentials - user = unit.create_user(self.identity_api, domain_id=self.domain_id) - self.assignment_api.create_grant(self.role['id'], - user_id=user['id'], - project_id=self.project['id']) - creds = self._make_credentials('totp', count=1, user_id=user['id']) - secret = creds[-1]['blob'] - - # Stop the clock otherwise there is a chance of auth failure due to - # getting a different TOTP between the call here and the call in the - # auth plugin. - self.useFixture(fixture.TimeFixture()) - - auth_data = self._make_auth_data_by_id( - totp._generate_totp_passcode(secret), user_id=user['id']) - self.v3_create_token(auth_data, expected_status=http_client.CREATED) - - def test_with_multiple_users_and_invalid_credentials(self): - """Prevent logging in with someone else's credentials. - - It's very easy to forget to limit the credentials query by user. - Let's just test it for a sanity check. - """ - # make some credentials for the existing user - self._make_credentials('totp', count=3) - - # create a new user and their credentials - new_user = unit.create_user(self.identity_api, - domain_id=self.domain_id) - self.assignment_api.create_grant(self.role['id'], - user_id=new_user['id'], - project_id=self.project['id']) - user2_creds = self._make_credentials( - 'totp', count=1, user_id=new_user['id']) - - user_id = self.default_domain_user['id'] # user1 - secret = user2_creds[-1]['blob'] - - auth_data = self._make_auth_data_by_id( - totp._generate_totp_passcode(secret), user_id=user_id) - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_with_username_and_domain_id(self): - creds = self._make_credentials('totp') - secret = creds[-1]['blob'] - auth_data = self._make_auth_data_by_name( - totp._generate_totp_passcode(secret), - username=self.default_domain_user['name'], - user_domain_id=self.default_domain_user['domain_id']) - - self.v3_create_token(auth_data, expected_status=http_client.CREATED) - - -class TestFetchRevocationList(test_v3.RestfulTestCase): - """Test fetch token revocation list on the v3 Identity API.""" - - def config_overrides(self): - super(TestFetchRevocationList, self).config_overrides() - self.config_fixture.config(group='token', revoke_by_id=True) - - def test_ids_no_tokens(self): - # When there's no revoked tokens the response is an empty list, and - # the response is signed. - res = self.get('/auth/tokens/OS-PKI/revoked') - signed = res.json['signed'] - clear = cms.cms_verify(signed, CONF.signing.certfile, - CONF.signing.ca_certs) - payload = json.loads(clear) - self.assertEqual({'revoked': []}, payload) - - def test_ids_token(self): - # When there's a revoked token, it's in the response, and the response - # is signed. - token_res = self.v3_create_token( - self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id'])) - - token_id = token_res.headers.get('X-Subject-Token') - token_data = token_res.json['token'] - - self.delete('/auth/tokens', headers={'X-Subject-Token': token_id}) - - res = self.get('/auth/tokens/OS-PKI/revoked') - signed = res.json['signed'] - clear = cms.cms_verify(signed, CONF.signing.certfile, - CONF.signing.ca_certs) - payload = json.loads(clear) - - def truncate(ts_str): - return ts_str[:19] + 'Z' # 2016-01-21T15:53:52 == 19 chars. - - exp_token_revoke_data = { - 'id': token_id, - 'audit_id': token_data['audit_ids'][0], - 'expires': truncate(token_data['expires_at']), - } - - self.assertEqual({'revoked': [exp_token_revoke_data]}, payload) - - def test_audit_id_only_no_tokens(self): - # When there's no revoked tokens and ?audit_id_only is used, the - # response is an empty list and is not signed. - res = self.get('/auth/tokens/OS-PKI/revoked?audit_id_only') - self.assertEqual({'revoked': []}, res.json) - - def test_audit_id_only_token(self): - # When there's a revoked token and ?audit_id_only is used, the - # response contains the audit_id of the token and is not signed. - token_res = self.v3_create_token( - self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id'])) - - token_id = token_res.headers.get('X-Subject-Token') - token_data = token_res.json['token'] - - self.delete('/auth/tokens', headers={'X-Subject-Token': token_id}) - - res = self.get('/auth/tokens/OS-PKI/revoked?audit_id_only') - - def truncate(ts_str): - return ts_str[:19] + 'Z' # 2016-01-21T15:53:52 == 19 chars. - - exp_token_revoke_data = { - 'audit_id': token_data['audit_ids'][0], - 'expires': truncate(token_data['expires_at']), - } - - self.assertEqual({'revoked': [exp_token_revoke_data]}, res.json) diff --git a/keystone-moon/keystone/tests/unit/test_v3_catalog.py b/keystone-moon/keystone/tests/unit/test_v3_catalog.py deleted file mode 100644 index 2eb9db14..00000000 --- a/keystone-moon/keystone/tests/unit/test_v3_catalog.py +++ /dev/null @@ -1,924 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import uuid - -from six.moves import http_client -from testtools import matchers - -from keystone import catalog -from keystone.tests import unit -from keystone.tests.unit.ksfixtures import database -from keystone.tests.unit import test_v3 - - -class CatalogTestCase(test_v3.RestfulTestCase): - """Test service & endpoint CRUD.""" - - # region crud tests - - def test_create_region_with_id(self): - """Call ``PUT /regions/{region_id}`` w/o an ID in the request body.""" - ref = unit.new_region_ref() - region_id = ref.pop('id') - r = self.put( - '/regions/%s' % region_id, - body={'region': ref}, - expected_status=http_client.CREATED) - self.assertValidRegionResponse(r, ref) - # Double-check that the region ID was kept as-is and not - # populated with a UUID, as is the case with POST /v3/regions - self.assertEqual(region_id, r.json['region']['id']) - - def test_create_region_with_matching_ids(self): - """Call ``PUT /regions/{region_id}`` with an ID in the request body.""" - ref = unit.new_region_ref() - region_id = ref['id'] - r = self.put( - '/regions/%s' % region_id, - body={'region': ref}, - expected_status=http_client.CREATED) - self.assertValidRegionResponse(r, ref) - # Double-check that the region ID was kept as-is and not - # populated with a UUID, as is the case with POST /v3/regions - self.assertEqual(region_id, r.json['region']['id']) - - def test_create_region_with_duplicate_id(self): - """Call ``PUT /regions/{region_id}``.""" - ref = dict(description="my region") - self.put( - '/regions/myregion', - body={'region': ref}, expected_status=http_client.CREATED) - # Create region again with duplicate id - self.put( - '/regions/myregion', - body={'region': ref}, expected_status=http_client.CONFLICT) - - def test_create_region(self): - """Call ``POST /regions`` with an ID in the request body.""" - # the ref will have an ID defined on it - ref = unit.new_region_ref() - r = self.post( - '/regions', - body={'region': ref}) - self.assertValidRegionResponse(r, ref) - - # we should be able to get the region, having defined the ID ourselves - r = self.get( - '/regions/%(region_id)s' % { - 'region_id': ref['id']}) - self.assertValidRegionResponse(r, ref) - - def test_create_region_with_empty_id(self): - """Call ``POST /regions`` with an empty ID in the request body.""" - ref = unit.new_region_ref(id='') - - r = self.post('/regions', body={'region': ref}) - self.assertValidRegionResponse(r, ref) - self.assertNotEmpty(r.result['region'].get('id')) - - def test_create_region_without_id(self): - """Call ``POST /regions`` without an ID in the request body.""" - ref = unit.new_region_ref() - - # instead of defining the ID ourselves... - del ref['id'] - - # let the service define the ID - r = self.post('/regions', body={'region': ref}) - self.assertValidRegionResponse(r, ref) - - def test_create_region_without_description(self): - """Call ``POST /regions`` without description in the request body.""" - ref = unit.new_region_ref(description=None) - - del ref['description'] - - r = self.post('/regions', body={'region': ref}) - # Create the description in the reference to compare to since the - # response should now have a description, even though we didn't send - # it with the original reference. - ref['description'] = '' - self.assertValidRegionResponse(r, ref) - - def test_create_regions_with_same_description_string(self): - """Call ``POST /regions`` with duplicate descriptions.""" - # NOTE(lbragstad): Make sure we can create two regions that have the - # same description. - region_desc = 'Some Region Description' - - ref1 = unit.new_region_ref(description=region_desc) - ref2 = unit.new_region_ref(description=region_desc) - - resp1 = self.post('/regions', body={'region': ref1}) - self.assertValidRegionResponse(resp1, ref1) - - resp2 = self.post('/regions', body={'region': ref2}) - self.assertValidRegionResponse(resp2, ref2) - - def test_create_regions_without_descriptions(self): - """Call ``POST /regions`` with no description.""" - # NOTE(lbragstad): Make sure we can create two regions that have - # no description in the request body. The description should be - # populated by Catalog Manager. - ref1 = unit.new_region_ref() - ref2 = unit.new_region_ref() - - del ref1['description'] - ref2['description'] = None - - resp1 = self.post('/regions', body={'region': ref1}) - - resp2 = self.post('/regions', body={'region': ref2}) - # Create the descriptions in the references to compare to since the - # responses should now have descriptions, even though we didn't send - # a description with the original references. - ref1['description'] = '' - ref2['description'] = '' - self.assertValidRegionResponse(resp1, ref1) - self.assertValidRegionResponse(resp2, ref2) - - def test_create_region_with_conflicting_ids(self): - """Call ``PUT /regions/{region_id}`` with conflicting region IDs.""" - # the region ref is created with an ID - ref = unit.new_region_ref() - - # but instead of using that ID, make up a new, conflicting one - self.put( - '/regions/%s' % uuid.uuid4().hex, - body={'region': ref}, - expected_status=http_client.BAD_REQUEST) - - def test_list_regions(self): - """Call ``GET /regions``.""" - r = self.get('/regions') - self.assertValidRegionListResponse(r, ref=self.region) - - def _create_region_with_parent_id(self, parent_id=None): - ref = unit.new_region_ref(parent_region_id=parent_id) - return self.post( - '/regions', - body={'region': ref}) - - def test_list_regions_filtered_by_parent_region_id(self): - """Call ``GET /regions?parent_region_id={parent_region_id}``.""" - new_region = self._create_region_with_parent_id() - parent_id = new_region.result['region']['id'] - - new_region = self._create_region_with_parent_id(parent_id) - new_region = self._create_region_with_parent_id(parent_id) - - r = self.get('/regions?parent_region_id=%s' % parent_id) - - for region in r.result['regions']: - self.assertEqual(parent_id, region['parent_region_id']) - - def test_get_region(self): - """Call ``GET /regions/{region_id}``.""" - r = self.get('/regions/%(region_id)s' % { - 'region_id': self.region_id}) - self.assertValidRegionResponse(r, self.region) - - def test_update_region(self): - """Call ``PATCH /regions/{region_id}``.""" - region = unit.new_region_ref() - del region['id'] - r = self.patch('/regions/%(region_id)s' % { - 'region_id': self.region_id}, - body={'region': region}) - self.assertValidRegionResponse(r, region) - - def test_update_region_without_description_keeps_original(self): - """Call ``PATCH /regions/{region_id}``.""" - region_ref = unit.new_region_ref() - - resp = self.post('/regions', body={'region': region_ref}) - - region_updates = { - # update with something that's not the description - 'parent_region_id': self.region_id, - } - resp = self.patch('/regions/%s' % region_ref['id'], - body={'region': region_updates}) - - # NOTE(dstanek): Keystone should keep the original description. - self.assertEqual(region_ref['description'], - resp.result['region']['description']) - - def test_update_region_with_null_description(self): - """Call ``PATCH /regions/{region_id}``.""" - region = unit.new_region_ref(description=None) - del region['id'] - r = self.patch('/regions/%(region_id)s' % { - 'region_id': self.region_id}, - body={'region': region}) - - # NOTE(dstanek): Keystone should turn the provided None value into - # an empty string before storing in the backend. - region['description'] = '' - self.assertValidRegionResponse(r, region) - - def test_delete_region(self): - """Call ``DELETE /regions/{region_id}``.""" - ref = unit.new_region_ref() - r = self.post( - '/regions', - body={'region': ref}) - self.assertValidRegionResponse(r, ref) - - self.delete('/regions/%(region_id)s' % { - 'region_id': ref['id']}) - - # service crud tests - - def test_create_service(self): - """Call ``POST /services``.""" - ref = unit.new_service_ref() - r = self.post( - '/services', - body={'service': ref}) - self.assertValidServiceResponse(r, ref) - - def test_create_service_no_name(self): - """Call ``POST /services``.""" - ref = unit.new_service_ref() - del ref['name'] - r = self.post( - '/services', - body={'service': ref}) - ref['name'] = '' - self.assertValidServiceResponse(r, ref) - - def test_create_service_no_enabled(self): - """Call ``POST /services``.""" - ref = unit.new_service_ref() - del ref['enabled'] - r = self.post( - '/services', - body={'service': ref}) - ref['enabled'] = True - self.assertValidServiceResponse(r, ref) - self.assertIs(True, r.result['service']['enabled']) - - def test_create_service_enabled_false(self): - """Call ``POST /services``.""" - ref = unit.new_service_ref(enabled=False) - r = self.post( - '/services', - body={'service': ref}) - self.assertValidServiceResponse(r, ref) - self.assertIs(False, r.result['service']['enabled']) - - def test_create_service_enabled_true(self): - """Call ``POST /services``.""" - ref = unit.new_service_ref(enabled=True) - r = self.post( - '/services', - body={'service': ref}) - self.assertValidServiceResponse(r, ref) - self.assertIs(True, r.result['service']['enabled']) - - def test_create_service_enabled_str_true(self): - """Call ``POST /services``.""" - ref = unit.new_service_ref(enabled='True') - self.post('/services', body={'service': ref}, - expected_status=http_client.BAD_REQUEST) - - def test_create_service_enabled_str_false(self): - """Call ``POST /services``.""" - ref = unit.new_service_ref(enabled='False') - self.post('/services', body={'service': ref}, - expected_status=http_client.BAD_REQUEST) - - def test_create_service_enabled_str_random(self): - """Call ``POST /services``.""" - ref = unit.new_service_ref(enabled='puppies') - self.post('/services', body={'service': ref}, - expected_status=http_client.BAD_REQUEST) - - def test_list_services(self): - """Call ``GET /services``.""" - r = self.get('/services') - self.assertValidServiceListResponse(r, ref=self.service) - - def _create_random_service(self): - ref = unit.new_service_ref() - response = self.post( - '/services', - body={'service': ref}) - return response.json['service'] - - def test_filter_list_services_by_type(self): - """Call ``GET /services?type=``.""" - target_ref = self._create_random_service() - - # create unrelated services - self._create_random_service() - self._create_random_service() - - response = self.get('/services?type=' + target_ref['type']) - self.assertValidServiceListResponse(response, ref=target_ref) - - filtered_service_list = response.json['services'] - self.assertEqual(1, len(filtered_service_list)) - - filtered_service = filtered_service_list[0] - self.assertEqual(target_ref['type'], filtered_service['type']) - - def test_filter_list_services_by_name(self): - """Call ``GET /services?name=``.""" - target_ref = self._create_random_service() - - # create unrelated services - self._create_random_service() - self._create_random_service() - - response = self.get('/services?name=' + target_ref['name']) - self.assertValidServiceListResponse(response, ref=target_ref) - - filtered_service_list = response.json['services'] - self.assertEqual(1, len(filtered_service_list)) - - filtered_service = filtered_service_list[0] - self.assertEqual(target_ref['name'], filtered_service['name']) - - def test_get_service(self): - """Call ``GET /services/{service_id}``.""" - r = self.get('/services/%(service_id)s' % { - 'service_id': self.service_id}) - self.assertValidServiceResponse(r, self.service) - - def test_update_service(self): - """Call ``PATCH /services/{service_id}``.""" - service = unit.new_service_ref() - del service['id'] - r = self.patch('/services/%(service_id)s' % { - 'service_id': self.service_id}, - body={'service': service}) - self.assertValidServiceResponse(r, service) - - def test_delete_service(self): - """Call ``DELETE /services/{service_id}``.""" - self.delete('/services/%(service_id)s' % { - 'service_id': self.service_id}) - - # endpoint crud tests - - def test_list_endpoints(self): - """Call ``GET /endpoints``.""" - r = self.get('/endpoints') - self.assertValidEndpointListResponse(r, ref=self.endpoint) - - def _create_random_endpoint(self, interface='public', - parent_region_id=None): - region = self._create_region_with_parent_id( - parent_id=parent_region_id) - service = self._create_random_service() - ref = unit.new_endpoint_ref( - service_id=service['id'], - interface=interface, - region_id=region.result['region']['id']) - - response = self.post( - '/endpoints', - body={'endpoint': ref}) - return response.json['endpoint'] - - def test_list_endpoints_filtered_by_interface(self): - """Call ``GET /endpoints?interface={interface}``.""" - ref = self._create_random_endpoint(interface='internal') - - response = self.get('/endpoints?interface=%s' % ref['interface']) - self.assertValidEndpointListResponse(response, ref=ref) - - for endpoint in response.json['endpoints']: - self.assertEqual(ref['interface'], endpoint['interface']) - - def test_list_endpoints_filtered_by_service_id(self): - """Call ``GET /endpoints?service_id={service_id}``.""" - ref = self._create_random_endpoint() - - response = self.get('/endpoints?service_id=%s' % ref['service_id']) - self.assertValidEndpointListResponse(response, ref=ref) - - for endpoint in response.json['endpoints']: - self.assertEqual(ref['service_id'], endpoint['service_id']) - - def test_list_endpoints_filtered_by_region_id(self): - """Call ``GET /endpoints?region_id={region_id}``.""" - ref = self._create_random_endpoint() - - response = self.get('/endpoints?region_id=%s' % ref['region_id']) - self.assertValidEndpointListResponse(response, ref=ref) - - for endpoint in response.json['endpoints']: - self.assertEqual(ref['region_id'], endpoint['region_id']) - - def test_list_endpoints_filtered_by_parent_region_id(self): - """Call ``GET /endpoints?region_id={region_id}``. - - Ensure passing the parent_region_id as filter returns an - empty list. - - """ - parent_region = self._create_region_with_parent_id() - parent_region_id = parent_region.result['region']['id'] - self._create_random_endpoint(parent_region_id=parent_region_id) - - response = self.get('/endpoints?region_id=%s' % parent_region_id) - self.assertEqual(0, len(response.json['endpoints'])) - - def test_list_endpoints_with_multiple_filters(self): - """Call ``GET /endpoints?interface={interface}...``. - - Ensure passing different combinations of interface, region_id and - service_id as filters will return the correct result. - - """ - # interface and region_id specified - ref = self._create_random_endpoint(interface='internal') - response = self.get('/endpoints?interface=%s®ion_id=%s' % - (ref['interface'], ref['region_id'])) - self.assertValidEndpointListResponse(response, ref=ref) - - for endpoint in response.json['endpoints']: - self.assertEqual(ref['interface'], endpoint['interface']) - self.assertEqual(ref['region_id'], endpoint['region_id']) - - # interface and service_id specified - ref = self._create_random_endpoint(interface='internal') - response = self.get('/endpoints?interface=%s&service_id=%s' % - (ref['interface'], ref['service_id'])) - self.assertValidEndpointListResponse(response, ref=ref) - - for endpoint in response.json['endpoints']: - self.assertEqual(ref['interface'], endpoint['interface']) - self.assertEqual(ref['service_id'], endpoint['service_id']) - - # region_id and service_id specified - ref = self._create_random_endpoint(interface='internal') - response = self.get('/endpoints?region_id=%s&service_id=%s' % - (ref['region_id'], ref['service_id'])) - self.assertValidEndpointListResponse(response, ref=ref) - - for endpoint in response.json['endpoints']: - self.assertEqual(ref['region_id'], endpoint['region_id']) - self.assertEqual(ref['service_id'], endpoint['service_id']) - - # interface, region_id and service_id specified - ref = self._create_random_endpoint(interface='internal') - response = self.get(('/endpoints?interface=%s®ion_id=%s' - '&service_id=%s') % - (ref['interface'], ref['region_id'], - ref['service_id'])) - self.assertValidEndpointListResponse(response, ref=ref) - - for endpoint in response.json['endpoints']: - self.assertEqual(ref['interface'], endpoint['interface']) - self.assertEqual(ref['region_id'], endpoint['region_id']) - self.assertEqual(ref['service_id'], endpoint['service_id']) - - def test_list_endpoints_with_random_filter_values(self): - """Call ``GET /endpoints?interface={interface}...``. - - Ensure passing random values for: interface, region_id and - service_id will return an empty list. - - """ - self._create_random_endpoint(interface='internal') - - response = self.get('/endpoints?interface=%s' % uuid.uuid4().hex) - self.assertEqual(0, len(response.json['endpoints'])) - - response = self.get('/endpoints?region_id=%s' % uuid.uuid4().hex) - self.assertEqual(0, len(response.json['endpoints'])) - - response = self.get('/endpoints?service_id=%s' % uuid.uuid4().hex) - self.assertEqual(0, len(response.json['endpoints'])) - - def test_create_endpoint_no_enabled(self): - """Call ``POST /endpoints``.""" - ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id) - r = self.post('/endpoints', body={'endpoint': ref}) - ref['enabled'] = True - self.assertValidEndpointResponse(r, ref) - - def test_create_endpoint_enabled_true(self): - """Call ``POST /endpoints`` with enabled: true.""" - ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id, - enabled=True) - r = self.post('/endpoints', body={'endpoint': ref}) - self.assertValidEndpointResponse(r, ref) - - def test_create_endpoint_enabled_false(self): - """Call ``POST /endpoints`` with enabled: false.""" - ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id, - enabled=False) - r = self.post('/endpoints', body={'endpoint': ref}) - self.assertValidEndpointResponse(r, ref) - - def test_create_endpoint_enabled_str_true(self): - """Call ``POST /endpoints`` with enabled: 'True'.""" - ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id, - enabled='True') - self.post('/endpoints', body={'endpoint': ref}, - expected_status=http_client.BAD_REQUEST) - - def test_create_endpoint_enabled_str_false(self): - """Call ``POST /endpoints`` with enabled: 'False'.""" - ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id, - enabled='False') - self.post('/endpoints', body={'endpoint': ref}, - expected_status=http_client.BAD_REQUEST) - - def test_create_endpoint_enabled_str_random(self): - """Call ``POST /endpoints`` with enabled: 'puppies'.""" - ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id, - enabled='puppies') - self.post('/endpoints', body={'endpoint': ref}, - expected_status=http_client.BAD_REQUEST) - - def test_create_endpoint_with_invalid_region_id(self): - """Call ``POST /endpoints``.""" - ref = unit.new_endpoint_ref(service_id=self.service_id) - self.post('/endpoints', body={'endpoint': ref}, - expected_status=http_client.BAD_REQUEST) - - def test_create_endpoint_with_region(self): - """EndpointV3 creates the region before creating the endpoint. - - This occurs when endpoint is provided with 'region' and no 'region_id'. - """ - ref = unit.new_endpoint_ref_with_region(service_id=self.service_id, - region=uuid.uuid4().hex) - self.post('/endpoints', body={'endpoint': ref}) - # Make sure the region is created - self.get('/regions/%(region_id)s' % {'region_id': ref["region"]}) - - def test_create_endpoint_with_no_region(self): - """EndpointV3 allows to creates the endpoint without region.""" - ref = unit.new_endpoint_ref(service_id=self.service_id, region_id=None) - del ref['region_id'] # cannot just be None, it needs to not exist - self.post('/endpoints', body={'endpoint': ref}) - - def test_create_endpoint_with_empty_url(self): - """Call ``POST /endpoints``.""" - ref = unit.new_endpoint_ref(service_id=self.service_id, url='') - self.post('/endpoints', body={'endpoint': ref}, - expected_status=http_client.BAD_REQUEST) - - def test_get_endpoint(self): - """Call ``GET /endpoints/{endpoint_id}``.""" - r = self.get( - '/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}) - self.assertValidEndpointResponse(r, self.endpoint) - - def test_update_endpoint(self): - """Call ``PATCH /endpoints/{endpoint_id}``.""" - ref = unit.new_endpoint_ref(service_id=self.service_id, - interface='public', - region_id=self.region_id) - del ref['id'] - r = self.patch( - '/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}, - body={'endpoint': ref}) - ref['enabled'] = True - self.assertValidEndpointResponse(r, ref) - - def test_update_endpoint_enabled_true(self): - """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: True.""" - r = self.patch( - '/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}, - body={'endpoint': {'enabled': True}}) - self.assertValidEndpointResponse(r, self.endpoint) - - def test_update_endpoint_enabled_false(self): - """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: False.""" - r = self.patch( - '/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}, - body={'endpoint': {'enabled': False}}) - exp_endpoint = copy.copy(self.endpoint) - exp_endpoint['enabled'] = False - self.assertValidEndpointResponse(r, exp_endpoint) - - def test_update_endpoint_enabled_str_true(self): - """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'True'.""" - self.patch( - '/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}, - body={'endpoint': {'enabled': 'True'}}, - expected_status=http_client.BAD_REQUEST) - - def test_update_endpoint_enabled_str_false(self): - """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'False'.""" - self.patch( - '/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}, - body={'endpoint': {'enabled': 'False'}}, - expected_status=http_client.BAD_REQUEST) - - def test_update_endpoint_enabled_str_random(self): - """Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'kitties'.""" - self.patch( - '/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}, - body={'endpoint': {'enabled': 'kitties'}}, - expected_status=http_client.BAD_REQUEST) - - def test_delete_endpoint(self): - """Call ``DELETE /endpoints/{endpoint_id}``.""" - self.delete( - '/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}) - - def test_create_endpoint_on_v2(self): - # clear the v3 endpoint so we only have endpoints created on v2 - self.delete( - '/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint_id}) - - # create a v3 endpoint ref, and then tweak it back to a v2-style ref - ref = unit.new_endpoint_ref_with_region(service_id=self.service['id'], - region=uuid.uuid4().hex, - internalurl=None) - del ref['id'] - del ref['interface'] - ref['publicurl'] = ref.pop('url') - # don't set adminurl to ensure it's absence is handled like internalurl - - # create the endpoint on v2 (using a v3 token) - r = self.admin_request( - method='POST', - path='/v2.0/endpoints', - token=self.get_scoped_token(), - body={'endpoint': ref}) - endpoint_v2 = r.result['endpoint'] - - # test the endpoint on v3 - r = self.get('/endpoints') - endpoints = self.assertValidEndpointListResponse(r) - self.assertEqual(1, len(endpoints)) - endpoint_v3 = endpoints.pop() - - # these attributes are identical between both APIs - self.assertEqual(ref['region'], endpoint_v3['region_id']) - self.assertEqual(ref['service_id'], endpoint_v3['service_id']) - self.assertEqual(ref['description'], endpoint_v3['description']) - - # a v2 endpoint is not quite the same concept as a v3 endpoint, so they - # receive different identifiers - self.assertNotEqual(endpoint_v2['id'], endpoint_v3['id']) - - # v2 has a publicurl; v3 has a url + interface type - self.assertEqual(ref['publicurl'], endpoint_v3['url']) - self.assertEqual('public', endpoint_v3['interface']) - - # tests for bug 1152632 -- these attributes were being returned by v3 - self.assertNotIn('publicurl', endpoint_v3) - self.assertNotIn('adminurl', endpoint_v3) - self.assertNotIn('internalurl', endpoint_v3) - - # test for bug 1152635 -- this attribute was being returned by v3 - self.assertNotIn('legacy_endpoint_id', endpoint_v3) - - self.assertEqual(endpoint_v2['region'], endpoint_v3['region_id']) - - def test_deleting_endpoint_with_space_in_url(self): - # add a space to all urls (intentional "i d" to test bug) - url_with_space = "http://127.0.0.1:8774 /v1.1/\$(tenant_i d)s" - - # create a v3 endpoint ref - ref = unit.new_endpoint_ref(service_id=self.service['id'], - region_id=None, - publicurl=url_with_space, - internalurl=url_with_space, - adminurl=url_with_space, - url=url_with_space) - - # add the endpoint to the database - self.catalog_api.create_endpoint(ref['id'], ref) - - # delete the endpoint - self.delete('/endpoints/%s' % ref['id']) - - # make sure it's deleted (GET should return Not Found) - self.get('/endpoints/%s' % ref['id'], - expected_status=http_client.NOT_FOUND) - - def test_endpoint_create_with_valid_url(self): - """Create endpoint with valid url should be tested,too.""" - # list one valid url is enough, no need to list too much - valid_url = 'http://127.0.0.1:8774/v1.1/$(tenant_id)s' - - ref = unit.new_endpoint_ref(self.service_id, - interface='public', - region_id=self.region_id, - url=valid_url) - self.post('/endpoints', body={'endpoint': ref}) - - def test_endpoint_create_with_valid_url_project_id(self): - """Create endpoint with valid url should be tested,too.""" - valid_url = 'http://127.0.0.1:8774/v1.1/$(project_id)s' - - ref = unit.new_endpoint_ref(self.service_id, - interface='public', - region_id=self.region_id, - url=valid_url) - self.post('/endpoints', body={'endpoint': ref}) - - def test_endpoint_create_with_invalid_url(self): - """Test the invalid cases: substitutions is not exactly right.""" - invalid_urls = [ - # using a substitution that is not whitelisted - KeyError - 'http://127.0.0.1:8774/v1.1/$(nonexistent)s', - - # invalid formatting - ValueError - 'http://127.0.0.1:8774/v1.1/$(tenant_id)', - 'http://127.0.0.1:8774/v1.1/$(tenant_id)t', - 'http://127.0.0.1:8774/v1.1/$(tenant_id', - - # invalid type specifier - TypeError - # admin_url is a string not an int - 'http://127.0.0.1:8774/v1.1/$(admin_url)d', - ] - - ref = unit.new_endpoint_ref(self.service_id) - - for invalid_url in invalid_urls: - ref['url'] = invalid_url - self.post('/endpoints', - body={'endpoint': ref}, - expected_status=http_client.BAD_REQUEST) - - -class TestCatalogAPISQL(unit.TestCase): - """Tests for the catalog Manager against the SQL backend.""" - - def setUp(self): - super(TestCatalogAPISQL, self).setUp() - self.useFixture(database.Database()) - self.catalog_api = catalog.Manager() - - service = unit.new_service_ref() - self.service_id = service['id'] - self.catalog_api.create_service(self.service_id, service) - - self.create_endpoint(service_id=self.service_id) - - def create_endpoint(self, service_id, **kwargs): - endpoint = unit.new_endpoint_ref(service_id=service_id, - region_id=None, **kwargs) - - self.catalog_api.create_endpoint(endpoint['id'], endpoint) - return endpoint - - def config_overrides(self): - super(TestCatalogAPISQL, self).config_overrides() - self.config_fixture.config(group='catalog', driver='sql') - - def test_get_catalog_ignores_endpoints_with_invalid_urls(self): - user_id = uuid.uuid4().hex - tenant_id = uuid.uuid4().hex - - # the only endpoint in the catalog is the one created in setUp - catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id) - self.assertEqual(1, len(catalog[0]['endpoints'])) - # it's also the only endpoint in the backend - self.assertEqual(1, len(self.catalog_api.list_endpoints())) - - # create a new, invalid endpoint - malformed type declaration - self.create_endpoint(self.service_id, - url='http://keystone/%(tenant_id)') - - # create a new, invalid endpoint - nonexistent key - self.create_endpoint(self.service_id, - url='http://keystone/%(you_wont_find_me)s') - - # verify that the invalid endpoints don't appear in the catalog - catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id) - self.assertEqual(1, len(catalog[0]['endpoints'])) - # all three appear in the backend - self.assertEqual(3, len(self.catalog_api.list_endpoints())) - - # create another valid endpoint - tenant_id will be replaced - self.create_endpoint(self.service_id, - url='http://keystone/%(tenant_id)s') - - # there are two valid endpoints, positive check - catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id) - self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2)) - - # If the URL has no 'tenant_id' to substitute, we will skip the - # endpoint which contains this kind of URL, negative check. - tenant_id = None - catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id) - self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1)) - - def test_get_catalog_always_returns_service_name(self): - user_id = uuid.uuid4().hex - tenant_id = uuid.uuid4().hex - - # create a service, with a name - named_svc = unit.new_service_ref() - self.catalog_api.create_service(named_svc['id'], named_svc) - self.create_endpoint(service_id=named_svc['id']) - - # create a service, with no name - unnamed_svc = unit.new_service_ref(name=None) - del unnamed_svc['name'] - self.catalog_api.create_service(unnamed_svc['id'], unnamed_svc) - self.create_endpoint(service_id=unnamed_svc['id']) - - catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id) - - named_endpoint = [ep for ep in catalog - if ep['type'] == named_svc['type']][0] - self.assertEqual(named_svc['name'], named_endpoint['name']) - - unnamed_endpoint = [ep for ep in catalog - if ep['type'] == unnamed_svc['type']][0] - self.assertEqual('', unnamed_endpoint['name']) - - -# TODO(dstanek): this needs refactoring with the test above, but we are in a -# crunch so that will happen in a future patch. -class TestCatalogAPISQLRegions(unit.TestCase): - """Tests for the catalog Manager against the SQL backend.""" - - def setUp(self): - super(TestCatalogAPISQLRegions, self).setUp() - self.useFixture(database.Database()) - self.catalog_api = catalog.Manager() - - def config_overrides(self): - super(TestCatalogAPISQLRegions, self).config_overrides() - self.config_fixture.config(group='catalog', driver='sql') - - def test_get_catalog_returns_proper_endpoints_with_no_region(self): - service = unit.new_service_ref() - service_id = service['id'] - self.catalog_api.create_service(service_id, service) - - endpoint = unit.new_endpoint_ref(service_id=service_id, - region_id=None) - del endpoint['region_id'] - self.catalog_api.create_endpoint(endpoint['id'], endpoint) - - user_id = uuid.uuid4().hex - tenant_id = uuid.uuid4().hex - - catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id) - self.assertValidCatalogEndpoint( - catalog[0]['endpoints'][0], ref=endpoint) - - def test_get_catalog_returns_proper_endpoints_with_region(self): - service = unit.new_service_ref() - service_id = service['id'] - self.catalog_api.create_service(service_id, service) - - endpoint = unit.new_endpoint_ref(service_id=service_id) - region = unit.new_region_ref(id=endpoint['region_id']) - self.catalog_api.create_region(region) - self.catalog_api.create_endpoint(endpoint['id'], endpoint) - - endpoint = self.catalog_api.get_endpoint(endpoint['id']) - user_id = uuid.uuid4().hex - tenant_id = uuid.uuid4().hex - - catalog = self.catalog_api.get_v3_catalog(user_id, tenant_id) - self.assertValidCatalogEndpoint( - catalog[0]['endpoints'][0], ref=endpoint) - - def assertValidCatalogEndpoint(self, entity, ref=None): - keys = ['description', 'id', 'interface', 'name', 'region_id', 'url'] - for k in keys: - self.assertEqual(ref.get(k), entity[k], k) - self.assertEqual(entity['region_id'], entity['region']) diff --git a/keystone-moon/keystone/tests/unit/test_v3_controller.py b/keystone-moon/keystone/tests/unit/test_v3_controller.py deleted file mode 100644 index 563e656e..00000000 --- a/keystone-moon/keystone/tests/unit/test_v3_controller.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2014 CERN. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -import six -from six.moves import range -from testtools import matchers - -from keystone.common import controller -from keystone import exception -from keystone.tests import unit - - -class V3ControllerTestCase(unit.TestCase): - """Tests for the V3Controller class.""" - def setUp(self): - super(V3ControllerTestCase, self).setUp() - - class ControllerUnderTest(controller.V3Controller): - _mutable_parameters = frozenset(['hello', 'world']) - - self.api = ControllerUnderTest() - - def test_check_immutable_params(self): - """Pass valid parameters to the method and expect no failure.""" - ref = { - 'hello': uuid.uuid4().hex, - 'world': uuid.uuid4().hex - } - self.api.check_immutable_params(ref) - - def test_check_immutable_params_fail(self): - """Pass invalid parameter to the method and expect failure.""" - ref = {uuid.uuid4().hex: uuid.uuid4().hex for _ in range(3)} - - ex = self.assertRaises(exception.ImmutableAttributeError, - self.api.check_immutable_params, ref) - ex_msg = six.text_type(ex) - self.assertThat(ex_msg, matchers.Contains(self.api.__class__.__name__)) - for key in ref.keys(): - self.assertThat(ex_msg, matchers.Contains(key)) diff --git a/keystone-moon/keystone/tests/unit/test_v3_credential.py b/keystone-moon/keystone/tests/unit/test_v3_credential.py deleted file mode 100644 index 07995f19..00000000 --- a/keystone-moon/keystone/tests/unit/test_v3_credential.py +++ /dev/null @@ -1,478 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import hashlib -import json -import uuid - -from keystoneclient.contrib.ec2 import utils as ec2_utils -from oslo_config import cfg -from six.moves import http_client -from testtools import matchers - -from keystone.common import utils -from keystone.contrib.ec2 import controllers -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit import test_v3 - - -CONF = cfg.CONF -CRED_TYPE_EC2 = controllers.CRED_TYPE_EC2 - - -class CredentialBaseTestCase(test_v3.RestfulTestCase): - def _create_dict_blob_credential(self): - blob, credential = unit.new_ec2_credential(user_id=self.user['id'], - project_id=self.project_id) - - # Store the blob as a dict *not* JSON ref bug #1259584 - # This means we can test the dict->json workaround, added - # as part of the bugfix for backwards compatibility works. - credential['blob'] = blob - credential_id = credential['id'] - - # Create direct via the DB API to avoid validation failure - self.credential_api.create_credential(credential_id, credential) - - return json.dumps(blob), credential_id - - -class CredentialTestCase(CredentialBaseTestCase): - """Test credential CRUD.""" - - def setUp(self): - - super(CredentialTestCase, self).setUp() - - self.credential = unit.new_credential_ref(user_id=self.user['id'], - project_id=self.project_id) - - self.credential_api.create_credential( - self.credential['id'], - self.credential) - - def test_credential_api_delete_credentials_for_project(self): - self.credential_api.delete_credentials_for_project(self.project_id) - # Test that the credential that we created in .setUp no longer exists - # once we delete all credentials for self.project_id - self.assertRaises(exception.CredentialNotFound, - self.credential_api.get_credential, - credential_id=self.credential['id']) - - def test_credential_api_delete_credentials_for_user(self): - self.credential_api.delete_credentials_for_user(self.user_id) - # Test that the credential that we created in .setUp no longer exists - # once we delete all credentials for self.user_id - self.assertRaises(exception.CredentialNotFound, - self.credential_api.get_credential, - credential_id=self.credential['id']) - - def test_list_credentials(self): - """Call ``GET /credentials``.""" - r = self.get('/credentials') - self.assertValidCredentialListResponse(r, ref=self.credential) - - def test_list_credentials_filtered_by_user_id(self): - """Call ``GET /credentials?user_id={user_id}``.""" - credential = unit.new_credential_ref(user_id=uuid.uuid4().hex) - self.credential_api.create_credential(credential['id'], credential) - - r = self.get('/credentials?user_id=%s' % self.user['id']) - self.assertValidCredentialListResponse(r, ref=self.credential) - for cred in r.result['credentials']: - self.assertEqual(self.user['id'], cred['user_id']) - - def test_list_credentials_filtered_by_type(self): - """Call ``GET /credentials?type={type}``.""" - # The type ec2 was chosen, instead of a random string, - # because the type must be in the list of supported types - ec2_credential = unit.new_credential_ref(user_id=uuid.uuid4().hex, - project_id=self.project_id, - type=CRED_TYPE_EC2) - - ec2_resp = self.credential_api.create_credential( - ec2_credential['id'], ec2_credential) - - # The type cert was chosen for the same reason as ec2 - r = self.get('/credentials?type=cert') - - # Testing the filter for two different types - self.assertValidCredentialListResponse(r, ref=self.credential) - for cred in r.result['credentials']: - self.assertEqual('cert', cred['type']) - - r_ec2 = self.get('/credentials?type=ec2') - self.assertThat(r_ec2.result['credentials'], matchers.HasLength(1)) - cred_ec2 = r_ec2.result['credentials'][0] - - self.assertValidCredentialListResponse(r_ec2, ref=ec2_resp) - self.assertEqual(CRED_TYPE_EC2, cred_ec2['type']) - self.assertEqual(ec2_credential['id'], cred_ec2['id']) - - def test_list_credentials_filtered_by_type_and_user_id(self): - """Call ``GET /credentials?user_id={user_id}&type={type}``.""" - user1_id = uuid.uuid4().hex - user2_id = uuid.uuid4().hex - - # Creating credentials for two different users - credential_user1_ec2 = unit.new_credential_ref(user_id=user1_id, - type=CRED_TYPE_EC2) - credential_user1_cert = unit.new_credential_ref(user_id=user1_id) - credential_user2_cert = unit.new_credential_ref(user_id=user2_id) - - self.credential_api.create_credential( - credential_user1_ec2['id'], credential_user1_ec2) - self.credential_api.create_credential( - credential_user1_cert['id'], credential_user1_cert) - self.credential_api.create_credential( - credential_user2_cert['id'], credential_user2_cert) - - r = self.get('/credentials?user_id=%s&type=ec2' % user1_id) - self.assertValidCredentialListResponse(r, ref=credential_user1_ec2) - self.assertThat(r.result['credentials'], matchers.HasLength(1)) - cred = r.result['credentials'][0] - self.assertEqual(CRED_TYPE_EC2, cred['type']) - self.assertEqual(user1_id, cred['user_id']) - - def test_create_credential(self): - """Call ``POST /credentials``.""" - ref = unit.new_credential_ref(user_id=self.user['id']) - r = self.post( - '/credentials', - body={'credential': ref}) - self.assertValidCredentialResponse(r, ref) - - def test_get_credential(self): - """Call ``GET /credentials/{credential_id}``.""" - r = self.get( - '/credentials/%(credential_id)s' % { - 'credential_id': self.credential['id']}) - self.assertValidCredentialResponse(r, self.credential) - - def test_update_credential(self): - """Call ``PATCH /credentials/{credential_id}``.""" - ref = unit.new_credential_ref(user_id=self.user['id'], - project_id=self.project_id) - del ref['id'] - r = self.patch( - '/credentials/%(credential_id)s' % { - 'credential_id': self.credential['id']}, - body={'credential': ref}) - self.assertValidCredentialResponse(r, ref) - - def test_delete_credential(self): - """Call ``DELETE /credentials/{credential_id}``.""" - self.delete( - '/credentials/%(credential_id)s' % { - 'credential_id': self.credential['id']}) - - def test_create_ec2_credential(self): - """Call ``POST /credentials`` for creating ec2 credential.""" - blob, ref = unit.new_ec2_credential(user_id=self.user['id'], - project_id=self.project_id) - r = self.post('/credentials', body={'credential': ref}) - self.assertValidCredentialResponse(r, ref) - # Assert credential id is same as hash of access key id for - # ec2 credentials - access = blob['access'].encode('utf-8') - self.assertEqual(hashlib.sha256(access).hexdigest(), - r.result['credential']['id']) - # Create second ec2 credential with the same access key id and check - # for conflict. - self.post( - '/credentials', - body={'credential': ref}, expected_status=http_client.CONFLICT) - - def test_get_ec2_dict_blob(self): - """Ensure non-JSON blob data is correctly converted.""" - expected_blob, credential_id = self._create_dict_blob_credential() - - r = self.get( - '/credentials/%(credential_id)s' % { - 'credential_id': credential_id}) - - # use json.loads to transform the blobs back into Python dictionaries - # to avoid problems with the keys being in different orders. - self.assertEqual(json.loads(expected_blob), - json.loads(r.result['credential']['blob'])) - - def test_list_ec2_dict_blob(self): - """Ensure non-JSON blob data is correctly converted.""" - expected_blob, credential_id = self._create_dict_blob_credential() - - list_r = self.get('/credentials') - list_creds = list_r.result['credentials'] - list_ids = [r['id'] for r in list_creds] - self.assertIn(credential_id, list_ids) - # use json.loads to transform the blobs back into Python dictionaries - # to avoid problems with the keys being in different orders. - for r in list_creds: - if r['id'] == credential_id: - self.assertEqual(json.loads(expected_blob), - json.loads(r['blob'])) - - def test_create_non_ec2_credential(self): - """Test creating non-ec2 credential. - - Call ``POST /credentials``. - """ - blob, ref = unit.new_cert_credential(user_id=self.user['id']) - - r = self.post('/credentials', body={'credential': ref}) - self.assertValidCredentialResponse(r, ref) - # Assert credential id is not same as hash of access key id for - # non-ec2 credentials - access = blob['access'].encode('utf-8') - self.assertNotEqual(hashlib.sha256(access).hexdigest(), - r.result['credential']['id']) - - def test_create_ec2_credential_with_missing_project_id(self): - """Test Creating ec2 credential with missing project_id. - - Call ``POST /credentials``. - """ - _, ref = unit.new_ec2_credential(user_id=self.user['id'], - project_id=None) - # Assert bad request status when missing project_id - self.post( - '/credentials', - body={'credential': ref}, expected_status=http_client.BAD_REQUEST) - - def test_create_ec2_credential_with_invalid_blob(self): - """Test creating ec2 credential with invalid blob. - - Call ``POST /credentials``. - """ - ref = unit.new_credential_ref(user_id=self.user['id'], - project_id=self.project_id, - blob='{"abc":"def"d}', - type=CRED_TYPE_EC2) - # Assert bad request status when request contains invalid blob - response = self.post( - '/credentials', - body={'credential': ref}, expected_status=http_client.BAD_REQUEST) - self.assertValidErrorResponse(response) - - def test_create_credential_with_admin_token(self): - # Make sure we can create credential with the static admin token - ref = unit.new_credential_ref(user_id=self.user['id']) - r = self.post( - '/credentials', - body={'credential': ref}, - token=self.get_admin_token()) - self.assertValidCredentialResponse(r, ref) - - -class TestCredentialTrustScoped(test_v3.RestfulTestCase): - """Test credential with trust scoped token.""" - - def setUp(self): - super(TestCredentialTrustScoped, self).setUp() - - self.trustee_user = unit.new_user_ref(domain_id=self.domain_id) - password = self.trustee_user['password'] - self.trustee_user = self.identity_api.create_user(self.trustee_user) - self.trustee_user['password'] = password - self.trustee_user_id = self.trustee_user['id'] - - def config_overrides(self): - super(TestCredentialTrustScoped, self).config_overrides() - self.config_fixture.config(group='trust', enabled=True) - - def test_trust_scoped_ec2_credential(self): - """Test creating trust scoped ec2 credential. - - Call ``POST /credentials``. - """ - # Create the trust - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user_id, - project_id=self.project_id, - impersonation=True, - expires=dict(minutes=1), - role_ids=[self.role_id]) - del ref['id'] - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - trust = self.assertValidTrustResponse(r) - - # Get a trust scoped token - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password'], - trust_id=trust['id']) - r = self.v3_create_token(auth_data) - self.assertValidProjectScopedTokenResponse(r, self.user) - trust_id = r.result['token']['OS-TRUST:trust']['id'] - token_id = r.headers.get('X-Subject-Token') - - # Create the credential with the trust scoped token - blob, ref = unit.new_ec2_credential(user_id=self.user['id'], - project_id=self.project_id) - r = self.post('/credentials', body={'credential': ref}, token=token_id) - - # We expect the response blob to contain the trust_id - ret_ref = ref.copy() - ret_blob = blob.copy() - ret_blob['trust_id'] = trust_id - ret_ref['blob'] = json.dumps(ret_blob) - self.assertValidCredentialResponse(r, ref=ret_ref) - - # Assert credential id is same as hash of access key id for - # ec2 credentials - access = blob['access'].encode('utf-8') - self.assertEqual(hashlib.sha256(access).hexdigest(), - r.result['credential']['id']) - - # Create second ec2 credential with the same access key id and check - # for conflict. - self.post( - '/credentials', - body={'credential': ref}, - token=token_id, - expected_status=http_client.CONFLICT) - - -class TestCredentialEc2(CredentialBaseTestCase): - """Test v3 credential compatibility with ec2tokens.""" - - def setUp(self): - super(TestCredentialEc2, self).setUp() - - def _validate_signature(self, access, secret): - """Test signature validation with the access/secret provided.""" - signer = ec2_utils.Ec2Signer(secret) - params = {'SignatureMethod': 'HmacSHA256', - 'SignatureVersion': '2', - 'AWSAccessKeyId': access} - request = {'host': 'foo', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - signature = signer.generate(request) - - # Now make a request to validate the signed dummy request via the - # ec2tokens API. This proves the v3 ec2 credentials actually work. - sig_ref = {'access': access, - 'signature': signature, - 'host': 'foo', - 'verb': 'GET', - 'path': '/bar', - 'params': params} - r = self.post( - '/ec2tokens', - body={'ec2Credentials': sig_ref}, - expected_status=http_client.OK) - self.assertValidTokenResponse(r) - - def test_ec2_credential_signature_validate(self): - """Test signature validation with a v3 ec2 credential.""" - blob, ref = unit.new_ec2_credential(user_id=self.user['id'], - project_id=self.project_id) - r = self.post('/credentials', body={'credential': ref}) - self.assertValidCredentialResponse(r, ref) - # Assert credential id is same as hash of access key id - access = blob['access'].encode('utf-8') - self.assertEqual(hashlib.sha256(access).hexdigest(), - r.result['credential']['id']) - - cred_blob = json.loads(r.result['credential']['blob']) - self.assertEqual(blob, cred_blob) - self._validate_signature(access=cred_blob['access'], - secret=cred_blob['secret']) - - def test_ec2_credential_signature_validate_legacy(self): - """Test signature validation with a legacy v3 ec2 credential.""" - cred_json, _ = self._create_dict_blob_credential() - cred_blob = json.loads(cred_json) - self._validate_signature(access=cred_blob['access'], - secret=cred_blob['secret']) - - def _get_ec2_cred_uri(self): - return '/users/%s/credentials/OS-EC2' % self.user_id - - def _get_ec2_cred(self): - uri = self._get_ec2_cred_uri() - r = self.post(uri, body={'tenant_id': self.project_id}) - return r.result['credential'] - - def test_ec2_create_credential(self): - """Test ec2 credential creation.""" - ec2_cred = self._get_ec2_cred() - self.assertEqual(self.user_id, ec2_cred['user_id']) - self.assertEqual(self.project_id, ec2_cred['tenant_id']) - self.assertIsNone(ec2_cred['trust_id']) - self._validate_signature(access=ec2_cred['access'], - secret=ec2_cred['secret']) - uri = '/'.join([self._get_ec2_cred_uri(), ec2_cred['access']]) - self.assertThat(ec2_cred['links']['self'], - matchers.EndsWith(uri)) - - def test_ec2_get_credential(self): - ec2_cred = self._get_ec2_cred() - uri = '/'.join([self._get_ec2_cred_uri(), ec2_cred['access']]) - r = self.get(uri) - self.assertDictEqual(ec2_cred, r.result['credential']) - self.assertThat(ec2_cred['links']['self'], - matchers.EndsWith(uri)) - - def test_ec2_cannot_get_non_ec2_credential(self): - access_key = uuid.uuid4().hex - cred_id = utils.hash_access_key(access_key) - non_ec2_cred = unit.new_credential_ref( - user_id=self.user_id, - project_id=self.project_id) - non_ec2_cred['id'] = cred_id - self.credential_api.create_credential(cred_id, non_ec2_cred) - uri = '/'.join([self._get_ec2_cred_uri(), access_key]) - # if access_key is not found, ec2 controller raises Unauthorized - # exception - self.get(uri, expected_status=http_client.UNAUTHORIZED) - - def test_ec2_list_credentials(self): - """Test ec2 credential listing.""" - self._get_ec2_cred() - uri = self._get_ec2_cred_uri() - r = self.get(uri) - cred_list = r.result['credentials'] - self.assertEqual(1, len(cred_list)) - self.assertThat(r.result['links']['self'], - matchers.EndsWith(uri)) - - # non-EC2 credentials won't be fetched - non_ec2_cred = unit.new_credential_ref( - user_id=self.user_id, - project_id=self.project_id) - non_ec2_cred['type'] = uuid.uuid4().hex - self.credential_api.create_credential(non_ec2_cred['id'], - non_ec2_cred) - r = self.get(uri) - cred_list_2 = r.result['credentials'] - # still one element because non-EC2 credentials are not returned. - self.assertEqual(1, len(cred_list_2)) - self.assertEqual(cred_list[0], cred_list_2[0]) - - def test_ec2_delete_credential(self): - """Test ec2 credential deletion.""" - ec2_cred = self._get_ec2_cred() - uri = '/'.join([self._get_ec2_cred_uri(), ec2_cred['access']]) - cred_from_credential_api = ( - self.credential_api - .list_credentials_for_user(self.user_id, type=CRED_TYPE_EC2)) - self.assertEqual(1, len(cred_from_credential_api)) - self.delete(uri) - self.assertRaises(exception.CredentialNotFound, - self.credential_api.get_credential, - cred_from_credential_api[0]['id']) diff --git a/keystone-moon/keystone/tests/unit/test_v3_domain_config.py b/keystone-moon/keystone/tests/unit/test_v3_domain_config.py deleted file mode 100644 index ee716081..00000000 --- a/keystone-moon/keystone/tests/unit/test_v3_domain_config.py +++ /dev/null @@ -1,459 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import uuid - -from oslo_config import cfg -from six.moves import http_client - -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit import test_v3 - - -CONF = cfg.CONF - - -class DomainConfigTestCase(test_v3.RestfulTestCase): - """Test domain config support.""" - - def setUp(self): - super(DomainConfigTestCase, self).setUp() - - self.domain = unit.new_domain_ref() - self.resource_api.create_domain(self.domain['id'], self.domain) - self.config = {'ldap': {'url': uuid.uuid4().hex, - 'user_tree_dn': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - - def test_create_config(self): - """Call ``PUT /domains/{domain_id}/config``.""" - url = '/domains/%(domain_id)s/config' % { - 'domain_id': self.domain['id']} - r = self.put(url, body={'config': self.config}, - expected_status=http_client.CREATED) - res = self.domain_config_api.get_config(self.domain['id']) - self.assertEqual(self.config, r.result['config']) - self.assertEqual(self.config, res) - - def test_create_config_invalid_domain(self): - """Call ``PUT /domains/{domain_id}/config`` - - While creating Identity API-based domain config with an invalid domain - id provided, the request shall be rejected with a response, 404 domain - not found. - """ - invalid_domain_id = uuid.uuid4().hex - url = '/domains/%(domain_id)s/config' % { - 'domain_id': invalid_domain_id} - self.put(url, body={'config': self.config}, - expected_status=exception.DomainNotFound.code) - - def test_create_config_twice(self): - """Check multiple creates don't throw error""" - self.put('/domains/%(domain_id)s/config' % { - 'domain_id': self.domain['id']}, - body={'config': self.config}, - expected_status=http_client.CREATED) - self.put('/domains/%(domain_id)s/config' % { - 'domain_id': self.domain['id']}, - body={'config': self.config}, - expected_status=http_client.OK) - - def test_delete_config(self): - """Call ``DELETE /domains{domain_id}/config``.""" - self.domain_config_api.create_config(self.domain['id'], self.config) - self.delete('/domains/%(domain_id)s/config' % { - 'domain_id': self.domain['id']}) - self.get('/domains/%(domain_id)s/config' % { - 'domain_id': self.domain['id']}, - expected_status=exception.DomainConfigNotFound.code) - - def test_delete_config_invalid_domain(self): - """Call ``DELETE /domains{domain_id}/config`` - - While deleting Identity API-based domain config with an invalid domain - id provided, the request shall be rejected with a response, 404 domain - not found. - """ - self.domain_config_api.create_config(self.domain['id'], self.config) - invalid_domain_id = uuid.uuid4().hex - self.delete('/domains/%(domain_id)s/config' % { - 'domain_id': invalid_domain_id}, - expected_status=exception.DomainNotFound.code) - - def test_delete_config_by_group(self): - """Call ``DELETE /domains{domain_id}/config/{group}``.""" - self.domain_config_api.create_config(self.domain['id'], self.config) - self.delete('/domains/%(domain_id)s/config/ldap' % { - 'domain_id': self.domain['id']}) - res = self.domain_config_api.get_config(self.domain['id']) - self.assertNotIn('ldap', res) - - def test_delete_config_by_group_invalid_domain(self): - """Call ``DELETE /domains{domain_id}/config/{group}`` - - While deleting Identity API-based domain config by group with an - invalid domain id provided, the request shall be rejected with a - response 404 domain not found. - """ - self.domain_config_api.create_config(self.domain['id'], self.config) - invalid_domain_id = uuid.uuid4().hex - self.delete('/domains/%(domain_id)s/config/ldap' % { - 'domain_id': invalid_domain_id}, - expected_status=exception.DomainNotFound.code) - - def test_get_head_config(self): - """Call ``GET & HEAD for /domains{domain_id}/config``.""" - self.domain_config_api.create_config(self.domain['id'], self.config) - url = '/domains/%(domain_id)s/config' % { - 'domain_id': self.domain['id']} - r = self.get(url) - self.assertEqual(self.config, r.result['config']) - self.head(url, expected_status=http_client.OK) - - def test_get_config_by_group(self): - """Call ``GET & HEAD /domains{domain_id}/config/{group}``.""" - self.domain_config_api.create_config(self.domain['id'], self.config) - url = '/domains/%(domain_id)s/config/ldap' % { - 'domain_id': self.domain['id']} - r = self.get(url) - self.assertEqual({'ldap': self.config['ldap']}, r.result['config']) - self.head(url, expected_status=http_client.OK) - - def test_get_config_by_group_invalid_domain(self): - """Call ``GET & HEAD /domains{domain_id}/config/{group}`` - - While retrieving Identity API-based domain config by group with an - invalid domain id provided, the request shall be rejected with a - response 404 domain not found. - """ - self.domain_config_api.create_config(self.domain['id'], self.config) - invalid_domain_id = uuid.uuid4().hex - self.get('/domains/%(domain_id)s/config/ldap' % { - 'domain_id': invalid_domain_id}, - expected_status=exception.DomainNotFound.code) - - def test_get_config_by_option(self): - """Call ``GET & HEAD /domains{domain_id}/config/{group}/{option}``.""" - self.domain_config_api.create_config(self.domain['id'], self.config) - url = '/domains/%(domain_id)s/config/ldap/url' % { - 'domain_id': self.domain['id']} - r = self.get(url) - self.assertEqual({'url': self.config['ldap']['url']}, - r.result['config']) - self.head(url, expected_status=http_client.OK) - - def test_get_config_by_option_invalid_domain(self): - """Call ``GET & HEAD /domains{domain_id}/config/{group}/{option}`` - - While retrieving Identity API-based domain config by option with an - invalid domain id provided, the request shall be rejected with a - response 404 domain not found. - """ - self.domain_config_api.create_config(self.domain['id'], self.config) - invalid_domain_id = uuid.uuid4().hex - self.get('/domains/%(domain_id)s/config/ldap/url' % { - 'domain_id': invalid_domain_id}, - expected_status=exception.DomainNotFound.code) - - def test_get_non_existant_config(self): - """Call ``GET /domains{domain_id}/config when no config defined``.""" - self.get('/domains/%(domain_id)s/config' % { - 'domain_id': self.domain['id']}, - expected_status=http_client.NOT_FOUND) - - def test_get_non_existant_config_invalid_domain(self): - """Call ``GET /domains{domain_id}/config when no config defined`` - - While retrieving non-existent Identity API-based domain config with an - invalid domain id provided, the request shall be rejected with a - response 404 domain not found. - """ - invalid_domain_id = uuid.uuid4().hex - self.get('/domains/%(domain_id)s/config' % { - 'domain_id': invalid_domain_id}, - expected_status=exception.DomainNotFound.code) - - def test_get_non_existant_config_group(self): - """Call ``GET /domains{domain_id}/config/{group_not_exist}``.""" - config = {'ldap': {'url': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - self.get('/domains/%(domain_id)s/config/identity' % { - 'domain_id': self.domain['id']}, - expected_status=http_client.NOT_FOUND) - - def test_get_non_existant_config_group_invalid_domain(self): - """Call ``GET /domains{domain_id}/config/{group_not_exist}`` - - While retrieving non-existent Identity API-based domain config group - with an invalid domain id provided, the request shall be rejected with - a response, 404 domain not found. - """ - config = {'ldap': {'url': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - invalid_domain_id = uuid.uuid4().hex - self.get('/domains/%(domain_id)s/config/identity' % { - 'domain_id': invalid_domain_id}, - expected_status=exception.DomainNotFound.code) - - def test_get_non_existant_config_option(self): - """Call ``GET /domains{domain_id}/config/group/{option_not_exist}``.""" - config = {'ldap': {'url': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - self.get('/domains/%(domain_id)s/config/ldap/user_tree_dn' % { - 'domain_id': self.domain['id']}, - expected_status=http_client.NOT_FOUND) - - def test_get_non_existant_config_option_invalid_domain(self): - """Call ``GET /domains{domain_id}/config/group/{option_not_exist}`` - - While retrieving non-existent Identity API-based domain config option - with an invalid domain id provided, the request shall be rejected with - a response, 404 domain not found. - """ - config = {'ldap': {'url': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - invalid_domain_id = uuid.uuid4().hex - self.get('/domains/%(domain_id)s/config/ldap/user_tree_dn' % { - 'domain_id': invalid_domain_id}, - expected_status=exception.DomainNotFound.code) - - def test_update_config(self): - """Call ``PATCH /domains/{domain_id}/config``.""" - self.domain_config_api.create_config(self.domain['id'], self.config) - new_config = {'ldap': {'url': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - r = self.patch('/domains/%(domain_id)s/config' % { - 'domain_id': self.domain['id']}, - body={'config': new_config}) - res = self.domain_config_api.get_config(self.domain['id']) - expected_config = copy.deepcopy(self.config) - expected_config['ldap']['url'] = new_config['ldap']['url'] - expected_config['identity']['driver'] = ( - new_config['identity']['driver']) - self.assertEqual(expected_config, r.result['config']) - self.assertEqual(expected_config, res) - - def test_update_config_invalid_domain(self): - """Call ``PATCH /domains/{domain_id}/config`` - - While updating Identity API-based domain config with an invalid domain - id provided, the request shall be rejected with a response, 404 domain - not found. - """ - self.domain_config_api.create_config(self.domain['id'], self.config) - new_config = {'ldap': {'url': uuid.uuid4().hex}, - 'identity': {'driver': uuid.uuid4().hex}} - invalid_domain_id = uuid.uuid4().hex - self.patch('/domains/%(domain_id)s/config' % { - 'domain_id': invalid_domain_id}, - body={'config': new_config}, - expected_status=exception.DomainNotFound.code) - - def test_update_config_group(self): - """Call ``PATCH /domains/{domain_id}/config/{group}``.""" - self.domain_config_api.create_config(self.domain['id'], self.config) - new_config = {'ldap': {'url': uuid.uuid4().hex, - 'user_filter': uuid.uuid4().hex}} - r = self.patch('/domains/%(domain_id)s/config/ldap' % { - 'domain_id': self.domain['id']}, - body={'config': new_config}) - res = self.domain_config_api.get_config(self.domain['id']) - expected_config = copy.deepcopy(self.config) - expected_config['ldap']['url'] = new_config['ldap']['url'] - expected_config['ldap']['user_filter'] = ( - new_config['ldap']['user_filter']) - self.assertEqual(expected_config, r.result['config']) - self.assertEqual(expected_config, res) - - def test_update_config_group_invalid_domain(self): - """Call ``PATCH /domains/{domain_id}/config/{group}`` - - While updating Identity API-based domain config group with an invalid - domain id provided, the request shall be rejected with a response, - 404 domain not found. - """ - self.domain_config_api.create_config(self.domain['id'], self.config) - new_config = {'ldap': {'url': uuid.uuid4().hex, - 'user_filter': uuid.uuid4().hex}} - invalid_domain_id = uuid.uuid4().hex - self.patch('/domains/%(domain_id)s/config/ldap' % { - 'domain_id': invalid_domain_id}, - body={'config': new_config}, - expected_status=exception.DomainNotFound.code) - - def test_update_config_invalid_group(self): - """Call ``PATCH /domains/{domain_id}/config/{invalid_group}``.""" - self.domain_config_api.create_config(self.domain['id'], self.config) - - # Trying to update a group that is neither whitelisted or sensitive - # should result in Forbidden. - invalid_group = uuid.uuid4().hex - new_config = {invalid_group: {'url': uuid.uuid4().hex, - 'user_filter': uuid.uuid4().hex}} - self.patch('/domains/%(domain_id)s/config/%(invalid_group)s' % { - 'domain_id': self.domain['id'], 'invalid_group': invalid_group}, - body={'config': new_config}, - expected_status=http_client.FORBIDDEN) - # Trying to update a valid group, but one that is not in the current - # config should result in NotFound - config = {'ldap': {'suffix': uuid.uuid4().hex}} - self.domain_config_api.create_config(self.domain['id'], config) - new_config = {'identity': {'driver': uuid.uuid4().hex}} - self.patch('/domains/%(domain_id)s/config/identity' % { - 'domain_id': self.domain['id']}, - body={'config': new_config}, - expected_status=http_client.NOT_FOUND) - - def test_update_config_invalid_group_invalid_domain(self): - """Call ``PATCH /domains/{domain_id}/config/{invalid_group}`` - - While updating Identity API-based domain config with an invalid group - and an invalid domain id provided, the request shall be rejected - with a response, 404 domain not found. - """ - self.domain_config_api.create_config(self.domain['id'], self.config) - invalid_group = uuid.uuid4().hex - new_config = {invalid_group: {'url': uuid.uuid4().hex, - 'user_filter': uuid.uuid4().hex}} - invalid_domain_id = uuid.uuid4().hex - self.patch('/domains/%(domain_id)s/config/%(invalid_group)s' % { - 'domain_id': invalid_domain_id, - 'invalid_group': invalid_group}, - body={'config': new_config}, - expected_status=exception.DomainNotFound.code) - - def test_update_config_option(self): - """Call ``PATCH /domains/{domain_id}/config/{group}/{option}``.""" - self.domain_config_api.create_config(self.domain['id'], self.config) - new_config = {'url': uuid.uuid4().hex} - r = self.patch('/domains/%(domain_id)s/config/ldap/url' % { - 'domain_id': self.domain['id']}, - body={'config': new_config}) - res = self.domain_config_api.get_config(self.domain['id']) - expected_config = copy.deepcopy(self.config) - expected_config['ldap']['url'] = new_config['url'] - self.assertEqual(expected_config, r.result['config']) - self.assertEqual(expected_config, res) - - def test_update_config_option_invalid_domain(self): - """Call ``PATCH /domains/{domain_id}/config/{group}/{option}`` - - While updating Identity API-based domain config option with an invalid - domain id provided, the request shall be rejected with a response, 404 - domain not found. - """ - self.domain_config_api.create_config(self.domain['id'], self.config) - new_config = {'url': uuid.uuid4().hex} - invalid_domain_id = uuid.uuid4().hex - self.patch('/domains/%(domain_id)s/config/ldap/url' % { - 'domain_id': invalid_domain_id}, - body={'config': new_config}, - expected_status=exception.DomainNotFound.code) - - def test_update_config_invalid_option(self): - """Call ``PATCH /domains/{domain_id}/config/{group}/{invalid}``.""" - self.domain_config_api.create_config(self.domain['id'], self.config) - invalid_option = uuid.uuid4().hex - new_config = {'ldap': {invalid_option: uuid.uuid4().hex}} - # Trying to update an option that is neither whitelisted or sensitive - # should result in Forbidden. - self.patch( - '/domains/%(domain_id)s/config/ldap/%(invalid_option)s' % { - 'domain_id': self.domain['id'], - 'invalid_option': invalid_option}, - body={'config': new_config}, - expected_status=http_client.FORBIDDEN) - # Trying to update a valid option, but one that is not in the current - # config should result in NotFound - new_config = {'suffix': uuid.uuid4().hex} - self.patch( - '/domains/%(domain_id)s/config/ldap/suffix' % { - 'domain_id': self.domain['id']}, - body={'config': new_config}, - expected_status=http_client.NOT_FOUND) - - def test_update_config_invalid_option_invalid_domain(self): - """Call ``PATCH /domains/{domain_id}/config/{group}/{invalid}`` - - While updating Identity API-based domain config with an invalid option - and an invalid domain id provided, the request shall be rejected - with a response, 404 domain not found. - """ - self.domain_config_api.create_config(self.domain['id'], self.config) - invalid_option = uuid.uuid4().hex - new_config = {'ldap': {invalid_option: uuid.uuid4().hex}} - invalid_domain_id = uuid.uuid4().hex - self.patch( - '/domains/%(domain_id)s/config/ldap/%(invalid_option)s' % { - 'domain_id': invalid_domain_id, - 'invalid_option': invalid_option}, - body={'config': new_config}, - expected_status=exception.DomainNotFound.code) - - def test_get_config_default(self): - """Call ``GET /domains/config/default``.""" - # Create a config that overrides a few of the options so that we can - # check that only the defaults are returned. - self.domain_config_api.create_config(self.domain['id'], self.config) - url = '/domains/config/default' - r = self.get(url) - default_config = r.result['config'] - for group in default_config: - for option in default_config[group]: - self.assertEqual(getattr(getattr(CONF, group), option), - default_config[group][option]) - - def test_get_config_default_by_group(self): - """Call ``GET /domains/config/{group}/default``.""" - # Create a config that overrides a few of the options so that we can - # check that only the defaults are returned. - self.domain_config_api.create_config(self.domain['id'], self.config) - url = '/domains/config/ldap/default' - r = self.get(url) - default_config = r.result['config'] - for option in default_config['ldap']: - self.assertEqual(getattr(CONF.ldap, option), - default_config['ldap'][option]) - - def test_get_config_default_by_option(self): - """Call ``GET /domains/config/{group}/{option}/default``.""" - # Create a config that overrides a few of the options so that we can - # check that only the defaults are returned. - self.domain_config_api.create_config(self.domain['id'], self.config) - url = '/domains/config/ldap/url/default' - r = self.get(url) - default_config = r.result['config'] - self.assertEqual(CONF.ldap.url, default_config['url']) - - def test_get_config_default_by_invalid_group(self): - """Call ``GET for /domains/config/{bad-group}/default``.""" - # First try a valid group, but one we don't support for domain config - self.get('/domains/config/resouce/default', - expected_status=http_client.FORBIDDEN) - - # Now try a totally invalid group - url = '/domains/config/%s/default' % uuid.uuid4().hex - self.get(url, expected_status=http_client.FORBIDDEN) - - def test_get_config_default_by_invalid_option(self): - """Call ``GET for /domains/config/{group}/{bad-option}/default``.""" - # First try a valid option, but one we don't support for domain config, - # i.e. one that is in the sensitive options list - self.get('/domains/config/ldap/password/default', - expected_status=http_client.FORBIDDEN) - - # Now try a totally invalid option - url = '/domains/config/ldap/%s/default' % uuid.uuid4().hex - self.get(url, expected_status=http_client.FORBIDDEN) diff --git a/keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py b/keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py deleted file mode 100644 index 9fee8d2b..00000000 --- a/keystone-moon/keystone/tests/unit/test_v3_endpoint_policy.py +++ /dev/null @@ -1,246 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from six.moves import http_client -from testtools import matchers - -from keystone.tests import unit -from keystone.tests.unit import test_v3 - - -class EndpointPolicyTestCase(test_v3.RestfulTestCase): - """Test endpoint policy CRUD. - - In general, the controller layer of the endpoint policy extension is really - just marshalling the data around the underlying manager calls. Given that - the manager layer is tested in depth by the backend tests, the tests we - execute here concentrate on ensuring we are correctly passing and - presenting the data. - - """ - - def setUp(self): - super(EndpointPolicyTestCase, self).setUp() - self.policy = unit.new_policy_ref() - self.policy_api.create_policy(self.policy['id'], self.policy) - self.service = unit.new_service_ref() - self.catalog_api.create_service(self.service['id'], self.service) - self.endpoint = unit.new_endpoint_ref(self.service['id'], enabled=True, - interface='public', - region_id=self.region_id) - self.catalog_api.create_endpoint(self.endpoint['id'], self.endpoint) - self.region = unit.new_region_ref() - self.catalog_api.create_region(self.region) - - def assert_head_and_get_return_same_response(self, url, expected_status): - self.get(url, expected_status=expected_status) - self.head(url, expected_status=expected_status) - - # endpoint policy crud tests - def _crud_test(self, url): - # Test when the resource does not exist also ensures - # that there is not a false negative after creation. - - self.assert_head_and_get_return_same_response( - url, - expected_status=http_client.NOT_FOUND) - - self.put(url) - - # test that the new resource is accessible. - self.assert_head_and_get_return_same_response( - url, - expected_status=http_client.NO_CONTENT) - - self.delete(url) - - # test that the deleted resource is no longer accessible - self.assert_head_and_get_return_same_response( - url, - expected_status=http_client.NOT_FOUND) - - def test_crud_for_policy_for_explicit_endpoint(self): - """PUT, HEAD and DELETE for explicit endpoint policy.""" - url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/endpoints/%(endpoint_id)s') % { - 'policy_id': self.policy['id'], - 'endpoint_id': self.endpoint['id']} - self._crud_test(url) - - def test_crud_for_policy_for_service(self): - """PUT, HEAD and DELETE for service endpoint policy.""" - url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/services/%(service_id)s') % { - 'policy_id': self.policy['id'], - 'service_id': self.service['id']} - self._crud_test(url) - - def test_crud_for_policy_for_region_and_service(self): - """PUT, HEAD and DELETE for region and service endpoint policy.""" - url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/services/%(service_id)s/regions/%(region_id)s') % { - 'policy_id': self.policy['id'], - 'service_id': self.service['id'], - 'region_id': self.region['id']} - self._crud_test(url) - - def test_get_policy_for_endpoint(self): - """GET /endpoints/{endpoint_id}/policy.""" - self.put('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/endpoints/%(endpoint_id)s' % { - 'policy_id': self.policy['id'], - 'endpoint_id': self.endpoint['id']}) - - self.head('/endpoints/%(endpoint_id)s/OS-ENDPOINT-POLICY' - '/policy' % { - 'endpoint_id': self.endpoint['id']}, - expected_status=http_client.OK) - - r = self.get('/endpoints/%(endpoint_id)s/OS-ENDPOINT-POLICY' - '/policy' % { - 'endpoint_id': self.endpoint['id']}) - self.assertValidPolicyResponse(r, ref=self.policy) - - def test_list_endpoints_for_policy(self): - """GET /policies/%(policy_id}/endpoints.""" - self.put('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/endpoints/%(endpoint_id)s' % { - 'policy_id': self.policy['id'], - 'endpoint_id': self.endpoint['id']}) - - r = self.get('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/endpoints' % { - 'policy_id': self.policy['id']}) - self.assertValidEndpointListResponse(r, ref=self.endpoint) - self.assertThat(r.result.get('endpoints'), matchers.HasLength(1)) - - def test_endpoint_association_cleanup_when_endpoint_deleted(self): - url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/endpoints/%(endpoint_id)s') % { - 'policy_id': self.policy['id'], - 'endpoint_id': self.endpoint['id']} - - self.put(url) - self.head(url) - - self.delete('/endpoints/%(endpoint_id)s' % { - 'endpoint_id': self.endpoint['id']}) - - self.head(url, expected_status=http_client.NOT_FOUND) - - def test_region_service_association_cleanup_when_region_deleted(self): - url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/services/%(service_id)s/regions/%(region_id)s') % { - 'policy_id': self.policy['id'], - 'service_id': self.service['id'], - 'region_id': self.region['id']} - - self.put(url) - self.head(url) - - self.delete('/regions/%(region_id)s' % { - 'region_id': self.region['id']}) - - self.head(url, expected_status=http_client.NOT_FOUND) - - def test_region_service_association_cleanup_when_service_deleted(self): - url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/services/%(service_id)s/regions/%(region_id)s') % { - 'policy_id': self.policy['id'], - 'service_id': self.service['id'], - 'region_id': self.region['id']} - - self.put(url) - self.head(url) - - self.delete('/services/%(service_id)s' % { - 'service_id': self.service['id']}) - - self.head(url, expected_status=http_client.NOT_FOUND) - - def test_service_association_cleanup_when_service_deleted(self): - url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/services/%(service_id)s') % { - 'policy_id': self.policy['id'], - 'service_id': self.service['id']} - - self.put(url) - self.get(url, expected_status=http_client.NO_CONTENT) - - self.delete('/policies/%(policy_id)s' % { - 'policy_id': self.policy['id']}) - - self.head(url, expected_status=http_client.NOT_FOUND) - - def test_service_association_cleanup_when_policy_deleted(self): - url = ('/policies/%(policy_id)s/OS-ENDPOINT-POLICY' - '/services/%(service_id)s') % { - 'policy_id': self.policy['id'], - 'service_id': self.service['id']} - - self.put(url) - self.get(url, expected_status=http_client.NO_CONTENT) - - self.delete('/services/%(service_id)s' % { - 'service_id': self.service['id']}) - - self.head(url, expected_status=http_client.NOT_FOUND) - - -class JsonHomeTests(test_v3.JsonHomeTestMixin): - EXTENSION_LOCATION = ('http://docs.openstack.org/api/openstack-identity/3/' - 'ext/OS-ENDPOINT-POLICY/1.0/rel') - PARAM_LOCATION = 'http://docs.openstack.org/api/openstack-identity/3/param' - - JSON_HOME_DATA = { - EXTENSION_LOCATION + '/endpoint_policy': { - 'href-template': '/endpoints/{endpoint_id}/OS-ENDPOINT-POLICY/' - 'policy', - 'href-vars': { - 'endpoint_id': PARAM_LOCATION + '/endpoint_id', - }, - }, - EXTENSION_LOCATION + '/policy_endpoints': { - 'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'endpoints', - 'href-vars': { - 'policy_id': PARAM_LOCATION + '/policy_id', - }, - }, - EXTENSION_LOCATION + '/endpoint_policy_association': { - 'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'endpoints/{endpoint_id}', - 'href-vars': { - 'policy_id': PARAM_LOCATION + '/policy_id', - 'endpoint_id': PARAM_LOCATION + '/endpoint_id', - }, - }, - EXTENSION_LOCATION + '/service_policy_association': { - 'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'services/{service_id}', - 'href-vars': { - 'policy_id': PARAM_LOCATION + '/policy_id', - 'service_id': PARAM_LOCATION + '/service_id', - }, - }, - EXTENSION_LOCATION + '/region_and_service_policy_association': { - 'href-template': '/policies/{policy_id}/OS-ENDPOINT-POLICY/' - 'services/{service_id}/regions/{region_id}', - 'href-vars': { - 'policy_id': PARAM_LOCATION + '/policy_id', - 'service_id': PARAM_LOCATION + '/service_id', - 'region_id': PARAM_LOCATION + '/region_id', - }, - }, - } diff --git a/keystone-moon/keystone/tests/unit/test_v3_federation.py b/keystone-moon/keystone/tests/unit/test_v3_federation.py deleted file mode 100644 index f4ec8e51..00000000 --- a/keystone-moon/keystone/tests/unit/test_v3_federation.py +++ /dev/null @@ -1,3722 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import os -import random -from testtools import matchers -import uuid - -import fixtures -from lxml import etree -import mock -from oslo_config import cfg -from oslo_log import versionutils -from oslo_serialization import jsonutils -from oslo_utils import importutils -from oslotest import mockpatch -import saml2 -from saml2 import saml -from saml2 import sigver -from six.moves import http_client -from six.moves import range, urllib, zip -xmldsig = importutils.try_import("saml2.xmldsig") -if not xmldsig: - xmldsig = importutils.try_import("xmldsig") - -from keystone.auth import controllers as auth_controllers -from keystone.common import environment -from keystone.contrib.federation import routers -from keystone import exception -from keystone.federation import controllers as federation_controllers -from keystone.federation import idp as keystone_idp -from keystone import notifications -from keystone.tests import unit -from keystone.tests.unit import core -from keystone.tests.unit import federation_fixtures -from keystone.tests.unit import ksfixtures -from keystone.tests.unit import mapping_fixtures -from keystone.tests.unit import test_v3 -from keystone.tests.unit import utils -from keystone.token.providers import common as token_common - - -subprocess = environment.subprocess - -CONF = cfg.CONF -ROOTDIR = os.path.dirname(os.path.abspath(__file__)) -XMLDIR = os.path.join(ROOTDIR, 'saml2/') - - -def dummy_validator(*args, **kwargs): - pass - - -class FederationTests(test_v3.RestfulTestCase): - - @mock.patch.object(versionutils, 'report_deprecated_feature') - def test_exception_happens(self, mock_deprecator): - routers.FederationExtension(mock.ANY) - mock_deprecator.assert_called_once_with(mock.ANY, mock.ANY) - args, _kwargs = mock_deprecator.call_args - self.assertIn("Remove federation_extension from", args[1]) - - -class FederatedSetupMixin(object): - - ACTION = 'authenticate' - IDP = 'ORG_IDP' - PROTOCOL = 'saml2' - AUTH_METHOD = 'saml2' - USER = 'user@ORGANIZATION' - ASSERTION_PREFIX = 'PREFIX_' - IDP_WITH_REMOTE = 'ORG_IDP_REMOTE' - REMOTE_IDS = ['entityID_IDP1', 'entityID_IDP2'] - REMOTE_ID_ATTR = uuid.uuid4().hex - - UNSCOPED_V3_SAML2_REQ = { - "identity": { - "methods": [AUTH_METHOD], - AUTH_METHOD: { - "identity_provider": IDP, - "protocol": PROTOCOL - } - } - } - - def _check_domains_are_valid(self, token): - self.assertEqual('Federated', token['user']['domain']['id']) - self.assertEqual('Federated', token['user']['domain']['name']) - - def _project(self, project): - return (project['id'], project['name']) - - def _roles(self, roles): - return set([(r['id'], r['name']) for r in roles]) - - def _check_projects_and_roles(self, token, roles, projects): - """Check whether the projects and the roles match.""" - token_roles = token.get('roles') - if token_roles is None: - raise AssertionError('Roles not found in the token') - token_roles = self._roles(token_roles) - roles_ref = self._roles(roles) - self.assertEqual(token_roles, roles_ref) - - token_projects = token.get('project') - if token_projects is None: - raise AssertionError('Projects not found in the token') - token_projects = self._project(token_projects) - projects_ref = self._project(projects) - self.assertEqual(token_projects, projects_ref) - - def _check_scoped_token_attributes(self, token): - - for obj in ('user', 'catalog', 'expires_at', 'issued_at', - 'methods', 'roles'): - self.assertIn(obj, token) - - os_federation = token['user']['OS-FEDERATION'] - - self.assertIn('groups', os_federation) - self.assertIn('identity_provider', os_federation) - self.assertIn('protocol', os_federation) - self.assertThat(os_federation, matchers.HasLength(3)) - - self.assertEqual(self.IDP, os_federation['identity_provider']['id']) - self.assertEqual(self.PROTOCOL, os_federation['protocol']['id']) - - def _check_project_scoped_token_attributes(self, token, project_id): - self.assertEqual(project_id, token['project']['id']) - self._check_scoped_token_attributes(token) - - def _check_domain_scoped_token_attributes(self, token, domain_id): - self.assertEqual(domain_id, token['domain']['id']) - self._check_scoped_token_attributes(token) - - def assertValidMappedUser(self, token): - """Check if user object meets all the criteria.""" - user = token['user'] - self.assertIn('id', user) - self.assertIn('name', user) - self.assertIn('domain', user) - - self.assertIn('groups', user['OS-FEDERATION']) - self.assertIn('identity_provider', user['OS-FEDERATION']) - self.assertIn('protocol', user['OS-FEDERATION']) - - # Make sure user_id is url safe - self.assertEqual(urllib.parse.quote(user['name']), user['id']) - - def _issue_unscoped_token(self, - idp=None, - assertion='EMPLOYEE_ASSERTION', - environment=None): - api = federation_controllers.Auth() - context = {'environment': environment or {}} - self._inject_assertion(context, assertion) - if idp is None: - idp = self.IDP - r = api.federated_authentication(context, idp, self.PROTOCOL) - return r - - def idp_ref(self, id=None): - idp = { - 'id': id or uuid.uuid4().hex, - 'enabled': True, - 'description': uuid.uuid4().hex - } - return idp - - def proto_ref(self, mapping_id=None): - proto = { - 'id': uuid.uuid4().hex, - 'mapping_id': mapping_id or uuid.uuid4().hex - } - return proto - - def mapping_ref(self, rules=None): - return { - 'id': uuid.uuid4().hex, - 'rules': rules or self.rules['rules'] - } - - def _scope_request(self, unscoped_token_id, scope, scope_id): - return { - 'auth': { - 'identity': { - 'methods': [ - self.AUTH_METHOD - ], - self.AUTH_METHOD: { - 'id': unscoped_token_id - } - }, - 'scope': { - scope: { - 'id': scope_id - } - } - } - } - - def _inject_assertion(self, context, variant, query_string=None): - assertion = getattr(mapping_fixtures, variant) - context['environment'].update(assertion) - context['query_string'] = query_string or [] - - def load_federation_sample_data(self): - """Inject additional data.""" - # Create and add domains - self.domainA = unit.new_domain_ref() - self.resource_api.create_domain(self.domainA['id'], - self.domainA) - - self.domainB = unit.new_domain_ref() - self.resource_api.create_domain(self.domainB['id'], - self.domainB) - - self.domainC = unit.new_domain_ref() - self.resource_api.create_domain(self.domainC['id'], - self.domainC) - - self.domainD = unit.new_domain_ref() - self.resource_api.create_domain(self.domainD['id'], - self.domainD) - - # Create and add projects - self.proj_employees = unit.new_project_ref( - domain_id=self.domainA['id']) - self.resource_api.create_project(self.proj_employees['id'], - self.proj_employees) - self.proj_customers = unit.new_project_ref( - domain_id=self.domainA['id']) - self.resource_api.create_project(self.proj_customers['id'], - self.proj_customers) - - self.project_all = unit.new_project_ref( - domain_id=self.domainA['id']) - self.resource_api.create_project(self.project_all['id'], - self.project_all) - - self.project_inherited = unit.new_project_ref( - domain_id=self.domainD['id']) - self.resource_api.create_project(self.project_inherited['id'], - self.project_inherited) - - # Create and add groups - self.group_employees = unit.new_group_ref(domain_id=self.domainA['id']) - self.group_employees = ( - self.identity_api.create_group(self.group_employees)) - - self.group_customers = unit.new_group_ref(domain_id=self.domainA['id']) - self.group_customers = ( - self.identity_api.create_group(self.group_customers)) - - self.group_admins = unit.new_group_ref(domain_id=self.domainA['id']) - self.group_admins = self.identity_api.create_group(self.group_admins) - - # Create and add roles - self.role_employee = unit.new_role_ref() - self.role_api.create_role(self.role_employee['id'], self.role_employee) - self.role_customer = unit.new_role_ref() - self.role_api.create_role(self.role_customer['id'], self.role_customer) - - self.role_admin = unit.new_role_ref() - self.role_api.create_role(self.role_admin['id'], self.role_admin) - - # Employees can access - # * proj_employees - # * project_all - self.assignment_api.create_grant(self.role_employee['id'], - group_id=self.group_employees['id'], - project_id=self.proj_employees['id']) - self.assignment_api.create_grant(self.role_employee['id'], - group_id=self.group_employees['id'], - project_id=self.project_all['id']) - # Customers can access - # * proj_customers - self.assignment_api.create_grant(self.role_customer['id'], - group_id=self.group_customers['id'], - project_id=self.proj_customers['id']) - - # Admins can access: - # * proj_customers - # * proj_employees - # * project_all - self.assignment_api.create_grant(self.role_admin['id'], - group_id=self.group_admins['id'], - project_id=self.proj_customers['id']) - self.assignment_api.create_grant(self.role_admin['id'], - group_id=self.group_admins['id'], - project_id=self.proj_employees['id']) - self.assignment_api.create_grant(self.role_admin['id'], - group_id=self.group_admins['id'], - project_id=self.project_all['id']) - - self.assignment_api.create_grant(self.role_customer['id'], - group_id=self.group_customers['id'], - domain_id=self.domainA['id']) - - # Customers can access: - # * domain A - self.assignment_api.create_grant(self.role_customer['id'], - group_id=self.group_customers['id'], - domain_id=self.domainA['id']) - - # Customers can access projects via inheritance: - # * domain D - self.assignment_api.create_grant(self.role_customer['id'], - group_id=self.group_customers['id'], - domain_id=self.domainD['id'], - inherited_to_projects=True) - - # Employees can access: - # * domain A - # * domain B - - self.assignment_api.create_grant(self.role_employee['id'], - group_id=self.group_employees['id'], - domain_id=self.domainA['id']) - self.assignment_api.create_grant(self.role_employee['id'], - group_id=self.group_employees['id'], - domain_id=self.domainB['id']) - - # Admins can access: - # * domain A - # * domain B - # * domain C - self.assignment_api.create_grant(self.role_admin['id'], - group_id=self.group_admins['id'], - domain_id=self.domainA['id']) - self.assignment_api.create_grant(self.role_admin['id'], - group_id=self.group_admins['id'], - domain_id=self.domainB['id']) - - self.assignment_api.create_grant(self.role_admin['id'], - group_id=self.group_admins['id'], - domain_id=self.domainC['id']) - self.rules = { - 'rules': [ - { - 'local': [ - { - 'group': { - 'id': self.group_employees['id'] - } - }, - { - 'user': { - 'name': '{0}', - 'id': '{1}' - } - } - ], - 'remote': [ - { - 'type': 'UserName' - }, - { - 'type': 'Email', - }, - { - 'type': 'orgPersonType', - 'any_one_of': [ - 'Employee' - ] - } - ] - }, - { - 'local': [ - { - 'group': { - 'id': self.group_employees['id'] - } - }, - { - 'user': { - 'name': '{0}', - 'id': '{1}' - } - } - ], - 'remote': [ - { - 'type': self.ASSERTION_PREFIX + 'UserName' - }, - { - 'type': self.ASSERTION_PREFIX + 'Email', - }, - { - 'type': self.ASSERTION_PREFIX + 'orgPersonType', - 'any_one_of': [ - 'SuperEmployee' - ] - } - ] - }, - { - 'local': [ - { - 'group': { - 'id': self.group_customers['id'] - } - }, - { - 'user': { - 'name': '{0}', - 'id': '{1}' - } - } - ], - 'remote': [ - { - 'type': 'UserName' - }, - { - 'type': 'Email' - }, - { - 'type': 'orgPersonType', - 'any_one_of': [ - 'Customer' - ] - } - ] - }, - { - 'local': [ - { - 'group': { - 'id': self.group_admins['id'] - } - }, - { - 'group': { - 'id': self.group_employees['id'] - } - }, - { - 'group': { - 'id': self.group_customers['id'] - } - }, - - { - 'user': { - 'name': '{0}', - 'id': '{1}' - } - } - ], - 'remote': [ - { - 'type': 'UserName' - }, - { - 'type': 'Email' - }, - { - 'type': 'orgPersonType', - 'any_one_of': [ - 'Admin', - 'Chief' - ] - } - ] - }, - { - 'local': [ - { - 'group': { - 'id': uuid.uuid4().hex - } - }, - { - 'group': { - 'id': self.group_customers['id'] - } - }, - { - 'user': { - 'name': '{0}', - 'id': '{1}' - } - } - ], - 'remote': [ - { - 'type': 'UserName', - }, - { - 'type': 'Email', - }, - { - 'type': 'FirstName', - 'any_one_of': [ - 'Jill' - ] - }, - { - 'type': 'LastName', - 'any_one_of': [ - 'Smith' - ] - } - ] - }, - { - 'local': [ - { - 'group': { - 'id': 'this_group_no_longer_exists' - } - }, - { - 'user': { - 'name': '{0}', - 'id': '{1}' - } - } - ], - 'remote': [ - { - 'type': 'UserName', - }, - { - 'type': 'Email', - }, - { - 'type': 'Email', - 'any_one_of': [ - 'testacct@example.com' - ] - }, - { - 'type': 'orgPersonType', - 'any_one_of': [ - 'Tester' - ] - } - ] - }, - # rules with local group names - { - "local": [ - { - 'user': { - 'name': '{0}', - 'id': '{1}' - } - }, - { - "group": { - "name": self.group_customers['name'], - "domain": { - "name": self.domainA['name'] - } - } - } - ], - "remote": [ - { - 'type': 'UserName', - }, - { - 'type': 'Email', - }, - { - "type": "orgPersonType", - "any_one_of": [ - "CEO", - "CTO" - ], - } - ] - }, - { - "local": [ - { - 'user': { - 'name': '{0}', - 'id': '{1}' - } - }, - { - "group": { - "name": self.group_admins['name'], - "domain": { - "id": self.domainA['id'] - } - } - } - ], - "remote": [ - { - "type": "UserName", - }, - { - "type": "Email", - }, - { - "type": "orgPersonType", - "any_one_of": [ - "Managers" - ] - } - ] - }, - { - "local": [ - { - "user": { - "name": "{0}", - "id": "{1}" - } - }, - { - "group": { - "name": "NON_EXISTING", - "domain": { - "id": self.domainA['id'] - } - } - } - ], - "remote": [ - { - "type": "UserName", - }, - { - "type": "Email", - }, - { - "type": "UserName", - "any_one_of": [ - "IamTester" - ] - } - ] - }, - { - "local": [ - { - "user": { - "type": "local", - "name": self.user['name'], - "domain": { - "id": self.user['domain_id'] - } - } - }, - { - "group": { - "id": self.group_customers['id'] - } - } - ], - "remote": [ - { - "type": "UserType", - "any_one_of": [ - "random" - ] - } - ] - }, - { - "local": [ - { - "user": { - "type": "local", - "name": self.user['name'], - "domain": { - "id": uuid.uuid4().hex - } - } - } - ], - "remote": [ - { - "type": "Position", - "any_one_of": [ - "DirectorGeneral" - ] - } - ] - } - ] - } - - # Add IDP - self.idp = self.idp_ref(id=self.IDP) - self.federation_api.create_idp(self.idp['id'], - self.idp) - # Add IDP with remote - self.idp_with_remote = self.idp_ref(id=self.IDP_WITH_REMOTE) - self.idp_with_remote['remote_ids'] = self.REMOTE_IDS - self.federation_api.create_idp(self.idp_with_remote['id'], - self.idp_with_remote) - # Add a mapping - self.mapping = self.mapping_ref() - self.federation_api.create_mapping(self.mapping['id'], - self.mapping) - # Add protocols - self.proto_saml = self.proto_ref(mapping_id=self.mapping['id']) - self.proto_saml['id'] = self.PROTOCOL - self.federation_api.create_protocol(self.idp['id'], - self.proto_saml['id'], - self.proto_saml) - # Add protocols IDP with remote - self.federation_api.create_protocol(self.idp_with_remote['id'], - self.proto_saml['id'], - self.proto_saml) - # Generate fake tokens - context = {'environment': {}} - - self.tokens = {} - VARIANTS = ('EMPLOYEE_ASSERTION', 'CUSTOMER_ASSERTION', - 'ADMIN_ASSERTION') - api = auth_controllers.Auth() - for variant in VARIANTS: - self._inject_assertion(context, variant) - r = api.authenticate_for_token(context, self.UNSCOPED_V3_SAML2_REQ) - self.tokens[variant] = r.headers.get('X-Subject-Token') - - self.TOKEN_SCOPE_PROJECT_FROM_NONEXISTENT_TOKEN = self._scope_request( - uuid.uuid4().hex, 'project', self.proj_customers['id']) - - self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE = self._scope_request( - self.tokens['EMPLOYEE_ASSERTION'], 'project', - self.proj_employees['id']) - - self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_ADMIN = self._scope_request( - self.tokens['ADMIN_ASSERTION'], 'project', - self.proj_employees['id']) - - self.TOKEN_SCOPE_PROJECT_CUSTOMER_FROM_ADMIN = self._scope_request( - self.tokens['ADMIN_ASSERTION'], 'project', - self.proj_customers['id']) - - self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_CUSTOMER = self._scope_request( - self.tokens['CUSTOMER_ASSERTION'], 'project', - self.proj_employees['id']) - - self.TOKEN_SCOPE_PROJECT_INHERITED_FROM_CUSTOMER = self._scope_request( - self.tokens['CUSTOMER_ASSERTION'], 'project', - self.project_inherited['id']) - - self.TOKEN_SCOPE_DOMAIN_A_FROM_CUSTOMER = self._scope_request( - self.tokens['CUSTOMER_ASSERTION'], 'domain', self.domainA['id']) - - self.TOKEN_SCOPE_DOMAIN_B_FROM_CUSTOMER = self._scope_request( - self.tokens['CUSTOMER_ASSERTION'], 'domain', - self.domainB['id']) - - self.TOKEN_SCOPE_DOMAIN_D_FROM_CUSTOMER = self._scope_request( - self.tokens['CUSTOMER_ASSERTION'], 'domain', self.domainD['id']) - - self.TOKEN_SCOPE_DOMAIN_A_FROM_ADMIN = self._scope_request( - self.tokens['ADMIN_ASSERTION'], 'domain', self.domainA['id']) - - self.TOKEN_SCOPE_DOMAIN_B_FROM_ADMIN = self._scope_request( - self.tokens['ADMIN_ASSERTION'], 'domain', self.domainB['id']) - - self.TOKEN_SCOPE_DOMAIN_C_FROM_ADMIN = self._scope_request( - self.tokens['ADMIN_ASSERTION'], 'domain', - self.domainC['id']) - - -class FederatedIdentityProviderTests(test_v3.RestfulTestCase): - """A test class for Identity Providers.""" - - idp_keys = ['description', 'enabled'] - - default_body = {'description': None, 'enabled': True} - - def base_url(self, suffix=None): - if suffix is not None: - return '/OS-FEDERATION/identity_providers/' + str(suffix) - return '/OS-FEDERATION/identity_providers' - - def _fetch_attribute_from_response(self, resp, parameter, - assert_is_not_none=True): - """Fetch single attribute from TestResponse object.""" - result = resp.result.get(parameter) - if assert_is_not_none: - self.assertIsNotNone(result) - return result - - def _create_and_decapsulate_response(self, body=None): - """Create IdP and fetch it's random id along with entity.""" - default_resp = self._create_default_idp(body=body) - idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') - self.assertIsNotNone(idp) - idp_id = idp.get('id') - return (idp_id, idp) - - def _get_idp(self, idp_id): - """Fetch IdP entity based on its id.""" - url = self.base_url(suffix=idp_id) - resp = self.get(url) - return resp - - def _create_default_idp(self, body=None): - """Create default IdP.""" - url = self.base_url(suffix=uuid.uuid4().hex) - if body is None: - body = self._http_idp_input() - resp = self.put(url, body={'identity_provider': body}, - expected_status=http_client.CREATED) - return resp - - def _http_idp_input(self, **kwargs): - """Create default input for IdP data.""" - body = None - if 'body' not in kwargs: - body = self.default_body.copy() - body['description'] = uuid.uuid4().hex - else: - body = kwargs['body'] - return body - - def _assign_protocol_to_idp(self, idp_id=None, proto=None, url=None, - mapping_id=None, validate=True, **kwargs): - if url is None: - url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s') - if idp_id is None: - idp_id, _ = self._create_and_decapsulate_response() - if proto is None: - proto = uuid.uuid4().hex - if mapping_id is None: - mapping_id = uuid.uuid4().hex - body = {'mapping_id': mapping_id} - url = url % {'idp_id': idp_id, 'protocol_id': proto} - resp = self.put(url, body={'protocol': body}, **kwargs) - if validate: - self.assertValidResponse(resp, 'protocol', dummy_validator, - keys_to_check=['id', 'mapping_id'], - ref={'id': proto, - 'mapping_id': mapping_id}) - return (resp, idp_id, proto) - - def _get_protocol(self, idp_id, protocol_id): - url = "%s/protocols/%s" % (idp_id, protocol_id) - url = self.base_url(suffix=url) - r = self.get(url) - return r - - def test_create_idp(self): - """Creates the IdentityProvider entity associated to remote_ids.""" - keys_to_check = list(self.idp_keys) - body = self.default_body.copy() - body['description'] = uuid.uuid4().hex - resp = self._create_default_idp(body=body) - self.assertValidResponse(resp, 'identity_provider', dummy_validator, - keys_to_check=keys_to_check, - ref=body) - - def test_create_idp_remote(self): - """Creates the IdentityProvider entity associated to remote_ids.""" - keys_to_check = list(self.idp_keys) - keys_to_check.append('remote_ids') - body = self.default_body.copy() - body['description'] = uuid.uuid4().hex - body['remote_ids'] = [uuid.uuid4().hex, - uuid.uuid4().hex, - uuid.uuid4().hex] - resp = self._create_default_idp(body=body) - self.assertValidResponse(resp, 'identity_provider', dummy_validator, - keys_to_check=keys_to_check, - ref=body) - - def test_create_idp_remote_repeated(self): - """Creates two IdentityProvider entities with some remote_ids - - A remote_id is the same for both so the second IdP is not - created because of the uniqueness of the remote_ids - - Expect HTTP 409 Conflict code for the latter call. - - """ - body = self.default_body.copy() - repeated_remote_id = uuid.uuid4().hex - body['remote_ids'] = [uuid.uuid4().hex, - uuid.uuid4().hex, - uuid.uuid4().hex, - repeated_remote_id] - self._create_default_idp(body=body) - - url = self.base_url(suffix=uuid.uuid4().hex) - body['remote_ids'] = [uuid.uuid4().hex, - repeated_remote_id] - resp = self.put(url, body={'identity_provider': body}, - expected_status=http_client.CONFLICT) - - resp_data = jsonutils.loads(resp.body) - self.assertIn('Duplicate remote ID', - resp_data.get('error', {}).get('message')) - - def test_create_idp_remote_empty(self): - """Creates an IdP with empty remote_ids.""" - keys_to_check = list(self.idp_keys) - keys_to_check.append('remote_ids') - body = self.default_body.copy() - body['description'] = uuid.uuid4().hex - body['remote_ids'] = [] - resp = self._create_default_idp(body=body) - self.assertValidResponse(resp, 'identity_provider', dummy_validator, - keys_to_check=keys_to_check, - ref=body) - - def test_create_idp_remote_none(self): - """Creates an IdP with a None remote_ids.""" - keys_to_check = list(self.idp_keys) - keys_to_check.append('remote_ids') - body = self.default_body.copy() - body['description'] = uuid.uuid4().hex - body['remote_ids'] = None - resp = self._create_default_idp(body=body) - expected = body.copy() - expected['remote_ids'] = [] - self.assertValidResponse(resp, 'identity_provider', dummy_validator, - keys_to_check=keys_to_check, - ref=expected) - - def test_update_idp_remote_ids(self): - """Update IdP's remote_ids parameter.""" - body = self.default_body.copy() - body['remote_ids'] = [uuid.uuid4().hex] - default_resp = self._create_default_idp(body=body) - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') - idp_id = default_idp.get('id') - url = self.base_url(suffix=idp_id) - self.assertIsNotNone(idp_id) - - body['remote_ids'] = [uuid.uuid4().hex, uuid.uuid4().hex] - - body = {'identity_provider': body} - resp = self.patch(url, body=body) - updated_idp = self._fetch_attribute_from_response(resp, - 'identity_provider') - body = body['identity_provider'] - self.assertEqual(sorted(body['remote_ids']), - sorted(updated_idp.get('remote_ids'))) - - resp = self.get(url) - returned_idp = self._fetch_attribute_from_response(resp, - 'identity_provider') - self.assertEqual(sorted(body['remote_ids']), - sorted(returned_idp.get('remote_ids'))) - - def test_update_idp_clean_remote_ids(self): - """Update IdP's remote_ids parameter with an empty list.""" - body = self.default_body.copy() - body['remote_ids'] = [uuid.uuid4().hex] - default_resp = self._create_default_idp(body=body) - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') - idp_id = default_idp.get('id') - url = self.base_url(suffix=idp_id) - self.assertIsNotNone(idp_id) - - body['remote_ids'] = [] - - body = {'identity_provider': body} - resp = self.patch(url, body=body) - updated_idp = self._fetch_attribute_from_response(resp, - 'identity_provider') - body = body['identity_provider'] - self.assertEqual(sorted(body['remote_ids']), - sorted(updated_idp.get('remote_ids'))) - - resp = self.get(url) - returned_idp = self._fetch_attribute_from_response(resp, - 'identity_provider') - self.assertEqual(sorted(body['remote_ids']), - sorted(returned_idp.get('remote_ids'))) - - def test_update_idp_remote_repeated(self): - """Update an IdentityProvider entity reusing a remote_id. - - A remote_id is the same for both so the second IdP is not - updated because of the uniqueness of the remote_ids. - - Expect HTTP 409 Conflict code for the latter call. - - """ - # Create first identity provider - body = self.default_body.copy() - repeated_remote_id = uuid.uuid4().hex - body['remote_ids'] = [uuid.uuid4().hex, - repeated_remote_id] - self._create_default_idp(body=body) - - # Create second identity provider (without remote_ids) - body = self.default_body.copy() - default_resp = self._create_default_idp(body=body) - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') - idp_id = default_idp.get('id') - url = self.base_url(suffix=idp_id) - - body['remote_ids'] = [repeated_remote_id] - resp = self.patch(url, body={'identity_provider': body}, - expected_status=http_client.CONFLICT) - resp_data = jsonutils.loads(resp.body) - self.assertIn('Duplicate remote ID', - resp_data['error']['message']) - - def test_list_idps(self, iterations=5): - """Lists all available IdentityProviders. - - This test collects ids of created IdPs and - intersects it with the list of all available IdPs. - List of all IdPs can be a superset of IdPs created in this test, - because other tests also create IdPs. - - """ - def get_id(resp): - r = self._fetch_attribute_from_response(resp, - 'identity_provider') - return r.get('id') - - ids = [] - for _ in range(iterations): - id = get_id(self._create_default_idp()) - ids.append(id) - ids = set(ids) - - keys_to_check = self.idp_keys - url = self.base_url() - resp = self.get(url) - self.assertValidListResponse(resp, 'identity_providers', - dummy_validator, - keys_to_check=keys_to_check) - entities = self._fetch_attribute_from_response(resp, - 'identity_providers') - entities_ids = set([e['id'] for e in entities]) - ids_intersection = entities_ids.intersection(ids) - self.assertEqual(ids_intersection, ids) - - def test_filter_list_idp_by_id(self): - def get_id(resp): - r = self._fetch_attribute_from_response(resp, - 'identity_provider') - return r.get('id') - - idp1_id = get_id(self._create_default_idp()) - idp2_id = get_id(self._create_default_idp()) - - # list the IdP, should get two IdP. - url = self.base_url() - resp = self.get(url) - entities = self._fetch_attribute_from_response(resp, - 'identity_providers') - entities_ids = [e['id'] for e in entities] - self.assertItemsEqual(entities_ids, [idp1_id, idp2_id]) - - # filter the IdP by ID. - url = self.base_url() + '?id=' + idp1_id - resp = self.get(url) - filtered_service_list = resp.json['identity_providers'] - self.assertThat(filtered_service_list, matchers.HasLength(1)) - self.assertEqual(idp1_id, filtered_service_list[0].get('id')) - - def test_filter_list_idp_by_enabled(self): - def get_id(resp): - r = self._fetch_attribute_from_response(resp, - 'identity_provider') - return r.get('id') - - idp1_id = get_id(self._create_default_idp()) - - body = self.default_body.copy() - body['enabled'] = False - idp2_id = get_id(self._create_default_idp(body=body)) - - # list the IdP, should get two IdP. - url = self.base_url() - resp = self.get(url) - entities = self._fetch_attribute_from_response(resp, - 'identity_providers') - entities_ids = [e['id'] for e in entities] - self.assertItemsEqual(entities_ids, [idp1_id, idp2_id]) - - # filter the IdP by 'enabled'. - url = self.base_url() + '?enabled=True' - resp = self.get(url) - filtered_service_list = resp.json['identity_providers'] - self.assertThat(filtered_service_list, matchers.HasLength(1)) - self.assertEqual(idp1_id, filtered_service_list[0].get('id')) - - def test_check_idp_uniqueness(self): - """Add same IdP twice. - - Expect HTTP 409 Conflict code for the latter call. - - """ - url = self.base_url(suffix=uuid.uuid4().hex) - body = self._http_idp_input() - self.put(url, body={'identity_provider': body}, - expected_status=http_client.CREATED) - resp = self.put(url, body={'identity_provider': body}, - expected_status=http_client.CONFLICT) - - resp_data = jsonutils.loads(resp.body) - self.assertIn('Duplicate entry', - resp_data.get('error', {}).get('message')) - - def test_get_idp(self): - """Create and later fetch IdP.""" - body = self._http_idp_input() - default_resp = self._create_default_idp(body=body) - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') - idp_id = default_idp.get('id') - url = self.base_url(suffix=idp_id) - resp = self.get(url) - self.assertValidResponse(resp, 'identity_provider', - dummy_validator, keys_to_check=body.keys(), - ref=body) - - def test_get_nonexisting_idp(self): - """Fetch nonexisting IdP entity. - - Expected HTTP 404 Not Found status code. - - """ - idp_id = uuid.uuid4().hex - self.assertIsNotNone(idp_id) - - url = self.base_url(suffix=idp_id) - self.get(url, expected_status=http_client.NOT_FOUND) - - def test_delete_existing_idp(self): - """Create and later delete IdP. - - Expect HTTP 404 Not Found for the GET IdP call. - """ - default_resp = self._create_default_idp() - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') - idp_id = default_idp.get('id') - self.assertIsNotNone(idp_id) - url = self.base_url(suffix=idp_id) - self.delete(url) - self.get(url, expected_status=http_client.NOT_FOUND) - - def test_delete_idp_also_deletes_assigned_protocols(self): - """Deleting an IdP will delete its assigned protocol.""" - # create default IdP - default_resp = self._create_default_idp() - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') - idp_id = default_idp['id'] - protocol_id = uuid.uuid4().hex - - url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s') - idp_url = self.base_url(suffix=idp_id) - - # assign protocol to IdP - kwargs = {'expected_status': http_client.CREATED} - resp, idp_id, proto = self._assign_protocol_to_idp( - url=url, - idp_id=idp_id, - proto=protocol_id, - **kwargs) - - # removing IdP will remove the assigned protocol as well - self.assertEqual(1, len(self.federation_api.list_protocols(idp_id))) - self.delete(idp_url) - self.get(idp_url, expected_status=http_client.NOT_FOUND) - self.assertEqual(0, len(self.federation_api.list_protocols(idp_id))) - - def test_delete_nonexisting_idp(self): - """Delete nonexisting IdP. - - Expect HTTP 404 Not Found for the GET IdP call. - """ - idp_id = uuid.uuid4().hex - url = self.base_url(suffix=idp_id) - self.delete(url, expected_status=http_client.NOT_FOUND) - - def test_update_idp_mutable_attributes(self): - """Update IdP's mutable parameters.""" - default_resp = self._create_default_idp() - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') - idp_id = default_idp.get('id') - url = self.base_url(suffix=idp_id) - self.assertIsNotNone(idp_id) - - _enabled = not default_idp.get('enabled') - body = {'remote_ids': [uuid.uuid4().hex, uuid.uuid4().hex], - 'description': uuid.uuid4().hex, - 'enabled': _enabled} - - body = {'identity_provider': body} - resp = self.patch(url, body=body) - updated_idp = self._fetch_attribute_from_response(resp, - 'identity_provider') - body = body['identity_provider'] - for key in body.keys(): - if isinstance(body[key], list): - self.assertEqual(sorted(body[key]), - sorted(updated_idp.get(key))) - else: - self.assertEqual(body[key], updated_idp.get(key)) - - resp = self.get(url) - updated_idp = self._fetch_attribute_from_response(resp, - 'identity_provider') - for key in body.keys(): - if isinstance(body[key], list): - self.assertEqual(sorted(body[key]), - sorted(updated_idp.get(key))) - else: - self.assertEqual(body[key], updated_idp.get(key)) - - def test_update_idp_immutable_attributes(self): - """Update IdP's immutable parameters. - - Expect HTTP BAD REQUEST. - - """ - default_resp = self._create_default_idp() - default_idp = self._fetch_attribute_from_response(default_resp, - 'identity_provider') - idp_id = default_idp.get('id') - self.assertIsNotNone(idp_id) - - body = self._http_idp_input() - body['id'] = uuid.uuid4().hex - body['protocols'] = [uuid.uuid4().hex, uuid.uuid4().hex] - - url = self.base_url(suffix=idp_id) - self.patch(url, body={'identity_provider': body}, - expected_status=http_client.BAD_REQUEST) - - def test_update_nonexistent_idp(self): - """Update nonexistent IdP - - Expect HTTP 404 Not Found code. - - """ - idp_id = uuid.uuid4().hex - url = self.base_url(suffix=idp_id) - body = self._http_idp_input() - body['enabled'] = False - body = {'identity_provider': body} - - self.patch(url, body=body, expected_status=http_client.NOT_FOUND) - - def test_assign_protocol_to_idp(self): - """Assign a protocol to existing IdP.""" - self._assign_protocol_to_idp(expected_status=http_client.CREATED) - - def test_protocol_composite_pk(self): - """Test that Keystone can add two entities. - - The entities have identical names, however, attached to different - IdPs. - - 1. Add IdP and assign it protocol with predefined name - 2. Add another IdP and assign it a protocol with same name. - - Expect HTTP 201 code - - """ - url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s') - - kwargs = {'expected_status': http_client.CREATED} - self._assign_protocol_to_idp(proto='saml2', - url=url, **kwargs) - - self._assign_protocol_to_idp(proto='saml2', - url=url, **kwargs) - - def test_protocol_idp_pk_uniqueness(self): - """Test whether Keystone checks for unique idp/protocol values. - - Add same protocol twice, expect Keystone to reject a latter call and - return HTTP 409 Conflict code. - - """ - url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s') - - kwargs = {'expected_status': http_client.CREATED} - resp, idp_id, proto = self._assign_protocol_to_idp(proto='saml2', - url=url, **kwargs) - kwargs = {'expected_status': http_client.CONFLICT} - resp, idp_id, proto = self._assign_protocol_to_idp(idp_id=idp_id, - proto='saml2', - validate=False, - url=url, **kwargs) - - def test_assign_protocol_to_nonexistent_idp(self): - """Assign protocol to IdP that doesn't exist. - - Expect HTTP 404 Not Found code. - - """ - idp_id = uuid.uuid4().hex - kwargs = {'expected_status': http_client.NOT_FOUND} - self._assign_protocol_to_idp(proto='saml2', - idp_id=idp_id, - validate=False, - **kwargs) - - def test_get_protocol(self): - """Create and later fetch protocol tied to IdP.""" - resp, idp_id, proto = self._assign_protocol_to_idp( - expected_status=http_client.CREATED) - proto_id = self._fetch_attribute_from_response(resp, 'protocol')['id'] - url = "%s/protocols/%s" % (idp_id, proto_id) - url = self.base_url(suffix=url) - - resp = self.get(url) - - reference = {'id': proto_id} - self.assertValidResponse(resp, 'protocol', - dummy_validator, - keys_to_check=reference.keys(), - ref=reference) - - def test_list_protocols(self): - """Create set of protocols and later list them. - - Compare input and output id sets. - - """ - resp, idp_id, proto = self._assign_protocol_to_idp( - expected_status=http_client.CREATED) - iterations = random.randint(0, 16) - protocol_ids = [] - for _ in range(iterations): - resp, _, proto = self._assign_protocol_to_idp( - idp_id=idp_id, - expected_status=http_client.CREATED) - proto_id = self._fetch_attribute_from_response(resp, 'protocol') - proto_id = proto_id['id'] - protocol_ids.append(proto_id) - - url = "%s/protocols" % idp_id - url = self.base_url(suffix=url) - resp = self.get(url) - self.assertValidListResponse(resp, 'protocols', - dummy_validator, - keys_to_check=['id']) - entities = self._fetch_attribute_from_response(resp, 'protocols') - entities = set([entity['id'] for entity in entities]) - protocols_intersection = entities.intersection(protocol_ids) - self.assertEqual(protocols_intersection, set(protocol_ids)) - - def test_update_protocols_attribute(self): - """Update protocol's attribute.""" - resp, idp_id, proto = self._assign_protocol_to_idp( - expected_status=http_client.CREATED) - new_mapping_id = uuid.uuid4().hex - - url = "%s/protocols/%s" % (idp_id, proto) - url = self.base_url(suffix=url) - body = {'mapping_id': new_mapping_id} - resp = self.patch(url, body={'protocol': body}) - self.assertValidResponse(resp, 'protocol', dummy_validator, - keys_to_check=['id', 'mapping_id'], - ref={'id': proto, - 'mapping_id': new_mapping_id} - ) - - def test_delete_protocol(self): - """Delete protocol. - - Expect HTTP 404 Not Found code for the GET call after the protocol is - deleted. - - """ - url = self.base_url(suffix='/%(idp_id)s/' - 'protocols/%(protocol_id)s') - resp, idp_id, proto = self._assign_protocol_to_idp( - expected_status=http_client.CREATED) - url = url % {'idp_id': idp_id, - 'protocol_id': proto} - self.delete(url) - self.get(url, expected_status=http_client.NOT_FOUND) - - -class MappingCRUDTests(test_v3.RestfulTestCase): - """A class for testing CRUD operations for Mappings.""" - - MAPPING_URL = '/OS-FEDERATION/mappings/' - - def assertValidMappingListResponse(self, resp, *args, **kwargs): - return self.assertValidListResponse( - resp, - 'mappings', - self.assertValidMapping, - keys_to_check=[], - *args, - **kwargs) - - def assertValidMappingResponse(self, resp, *args, **kwargs): - return self.assertValidResponse( - resp, - 'mapping', - self.assertValidMapping, - keys_to_check=[], - *args, - **kwargs) - - def assertValidMapping(self, entity, ref=None): - self.assertIsNotNone(entity.get('id')) - self.assertIsNotNone(entity.get('rules')) - if ref: - self.assertEqual(entity['rules'], ref['rules']) - return entity - - def _create_default_mapping_entry(self): - url = self.MAPPING_URL + uuid.uuid4().hex - resp = self.put(url, - body={'mapping': mapping_fixtures.MAPPING_LARGE}, - expected_status=http_client.CREATED) - return resp - - def _get_id_from_response(self, resp): - r = resp.result.get('mapping') - return r.get('id') - - def test_mapping_create(self): - resp = self._create_default_mapping_entry() - self.assertValidMappingResponse(resp, mapping_fixtures.MAPPING_LARGE) - - def test_mapping_list(self): - url = self.MAPPING_URL - self._create_default_mapping_entry() - resp = self.get(url) - entities = resp.result.get('mappings') - self.assertIsNotNone(entities) - self.assertResponseStatus(resp, http_client.OK) - self.assertValidListLinks(resp.result.get('links')) - self.assertEqual(1, len(entities)) - - def test_mapping_delete(self): - url = self.MAPPING_URL + '%(mapping_id)s' - resp = self._create_default_mapping_entry() - mapping_id = self._get_id_from_response(resp) - url = url % {'mapping_id': str(mapping_id)} - resp = self.delete(url) - self.assertResponseStatus(resp, http_client.NO_CONTENT) - self.get(url, expected_status=http_client.NOT_FOUND) - - def test_mapping_get(self): - url = self.MAPPING_URL + '%(mapping_id)s' - resp = self._create_default_mapping_entry() - mapping_id = self._get_id_from_response(resp) - url = url % {'mapping_id': mapping_id} - resp = self.get(url) - self.assertValidMappingResponse(resp, mapping_fixtures.MAPPING_LARGE) - - def test_mapping_update(self): - url = self.MAPPING_URL + '%(mapping_id)s' - resp = self._create_default_mapping_entry() - mapping_id = self._get_id_from_response(resp) - url = url % {'mapping_id': mapping_id} - resp = self.patch(url, - body={'mapping': mapping_fixtures.MAPPING_SMALL}) - self.assertValidMappingResponse(resp, mapping_fixtures.MAPPING_SMALL) - resp = self.get(url) - self.assertValidMappingResponse(resp, mapping_fixtures.MAPPING_SMALL) - - def test_delete_mapping_dne(self): - url = self.MAPPING_URL + uuid.uuid4().hex - self.delete(url, expected_status=http_client.NOT_FOUND) - - def test_get_mapping_dne(self): - url = self.MAPPING_URL + uuid.uuid4().hex - self.get(url, expected_status=http_client.NOT_FOUND) - - def test_create_mapping_bad_requirements(self): - url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http_client.BAD_REQUEST, - body={'mapping': mapping_fixtures.MAPPING_BAD_REQ}) - - def test_create_mapping_no_rules(self): - url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http_client.BAD_REQUEST, - body={'mapping': mapping_fixtures.MAPPING_NO_RULES}) - - def test_create_mapping_no_remote_objects(self): - url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http_client.BAD_REQUEST, - body={'mapping': mapping_fixtures.MAPPING_NO_REMOTE}) - - def test_create_mapping_bad_value(self): - url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http_client.BAD_REQUEST, - body={'mapping': mapping_fixtures.MAPPING_BAD_VALUE}) - - def test_create_mapping_missing_local(self): - url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http_client.BAD_REQUEST, - body={'mapping': mapping_fixtures.MAPPING_MISSING_LOCAL}) - - def test_create_mapping_missing_type(self): - url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http_client.BAD_REQUEST, - body={'mapping': mapping_fixtures.MAPPING_MISSING_TYPE}) - - def test_create_mapping_wrong_type(self): - url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http_client.BAD_REQUEST, - body={'mapping': mapping_fixtures.MAPPING_WRONG_TYPE}) - - def test_create_mapping_extra_remote_properties_not_any_of(self): - url = self.MAPPING_URL + uuid.uuid4().hex - mapping = mapping_fixtures.MAPPING_EXTRA_REMOTE_PROPS_NOT_ANY_OF - self.put(url, expected_status=http_client.BAD_REQUEST, - body={'mapping': mapping}) - - def test_create_mapping_extra_remote_properties_any_one_of(self): - url = self.MAPPING_URL + uuid.uuid4().hex - mapping = mapping_fixtures.MAPPING_EXTRA_REMOTE_PROPS_ANY_ONE_OF - self.put(url, expected_status=http_client.BAD_REQUEST, - body={'mapping': mapping}) - - def test_create_mapping_extra_remote_properties_just_type(self): - url = self.MAPPING_URL + uuid.uuid4().hex - mapping = mapping_fixtures.MAPPING_EXTRA_REMOTE_PROPS_JUST_TYPE - self.put(url, expected_status=http_client.BAD_REQUEST, - body={'mapping': mapping}) - - def test_create_mapping_empty_map(self): - url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http_client.BAD_REQUEST, - body={'mapping': {}}) - - def test_create_mapping_extra_rules_properties(self): - url = self.MAPPING_URL + uuid.uuid4().hex - self.put(url, expected_status=http_client.BAD_REQUEST, - body={'mapping': mapping_fixtures.MAPPING_EXTRA_RULES_PROPS}) - - def test_create_mapping_with_blacklist_and_whitelist(self): - """Test for adding whitelist and blacklist in the rule - - Server should respond with HTTP 400 Bad Request error upon discovering - both ``whitelist`` and ``blacklist`` keywords in the same rule. - - """ - url = self.MAPPING_URL + uuid.uuid4().hex - mapping = mapping_fixtures.MAPPING_GROUPS_WHITELIST_AND_BLACKLIST - self.put(url, expected_status=http_client.BAD_REQUEST, - body={'mapping': mapping}) - - def test_create_mapping_with_local_user_and_local_domain(self): - url = self.MAPPING_URL + uuid.uuid4().hex - resp = self.put( - url, - body={ - 'mapping': mapping_fixtures.MAPPING_LOCAL_USER_LOCAL_DOMAIN - }, - expected_status=http_client.CREATED) - self.assertValidMappingResponse( - resp, mapping_fixtures.MAPPING_LOCAL_USER_LOCAL_DOMAIN) - - def test_create_mapping_with_ephemeral(self): - url = self.MAPPING_URL + uuid.uuid4().hex - resp = self.put( - url, - body={'mapping': mapping_fixtures.MAPPING_EPHEMERAL_USER}, - expected_status=http_client.CREATED) - self.assertValidMappingResponse( - resp, mapping_fixtures.MAPPING_EPHEMERAL_USER) - - def test_create_mapping_with_bad_user_type(self): - url = self.MAPPING_URL + uuid.uuid4().hex - # get a copy of a known good map - bad_mapping = copy.deepcopy(mapping_fixtures.MAPPING_EPHEMERAL_USER) - # now sabotage the user type - bad_mapping['rules'][0]['local'][0]['user']['type'] = uuid.uuid4().hex - self.put(url, expected_status=http_client.BAD_REQUEST, - body={'mapping': bad_mapping}) - - -class FederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): - - def auth_plugin_config_override(self): - methods = ['saml2'] - super(FederatedTokenTests, self).auth_plugin_config_override(methods) - - def setUp(self): - super(FederatedTokenTests, self).setUp() - self._notifications = [] - - def fake_saml_notify(action, context, user_id, group_ids, - identity_provider, protocol, token_id, outcome): - note = { - 'action': action, - 'user_id': user_id, - 'identity_provider': identity_provider, - 'protocol': protocol, - 'send_notification_called': True} - self._notifications.append(note) - - self.useFixture(mockpatch.PatchObject( - notifications, - 'send_saml_audit_notification', - fake_saml_notify)) - - def _assert_last_notify(self, action, identity_provider, protocol, - user_id=None): - self.assertTrue(self._notifications) - note = self._notifications[-1] - if user_id: - self.assertEqual(note['user_id'], user_id) - self.assertEqual(note['action'], action) - self.assertEqual(note['identity_provider'], identity_provider) - self.assertEqual(note['protocol'], protocol) - self.assertTrue(note['send_notification_called']) - - def load_fixtures(self, fixtures): - super(FederatedTokenTests, self).load_fixtures(fixtures) - self.load_federation_sample_data() - - def test_issue_unscoped_token_notify(self): - self._issue_unscoped_token() - self._assert_last_notify(self.ACTION, self.IDP, self.PROTOCOL) - - def test_issue_unscoped_token(self): - r = self._issue_unscoped_token() - self.assertIsNotNone(r.headers.get('X-Subject-Token')) - self.assertValidMappedUser(r.json['token']) - - def test_issue_unscoped_token_disabled_idp(self): - """Checks if authentication works with disabled identity providers. - - Test plan: - 1) Disable default IdP - 2) Try issuing unscoped token for that IdP - 3) Expect server to forbid authentication - - """ - enabled_false = {'enabled': False} - self.federation_api.update_idp(self.IDP, enabled_false) - self.assertRaises(exception.Forbidden, - self._issue_unscoped_token) - - def test_issue_unscoped_token_group_names_in_mapping(self): - r = self._issue_unscoped_token(assertion='ANOTHER_CUSTOMER_ASSERTION') - ref_groups = set([self.group_customers['id'], self.group_admins['id']]) - token_resp = r.json_body - token_groups = token_resp['token']['user']['OS-FEDERATION']['groups'] - token_groups = set([group['id'] for group in token_groups]) - self.assertEqual(ref_groups, token_groups) - - def test_issue_unscoped_tokens_nonexisting_group(self): - self.assertRaises(exception.MissingGroups, - self._issue_unscoped_token, - assertion='ANOTHER_TESTER_ASSERTION') - - def test_issue_unscoped_token_with_remote_no_attribute(self): - r = self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE, - environment={ - self.REMOTE_ID_ATTR: - self.REMOTE_IDS[0] - }) - self.assertIsNotNone(r.headers.get('X-Subject-Token')) - - def test_issue_unscoped_token_with_remote(self): - self.config_fixture.config(group='federation', - remote_id_attribute=self.REMOTE_ID_ATTR) - r = self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE, - environment={ - self.REMOTE_ID_ATTR: - self.REMOTE_IDS[0] - }) - self.assertIsNotNone(r.headers.get('X-Subject-Token')) - - def test_issue_unscoped_token_with_saml2_remote(self): - self.config_fixture.config(group='saml2', - remote_id_attribute=self.REMOTE_ID_ATTR) - r = self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE, - environment={ - self.REMOTE_ID_ATTR: - self.REMOTE_IDS[0] - }) - self.assertIsNotNone(r.headers.get('X-Subject-Token')) - - def test_issue_unscoped_token_with_remote_different(self): - self.config_fixture.config(group='federation', - remote_id_attribute=self.REMOTE_ID_ATTR) - self.assertRaises(exception.Forbidden, - self._issue_unscoped_token, - idp=self.IDP_WITH_REMOTE, - environment={ - self.REMOTE_ID_ATTR: uuid.uuid4().hex - }) - - def test_issue_unscoped_token_with_remote_default_overwritten(self): - """Test that protocol remote_id_attribute has higher priority. - - Make sure the parameter stored under ``protocol`` section has higher - priority over parameter from default ``federation`` configuration - section. - - """ - self.config_fixture.config(group='saml2', - remote_id_attribute=self.REMOTE_ID_ATTR) - self.config_fixture.config(group='federation', - remote_id_attribute=uuid.uuid4().hex) - r = self._issue_unscoped_token(idp=self.IDP_WITH_REMOTE, - environment={ - self.REMOTE_ID_ATTR: - self.REMOTE_IDS[0] - }) - self.assertIsNotNone(r.headers.get('X-Subject-Token')) - - def test_issue_unscoped_token_with_remote_unavailable(self): - self.config_fixture.config(group='federation', - remote_id_attribute=self.REMOTE_ID_ATTR) - self.assertRaises(exception.Unauthorized, - self._issue_unscoped_token, - idp=self.IDP_WITH_REMOTE, - environment={ - uuid.uuid4().hex: uuid.uuid4().hex - }) - - def test_issue_unscoped_token_with_remote_user_as_empty_string(self): - # make sure that REMOTE_USER set as the empty string won't interfere - r = self._issue_unscoped_token(environment={'REMOTE_USER': ''}) - self.assertIsNotNone(r.headers.get('X-Subject-Token')) - - def test_issue_unscoped_token_no_groups(self): - self.assertRaises(exception.Unauthorized, - self._issue_unscoped_token, - assertion='BAD_TESTER_ASSERTION') - - def test_issue_unscoped_token_malformed_environment(self): - """Test whether non string objects are filtered out. - - Put non string objects into the environment, inject - correct assertion and try to get an unscoped token. - Expect server not to fail on using split() method on - non string objects and return token id in the HTTP header. - - """ - api = auth_controllers.Auth() - context = { - 'environment': { - 'malformed_object': object(), - 'another_bad_idea': tuple(range(10)), - 'yet_another_bad_param': dict(zip(uuid.uuid4().hex, - range(32))) - } - } - self._inject_assertion(context, 'EMPLOYEE_ASSERTION') - r = api.authenticate_for_token(context, self.UNSCOPED_V3_SAML2_REQ) - self.assertIsNotNone(r.headers.get('X-Subject-Token')) - - def test_scope_to_project_once_notify(self): - r = self.v3_create_token( - self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE) - user_id = r.json['token']['user']['id'] - self._assert_last_notify(self.ACTION, self.IDP, self.PROTOCOL, user_id) - - def test_scope_to_project_once(self): - r = self.v3_create_token( - self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE) - token_resp = r.result['token'] - project_id = token_resp['project']['id'] - self._check_project_scoped_token_attributes(token_resp, project_id) - roles_ref = [self.role_employee] - - projects_ref = self.proj_employees - self._check_projects_and_roles(token_resp, roles_ref, projects_ref) - self.assertValidMappedUser(token_resp) - - def test_scope_token_with_idp_disabled(self): - """Scope token issued by disabled IdP. - - Try scoping the token issued by an IdP which is disabled now. Expect - server to refuse scoping operation. - - This test confirms correct behaviour when IdP was enabled and unscoped - token was issued, but disabled before user tries to scope the token. - Here we assume the unscoped token was already issued and start from - the moment where IdP is being disabled and unscoped token is being - used. - - Test plan: - 1) Disable IdP - 2) Try scoping unscoped token - - """ - enabled_false = {'enabled': False} - self.federation_api.update_idp(self.IDP, enabled_false) - self.v3_create_token( - self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_CUSTOMER, - expected_status=http_client.FORBIDDEN) - - def test_scope_to_bad_project(self): - """Scope unscoped token with a project we don't have access to.""" - self.v3_create_token( - self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_CUSTOMER, - expected_status=http_client.UNAUTHORIZED) - - def test_scope_to_project_multiple_times(self): - """Try to scope the unscoped token multiple times. - - The new tokens should be scoped to: - - * Customers' project - * Employees' project - - """ - bodies = (self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_ADMIN, - self.TOKEN_SCOPE_PROJECT_CUSTOMER_FROM_ADMIN) - project_ids = (self.proj_employees['id'], - self.proj_customers['id']) - for body, project_id_ref in zip(bodies, project_ids): - r = self.v3_create_token(body) - token_resp = r.result['token'] - self._check_project_scoped_token_attributes(token_resp, - project_id_ref) - - def test_scope_to_project_with_only_inherited_roles(self): - """Try to scope token whose only roles are inherited.""" - self.config_fixture.config(group='os_inherit', enabled=True) - r = self.v3_create_token( - self.TOKEN_SCOPE_PROJECT_INHERITED_FROM_CUSTOMER) - token_resp = r.result['token'] - self._check_project_scoped_token_attributes( - token_resp, self.project_inherited['id']) - roles_ref = [self.role_customer] - projects_ref = self.project_inherited - self._check_projects_and_roles(token_resp, roles_ref, projects_ref) - self.assertValidMappedUser(token_resp) - - def test_scope_token_from_nonexistent_unscoped_token(self): - """Try to scope token from non-existent unscoped token.""" - self.v3_create_token( - self.TOKEN_SCOPE_PROJECT_FROM_NONEXISTENT_TOKEN, - expected_status=http_client.NOT_FOUND) - - def test_issue_token_from_rules_without_user(self): - api = auth_controllers.Auth() - context = {'environment': {}} - self._inject_assertion(context, 'BAD_TESTER_ASSERTION') - self.assertRaises(exception.Unauthorized, - api.authenticate_for_token, - context, self.UNSCOPED_V3_SAML2_REQ) - - def test_issue_token_with_nonexistent_group(self): - """Inject assertion that matches rule issuing bad group id. - - Expect server to find out that some groups are missing in the - backend and raise exception.MappedGroupNotFound exception. - - """ - self.assertRaises(exception.MappedGroupNotFound, - self._issue_unscoped_token, - assertion='CONTRACTOR_ASSERTION') - - def test_scope_to_domain_once(self): - r = self.v3_create_token(self.TOKEN_SCOPE_DOMAIN_A_FROM_CUSTOMER) - token_resp = r.result['token'] - self._check_domain_scoped_token_attributes(token_resp, - self.domainA['id']) - - def test_scope_to_domain_multiple_tokens(self): - """Issue multiple tokens scoping to different domains. - - The new tokens should be scoped to: - - * domainA - * domainB - * domainC - - """ - bodies = (self.TOKEN_SCOPE_DOMAIN_A_FROM_ADMIN, - self.TOKEN_SCOPE_DOMAIN_B_FROM_ADMIN, - self.TOKEN_SCOPE_DOMAIN_C_FROM_ADMIN) - domain_ids = (self.domainA['id'], - self.domainB['id'], - self.domainC['id']) - - for body, domain_id_ref in zip(bodies, domain_ids): - r = self.v3_create_token(body) - token_resp = r.result['token'] - self._check_domain_scoped_token_attributes(token_resp, - domain_id_ref) - - def test_scope_to_domain_with_only_inherited_roles_fails(self): - """Try to scope to a domain that has no direct roles.""" - self.v3_create_token( - self.TOKEN_SCOPE_DOMAIN_D_FROM_CUSTOMER, - expected_status=http_client.UNAUTHORIZED) - - def test_list_projects(self): - urls = ('/OS-FEDERATION/projects', '/auth/projects') - - token = (self.tokens['CUSTOMER_ASSERTION'], - self.tokens['EMPLOYEE_ASSERTION'], - self.tokens['ADMIN_ASSERTION']) - - self.config_fixture.config(group='os_inherit', enabled=True) - projects_refs = (set([self.proj_customers['id'], - self.project_inherited['id']]), - set([self.proj_employees['id'], - self.project_all['id']]), - set([self.proj_employees['id'], - self.project_all['id'], - self.proj_customers['id'], - self.project_inherited['id']])) - - for token, projects_ref in zip(token, projects_refs): - for url in urls: - r = self.get(url, token=token) - projects_resp = r.result['projects'] - projects = set(p['id'] for p in projects_resp) - self.assertEqual(projects_ref, projects, - 'match failed for url %s' % url) - - # TODO(samueldmq): Create another test class for role inheritance tests. - # The advantage would be to reduce the complexity of this test class and - # have tests specific to this functionality grouped, easing readability and - # maintenability. - def test_list_projects_for_inherited_project_assignment(self): - # Enable os_inherit extension - self.config_fixture.config(group='os_inherit', enabled=True) - - # Create a subproject - subproject_inherited = unit.new_project_ref( - domain_id=self.domainD['id'], - parent_id=self.project_inherited['id']) - self.resource_api.create_project(subproject_inherited['id'], - subproject_inherited) - - # Create an inherited role assignment - self.assignment_api.create_grant( - role_id=self.role_employee['id'], - group_id=self.group_employees['id'], - project_id=self.project_inherited['id'], - inherited_to_projects=True) - - # Define expected projects from employee assertion, which contain - # the created subproject - expected_project_ids = [self.project_all['id'], - self.proj_employees['id'], - subproject_inherited['id']] - - # Assert expected projects for both available URLs - for url in ('/OS-FEDERATION/projects', '/auth/projects'): - r = self.get(url, token=self.tokens['EMPLOYEE_ASSERTION']) - project_ids = [project['id'] for project in r.result['projects']] - - self.assertEqual(len(expected_project_ids), len(project_ids)) - for expected_project_id in expected_project_ids: - self.assertIn(expected_project_id, project_ids, - 'Projects match failed for url %s' % url) - - def test_list_domains(self): - urls = ('/OS-FEDERATION/domains', '/auth/domains') - - tokens = (self.tokens['CUSTOMER_ASSERTION'], - self.tokens['EMPLOYEE_ASSERTION'], - self.tokens['ADMIN_ASSERTION']) - - # NOTE(henry-nash): domain D does not appear in the expected results - # since it only had inherited roles (which only apply to projects - # within the domain) - - domain_refs = (set([self.domainA['id']]), - set([self.domainA['id'], - self.domainB['id']]), - set([self.domainA['id'], - self.domainB['id'], - self.domainC['id']])) - - for token, domains_ref in zip(tokens, domain_refs): - for url in urls: - r = self.get(url, token=token) - domains_resp = r.result['domains'] - domains = set(p['id'] for p in domains_resp) - self.assertEqual(domains_ref, domains, - 'match failed for url %s' % url) - - @utils.wip('This will fail because of bug #1501032. The returned method' - 'list should contain "saml2". This is documented in bug ' - '1501032.') - def test_full_workflow(self): - """Test 'standard' workflow for granting access tokens. - - * Issue unscoped token - * List available projects based on groups - * Scope token to one of available projects - - """ - r = self._issue_unscoped_token() - token_resp = r.json_body['token'] - # NOTE(lbragstad): Ensure only 'saml2' is in the method list. - self.assertListEqual(['saml2'], token_resp['methods']) - self.assertValidMappedUser(token_resp) - employee_unscoped_token_id = r.headers.get('X-Subject-Token') - r = self.get('/auth/projects', token=employee_unscoped_token_id) - projects = r.result['projects'] - random_project = random.randint(0, len(projects)) - 1 - project = projects[random_project] - - v3_scope_request = self._scope_request(employee_unscoped_token_id, - 'project', project['id']) - - r = self.v3_create_token(v3_scope_request) - token_resp = r.result['token'] - # FIXME(lbragstad): 'token' should be in the list of methods returned - # but it isn't. This is documented in bug 1501032. - self.assertIn('token', token_resp['methods']) - self.assertIn('saml2', token_resp['methods']) - self._check_project_scoped_token_attributes(token_resp, project['id']) - - def test_workflow_with_groups_deletion(self): - """Test full workflow with groups deletion before token scoping. - - The test scenario is as follows: - - Create group ``group`` - - Create and assign roles to ``group`` and ``project_all`` - - Patch mapping rules for existing IdP so it issues group id - - Issue unscoped token with ``group``'s id - - Delete group ``group`` - - Scope token to ``project_all`` - - Expect HTTP 500 response - - """ - # create group and role - group = unit.new_group_ref(domain_id=self.domainA['id']) - group = self.identity_api.create_group(group) - role = unit.new_role_ref() - self.role_api.create_role(role['id'], role) - - # assign role to group and project_admins - self.assignment_api.create_grant(role['id'], - group_id=group['id'], - project_id=self.project_all['id']) - - rules = { - 'rules': [ - { - 'local': [ - { - 'group': { - 'id': group['id'] - } - }, - { - 'user': { - 'name': '{0}' - } - } - ], - 'remote': [ - { - 'type': 'UserName' - }, - { - 'type': 'LastName', - 'any_one_of': [ - 'Account' - ] - } - ] - } - ] - } - - self.federation_api.update_mapping(self.mapping['id'], rules) - - r = self._issue_unscoped_token(assertion='TESTER_ASSERTION') - token_id = r.headers.get('X-Subject-Token') - - # delete group - self.identity_api.delete_group(group['id']) - - # scope token to project_all, expect HTTP 500 - scoped_token = self._scope_request( - token_id, 'project', - self.project_all['id']) - - self.v3_create_token( - scoped_token, expected_status=http_client.INTERNAL_SERVER_ERROR) - - def test_lists_with_missing_group_in_backend(self): - """Test a mapping that points to a group that does not exist - - For explicit mappings, we expect the group to exist in the backend, - but for lists, specifically blacklists, a missing group is expected - as many groups will be specified by the IdP that are not Keystone - groups. - - The test scenario is as follows: - - Create group ``EXISTS`` - - Set mapping rules for existing IdP with a blacklist - that passes through as REMOTE_USER_GROUPS - - Issue unscoped token with on group ``EXISTS`` id in it - - """ - domain_id = self.domainA['id'] - domain_name = self.domainA['name'] - group = unit.new_group_ref(domain_id=domain_id, name='EXISTS') - group = self.identity_api.create_group(group) - rules = { - 'rules': [ - { - "local": [ - { - "user": { - "name": "{0}", - "id": "{0}" - } - } - ], - "remote": [ - { - "type": "REMOTE_USER" - } - ] - }, - { - "local": [ - { - "groups": "{0}", - "domain": {"name": domain_name} - } - ], - "remote": [ - { - "type": "REMOTE_USER_GROUPS", - } - ] - } - ] - } - self.federation_api.update_mapping(self.mapping['id'], rules) - - def test_empty_blacklist_passess_all_values(self): - """Test a mapping with empty blacklist specified - - Not adding a ``blacklist`` keyword to the mapping rules has the same - effect as adding an empty ``blacklist``. - In both cases, the mapping engine will not discard any groups that are - associated with apache environment variables. - - This test checks scenario where an empty blacklist was specified. - Expected result is to allow any value. - - The test scenario is as follows: - - Create group ``EXISTS`` - - Create group ``NO_EXISTS`` - - Set mapping rules for existing IdP with a blacklist - that passes through as REMOTE_USER_GROUPS - - Issue unscoped token with groups ``EXISTS`` and ``NO_EXISTS`` - assigned - - """ - domain_id = self.domainA['id'] - domain_name = self.domainA['name'] - - # Add a group "EXISTS" - group_exists = unit.new_group_ref(domain_id=domain_id, name='EXISTS') - group_exists = self.identity_api.create_group(group_exists) - - # Add a group "NO_EXISTS" - group_no_exists = unit.new_group_ref(domain_id=domain_id, - name='NO_EXISTS') - group_no_exists = self.identity_api.create_group(group_no_exists) - - group_ids = set([group_exists['id'], group_no_exists['id']]) - - rules = { - 'rules': [ - { - "local": [ - { - "user": { - "name": "{0}", - "id": "{0}" - } - } - ], - "remote": [ - { - "type": "REMOTE_USER" - } - ] - }, - { - "local": [ - { - "groups": "{0}", - "domain": {"name": domain_name} - } - ], - "remote": [ - { - "type": "REMOTE_USER_GROUPS", - "blacklist": [] - } - ] - } - ] - } - self.federation_api.update_mapping(self.mapping['id'], rules) - r = self._issue_unscoped_token(assertion='UNMATCHED_GROUP_ASSERTION') - assigned_group_ids = r.json['token']['user']['OS-FEDERATION']['groups'] - self.assertEqual(len(group_ids), len(assigned_group_ids)) - for group in assigned_group_ids: - self.assertIn(group['id'], group_ids) - - def test_not_adding_blacklist_passess_all_values(self): - """Test a mapping without blacklist specified. - - Not adding a ``blacklist`` keyword to the mapping rules has the same - effect as adding an empty ``blacklist``. In both cases all values will - be accepted and passed. - - This test checks scenario where an blacklist was not specified. - Expected result is to allow any value. - - The test scenario is as follows: - - Create group ``EXISTS`` - - Create group ``NO_EXISTS`` - - Set mapping rules for existing IdP with a blacklist - that passes through as REMOTE_USER_GROUPS - - Issue unscoped token with on groups ``EXISTS`` and ``NO_EXISTS`` - assigned - - """ - domain_id = self.domainA['id'] - domain_name = self.domainA['name'] - - # Add a group "EXISTS" - group_exists = unit.new_group_ref(domain_id=domain_id, - name='EXISTS') - group_exists = self.identity_api.create_group(group_exists) - - # Add a group "NO_EXISTS" - group_no_exists = unit.new_group_ref(domain_id=domain_id, - name='NO_EXISTS') - group_no_exists = self.identity_api.create_group(group_no_exists) - - group_ids = set([group_exists['id'], group_no_exists['id']]) - - rules = { - 'rules': [ - { - "local": [ - { - "user": { - "name": "{0}", - "id": "{0}" - } - } - ], - "remote": [ - { - "type": "REMOTE_USER" - } - ] - }, - { - "local": [ - { - "groups": "{0}", - "domain": {"name": domain_name} - } - ], - "remote": [ - { - "type": "REMOTE_USER_GROUPS", - } - ] - } - ] - } - self.federation_api.update_mapping(self.mapping['id'], rules) - r = self._issue_unscoped_token(assertion='UNMATCHED_GROUP_ASSERTION') - assigned_group_ids = r.json['token']['user']['OS-FEDERATION']['groups'] - self.assertEqual(len(group_ids), len(assigned_group_ids)) - for group in assigned_group_ids: - self.assertIn(group['id'], group_ids) - - def test_empty_whitelist_discards_all_values(self): - """Test that empty whitelist blocks all the values - - Not adding a ``whitelist`` keyword to the mapping value is different - than adding empty whitelist. The former case will simply pass all the - values, whereas the latter would discard all the values. - - This test checks scenario where an empty whitelist was specified. - The expected result is that no groups are matched. - - The test scenario is as follows: - - Create group ``EXISTS`` - - Set mapping rules for existing IdP with an empty whitelist - that whould discard any values from the assertion - - Try issuing unscoped token, expect server to raise - ``exception.MissingGroups`` as no groups were matched and ephemeral - user does not have any group assigned. - - """ - domain_id = self.domainA['id'] - domain_name = self.domainA['name'] - group = unit.new_group_ref(domain_id=domain_id, name='EXISTS') - group = self.identity_api.create_group(group) - rules = { - 'rules': [ - { - "local": [ - { - "user": { - "name": "{0}", - "id": "{0}" - } - } - ], - "remote": [ - { - "type": "REMOTE_USER" - } - ] - }, - { - "local": [ - { - "groups": "{0}", - "domain": {"name": domain_name} - } - ], - "remote": [ - { - "type": "REMOTE_USER_GROUPS", - "whitelist": [] - } - ] - } - ] - } - self.federation_api.update_mapping(self.mapping['id'], rules) - - self.assertRaises(exception.MissingGroups, - self._issue_unscoped_token, - assertion='UNMATCHED_GROUP_ASSERTION') - - def test_not_setting_whitelist_accepts_all_values(self): - """Test that not setting whitelist passes - - Not adding a ``whitelist`` keyword to the mapping value is different - than adding empty whitelist. The former case will simply pass all the - values, whereas the latter would discard all the values. - - This test checks a scenario where a ``whitelist`` was not specified. - Expected result is that no groups are ignored. - - The test scenario is as follows: - - Create group ``EXISTS`` - - Set mapping rules for existing IdP with an empty whitelist - that whould discard any values from the assertion - - Issue an unscoped token and make sure ephemeral user is a member of - two groups. - - """ - domain_id = self.domainA['id'] - domain_name = self.domainA['name'] - - # Add a group "EXISTS" - group_exists = unit.new_group_ref(domain_id=domain_id, - name='EXISTS') - group_exists = self.identity_api.create_group(group_exists) - - # Add a group "NO_EXISTS" - group_no_exists = unit.new_group_ref(domain_id=domain_id, - name='NO_EXISTS') - group_no_exists = self.identity_api.create_group(group_no_exists) - - group_ids = set([group_exists['id'], group_no_exists['id']]) - - rules = { - 'rules': [ - { - "local": [ - { - "user": { - "name": "{0}", - "id": "{0}" - } - } - ], - "remote": [ - { - "type": "REMOTE_USER" - } - ] - }, - { - "local": [ - { - "groups": "{0}", - "domain": {"name": domain_name} - } - ], - "remote": [ - { - "type": "REMOTE_USER_GROUPS", - } - ] - } - ] - } - self.federation_api.update_mapping(self.mapping['id'], rules) - r = self._issue_unscoped_token(assertion='UNMATCHED_GROUP_ASSERTION') - assigned_group_ids = r.json['token']['user']['OS-FEDERATION']['groups'] - self.assertEqual(len(group_ids), len(assigned_group_ids)) - for group in assigned_group_ids: - self.assertIn(group['id'], group_ids) - - def test_assertion_prefix_parameter(self): - """Test parameters filtering based on the prefix. - - With ``assertion_prefix`` set to fixed, non default value, - issue an unscoped token from assertion EMPLOYEE_ASSERTION_PREFIXED. - Expect server to return unscoped token. - - """ - self.config_fixture.config(group='federation', - assertion_prefix=self.ASSERTION_PREFIX) - r = self._issue_unscoped_token(assertion='EMPLOYEE_ASSERTION_PREFIXED') - self.assertIsNotNone(r.headers.get('X-Subject-Token')) - - def test_assertion_prefix_parameter_expect_fail(self): - """Test parameters filtering based on the prefix. - - With ``assertion_prefix`` default value set to empty string - issue an unscoped token from assertion EMPLOYEE_ASSERTION. - Next, configure ``assertion_prefix`` to value ``UserName``. - Try issuing unscoped token with EMPLOYEE_ASSERTION. - Expect server to raise exception.Unathorized exception. - - """ - r = self._issue_unscoped_token() - self.assertIsNotNone(r.headers.get('X-Subject-Token')) - self.config_fixture.config(group='federation', - assertion_prefix='UserName') - - self.assertRaises(exception.Unauthorized, - self._issue_unscoped_token) - - def test_v2_auth_with_federation_token_fails(self): - """Test that using a federation token with v2 auth fails. - - If an admin sets up a federated Keystone environment, and a user - incorrectly configures a service (like Nova) to only use v2 auth, the - returned message should be informative. - - """ - r = self._issue_unscoped_token() - token_id = r.headers.get('X-Subject-Token') - self.assertRaises(exception.Unauthorized, - self.token_provider_api.validate_v2_token, - token_id=token_id) - - def test_unscoped_token_has_user_domain(self): - r = self._issue_unscoped_token() - self._check_domains_are_valid(r.json_body['token']) - - def test_scoped_token_has_user_domain(self): - r = self.v3_create_token( - self.TOKEN_SCOPE_PROJECT_EMPLOYEE_FROM_EMPLOYEE) - self._check_domains_are_valid(r.result['token']) - - def test_issue_unscoped_token_for_local_user(self): - r = self._issue_unscoped_token(assertion='LOCAL_USER_ASSERTION') - token_resp = r.json_body['token'] - self.assertListEqual(['saml2'], token_resp['methods']) - self.assertEqual(self.user['id'], token_resp['user']['id']) - self.assertEqual(self.user['name'], token_resp['user']['name']) - self.assertEqual(self.domain['id'], token_resp['user']['domain']['id']) - # Make sure the token is not scoped - self.assertNotIn('project', token_resp) - self.assertNotIn('domain', token_resp) - - def test_issue_token_for_local_user_user_not_found(self): - self.assertRaises(exception.Unauthorized, - self._issue_unscoped_token, - assertion='ANOTHER_LOCAL_USER_ASSERTION') - - -class FernetFederatedTokenTests(test_v3.RestfulTestCase, FederatedSetupMixin): - AUTH_METHOD = 'token' - - def load_fixtures(self, fixtures): - super(FernetFederatedTokenTests, self).load_fixtures(fixtures) - self.load_federation_sample_data() - - def config_overrides(self): - super(FernetFederatedTokenTests, self).config_overrides() - self.config_fixture.config(group='token', provider='fernet') - self.useFixture(ksfixtures.KeyRepository(self.config_fixture)) - - def auth_plugin_config_override(self): - methods = ['saml2', 'token', 'password'] - super(FernetFederatedTokenTests, - self).auth_plugin_config_override(methods) - - def test_federated_unscoped_token(self): - resp = self._issue_unscoped_token() - self.assertEqual(204, len(resp.headers['X-Subject-Token'])) - self.assertValidMappedUser(resp.json_body['token']) - - def test_federated_unscoped_token_with_multiple_groups(self): - assertion = 'ANOTHER_CUSTOMER_ASSERTION' - resp = self._issue_unscoped_token(assertion=assertion) - self.assertEqual(226, len(resp.headers['X-Subject-Token'])) - self.assertValidMappedUser(resp.json_body['token']) - - def test_validate_federated_unscoped_token(self): - resp = self._issue_unscoped_token() - unscoped_token = resp.headers.get('X-Subject-Token') - # assert that the token we received is valid - self.get('/auth/tokens/', headers={'X-Subject-Token': unscoped_token}) - - def test_fernet_full_workflow(self): - """Test 'standard' workflow for granting Fernet access tokens. - - * Issue unscoped token - * List available projects based on groups - * Scope token to one of available projects - - """ - resp = self._issue_unscoped_token() - self.assertValidMappedUser(resp.json_body['token']) - unscoped_token = resp.headers.get('X-Subject-Token') - resp = self.get('/auth/projects', token=unscoped_token) - projects = resp.result['projects'] - random_project = random.randint(0, len(projects)) - 1 - project = projects[random_project] - - v3_scope_request = self._scope_request(unscoped_token, - 'project', project['id']) - - resp = self.v3_create_token(v3_scope_request) - token_resp = resp.result['token'] - self._check_project_scoped_token_attributes(token_resp, project['id']) - - -class FederatedTokenTestsMethodToken(FederatedTokenTests): - """Test federation operation with unified scoping auth method. - - Test all the operations with auth method set to ``token`` as a new, unified - way for scoping all the tokens. - - """ - - AUTH_METHOD = 'token' - - def auth_plugin_config_override(self): - methods = ['saml2', 'token'] - super(FederatedTokenTests, - self).auth_plugin_config_override(methods) - - @utils.wip('This will fail because of bug #1501032. The returned method' - 'list should contain "saml2". This is documented in bug ' - '1501032.') - def test_full_workflow(self): - """Test 'standard' workflow for granting access tokens. - - * Issue unscoped token - * List available projects based on groups - * Scope token to one of available projects - - """ - r = self._issue_unscoped_token() - token_resp = r.json_body['token'] - # NOTE(lbragstad): Ensure only 'saml2' is in the method list. - self.assertListEqual(['saml2'], token_resp['methods']) - self.assertValidMappedUser(token_resp) - employee_unscoped_token_id = r.headers.get('X-Subject-Token') - r = self.get('/auth/projects', token=employee_unscoped_token_id) - projects = r.result['projects'] - random_project = random.randint(0, len(projects)) - 1 - project = projects[random_project] - - v3_scope_request = self._scope_request(employee_unscoped_token_id, - 'project', project['id']) - - r = self.v3_authenticate_token(v3_scope_request) - token_resp = r.result['token'] - self.assertIn('token', token_resp['methods']) - self.assertIn('saml2', token_resp['methods']) - self._check_project_scoped_token_attributes(token_resp, project['id']) - - -class FederatedUserTests(test_v3.RestfulTestCase, FederatedSetupMixin): - """Tests for federated users - - Tests new shadow users functionality - - """ - - def auth_plugin_config_override(self): - methods = ['saml2'] - super(FederatedUserTests, self).auth_plugin_config_override(methods) - - def setUp(self): - super(FederatedUserTests, self).setUp() - - def load_fixtures(self, fixtures): - super(FederatedUserTests, self).load_fixtures(fixtures) - self.load_federation_sample_data() - - def test_user_id_persistense(self): - """Ensure user_id is persistend for multiple federated authn calls.""" - r = self._issue_unscoped_token() - user_id = r.json_body['token']['user']['id'] - - r = self._issue_unscoped_token() - user_id2 = r.json_body['token']['user']['id'] - self.assertEqual(user_id, user_id2) - - -class JsonHomeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin): - JSON_HOME_DATA = { - 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-FEDERATION/' - '1.0/rel/identity_provider': { - 'href-template': '/OS-FEDERATION/identity_providers/{idp_id}', - 'href-vars': { - 'idp_id': 'http://docs.openstack.org/api/openstack-identity/3/' - 'ext/OS-FEDERATION/1.0/param/idp_id' - }, - }, - } - - -def _is_xmlsec1_installed(): - p = subprocess.Popen( - ['which', 'xmlsec1'], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - - # invert the return code - return not bool(p.wait()) - - -def _load_xml(filename): - with open(os.path.join(XMLDIR, filename), 'r') as xml: - return xml.read() - - -class SAMLGenerationTests(test_v3.RestfulTestCase): - - SP_AUTH_URL = ('http://beta.com:5000/v3/OS-FEDERATION/identity_providers' - '/BETA/protocols/saml2/auth') - - ASSERTION_FILE = 'signed_saml2_assertion.xml' - - # The values of the following variables match the attributes values found - # in ASSERTION_FILE - ISSUER = 'https://acme.com/FIM/sps/openstack/saml20' - RECIPIENT = 'http://beta.com/Shibboleth.sso/SAML2/POST' - SUBJECT = 'test_user' - SUBJECT_DOMAIN = 'user_domain' - ROLES = ['admin', 'member'] - PROJECT = 'development' - PROJECT_DOMAIN = 'project_domain' - SAML_GENERATION_ROUTE = '/auth/OS-FEDERATION/saml2' - ECP_GENERATION_ROUTE = '/auth/OS-FEDERATION/saml2/ecp' - ASSERTION_VERSION = "2.0" - SERVICE_PROVDIER_ID = 'ACME' - - def sp_ref(self): - ref = { - 'auth_url': self.SP_AUTH_URL, - 'enabled': True, - 'description': uuid.uuid4().hex, - 'sp_url': self.RECIPIENT, - 'relay_state_prefix': CONF.saml.relay_state_prefix, - - } - return ref - - def setUp(self): - super(SAMLGenerationTests, self).setUp() - self.signed_assertion = saml2.create_class_from_xml_string( - saml.Assertion, _load_xml(self.ASSERTION_FILE)) - self.sp = self.sp_ref() - url = '/OS-FEDERATION/service_providers/' + self.SERVICE_PROVDIER_ID - self.put(url, body={'service_provider': self.sp}, - expected_status=http_client.CREATED) - - def test_samlize_token_values(self): - """Test the SAML generator produces a SAML object. - - Test the SAML generator directly by passing known arguments, the result - should be a SAML object that consistently includes attributes based on - the known arguments that were passed in. - - """ - with mock.patch.object(keystone_idp, '_sign_assertion', - return_value=self.signed_assertion): - generator = keystone_idp.SAMLGenerator() - response = generator.samlize_token(self.ISSUER, self.RECIPIENT, - self.SUBJECT, - self.SUBJECT_DOMAIN, - self.ROLES, self.PROJECT, - self.PROJECT_DOMAIN) - - assertion = response.assertion - self.assertIsNotNone(assertion) - self.assertIsInstance(assertion, saml.Assertion) - issuer = response.issuer - self.assertEqual(self.RECIPIENT, response.destination) - self.assertEqual(self.ISSUER, issuer.text) - - user_attribute = assertion.attribute_statement[0].attribute[0] - self.assertEqual(self.SUBJECT, user_attribute.attribute_value[0].text) - - user_domain_attribute = ( - assertion.attribute_statement[0].attribute[1]) - self.assertEqual(self.SUBJECT_DOMAIN, - user_domain_attribute.attribute_value[0].text) - - role_attribute = assertion.attribute_statement[0].attribute[2] - for attribute_value in role_attribute.attribute_value: - self.assertIn(attribute_value.text, self.ROLES) - - project_attribute = assertion.attribute_statement[0].attribute[3] - self.assertEqual(self.PROJECT, - project_attribute.attribute_value[0].text) - - project_domain_attribute = ( - assertion.attribute_statement[0].attribute[4]) - self.assertEqual(self.PROJECT_DOMAIN, - project_domain_attribute.attribute_value[0].text) - - def test_verify_assertion_object(self): - """Test that the Assertion object is built properly. - - The Assertion doesn't need to be signed in this test, so - _sign_assertion method is patched and doesn't alter the assertion. - - """ - with mock.patch.object(keystone_idp, '_sign_assertion', - side_effect=lambda x: x): - generator = keystone_idp.SAMLGenerator() - response = generator.samlize_token(self.ISSUER, self.RECIPIENT, - self.SUBJECT, - self.SUBJECT_DOMAIN, - self.ROLES, self.PROJECT, - self.PROJECT_DOMAIN) - assertion = response.assertion - self.assertEqual(self.ASSERTION_VERSION, assertion.version) - - def test_valid_saml_xml(self): - """Test the generated SAML object can become valid XML. - - Test the generator directly by passing known arguments, the result - should be a SAML object that consistently includes attributes based on - the known arguments that were passed in. - - """ - with mock.patch.object(keystone_idp, '_sign_assertion', - return_value=self.signed_assertion): - generator = keystone_idp.SAMLGenerator() - response = generator.samlize_token(self.ISSUER, self.RECIPIENT, - self.SUBJECT, - self.SUBJECT_DOMAIN, - self.ROLES, self.PROJECT, - self.PROJECT_DOMAIN) - - saml_str = response.to_string() - response = etree.fromstring(saml_str) - issuer = response[0] - assertion = response[2] - - self.assertEqual(self.RECIPIENT, response.get('Destination')) - self.assertEqual(self.ISSUER, issuer.text) - - user_attribute = assertion[4][0] - self.assertEqual(self.SUBJECT, user_attribute[0].text) - - user_domain_attribute = assertion[4][1] - self.assertEqual(self.SUBJECT_DOMAIN, user_domain_attribute[0].text) - - role_attribute = assertion[4][2] - for attribute_value in role_attribute: - self.assertIn(attribute_value.text, self.ROLES) - - project_attribute = assertion[4][3] - self.assertEqual(self.PROJECT, project_attribute[0].text) - - project_domain_attribute = assertion[4][4] - self.assertEqual(self.PROJECT_DOMAIN, project_domain_attribute[0].text) - - def test_assertion_using_explicit_namespace_prefixes(self): - def mocked_subprocess_check_output(*popenargs, **kwargs): - # the last option is the assertion file to be signed - filename = popenargs[0][-1] - with open(filename, 'r') as f: - assertion_content = f.read() - # since we are not testing the signature itself, we can return - # the assertion as is without signing it - return assertion_content - - with mock.patch.object(subprocess, 'check_output', - side_effect=mocked_subprocess_check_output): - generator = keystone_idp.SAMLGenerator() - response = generator.samlize_token(self.ISSUER, self.RECIPIENT, - self.SUBJECT, - self.SUBJECT_DOMAIN, - self.ROLES, self.PROJECT, - self.PROJECT_DOMAIN) - assertion_xml = response.assertion.to_string() - # make sure we have the proper tag and prefix for the assertion - # namespace - self.assertIn(' - - Test Plan: - - - Attempt to get all entities back by passing a two-term attribute - - Attempt to piggyback filter to damage DB (e.g. drop table) - - """ - self._set_policy({"identity:list_users": [], - "identity:list_groups": [], - "identity:create_group": []}) - - url_by_name = "/users?name=anything' or 'x'='x" - r = self.get(url_by_name, auth=self.auth) - - self.assertEqual(0, len(r.result.get('users'))) - - # See if we can add a SQL command...use the group table instead of the - # user table since 'user' is reserved word for SQLAlchemy. - group = unit.new_group_ref(domain_id=self.domainB['id']) - group = self.identity_api.create_group(group) - - url_by_name = "/users?name=x'; drop table group" - r = self.get(url_by_name, auth=self.auth) - - # Check group table is still there... - url_by_name = "/groups" - r = self.get(url_by_name, auth=self.auth) - self.assertTrue(len(r.result.get('groups')) > 0) - - -class IdentityTestListLimitCase(IdentityTestFilteredCase): - """Test list limiting enforcement on the v3 Identity API.""" - - content_type = 'json' - - def setUp(self): - """Setup for Identity Limit Test Cases.""" - super(IdentityTestListLimitCase, self).setUp() - - # Create 10 entries for each of the entities we are going to test - self.ENTITY_TYPES = ['user', 'group', 'project'] - self.entity_lists = {} - for entity in self.ENTITY_TYPES: - self.entity_lists[entity] = self._create_test_data(entity, 10) - # Make sure we clean up when finished - self.addCleanup(self.clean_up_entity, entity) - - self.service_list = [] - self.addCleanup(self.clean_up_service) - for _ in range(10): - new_entity = unit.new_service_ref() - service = self.catalog_api.create_service(new_entity['id'], - new_entity) - self.service_list.append(service) - - self.policy_list = [] - self.addCleanup(self.clean_up_policy) - for _ in range(10): - new_entity = unit.new_policy_ref() - policy = self.policy_api.create_policy(new_entity['id'], - new_entity) - self.policy_list.append(policy) - - def clean_up_entity(self, entity): - """Clean up entity test data from Identity Limit Test Cases.""" - self._delete_test_data(entity, self.entity_lists[entity]) - - def clean_up_service(self): - """Clean up service test data from Identity Limit Test Cases.""" - for service in self.service_list: - self.catalog_api.delete_service(service['id']) - - def clean_up_policy(self): - """Clean up policy test data from Identity Limit Test Cases.""" - for policy in self.policy_list: - self.policy_api.delete_policy(policy['id']) - - def _test_entity_list_limit(self, entity, driver): - """GET / (limited) - - Test Plan: - - - For the specified type of entity: - - Update policy for no protection on api - - Add a bunch of entities - - Set the global list limit to 5, and check that getting all - - entities only returns 5 - - Set the driver list_limit to 4, and check that now only 4 are - - returned - - """ - if entity == 'policy': - plural = 'policies' - else: - plural = '%ss' % entity - - self._set_policy({"identity:list_%s" % plural: []}) - self.config_fixture.config(list_limit=5) - self.config_fixture.config(group=driver, list_limit=None) - r = self.get('/%s' % plural, auth=self.auth) - self.assertEqual(5, len(r.result.get(plural))) - self.assertIs(r.result.get('truncated'), True) - - self.config_fixture.config(group=driver, list_limit=4) - r = self.get('/%s' % plural, auth=self.auth) - self.assertEqual(4, len(r.result.get(plural))) - self.assertIs(r.result.get('truncated'), True) - - def test_users_list_limit(self): - self._test_entity_list_limit('user', 'identity') - - def test_groups_list_limit(self): - self._test_entity_list_limit('group', 'identity') - - def test_projects_list_limit(self): - self._test_entity_list_limit('project', 'resource') - - def test_services_list_limit(self): - self._test_entity_list_limit('service', 'catalog') - - def test_non_driver_list_limit(self): - """Check list can be limited without driver level support. - - Policy limiting is not done at the driver level (since it - really isn't worth doing it there). So use this as a test - for ensuring the controller level will successfully limit - in this case. - - """ - self._test_entity_list_limit('policy', 'policy') - - def test_no_limit(self): - """Check truncated attribute not set when list not limited.""" - self._set_policy({"identity:list_services": []}) - r = self.get('/services', auth=self.auth) - self.assertEqual(10, len(r.result.get('services'))) - self.assertIsNone(r.result.get('truncated')) - - def test_at_limit(self): - """Check truncated attribute not set when list at max size.""" - # Test this by overriding the general limit with a higher - # driver-specific limit (allowing all entities to be returned - # in the collection), which should result in a non truncated list - self._set_policy({"identity:list_services": []}) - self.config_fixture.config(list_limit=5) - self.config_fixture.config(group='catalog', list_limit=10) - r = self.get('/services', auth=self.auth) - self.assertEqual(10, len(r.result.get('services'))) - self.assertIsNone(r.result.get('truncated')) diff --git a/keystone-moon/keystone/tests/unit/test_v3_identity.py b/keystone-moon/keystone/tests/unit/test_v3_identity.py deleted file mode 100644 index 7d3f6cad..00000000 --- a/keystone-moon/keystone/tests/unit/test_v3_identity.py +++ /dev/null @@ -1,795 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging -import uuid - -import fixtures -import mock -from oslo_config import cfg -from six.moves import http_client -from testtools import matchers - -from keystone.common import controller -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit import test_v3 - - -CONF = cfg.CONF - - -# NOTE(morganfainberg): To be removed when admin_token_auth middleware is -# removed. This was moved to it's own testcase so it can setup the -# admin_token_auth pipeline without impacting other tests. -class IdentityTestCaseStaticAdminToken(test_v3.RestfulTestCase): - EXTENSION_TO_ADD = 'admin_token_auth' - - def config_overrides(self): - super(IdentityTestCaseStaticAdminToken, self).config_overrides() - self.config_fixture.config( - admin_token='ADMIN') - - def test_list_users_with_static_admin_token_and_multiple_backends(self): - # domain-specific operations with the bootstrap ADMIN token is - # disallowed when domain-specific drivers are enabled - self.config_fixture.config(group='identity', - domain_specific_drivers_enabled=True) - self.get('/users', token=CONF.admin_token, - expected_status=exception.Unauthorized.code) - - def test_create_user_with_admin_token_and_no_domain(self): - """Call ``POST /users`` with admin token but no domain id. - - It should not be possible to use the admin token to create a user - while not explicitly passing the domain in the request body. - - """ - # Passing a valid domain id to new_user_ref() since domain_id is - # not an optional parameter. - ref = unit.new_user_ref(domain_id=self.domain_id) - # Delete the domain id before sending the request. - del ref['domain_id'] - self.post('/users', body={'user': ref}, token=CONF.admin_token, - expected_status=http_client.BAD_REQUEST) - - -class IdentityTestCase(test_v3.RestfulTestCase): - """Test users and groups.""" - - def setUp(self): - super(IdentityTestCase, self).setUp() - - self.group = unit.new_group_ref(domain_id=self.domain_id) - self.group = self.identity_api.create_group(self.group) - self.group_id = self.group['id'] - - self.credential = unit.new_credential_ref( - user_id=self.user['id'], - project_id=self.project_id) - - self.credential_api.create_credential(self.credential['id'], - self.credential) - - # user crud tests - - def test_create_user(self): - """Call ``POST /users``.""" - ref = unit.new_user_ref(domain_id=self.domain_id) - r = self.post( - '/users', - body={'user': ref}) - return self.assertValidUserResponse(r, ref) - - def test_create_user_without_domain(self): - """Call ``POST /users`` without specifying domain. - - According to the identity-api specification, if you do not - explicitly specific the domain_id in the entity, it should - take the domain scope of the token as the domain_id. - - """ - # Create a user with a role on the domain so we can get a - # domain scoped token - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - user = unit.create_user(self.identity_api, domain_id=domain['id']) - self.assignment_api.create_grant( - role_id=self.role_id, user_id=user['id'], - domain_id=domain['id']) - - ref = unit.new_user_ref(domain_id=domain['id']) - ref_nd = ref.copy() - ref_nd.pop('domain_id') - auth = self.build_authentication_request( - user_id=user['id'], - password=user['password'], - domain_id=domain['id']) - r = self.post('/users', body={'user': ref_nd}, auth=auth) - self.assertValidUserResponse(r, ref) - - # Now try the same thing without a domain token - which should fail - ref = unit.new_user_ref(domain_id=domain['id']) - ref_nd = ref.copy() - ref_nd.pop('domain_id') - auth = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id']) - - # TODO(henry-nash): Due to bug #1283539 we currently automatically - # use the default domain_id if a domain scoped token is not being - # used. For now we just check that a deprecation warning has been - # issued. Change the code below to expect a failure once this bug is - # fixed. - with mock.patch( - 'oslo_log.versionutils.report_deprecated_feature') as mock_dep: - r = self.post('/users', body={'user': ref_nd}, auth=auth) - self.assertTrue(mock_dep.called) - - ref['domain_id'] = CONF.identity.default_domain_id - return self.assertValidUserResponse(r, ref) - - def test_create_user_with_admin_token_and_domain(self): - """Call ``POST /users`` with admin token and domain id.""" - ref = unit.new_user_ref(domain_id=self.domain_id) - self.post('/users', body={'user': ref}, token=self.get_admin_token(), - expected_status=http_client.CREATED) - - def test_user_management_normalized_keys(self): - """Illustrate the inconsistent handling of hyphens in keys. - - To quote Morgan in bug 1526244: - - the reason this is converted from "domain-id" to "domain_id" is - because of how we process/normalize data. The way we have to handle - specific data types for known columns requires avoiding "-" in the - actual python code since "-" is not valid for attributes in python - w/o significant use of "getattr" etc. - - In short, historically we handle some things in conversions. The - use of "extras" has long been a poor design choice that leads to - odd/strange inconsistent behaviors because of other choices made in - handling data from within the body. (In many cases we convert from - "-" to "_" throughout openstack) - - Source: https://bugs.launchpad.net/keystone/+bug/1526244/comments/9 - - """ - # Create two domains to work with. - domain1 = unit.new_domain_ref() - self.resource_api.create_domain(domain1['id'], domain1) - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - - # We can successfully create a normal user without any surprises. - user = unit.new_user_ref(domain_id=domain1['id']) - r = self.post( - '/users', - body={'user': user}) - self.assertValidUserResponse(r, user) - user['id'] = r.json['user']['id'] - - # Query strings are not normalized: so we get all users back (like - # self.user), not just the ones in the specified domain. - r = self.get( - '/users?domain-id=%s' % domain1['id']) - self.assertValidUserListResponse(r, ref=self.user) - self.assertNotEqual(domain1['id'], self.user['domain_id']) - - # When creating a new user, if we move the 'domain_id' into the - # 'domain-id' attribute, the server will normalize the request - # attribute, and effectively "move it back" for us. - user = unit.new_user_ref(domain_id=domain1['id']) - user['domain-id'] = user.pop('domain_id') - r = self.post( - '/users', - body={'user': user}) - self.assertNotIn('domain-id', r.json['user']) - self.assertEqual(domain1['id'], r.json['user']['domain_id']) - # (move this attribute back so we can use assertValidUserResponse) - user['domain_id'] = user.pop('domain-id') - self.assertValidUserResponse(r, user) - user['id'] = r.json['user']['id'] - - # If we try updating the user's 'domain_id' by specifying a - # 'domain-id', then it'll be stored into extras rather than normalized, - # and the user's actual 'domain_id' is not affected. - r = self.patch( - '/users/%s' % user['id'], - body={'user': {'domain-id': domain2['id']}}) - self.assertEqual(domain2['id'], r.json['user']['domain-id']) - self.assertEqual(user['domain_id'], r.json['user']['domain_id']) - self.assertNotEqual(domain2['id'], user['domain_id']) - self.assertValidUserResponse(r, user) - - def test_create_user_bad_request(self): - """Call ``POST /users``.""" - self.post('/users', body={'user': {}}, - expected_status=http_client.BAD_REQUEST) - - def test_list_users(self): - """Call ``GET /users``.""" - resource_url = '/users' - r = self.get(resource_url) - self.assertValidUserListResponse(r, ref=self.user, - resource_url=resource_url) - - def test_list_users_with_multiple_backends(self): - """Call ``GET /users`` when multiple backends is enabled. - - In this scenario, the controller requires a domain to be specified - either as a filter or by using a domain scoped token. - - """ - self.config_fixture.config(group='identity', - domain_specific_drivers_enabled=True) - - # Create a new domain with a new project and user - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - - project = unit.new_project_ref(domain_id=domain['id']) - self.resource_api.create_project(project['id'], project) - - user = unit.create_user(self.identity_api, domain_id=domain['id']) - - # Create both project and domain role grants for the user so we - # can get both project and domain scoped tokens - self.assignment_api.create_grant( - role_id=self.role_id, user_id=user['id'], - domain_id=domain['id']) - self.assignment_api.create_grant( - role_id=self.role_id, user_id=user['id'], - project_id=project['id']) - - dom_auth = self.build_authentication_request( - user_id=user['id'], - password=user['password'], - domain_id=domain['id']) - project_auth = self.build_authentication_request( - user_id=user['id'], - password=user['password'], - project_id=project['id']) - - # First try using a domain scoped token - resource_url = '/users' - r = self.get(resource_url, auth=dom_auth) - self.assertValidUserListResponse(r, ref=user, - resource_url=resource_url) - - # Now try using a project scoped token - resource_url = '/users' - r = self.get(resource_url, auth=project_auth) - self.assertValidUserListResponse(r, ref=user, - resource_url=resource_url) - - # Now try with an explicit filter - resource_url = ('/users?domain_id=%(domain_id)s' % - {'domain_id': domain['id']}) - r = self.get(resource_url) - self.assertValidUserListResponse(r, ref=user, - resource_url=resource_url) - - def test_list_users_no_default_project(self): - """Call ``GET /users`` making sure no default_project_id.""" - user = unit.new_user_ref(self.domain_id) - user = self.identity_api.create_user(user) - resource_url = '/users' - r = self.get(resource_url) - self.assertValidUserListResponse(r, ref=user, - resource_url=resource_url) - - def test_get_user(self): - """Call ``GET /users/{user_id}``.""" - r = self.get('/users/%(user_id)s' % { - 'user_id': self.user['id']}) - self.assertValidUserResponse(r, self.user) - - def test_get_user_with_default_project(self): - """Call ``GET /users/{user_id}`` making sure of default_project_id.""" - user = unit.new_user_ref(domain_id=self.domain_id, - project_id=self.project_id) - user = self.identity_api.create_user(user) - r = self.get('/users/%(user_id)s' % {'user_id': user['id']}) - self.assertValidUserResponse(r, user) - - def test_add_user_to_group(self): - """Call ``PUT /groups/{group_id}/users/{user_id}``.""" - self.put('/groups/%(group_id)s/users/%(user_id)s' % { - 'group_id': self.group_id, 'user_id': self.user['id']}) - - def test_list_groups_for_user(self): - """Call ``GET /users/{user_id}/groups``.""" - user1 = unit.create_user(self.identity_api, - domain_id=self.domain['id']) - user2 = unit.create_user(self.identity_api, - domain_id=self.domain['id']) - - self.put('/groups/%(group_id)s/users/%(user_id)s' % { - 'group_id': self.group_id, 'user_id': user1['id']}) - - # Scenarios below are written to test the default policy configuration - - # One should be allowed to list one's own groups - auth = self.build_authentication_request( - user_id=user1['id'], - password=user1['password']) - resource_url = ('/users/%(user_id)s/groups' % - {'user_id': user1['id']}) - r = self.get(resource_url, auth=auth) - self.assertValidGroupListResponse(r, ref=self.group, - resource_url=resource_url) - - # Administrator is allowed to list others' groups - resource_url = ('/users/%(user_id)s/groups' % - {'user_id': user1['id']}) - r = self.get(resource_url) - self.assertValidGroupListResponse(r, ref=self.group, - resource_url=resource_url) - - # Ordinary users should not be allowed to list other's groups - auth = self.build_authentication_request( - user_id=user2['id'], - password=user2['password']) - r = self.get('/users/%(user_id)s/groups' % { - 'user_id': user1['id']}, auth=auth, - expected_status=exception.ForbiddenAction.code) - - def test_check_user_in_group(self): - """Call ``HEAD /groups/{group_id}/users/{user_id}``.""" - self.put('/groups/%(group_id)s/users/%(user_id)s' % { - 'group_id': self.group_id, 'user_id': self.user['id']}) - self.head('/groups/%(group_id)s/users/%(user_id)s' % { - 'group_id': self.group_id, 'user_id': self.user['id']}) - - def test_list_users_in_group(self): - """Call ``GET /groups/{group_id}/users``.""" - self.put('/groups/%(group_id)s/users/%(user_id)s' % { - 'group_id': self.group_id, 'user_id': self.user['id']}) - resource_url = ('/groups/%(group_id)s/users' % - {'group_id': self.group_id}) - r = self.get(resource_url) - self.assertValidUserListResponse(r, ref=self.user, - resource_url=resource_url) - self.assertIn('/groups/%(group_id)s/users' % { - 'group_id': self.group_id}, r.result['links']['self']) - - def test_remove_user_from_group(self): - """Call ``DELETE /groups/{group_id}/users/{user_id}``.""" - self.put('/groups/%(group_id)s/users/%(user_id)s' % { - 'group_id': self.group_id, 'user_id': self.user['id']}) - self.delete('/groups/%(group_id)s/users/%(user_id)s' % { - 'group_id': self.group_id, 'user_id': self.user['id']}) - - def test_update_user(self): - """Call ``PATCH /users/{user_id}``.""" - user = unit.new_user_ref(domain_id=self.domain_id) - del user['id'] - r = self.patch('/users/%(user_id)s' % { - 'user_id': self.user['id']}, - body={'user': user}) - self.assertValidUserResponse(r, user) - - def test_admin_password_reset(self): - # bootstrap a user as admin - user_ref = unit.create_user(self.identity_api, - domain_id=self.domain['id']) - - # auth as user should work before a password change - old_password_auth = self.build_authentication_request( - user_id=user_ref['id'], - password=user_ref['password']) - r = self.v3_create_token(old_password_auth) - old_token = r.headers.get('X-Subject-Token') - - # auth as user with a token should work before a password change - old_token_auth = self.build_authentication_request(token=old_token) - self.v3_create_token(old_token_auth) - - # administrative password reset - new_password = uuid.uuid4().hex - self.patch('/users/%s' % user_ref['id'], - body={'user': {'password': new_password}}) - - # auth as user with original password should not work after change - self.v3_create_token(old_password_auth, - expected_status=http_client.UNAUTHORIZED) - - # auth as user with an old token should not work after change - self.v3_create_token(old_token_auth, - expected_status=http_client.NOT_FOUND) - - # new password should work - new_password_auth = self.build_authentication_request( - user_id=user_ref['id'], - password=new_password) - self.v3_create_token(new_password_auth) - - def test_update_user_domain_id(self): - """Call ``PATCH /users/{user_id}`` with domain_id.""" - user = unit.new_user_ref(domain_id=self.domain['id']) - user = self.identity_api.create_user(user) - user['domain_id'] = CONF.identity.default_domain_id - r = self.patch('/users/%(user_id)s' % { - 'user_id': user['id']}, - body={'user': user}, - expected_status=exception.ValidationError.code) - self.config_fixture.config(domain_id_immutable=False) - user['domain_id'] = self.domain['id'] - r = self.patch('/users/%(user_id)s' % { - 'user_id': user['id']}, - body={'user': user}) - self.assertValidUserResponse(r, user) - - def test_delete_user(self): - """Call ``DELETE /users/{user_id}``. - - As well as making sure the delete succeeds, we ensure - that any credentials that reference this user are - also deleted, while other credentials are unaffected. - In addition, no tokens should remain valid for this user. - - """ - # First check the credential for this user is present - r = self.credential_api.get_credential(self.credential['id']) - self.assertDictEqual(self.credential, r) - # Create a second credential with a different user - - user2 = unit.new_user_ref(domain_id=self.domain['id'], - project_id=self.project['id']) - user2 = self.identity_api.create_user(user2) - credential2 = unit.new_credential_ref(user_id=user2['id'], - project_id=self.project['id']) - self.credential_api.create_credential(credential2['id'], credential2) - - # Create a token for this user which we can check later - # gets deleted - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - project_id=self.project['id']) - token = self.get_requested_token(auth_data) - # Confirm token is valid for now - self.head('/auth/tokens', - headers={'X-Subject-Token': token}, - expected_status=http_client.OK) - - # Now delete the user - self.delete('/users/%(user_id)s' % { - 'user_id': self.user['id']}) - - # Deleting the user should have deleted any credentials - # that reference this project - self.assertRaises(exception.CredentialNotFound, - self.credential_api.get_credential, - self.credential['id']) - # And the no tokens we remain valid - tokens = self.token_provider_api._persistence._list_tokens( - self.user['id']) - self.assertEqual(0, len(tokens)) - # But the credential for user2 is unaffected - r = self.credential_api.get_credential(credential2['id']) - self.assertDictEqual(credential2, r) - - # shadow user tests - def test_shadow_federated_user(self): - fed_user = unit.new_federated_user_ref() - user = ( - self.identity_api.shadow_federated_user(fed_user["idp_id"], - fed_user["protocol_id"], - fed_user["unique_id"], - fed_user["display_name"]) - ) - self.assertIsNotNone(user["id"]) - self.assertEqual(len(user.keys()), 4) - self.assertIsNotNone(user['id']) - self.assertIsNotNone(user['name']) - self.assertIsNone(user['domain_id']) - self.assertEqual(user['enabled'], True) - - def test_shadow_existing_federated_user(self): - fed_user = unit.new_federated_user_ref() - - # introduce the user to keystone for the first time - shadow_user1 = self.identity_api.shadow_federated_user( - fed_user["idp_id"], - fed_user["protocol_id"], - fed_user["unique_id"], - fed_user["display_name"]) - self.assertEqual(fed_user['display_name'], shadow_user1['name']) - - # shadow the user again, with another name to invalidate the cache - # internally, this operation causes request to the driver. It should - # not fail. - fed_user['display_name'] = uuid.uuid4().hex - shadow_user2 = self.identity_api.shadow_federated_user( - fed_user["idp_id"], - fed_user["protocol_id"], - fed_user["unique_id"], - fed_user["display_name"]) - self.assertEqual(fed_user['display_name'], shadow_user2['name']) - self.assertNotEqual(shadow_user1['name'], shadow_user2['name']) - - # The shadowed users still share the same unique ID. - self.assertEqual(shadow_user1['id'], shadow_user2['id']) - - # group crud tests - - def test_create_group(self): - """Call ``POST /groups``.""" - # Create a new group to avoid a duplicate check failure - ref = unit.new_group_ref(domain_id=self.domain_id) - r = self.post( - '/groups', - body={'group': ref}) - return self.assertValidGroupResponse(r, ref) - - def test_create_group_bad_request(self): - """Call ``POST /groups``.""" - self.post('/groups', body={'group': {}}, - expected_status=http_client.BAD_REQUEST) - - def test_list_groups(self): - """Call ``GET /groups``.""" - resource_url = '/groups' - r = self.get(resource_url) - self.assertValidGroupListResponse(r, ref=self.group, - resource_url=resource_url) - - def test_get_group(self): - """Call ``GET /groups/{group_id}``.""" - r = self.get('/groups/%(group_id)s' % { - 'group_id': self.group_id}) - self.assertValidGroupResponse(r, self.group) - - def test_update_group(self): - """Call ``PATCH /groups/{group_id}``.""" - group = unit.new_group_ref(domain_id=self.domain_id) - del group['id'] - r = self.patch('/groups/%(group_id)s' % { - 'group_id': self.group_id}, - body={'group': group}) - self.assertValidGroupResponse(r, group) - - def test_update_group_domain_id(self): - """Call ``PATCH /groups/{group_id}`` with domain_id.""" - self.group['domain_id'] = CONF.identity.default_domain_id - r = self.patch('/groups/%(group_id)s' % { - 'group_id': self.group['id']}, - body={'group': self.group}, - expected_status=exception.ValidationError.code) - self.config_fixture.config(domain_id_immutable=False) - self.group['domain_id'] = self.domain['id'] - r = self.patch('/groups/%(group_id)s' % { - 'group_id': self.group['id']}, - body={'group': self.group}) - self.assertValidGroupResponse(r, self.group) - - def test_delete_group(self): - """Call ``DELETE /groups/{group_id}``.""" - self.delete('/groups/%(group_id)s' % { - 'group_id': self.group_id}) - - def test_create_user_password_not_logged(self): - # When a user is created, the password isn't logged at any level. - - log_fix = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG)) - - ref = unit.new_user_ref(domain_id=self.domain_id) - self.post( - '/users', - body={'user': ref}) - - self.assertNotIn(ref['password'], log_fix.output) - - def test_update_password_not_logged(self): - # When admin modifies user password, the password isn't logged at any - # level. - - log_fix = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG)) - - # bootstrap a user as admin - user_ref = unit.create_user(self.identity_api, - domain_id=self.domain['id']) - - self.assertNotIn(user_ref['password'], log_fix.output) - - # administrative password reset - new_password = uuid.uuid4().hex - self.patch('/users/%s' % user_ref['id'], - body={'user': {'password': new_password}}) - - self.assertNotIn(new_password, log_fix.output) - - -class IdentityV3toV2MethodsTestCase(unit.TestCase): - """Test users V3 to V2 conversion methods.""" - - def new_user_ref(self, **kwargs): - """Construct a bare bones user ref. - - Omits all optional components. - """ - ref = unit.new_user_ref(**kwargs) - # description is already omitted - del ref['email'] - del ref['enabled'] - del ref['password'] - return ref - - def setUp(self): - super(IdentityV3toV2MethodsTestCase, self).setUp() - self.load_backends() - user_id = uuid.uuid4().hex - project_id = uuid.uuid4().hex - - # User with only default_project_id in ref - self.user1 = self.new_user_ref( - id=user_id, - name=user_id, - project_id=project_id, - domain_id=CONF.identity.default_domain_id) - # User without default_project_id or tenantId in ref - self.user2 = self.new_user_ref( - id=user_id, - name=user_id, - domain_id=CONF.identity.default_domain_id) - # User with both tenantId and default_project_id in ref - self.user3 = self.new_user_ref( - id=user_id, - name=user_id, - project_id=project_id, - tenantId=project_id, - domain_id=CONF.identity.default_domain_id) - # User with only tenantId in ref - self.user4 = self.new_user_ref( - id=user_id, - name=user_id, - tenantId=project_id, - domain_id=CONF.identity.default_domain_id) - - # Expected result if the user is meant to have a tenantId element - self.expected_user = {'id': user_id, - 'name': user_id, - 'username': user_id, - 'tenantId': project_id} - - # Expected result if the user is not meant to have a tenantId element - self.expected_user_no_tenant_id = {'id': user_id, - 'name': user_id, - 'username': user_id} - - def test_v3_to_v2_user_method(self): - - updated_user1 = controller.V2Controller.v3_to_v2_user(self.user1) - self.assertIs(self.user1, updated_user1) - self.assertDictEqual(self.expected_user, self.user1) - updated_user2 = controller.V2Controller.v3_to_v2_user(self.user2) - self.assertIs(self.user2, updated_user2) - self.assertDictEqual(self.expected_user_no_tenant_id, self.user2) - updated_user3 = controller.V2Controller.v3_to_v2_user(self.user3) - self.assertIs(self.user3, updated_user3) - self.assertDictEqual(self.expected_user, self.user3) - updated_user4 = controller.V2Controller.v3_to_v2_user(self.user4) - self.assertIs(self.user4, updated_user4) - self.assertDictEqual(self.expected_user_no_tenant_id, self.user4) - - def test_v3_to_v2_user_method_list(self): - user_list = [self.user1, self.user2, self.user3, self.user4] - updated_list = controller.V2Controller.v3_to_v2_user(user_list) - - self.assertEqual(len(user_list), len(updated_list)) - - for i, ref in enumerate(updated_list): - # Order should not change. - self.assertIs(ref, user_list[i]) - - self.assertDictEqual(self.expected_user, self.user1) - self.assertDictEqual(self.expected_user_no_tenant_id, self.user2) - self.assertDictEqual(self.expected_user, self.user3) - self.assertDictEqual(self.expected_user_no_tenant_id, self.user4) - - -class UserSelfServiceChangingPasswordsTestCase(test_v3.RestfulTestCase): - - def setUp(self): - super(UserSelfServiceChangingPasswordsTestCase, self).setUp() - self.user_ref = unit.create_user(self.identity_api, - domain_id=self.domain['id']) - self.token = self.get_request_token(self.user_ref['password'], - http_client.CREATED) - - def get_request_token(self, password, expected_status): - auth_data = self.build_authentication_request( - user_id=self.user_ref['id'], - password=password) - r = self.v3_create_token(auth_data, - expected_status=expected_status) - return r.headers.get('X-Subject-Token') - - def change_password(self, expected_status, **kwargs): - """Returns a test response for a change password request.""" - return self.post('/users/%s/password' % self.user_ref['id'], - body={'user': kwargs}, - token=self.token, - expected_status=expected_status) - - def test_changing_password(self): - # original password works - token_id = self.get_request_token(self.user_ref['password'], - expected_status=http_client.CREATED) - # original token works - old_token_auth = self.build_authentication_request(token=token_id) - self.v3_create_token(old_token_auth) - - # change password - new_password = uuid.uuid4().hex - self.change_password(password=new_password, - original_password=self.user_ref['password'], - expected_status=http_client.NO_CONTENT) - - # old password fails - self.get_request_token(self.user_ref['password'], - expected_status=http_client.UNAUTHORIZED) - - # old token fails - self.v3_create_token(old_token_auth, - expected_status=http_client.NOT_FOUND) - - # new password works - self.get_request_token(new_password, - expected_status=http_client.CREATED) - - def test_changing_password_with_missing_original_password_fails(self): - r = self.change_password(password=uuid.uuid4().hex, - expected_status=http_client.BAD_REQUEST) - self.assertThat(r.result['error']['message'], - matchers.Contains('original_password')) - - def test_changing_password_with_missing_password_fails(self): - r = self.change_password(original_password=self.user_ref['password'], - expected_status=http_client.BAD_REQUEST) - self.assertThat(r.result['error']['message'], - matchers.Contains('password')) - - def test_changing_password_with_incorrect_password_fails(self): - self.change_password(password=uuid.uuid4().hex, - original_password=uuid.uuid4().hex, - expected_status=http_client.UNAUTHORIZED) - - def test_changing_password_with_disabled_user_fails(self): - # disable the user account - self.user_ref['enabled'] = False - self.patch('/users/%s' % self.user_ref['id'], - body={'user': self.user_ref}) - - self.change_password(password=uuid.uuid4().hex, - original_password=self.user_ref['password'], - expected_status=http_client.UNAUTHORIZED) - - def test_changing_password_not_logged(self): - # When a user changes their password, the password isn't logged at any - # level. - - log_fix = self.useFixture(fixtures.FakeLogger(level=logging.DEBUG)) - - # change password - new_password = uuid.uuid4().hex - self.change_password(password=new_password, - original_password=self.user_ref['password'], - expected_status=http_client.NO_CONTENT) - - self.assertNotIn(self.user_ref['password'], log_fix.output) - self.assertNotIn(new_password, log_fix.output) diff --git a/keystone-moon/keystone/tests/unit/test_v3_oauth1.py b/keystone-moon/keystone/tests/unit/test_v3_oauth1.py deleted file mode 100644 index 198dffb8..00000000 --- a/keystone-moon/keystone/tests/unit/test_v3_oauth1.py +++ /dev/null @@ -1,907 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import uuid - -import mock -from oslo_log import versionutils -from oslo_serialization import jsonutils -from pycadf import cadftaxonomy -from six.moves import http_client -from six.moves import urllib - -from keystone.contrib.oauth1 import routers -from keystone import exception -from keystone import oauth1 -from keystone.oauth1 import controllers -from keystone.oauth1 import core -from keystone.tests import unit -from keystone.tests.unit.common import test_notifications -from keystone.tests.unit import ksfixtures -from keystone.tests.unit.ksfixtures import temporaryfile -from keystone.tests.unit import test_v3 - - -class OAuth1ContribTests(test_v3.RestfulTestCase): - - @mock.patch.object(versionutils, 'report_deprecated_feature') - def test_exception_happens(self, mock_deprecator): - routers.OAuth1Extension(mock.ANY) - mock_deprecator.assert_called_once_with(mock.ANY, mock.ANY) - args, _kwargs = mock_deprecator.call_args - self.assertIn("Remove oauth1_extension from", args[1]) - - -class OAuth1Tests(test_v3.RestfulTestCase): - - CONSUMER_URL = '/OS-OAUTH1/consumers' - - def setUp(self): - super(OAuth1Tests, self).setUp() - - # Now that the app has been served, we can query CONF values - self.base_url = 'http://localhost/v3' - self.controller = controllers.OAuthControllerV3() - - def _create_single_consumer(self): - ref = {'description': uuid.uuid4().hex} - resp = self.post( - self.CONSUMER_URL, - body={'consumer': ref}) - return resp.result['consumer'] - - def _create_request_token(self, consumer, project_id): - endpoint = '/OS-OAUTH1/request_token' - client = oauth1.Client(consumer['key'], - client_secret=consumer['secret'], - signature_method=oauth1.SIG_HMAC, - callback_uri="oob") - headers = {'requested_project_id': project_id} - url, headers, body = client.sign(self.base_url + endpoint, - http_method='POST', - headers=headers) - return endpoint, headers - - def _create_access_token(self, consumer, token): - endpoint = '/OS-OAUTH1/access_token' - client = oauth1.Client(consumer['key'], - client_secret=consumer['secret'], - resource_owner_key=token.key, - resource_owner_secret=token.secret, - signature_method=oauth1.SIG_HMAC, - verifier=token.verifier) - url, headers, body = client.sign(self.base_url + endpoint, - http_method='POST') - headers.update({'Content-Type': 'application/json'}) - return endpoint, headers - - def _get_oauth_token(self, consumer, token): - client = oauth1.Client(consumer['key'], - client_secret=consumer['secret'], - resource_owner_key=token.key, - resource_owner_secret=token.secret, - signature_method=oauth1.SIG_HMAC) - endpoint = '/auth/tokens' - url, headers, body = client.sign(self.base_url + endpoint, - http_method='POST') - headers.update({'Content-Type': 'application/json'}) - ref = {'auth': {'identity': {'oauth1': {}, 'methods': ['oauth1']}}} - return endpoint, headers, ref - - def _authorize_request_token(self, request_id): - return '/OS-OAUTH1/authorize/%s' % (request_id) - - -class ConsumerCRUDTests(OAuth1Tests): - - def _consumer_create(self, description=None, description_flag=True, - **kwargs): - if description_flag: - ref = {'description': description} - else: - ref = {} - if kwargs: - ref.update(kwargs) - resp = self.post( - self.CONSUMER_URL, - body={'consumer': ref}) - consumer = resp.result['consumer'] - consumer_id = consumer['id'] - self.assertEqual(description, consumer['description']) - self.assertIsNotNone(consumer_id) - self.assertIsNotNone(consumer['secret']) - return consumer - - def test_consumer_create(self): - description = uuid.uuid4().hex - self._consumer_create(description=description) - - def test_consumer_create_none_desc_1(self): - self._consumer_create() - - def test_consumer_create_none_desc_2(self): - self._consumer_create(description_flag=False) - - def test_consumer_create_normalize_field(self): - # If create a consumer with a field with : or - in the name, - # the name is normalized by converting those chars to _. - field_name = 'some:weird-field' - field_value = uuid.uuid4().hex - extra_fields = {field_name: field_value} - consumer = self._consumer_create(**extra_fields) - normalized_field_name = 'some_weird_field' - self.assertEqual(field_value, consumer[normalized_field_name]) - - def test_consumer_delete(self): - consumer = self._create_single_consumer() - consumer_id = consumer['id'] - resp = self.delete(self.CONSUMER_URL + '/%s' % consumer_id) - self.assertResponseStatus(resp, http_client.NO_CONTENT) - - def test_consumer_get(self): - consumer = self._create_single_consumer() - consumer_id = consumer['id'] - resp = self.get(self.CONSUMER_URL + '/%s' % consumer_id) - self_url = ['http://localhost/v3', self.CONSUMER_URL, - '/', consumer_id] - self_url = ''.join(self_url) - self.assertEqual(self_url, resp.result['consumer']['links']['self']) - self.assertEqual(consumer_id, resp.result['consumer']['id']) - - def test_consumer_list(self): - self._consumer_create() - resp = self.get(self.CONSUMER_URL) - entities = resp.result['consumers'] - self.assertIsNotNone(entities) - self_url = ['http://localhost/v3', self.CONSUMER_URL] - self_url = ''.join(self_url) - self.assertEqual(self_url, resp.result['links']['self']) - self.assertValidListLinks(resp.result['links']) - - def test_consumer_update(self): - consumer = self._create_single_consumer() - original_id = consumer['id'] - original_description = consumer['description'] - update_description = original_description + '_new' - - update_ref = {'description': update_description} - update_resp = self.patch(self.CONSUMER_URL + '/%s' % original_id, - body={'consumer': update_ref}) - consumer = update_resp.result['consumer'] - self.assertEqual(update_description, consumer['description']) - self.assertEqual(original_id, consumer['id']) - - def test_consumer_update_bad_secret(self): - consumer = self._create_single_consumer() - original_id = consumer['id'] - update_ref = copy.deepcopy(consumer) - update_ref['description'] = uuid.uuid4().hex - update_ref['secret'] = uuid.uuid4().hex - self.patch(self.CONSUMER_URL + '/%s' % original_id, - body={'consumer': update_ref}, - expected_status=http_client.BAD_REQUEST) - - def test_consumer_update_bad_id(self): - consumer = self._create_single_consumer() - original_id = consumer['id'] - original_description = consumer['description'] - update_description = original_description + "_new" - - update_ref = copy.deepcopy(consumer) - update_ref['description'] = update_description - update_ref['id'] = update_description - self.patch(self.CONSUMER_URL + '/%s' % original_id, - body={'consumer': update_ref}, - expected_status=http_client.BAD_REQUEST) - - def test_consumer_update_normalize_field(self): - # If update a consumer with a field with : or - in the name, - # the name is normalized by converting those chars to _. - field1_name = 'some:weird-field' - field1_orig_value = uuid.uuid4().hex - - extra_fields = {field1_name: field1_orig_value} - consumer = self._consumer_create(**extra_fields) - consumer_id = consumer['id'] - - field1_new_value = uuid.uuid4().hex - - field2_name = 'weird:some-field' - field2_value = uuid.uuid4().hex - - update_ref = {field1_name: field1_new_value, - field2_name: field2_value} - - update_resp = self.patch(self.CONSUMER_URL + '/%s' % consumer_id, - body={'consumer': update_ref}) - consumer = update_resp.result['consumer'] - - normalized_field1_name = 'some_weird_field' - self.assertEqual(field1_new_value, consumer[normalized_field1_name]) - - normalized_field2_name = 'weird_some_field' - self.assertEqual(field2_value, consumer[normalized_field2_name]) - - def test_consumer_create_no_description(self): - resp = self.post(self.CONSUMER_URL, body={'consumer': {}}) - consumer = resp.result['consumer'] - consumer_id = consumer['id'] - self.assertIsNone(consumer['description']) - self.assertIsNotNone(consumer_id) - self.assertIsNotNone(consumer['secret']) - - def test_consumer_get_bad_id(self): - self.get(self.CONSUMER_URL + '/%(consumer_id)s' - % {'consumer_id': uuid.uuid4().hex}, - expected_status=http_client.NOT_FOUND) - - -class OAuthFlowTests(OAuth1Tests): - - def test_oauth_flow(self): - consumer = self._create_single_consumer() - consumer_id = consumer['id'] - consumer_secret = consumer['secret'] - self.consumer = {'key': consumer_id, 'secret': consumer_secret} - self.assertIsNotNone(self.consumer['secret']) - - url, headers = self._create_request_token(self.consumer, - self.project_id) - content = self.post( - url, headers=headers, - response_content_type='application/x-www-urlformencoded') - credentials = urllib.parse.parse_qs(content.result) - request_key = credentials['oauth_token'][0] - request_secret = credentials['oauth_token_secret'][0] - self.request_token = oauth1.Token(request_key, request_secret) - self.assertIsNotNone(self.request_token.key) - - url = self._authorize_request_token(request_key) - body = {'roles': [{'id': self.role_id}]} - resp = self.put(url, body=body, expected_status=http_client.OK) - self.verifier = resp.result['token']['oauth_verifier'] - self.assertTrue(all(i in core.VERIFIER_CHARS for i in self.verifier)) - self.assertEqual(8, len(self.verifier)) - - self.request_token.set_verifier(self.verifier) - url, headers = self._create_access_token(self.consumer, - self.request_token) - content = self.post( - url, headers=headers, - response_content_type='application/x-www-urlformencoded') - credentials = urllib.parse.parse_qs(content.result) - access_key = credentials['oauth_token'][0] - access_secret = credentials['oauth_token_secret'][0] - self.access_token = oauth1.Token(access_key, access_secret) - self.assertIsNotNone(self.access_token.key) - - url, headers, body = self._get_oauth_token(self.consumer, - self.access_token) - content = self.post(url, headers=headers, body=body) - self.keystone_token_id = content.headers['X-Subject-Token'] - self.keystone_token = content.result['token'] - self.assertIsNotNone(self.keystone_token_id) - - -class AccessTokenCRUDTests(OAuthFlowTests): - def test_delete_access_token_dne(self): - self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s' - % {'user': self.user_id, - 'auth': uuid.uuid4().hex}, - expected_status=http_client.NOT_FOUND) - - def test_list_no_access_tokens(self): - resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens' - % {'user_id': self.user_id}) - entities = resp.result['access_tokens'] - self.assertEqual([], entities) - self.assertValidListLinks(resp.result['links']) - - def test_get_single_access_token(self): - self.test_oauth_flow() - url = '/users/%(user_id)s/OS-OAUTH1/access_tokens/%(key)s' % { - 'user_id': self.user_id, - 'key': self.access_token.key - } - resp = self.get(url) - entity = resp.result['access_token'] - self.assertEqual(self.access_token.key, entity['id']) - self.assertEqual(self.consumer['key'], entity['consumer_id']) - self.assertEqual('http://localhost/v3' + url, entity['links']['self']) - - def test_get_access_token_dne(self): - self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens/%(key)s' - % {'user_id': self.user_id, - 'key': uuid.uuid4().hex}, - expected_status=http_client.NOT_FOUND) - - def test_list_all_roles_in_access_token(self): - self.test_oauth_flow() - resp = self.get('/users/%(id)s/OS-OAUTH1/access_tokens/%(key)s/roles' - % {'id': self.user_id, - 'key': self.access_token.key}) - entities = resp.result['roles'] - self.assertTrue(entities) - self.assertValidListLinks(resp.result['links']) - - def test_get_role_in_access_token(self): - self.test_oauth_flow() - url = ('/users/%(id)s/OS-OAUTH1/access_tokens/%(key)s/roles/%(role)s' - % {'id': self.user_id, 'key': self.access_token.key, - 'role': self.role_id}) - resp = self.get(url) - entity = resp.result['role'] - self.assertEqual(self.role_id, entity['id']) - - def test_get_role_in_access_token_dne(self): - self.test_oauth_flow() - url = ('/users/%(id)s/OS-OAUTH1/access_tokens/%(key)s/roles/%(role)s' - % {'id': self.user_id, 'key': self.access_token.key, - 'role': uuid.uuid4().hex}) - self.get(url, expected_status=http_client.NOT_FOUND) - - def test_list_and_delete_access_tokens(self): - self.test_oauth_flow() - # List access_tokens should be > 0 - resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens' - % {'user_id': self.user_id}) - entities = resp.result['access_tokens'] - self.assertTrue(entities) - self.assertValidListLinks(resp.result['links']) - - # Delete access_token - resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s' - % {'user': self.user_id, - 'auth': self.access_token.key}) - self.assertResponseStatus(resp, http_client.NO_CONTENT) - - # List access_token should be 0 - resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens' - % {'user_id': self.user_id}) - entities = resp.result['access_tokens'] - self.assertEqual([], entities) - self.assertValidListLinks(resp.result['links']) - - -class AuthTokenTests(OAuthFlowTests): - - def test_keystone_token_is_valid(self): - self.test_oauth_flow() - headers = {'X-Subject-Token': self.keystone_token_id, - 'X-Auth-Token': self.keystone_token_id} - r = self.get('/auth/tokens', headers=headers) - self.assertValidTokenResponse(r, self.user) - - # now verify the oauth section - oauth_section = r.result['token']['OS-OAUTH1'] - self.assertEqual(self.access_token.key, - oauth_section['access_token_id']) - self.assertEqual(self.consumer['key'], oauth_section['consumer_id']) - - # verify the roles section - roles_list = r.result['token']['roles'] - # we can just verify the 0th role since we are only assigning one role - self.assertEqual(self.role_id, roles_list[0]['id']) - - # verify that the token can perform delegated tasks - ref = unit.new_user_ref(domain_id=self.domain_id) - r = self.admin_request(path='/v3/users', headers=headers, - method='POST', body={'user': ref}) - self.assertValidUserResponse(r, ref) - - def test_delete_access_token_also_revokes_token(self): - self.test_oauth_flow() - - # Delete access token - resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s' - % {'user': self.user_id, - 'auth': self.access_token.key}) - self.assertResponseStatus(resp, http_client.NO_CONTENT) - - # Check Keystone Token no longer exists - headers = {'X-Subject-Token': self.keystone_token_id, - 'X-Auth-Token': self.keystone_token_id} - self.get('/auth/tokens', headers=headers, - expected_status=http_client.NOT_FOUND) - - def test_deleting_consumer_also_deletes_tokens(self): - self.test_oauth_flow() - - # Delete consumer - consumer_id = self.consumer['key'] - resp = self.delete('/OS-OAUTH1/consumers/%(consumer_id)s' - % {'consumer_id': consumer_id}) - self.assertResponseStatus(resp, http_client.NO_CONTENT) - - # List access_token should be 0 - resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens' - % {'user_id': self.user_id}) - entities = resp.result['access_tokens'] - self.assertEqual([], entities) - - # Check Keystone Token no longer exists - headers = {'X-Subject-Token': self.keystone_token_id, - 'X-Auth-Token': self.keystone_token_id} - self.head('/auth/tokens', headers=headers, - expected_status=http_client.NOT_FOUND) - - def test_change_user_password_also_deletes_tokens(self): - self.test_oauth_flow() - - # delegated keystone token exists - headers = {'X-Subject-Token': self.keystone_token_id, - 'X-Auth-Token': self.keystone_token_id} - r = self.get('/auth/tokens', headers=headers) - self.assertValidTokenResponse(r, self.user) - - user = {'password': uuid.uuid4().hex} - r = self.patch('/users/%(user_id)s' % { - 'user_id': self.user['id']}, - body={'user': user}) - - headers = {'X-Subject-Token': self.keystone_token_id, - 'X-Auth-Token': self.keystone_token_id} - self.admin_request(path='/auth/tokens', headers=headers, - method='GET', expected_status=http_client.NOT_FOUND) - - def test_deleting_project_also_invalidates_tokens(self): - self.test_oauth_flow() - - # delegated keystone token exists - headers = {'X-Subject-Token': self.keystone_token_id, - 'X-Auth-Token': self.keystone_token_id} - r = self.get('/auth/tokens', headers=headers) - self.assertValidTokenResponse(r, self.user) - - r = self.delete('/projects/%(project_id)s' % { - 'project_id': self.project_id}) - - headers = {'X-Subject-Token': self.keystone_token_id, - 'X-Auth-Token': self.keystone_token_id} - self.admin_request(path='/auth/tokens', headers=headers, - method='GET', expected_status=http_client.NOT_FOUND) - - def test_token_chaining_is_not_allowed(self): - self.test_oauth_flow() - - # attempt to re-authenticate (token chain) with the given token - path = '/v3/auth/tokens/' - auth_data = self.build_authentication_request( - token=self.keystone_token_id) - - self.admin_request( - path=path, - body=auth_data, - token=self.keystone_token_id, - method='POST', - expected_status=http_client.FORBIDDEN) - - def test_delete_keystone_tokens_by_consumer_id(self): - self.test_oauth_flow() - self.token_provider_api._persistence.get_token(self.keystone_token_id) - self.token_provider_api._persistence.delete_tokens( - self.user_id, - consumer_id=self.consumer['key']) - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._persistence.get_token, - self.keystone_token_id) - - def _create_trust_get_token(self): - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.user_id, - project_id=self.project_id, - impersonation=True, - expires=dict(minutes=1), - role_ids=[self.role_id]) - del ref['id'] - - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - trust = self.assertValidTrustResponse(r) - - auth_data = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - trust_id=trust['id']) - - return self.get_requested_token(auth_data) - - def _approve_request_token_url(self): - consumer = self._create_single_consumer() - consumer_id = consumer['id'] - consumer_secret = consumer['secret'] - self.consumer = {'key': consumer_id, 'secret': consumer_secret} - self.assertIsNotNone(self.consumer['secret']) - - url, headers = self._create_request_token(self.consumer, - self.project_id) - content = self.post( - url, headers=headers, - response_content_type='application/x-www-urlformencoded') - credentials = urllib.parse.parse_qs(content.result) - request_key = credentials['oauth_token'][0] - request_secret = credentials['oauth_token_secret'][0] - self.request_token = oauth1.Token(request_key, request_secret) - self.assertIsNotNone(self.request_token.key) - - url = self._authorize_request_token(request_key) - - return url - - def test_oauth_token_cannot_create_new_trust(self): - self.test_oauth_flow() - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.user_id, - project_id=self.project_id, - impersonation=True, - expires=dict(minutes=1), - role_ids=[self.role_id]) - del ref['id'] - - self.post('/OS-TRUST/trusts', - body={'trust': ref}, - token=self.keystone_token_id, - expected_status=http_client.FORBIDDEN) - - def test_oauth_token_cannot_authorize_request_token(self): - self.test_oauth_flow() - url = self._approve_request_token_url() - body = {'roles': [{'id': self.role_id}]} - self.put(url, body=body, token=self.keystone_token_id, - expected_status=http_client.FORBIDDEN) - - def test_oauth_token_cannot_list_request_tokens(self): - self._set_policy({"identity:list_access_tokens": [], - "identity:create_consumer": [], - "identity:authorize_request_token": []}) - self.test_oauth_flow() - url = '/users/%s/OS-OAUTH1/access_tokens' % self.user_id - self.get(url, token=self.keystone_token_id, - expected_status=http_client.FORBIDDEN) - - def _set_policy(self, new_policy): - self.tempfile = self.useFixture(temporaryfile.SecureTempFile()) - self.tmpfilename = self.tempfile.file_name - self.config_fixture.config(group='oslo_policy', - policy_file=self.tmpfilename) - with open(self.tmpfilename, "w") as policyfile: - policyfile.write(jsonutils.dumps(new_policy)) - - def test_trust_token_cannot_authorize_request_token(self): - trust_token = self._create_trust_get_token() - url = self._approve_request_token_url() - body = {'roles': [{'id': self.role_id}]} - self.put(url, body=body, token=trust_token, - expected_status=http_client.FORBIDDEN) - - def test_trust_token_cannot_list_request_tokens(self): - self._set_policy({"identity:list_access_tokens": [], - "identity:create_trust": []}) - trust_token = self._create_trust_get_token() - url = '/users/%s/OS-OAUTH1/access_tokens' % self.user_id - self.get(url, token=trust_token, - expected_status=http_client.FORBIDDEN) - - -class FernetAuthTokenTests(AuthTokenTests): - - def config_overrides(self): - super(FernetAuthTokenTests, self).config_overrides() - self.config_fixture.config(group='token', provider='fernet') - self.useFixture(ksfixtures.KeyRepository(self.config_fixture)) - - def test_delete_keystone_tokens_by_consumer_id(self): - # NOTE(lbragstad): Fernet tokens are never persisted in the backend. - pass - - -class MaliciousOAuth1Tests(OAuth1Tests): - - def test_bad_consumer_secret(self): - consumer = self._create_single_consumer() - consumer_id = consumer['id'] - consumer = {'key': consumer_id, 'secret': uuid.uuid4().hex} - url, headers = self._create_request_token(consumer, self.project_id) - self.post(url, headers=headers, - expected_status=http_client.UNAUTHORIZED) - - def test_bad_request_token_key(self): - consumer = self._create_single_consumer() - consumer_id = consumer['id'] - consumer_secret = consumer['secret'] - consumer = {'key': consumer_id, 'secret': consumer_secret} - url, headers = self._create_request_token(consumer, self.project_id) - self.post( - url, headers=headers, - response_content_type='application/x-www-urlformencoded') - url = self._authorize_request_token(uuid.uuid4().hex) - body = {'roles': [{'id': self.role_id}]} - self.put(url, body=body, expected_status=http_client.NOT_FOUND) - - def test_bad_consumer_id(self): - consumer = self._create_single_consumer() - consumer_id = uuid.uuid4().hex - consumer_secret = consumer['secret'] - consumer = {'key': consumer_id, 'secret': consumer_secret} - url, headers = self._create_request_token(consumer, self.project_id) - self.post(url, headers=headers, expected_status=http_client.NOT_FOUND) - - def test_bad_requested_project_id(self): - consumer = self._create_single_consumer() - consumer_id = consumer['id'] - consumer_secret = consumer['secret'] - consumer = {'key': consumer_id, 'secret': consumer_secret} - project_id = uuid.uuid4().hex - url, headers = self._create_request_token(consumer, project_id) - self.post(url, headers=headers, expected_status=http_client.NOT_FOUND) - - def test_bad_verifier(self): - consumer = self._create_single_consumer() - consumer_id = consumer['id'] - consumer_secret = consumer['secret'] - consumer = {'key': consumer_id, 'secret': consumer_secret} - - url, headers = self._create_request_token(consumer, self.project_id) - content = self.post( - url, headers=headers, - response_content_type='application/x-www-urlformencoded') - credentials = urllib.parse.parse_qs(content.result) - request_key = credentials['oauth_token'][0] - request_secret = credentials['oauth_token_secret'][0] - request_token = oauth1.Token(request_key, request_secret) - - url = self._authorize_request_token(request_key) - body = {'roles': [{'id': self.role_id}]} - resp = self.put(url, body=body, expected_status=http_client.OK) - verifier = resp.result['token']['oauth_verifier'] - self.assertIsNotNone(verifier) - - request_token.set_verifier(uuid.uuid4().hex) - url, headers = self._create_access_token(consumer, request_token) - self.post(url, headers=headers, - expected_status=http_client.UNAUTHORIZED) - - def test_bad_authorizing_roles(self): - consumer = self._create_single_consumer() - consumer_id = consumer['id'] - consumer_secret = consumer['secret'] - consumer = {'key': consumer_id, 'secret': consumer_secret} - - url, headers = self._create_request_token(consumer, self.project_id) - content = self.post( - url, headers=headers, - response_content_type='application/x-www-urlformencoded') - credentials = urllib.parse.parse_qs(content.result) - request_key = credentials['oauth_token'][0] - - self.assignment_api.remove_role_from_user_and_project( - self.user_id, self.project_id, self.role_id) - url = self._authorize_request_token(request_key) - body = {'roles': [{'id': self.role_id}]} - self.admin_request(path=url, method='PUT', - body=body, expected_status=http_client.NOT_FOUND) - - def test_expired_authorizing_request_token(self): - self.config_fixture.config(group='oauth1', request_token_duration=-1) - - consumer = self._create_single_consumer() - consumer_id = consumer['id'] - consumer_secret = consumer['secret'] - self.consumer = {'key': consumer_id, 'secret': consumer_secret} - self.assertIsNotNone(self.consumer['key']) - - url, headers = self._create_request_token(self.consumer, - self.project_id) - content = self.post( - url, headers=headers, - response_content_type='application/x-www-urlformencoded') - credentials = urllib.parse.parse_qs(content.result) - request_key = credentials['oauth_token'][0] - request_secret = credentials['oauth_token_secret'][0] - self.request_token = oauth1.Token(request_key, request_secret) - self.assertIsNotNone(self.request_token.key) - - url = self._authorize_request_token(request_key) - body = {'roles': [{'id': self.role_id}]} - self.put(url, body=body, expected_status=http_client.UNAUTHORIZED) - - def test_expired_creating_keystone_token(self): - self.config_fixture.config(group='oauth1', access_token_duration=-1) - consumer = self._create_single_consumer() - consumer_id = consumer['id'] - consumer_secret = consumer['secret'] - self.consumer = {'key': consumer_id, 'secret': consumer_secret} - self.assertIsNotNone(self.consumer['key']) - - url, headers = self._create_request_token(self.consumer, - self.project_id) - content = self.post( - url, headers=headers, - response_content_type='application/x-www-urlformencoded') - credentials = urllib.parse.parse_qs(content.result) - request_key = credentials['oauth_token'][0] - request_secret = credentials['oauth_token_secret'][0] - self.request_token = oauth1.Token(request_key, request_secret) - self.assertIsNotNone(self.request_token.key) - - url = self._authorize_request_token(request_key) - body = {'roles': [{'id': self.role_id}]} - resp = self.put(url, body=body, expected_status=http_client.OK) - self.verifier = resp.result['token']['oauth_verifier'] - - self.request_token.set_verifier(self.verifier) - url, headers = self._create_access_token(self.consumer, - self.request_token) - content = self.post( - url, headers=headers, - response_content_type='application/x-www-urlformencoded') - credentials = urllib.parse.parse_qs(content.result) - access_key = credentials['oauth_token'][0] - access_secret = credentials['oauth_token_secret'][0] - self.access_token = oauth1.Token(access_key, access_secret) - self.assertIsNotNone(self.access_token.key) - - url, headers, body = self._get_oauth_token(self.consumer, - self.access_token) - self.post(url, headers=headers, body=body, - expected_status=http_client.UNAUTHORIZED) - - def test_missing_oauth_headers(self): - endpoint = '/OS-OAUTH1/request_token' - client = oauth1.Client(uuid.uuid4().hex, - client_secret=uuid.uuid4().hex, - signature_method=oauth1.SIG_HMAC, - callback_uri="oob") - headers = {'requested_project_id': uuid.uuid4().hex} - _url, headers, _body = client.sign(self.base_url + endpoint, - http_method='POST', - headers=headers) - - # NOTE(stevemar): To simulate this error, we remove the Authorization - # header from the post request. - del headers['Authorization'] - self.post(endpoint, headers=headers, - expected_status=http_client.INTERNAL_SERVER_ERROR) - - -class OAuthNotificationTests(OAuth1Tests, - test_notifications.BaseNotificationTest): - - def test_create_consumer(self): - consumer_ref = self._create_single_consumer() - self._assert_notify_sent(consumer_ref['id'], - test_notifications.CREATED_OPERATION, - 'OS-OAUTH1:consumer') - self._assert_last_audit(consumer_ref['id'], - test_notifications.CREATED_OPERATION, - 'OS-OAUTH1:consumer', - cadftaxonomy.SECURITY_ACCOUNT) - - def test_update_consumer(self): - consumer_ref = self._create_single_consumer() - update_ref = {'consumer': {'description': uuid.uuid4().hex}} - self.oauth_api.update_consumer(consumer_ref['id'], update_ref) - self._assert_notify_sent(consumer_ref['id'], - test_notifications.UPDATED_OPERATION, - 'OS-OAUTH1:consumer') - self._assert_last_audit(consumer_ref['id'], - test_notifications.UPDATED_OPERATION, - 'OS-OAUTH1:consumer', - cadftaxonomy.SECURITY_ACCOUNT) - - def test_delete_consumer(self): - consumer_ref = self._create_single_consumer() - self.oauth_api.delete_consumer(consumer_ref['id']) - self._assert_notify_sent(consumer_ref['id'], - test_notifications.DELETED_OPERATION, - 'OS-OAUTH1:consumer') - self._assert_last_audit(consumer_ref['id'], - test_notifications.DELETED_OPERATION, - 'OS-OAUTH1:consumer', - cadftaxonomy.SECURITY_ACCOUNT) - - def test_oauth_flow_notifications(self): - """Test to ensure notifications are sent for oauth tokens - - This test is very similar to test_oauth_flow, however - there are additional checks in this test for ensuring that - notifications for request token creation, and access token - creation/deletion are emitted. - """ - consumer = self._create_single_consumer() - consumer_id = consumer['id'] - consumer_secret = consumer['secret'] - self.consumer = {'key': consumer_id, 'secret': consumer_secret} - self.assertIsNotNone(self.consumer['secret']) - - url, headers = self._create_request_token(self.consumer, - self.project_id) - content = self.post( - url, headers=headers, - response_content_type='application/x-www-urlformencoded') - credentials = urllib.parse.parse_qs(content.result) - request_key = credentials['oauth_token'][0] - request_secret = credentials['oauth_token_secret'][0] - self.request_token = oauth1.Token(request_key, request_secret) - self.assertIsNotNone(self.request_token.key) - - # Test to ensure the create request token notification is sent - self._assert_notify_sent(request_key, - test_notifications.CREATED_OPERATION, - 'OS-OAUTH1:request_token') - self._assert_last_audit(request_key, - test_notifications.CREATED_OPERATION, - 'OS-OAUTH1:request_token', - cadftaxonomy.SECURITY_CREDENTIAL) - - url = self._authorize_request_token(request_key) - body = {'roles': [{'id': self.role_id}]} - resp = self.put(url, body=body, expected_status=http_client.OK) - self.verifier = resp.result['token']['oauth_verifier'] - self.assertTrue(all(i in core.VERIFIER_CHARS for i in self.verifier)) - self.assertEqual(8, len(self.verifier)) - - self.request_token.set_verifier(self.verifier) - url, headers = self._create_access_token(self.consumer, - self.request_token) - content = self.post( - url, headers=headers, - response_content_type='application/x-www-urlformencoded') - credentials = urllib.parse.parse_qs(content.result) - access_key = credentials['oauth_token'][0] - access_secret = credentials['oauth_token_secret'][0] - self.access_token = oauth1.Token(access_key, access_secret) - self.assertIsNotNone(self.access_token.key) - - # Test to ensure the create access token notification is sent - self._assert_notify_sent(access_key, - test_notifications.CREATED_OPERATION, - 'OS-OAUTH1:access_token') - self._assert_last_audit(access_key, - test_notifications.CREATED_OPERATION, - 'OS-OAUTH1:access_token', - cadftaxonomy.SECURITY_CREDENTIAL) - - resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s' - % {'user': self.user_id, - 'auth': self.access_token.key}) - self.assertResponseStatus(resp, http_client.NO_CONTENT) - - # Test to ensure the delete access token notification is sent - self._assert_notify_sent(access_key, - test_notifications.DELETED_OPERATION, - 'OS-OAUTH1:access_token') - self._assert_last_audit(access_key, - test_notifications.DELETED_OPERATION, - 'OS-OAUTH1:access_token', - cadftaxonomy.SECURITY_CREDENTIAL) - - -class OAuthCADFNotificationTests(OAuthNotificationTests): - - def setUp(self): - """Repeat the tests for CADF notifications.""" - super(OAuthCADFNotificationTests, self).setUp() - self.config_fixture.config(notification_format='cadf') - - -class JsonHomeTests(OAuth1Tests, test_v3.JsonHomeTestMixin): - JSON_HOME_DATA = { - 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-OAUTH1/1.0/' - 'rel/consumers': { - 'href': '/OS-OAUTH1/consumers', - }, - } diff --git a/keystone-moon/keystone/tests/unit/test_v3_os_revoke.py b/keystone-moon/keystone/tests/unit/test_v3_os_revoke.py deleted file mode 100644 index 5fb5387a..00000000 --- a/keystone-moon/keystone/tests/unit/test_v3_os_revoke.py +++ /dev/null @@ -1,136 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime -import uuid - -from oslo_utils import timeutils -import six -from six.moves import http_client -from testtools import matchers - -from keystone.common import utils -from keystone.models import revoke_model -from keystone.tests.unit import test_v3 -from keystone.token import provider - - -def _future_time_string(): - expire_delta = datetime.timedelta(seconds=1000) - future_time = timeutils.utcnow() + expire_delta - return utils.isotime(future_time) - - -class OSRevokeTests(test_v3.RestfulTestCase, test_v3.JsonHomeTestMixin): - - JSON_HOME_DATA = { - 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-REVOKE/1.0/' - 'rel/events': { - 'href': '/OS-REVOKE/events', - }, - } - - def test_get_empty_list(self): - resp = self.get('/OS-REVOKE/events') - self.assertEqual([], resp.json_body['events']) - - def _blank_event(self): - return {} - - # The two values will be the same with the exception of - # 'issued_before' which is set when the event is recorded. - def assertReportedEventMatchesRecorded(self, event, sample, before_time): - after_time = timeutils.utcnow() - event_issued_before = timeutils.normalize_time( - timeutils.parse_isotime(event['issued_before'])) - self.assertTrue( - before_time <= event_issued_before, - 'invalid event issued_before time; %s is not later than %s.' % ( - utils.isotime(event_issued_before, subsecond=True), - utils.isotime(before_time, subsecond=True))) - self.assertTrue( - event_issued_before <= after_time, - 'invalid event issued_before time; %s is not earlier than %s.' % ( - utils.isotime(event_issued_before, subsecond=True), - utils.isotime(after_time, subsecond=True))) - del (event['issued_before']) - self.assertEqual(sample, event) - - def test_revoked_list_self_url(self): - revoked_list_url = '/OS-REVOKE/events' - resp = self.get(revoked_list_url) - links = resp.json_body['links'] - self.assertThat(links['self'], matchers.EndsWith(revoked_list_url)) - - def test_revoked_token_in_list(self): - user_id = uuid.uuid4().hex - expires_at = provider.default_expire_time() - sample = self._blank_event() - sample['user_id'] = six.text_type(user_id) - sample['expires_at'] = six.text_type(utils.isotime(expires_at)) - before_time = timeutils.utcnow() - self.revoke_api.revoke_by_expiration(user_id, expires_at) - resp = self.get('/OS-REVOKE/events') - events = resp.json_body['events'] - self.assertEqual(1, len(events)) - self.assertReportedEventMatchesRecorded(events[0], sample, before_time) - - def test_disabled_project_in_list(self): - project_id = uuid.uuid4().hex - sample = dict() - sample['project_id'] = six.text_type(project_id) - before_time = timeutils.utcnow() - self.revoke_api.revoke( - revoke_model.RevokeEvent(project_id=project_id)) - - resp = self.get('/OS-REVOKE/events') - events = resp.json_body['events'] - self.assertEqual(1, len(events)) - self.assertReportedEventMatchesRecorded(events[0], sample, before_time) - - def test_disabled_domain_in_list(self): - domain_id = uuid.uuid4().hex - sample = dict() - sample['domain_id'] = six.text_type(domain_id) - before_time = timeutils.utcnow() - self.revoke_api.revoke( - revoke_model.RevokeEvent(domain_id=domain_id)) - - resp = self.get('/OS-REVOKE/events') - events = resp.json_body['events'] - self.assertEqual(1, len(events)) - self.assertReportedEventMatchesRecorded(events[0], sample, before_time) - - def test_list_since_invalid(self): - self.get('/OS-REVOKE/events?since=blah', - expected_status=http_client.BAD_REQUEST) - - def test_list_since_valid(self): - resp = self.get('/OS-REVOKE/events?since=2013-02-27T18:30:59.999999Z') - events = resp.json_body['events'] - self.assertEqual(0, len(events)) - - def test_since_future_time_no_events(self): - domain_id = uuid.uuid4().hex - sample = dict() - sample['domain_id'] = six.text_type(domain_id) - - self.revoke_api.revoke( - revoke_model.RevokeEvent(domain_id=domain_id)) - - resp = self.get('/OS-REVOKE/events') - events = resp.json_body['events'] - self.assertEqual(1, len(events)) - - resp = self.get('/OS-REVOKE/events?since=%s' % _future_time_string()) - events = resp.json_body['events'] - self.assertEqual([], events) diff --git a/keystone-moon/keystone/tests/unit/test_v3_policy.py b/keystone-moon/keystone/tests/unit/test_v3_policy.py deleted file mode 100644 index 76a52088..00000000 --- a/keystone-moon/keystone/tests/unit/test_v3_policy.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import json -import uuid - -from keystone.tests import unit -from keystone.tests.unit import test_v3 - - -class PolicyTestCase(test_v3.RestfulTestCase): - """Test policy CRUD.""" - - def setUp(self): - super(PolicyTestCase, self).setUp() - self.policy = unit.new_policy_ref() - self.policy_id = self.policy['id'] - self.policy_api.create_policy( - self.policy_id, - self.policy.copy()) - - # policy crud tests - - def test_create_policy(self): - """Call ``POST /policies``.""" - ref = unit.new_policy_ref() - r = self.post('/policies', body={'policy': ref}) - return self.assertValidPolicyResponse(r, ref) - - def test_list_policies(self): - """Call ``GET /policies``.""" - r = self.get('/policies') - self.assertValidPolicyListResponse(r, ref=self.policy) - - def test_get_policy(self): - """Call ``GET /policies/{policy_id}``.""" - r = self.get( - '/policies/%(policy_id)s' % {'policy_id': self.policy_id}) - self.assertValidPolicyResponse(r, self.policy) - - def test_update_policy(self): - """Call ``PATCH /policies/{policy_id}``.""" - self.policy['blob'] = json.dumps({'data': uuid.uuid4().hex, }) - r = self.patch( - '/policies/%(policy_id)s' % {'policy_id': self.policy_id}, - body={'policy': self.policy}) - self.assertValidPolicyResponse(r, self.policy) - - def test_delete_policy(self): - """Call ``DELETE /policies/{policy_id}``.""" - self.delete( - '/policies/%(policy_id)s' % {'policy_id': self.policy_id}) diff --git a/keystone-moon/keystone/tests/unit/test_v3_protection.py b/keystone-moon/keystone/tests/unit/test_v3_protection.py deleted file mode 100644 index f77a1528..00000000 --- a/keystone-moon/keystone/tests/unit/test_v3_protection.py +++ /dev/null @@ -1,1777 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# Copyright 2013 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from oslo_config import cfg -from oslo_serialization import jsonutils -from six.moves import http_client - -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit import ksfixtures -from keystone.tests.unit.ksfixtures import temporaryfile -from keystone.tests.unit import test_v3 -from keystone.tests.unit import utils - - -CONF = cfg.CONF - - -class IdentityTestProtectedCase(test_v3.RestfulTestCase): - """Test policy enforcement on the v3 Identity API.""" - - def _policy_fixture(self): - return ksfixtures.Policy(self.tmpfilename, self.config_fixture) - - def setUp(self): - """Setup for Identity Protection Test Cases. - - As well as the usual housekeeping, create a set of domains, - users, roles and projects for the subsequent tests: - - - Three domains: A,B & C. C is disabled. - - DomainA has user1, DomainB has user2 and user3 - - DomainA has group1 and group2, DomainB has group3 - - User1 has two roles on DomainA - - User2 has one role on DomainA - - Remember that there will also be a fourth domain in existence, - the default domain. - - """ - self.tempfile = self.useFixture(temporaryfile.SecureTempFile()) - self.tmpfilename = self.tempfile.file_name - super(IdentityTestProtectedCase, self).setUp() - - # A default auth request we can use - un-scoped user token - self.auth = self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password']) - - def load_sample_data(self): - self._populate_default_domain() - # Start by creating a couple of domains - self.domainA = unit.new_domain_ref() - self.resource_api.create_domain(self.domainA['id'], self.domainA) - self.domainB = unit.new_domain_ref() - self.resource_api.create_domain(self.domainB['id'], self.domainB) - self.domainC = unit.new_domain_ref(enabled=False) - self.resource_api.create_domain(self.domainC['id'], self.domainC) - - # Now create some users, one in domainA and two of them in domainB - self.user1 = unit.create_user(self.identity_api, - domain_id=self.domainA['id']) - self.user2 = unit.create_user(self.identity_api, - domain_id=self.domainB['id']) - self.user3 = unit.create_user(self.identity_api, - domain_id=self.domainB['id']) - - self.group1 = unit.new_group_ref(domain_id=self.domainA['id']) - self.group1 = self.identity_api.create_group(self.group1) - - self.group2 = unit.new_group_ref(domain_id=self.domainA['id']) - self.group2 = self.identity_api.create_group(self.group2) - - self.group3 = unit.new_group_ref(domain_id=self.domainB['id']) - self.group3 = self.identity_api.create_group(self.group3) - - self.role = unit.new_role_ref() - self.role_api.create_role(self.role['id'], self.role) - self.role1 = unit.new_role_ref() - self.role_api.create_role(self.role1['id'], self.role1) - self.assignment_api.create_grant(self.role['id'], - user_id=self.user1['id'], - domain_id=self.domainA['id']) - self.assignment_api.create_grant(self.role['id'], - user_id=self.user2['id'], - domain_id=self.domainA['id']) - self.assignment_api.create_grant(self.role1['id'], - user_id=self.user1['id'], - domain_id=self.domainA['id']) - - def _get_id_list_from_ref_list(self, ref_list): - result_list = [] - for x in ref_list: - result_list.append(x['id']) - return result_list - - def _set_policy(self, new_policy): - with open(self.tmpfilename, "w") as policyfile: - policyfile.write(jsonutils.dumps(new_policy)) - - def test_list_users_unprotected(self): - """GET /users (unprotected) - - Test Plan: - - - Update policy so api is unprotected - - Use an un-scoped token to make sure we can get back all - the users independent of domain - - """ - self._set_policy({"identity:list_users": []}) - r = self.get('/users', auth=self.auth) - id_list = self._get_id_list_from_ref_list(r.result.get('users')) - self.assertIn(self.user1['id'], id_list) - self.assertIn(self.user2['id'], id_list) - self.assertIn(self.user3['id'], id_list) - - def test_list_users_filtered_by_domain(self): - """GET /users?domain_id=mydomain (filtered) - - Test Plan: - - - Update policy so api is unprotected - - Use an un-scoped token to make sure we can filter the - users by domainB, getting back the 2 users in that domain - - """ - self._set_policy({"identity:list_users": []}) - url_by_name = '/users?domain_id=%s' % self.domainB['id'] - r = self.get(url_by_name, auth=self.auth) - # We should get back two users, those in DomainB - id_list = self._get_id_list_from_ref_list(r.result.get('users')) - self.assertIn(self.user2['id'], id_list) - self.assertIn(self.user3['id'], id_list) - - def test_get_user_protected_match_id(self): - """GET /users/{id} (match payload) - - Test Plan: - - - Update policy to protect api by user_id - - List users with user_id of user1 as filter, to check that - this will correctly match user_id in the flattened - payload - - """ - # TODO(henry-nash, ayoung): It would be good to expand this - # test for further test flattening, e.g. protect on, say, an - # attribute of an object being created - new_policy = {"identity:get_user": [["user_id:%(user_id)s"]]} - self._set_policy(new_policy) - url_by_name = '/users/%s' % self.user1['id'] - r = self.get(url_by_name, auth=self.auth) - self.assertEqual(self.user1['id'], r.result['user']['id']) - - def test_get_user_protected_match_target(self): - """GET /users/{id} (match target) - - Test Plan: - - - Update policy to protect api by domain_id - - Try and read a user who is in DomainB with a token scoped - to Domain A - this should fail - - Retry this for a user who is in Domain A, which should succeed. - - Finally, try getting a user that does not exist, which should - still return UserNotFound - - """ - new_policy = {'identity:get_user': - [["domain_id:%(target.user.domain_id)s"]]} - self._set_policy(new_policy) - self.auth = self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'], - domain_id=self.domainA['id']) - url_by_name = '/users/%s' % self.user2['id'] - r = self.get(url_by_name, auth=self.auth, - expected_status=exception.ForbiddenAction.code) - - url_by_name = '/users/%s' % self.user1['id'] - r = self.get(url_by_name, auth=self.auth) - self.assertEqual(self.user1['id'], r.result['user']['id']) - - url_by_name = '/users/%s' % uuid.uuid4().hex - r = self.get(url_by_name, auth=self.auth, - expected_status=exception.UserNotFound.code) - - def test_revoke_grant_protected_match_target(self): - """DELETE /domains/{id}/users/{id}/roles/{id} (match target) - - Test Plan: - - - Update policy to protect api by domain_id of entities in - the grant - - Try and delete the existing grant that has a user who is - from a different domain - this should fail. - - Retry this for a user who is in Domain A, which should succeed. - - """ - new_policy = {'identity:revoke_grant': - [["domain_id:%(target.user.domain_id)s"]]} - self._set_policy(new_policy) - collection_url = ( - '/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': self.domainA['id'], - 'user_id': self.user2['id']}) - member_url = '%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': self.role['id']} - - self.auth = self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'], - domain_id=self.domainA['id']) - self.delete(member_url, auth=self.auth, - expected_status=exception.ForbiddenAction.code) - - collection_url = ( - '/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': self.domainA['id'], - 'user_id': self.user1['id']}) - member_url = '%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': self.role1['id']} - self.delete(member_url, auth=self.auth) - - def test_list_users_protected_by_domain(self): - """GET /users?domain_id=mydomain (protected) - - Test Plan: - - - Update policy to protect api by domain_id - - List groups using a token scoped to domainA with a filter - specifying domainA - we should only get back the one user - that is in domainA. - - Try and read the users from domainB - this should fail since - we don't have a token scoped for domainB - - """ - new_policy = {"identity:list_users": ["domain_id:%(domain_id)s"]} - self._set_policy(new_policy) - self.auth = self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'], - domain_id=self.domainA['id']) - url_by_name = '/users?domain_id=%s' % self.domainA['id'] - r = self.get(url_by_name, auth=self.auth) - # We should only get back one user, the one in DomainA - id_list = self._get_id_list_from_ref_list(r.result.get('users')) - self.assertEqual(1, len(id_list)) - self.assertIn(self.user1['id'], id_list) - - # Now try for domainB, which should fail - url_by_name = '/users?domain_id=%s' % self.domainB['id'] - r = self.get(url_by_name, auth=self.auth, - expected_status=exception.ForbiddenAction.code) - - def test_list_groups_protected_by_domain(self): - """GET /groups?domain_id=mydomain (protected) - - Test Plan: - - - Update policy to protect api by domain_id - - List groups using a token scoped to domainA and make sure - we only get back the two groups that are in domainA - - Try and read the groups from domainB - this should fail since - we don't have a token scoped for domainB - - """ - new_policy = {"identity:list_groups": ["domain_id:%(domain_id)s"]} - self._set_policy(new_policy) - self.auth = self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'], - domain_id=self.domainA['id']) - url_by_name = '/groups?domain_id=%s' % self.domainA['id'] - r = self.get(url_by_name, auth=self.auth) - # We should only get back two groups, the ones in DomainA - id_list = self._get_id_list_from_ref_list(r.result.get('groups')) - self.assertEqual(2, len(id_list)) - self.assertIn(self.group1['id'], id_list) - self.assertIn(self.group2['id'], id_list) - - # Now try for domainB, which should fail - url_by_name = '/groups?domain_id=%s' % self.domainB['id'] - r = self.get(url_by_name, auth=self.auth, - expected_status=exception.ForbiddenAction.code) - - def test_list_groups_protected_by_domain_and_filtered(self): - """GET /groups?domain_id=mydomain&name=myname (protected) - - Test Plan: - - - Update policy to protect api by domain_id - - List groups using a token scoped to domainA with a filter - specifying both domainA and the name of group. - - We should only get back the group in domainA that matches - the name - - """ - new_policy = {"identity:list_groups": ["domain_id:%(domain_id)s"]} - self._set_policy(new_policy) - self.auth = self.build_authentication_request( - user_id=self.user1['id'], - password=self.user1['password'], - domain_id=self.domainA['id']) - url_by_name = '/groups?domain_id=%s&name=%s' % ( - self.domainA['id'], self.group2['name']) - r = self.get(url_by_name, auth=self.auth) - # We should only get back one user, the one in DomainA that matches - # the name supplied - id_list = self._get_id_list_from_ref_list(r.result.get('groups')) - self.assertEqual(1, len(id_list)) - self.assertIn(self.group2['id'], id_list) - - -class IdentityTestPolicySample(test_v3.RestfulTestCase): - """Test policy enforcement of the policy.json file.""" - - def load_sample_data(self): - self._populate_default_domain() - - self.just_a_user = unit.create_user( - self.identity_api, - domain_id=CONF.identity.default_domain_id) - self.another_user = unit.create_user( - self.identity_api, - domain_id=CONF.identity.default_domain_id) - self.admin_user = unit.create_user( - self.identity_api, - domain_id=CONF.identity.default_domain_id) - - self.role = unit.new_role_ref() - self.role_api.create_role(self.role['id'], self.role) - self.admin_role = unit.new_role_ref(name='admin') - self.role_api.create_role(self.admin_role['id'], self.admin_role) - - # Create and assign roles to the project - self.project = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - self.resource_api.create_project(self.project['id'], self.project) - self.assignment_api.create_grant(self.role['id'], - user_id=self.just_a_user['id'], - project_id=self.project['id']) - self.assignment_api.create_grant(self.role['id'], - user_id=self.another_user['id'], - project_id=self.project['id']) - self.assignment_api.create_grant(self.admin_role['id'], - user_id=self.admin_user['id'], - project_id=self.project['id']) - - def test_user_validate_same_token(self): - # Given a non-admin user token, the token can be used to validate - # itself. - # This is GET /v3/auth/tokens, with X-Auth-Token == X-Subject-Token - - auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - token = self.get_requested_token(auth) - - self.get('/auth/tokens', token=token, - headers={'X-Subject-Token': token}) - - def test_user_validate_user_token(self): - # A user can validate one of their own tokens. - # This is GET /v3/auth/tokens - - auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - token1 = self.get_requested_token(auth) - token2 = self.get_requested_token(auth) - - self.get('/auth/tokens', token=token1, - headers={'X-Subject-Token': token2}) - - def test_user_validate_other_user_token_rejected(self): - # A user cannot validate another user's token. - # This is GET /v3/auth/tokens - - user1_auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - user1_token = self.get_requested_token(user1_auth) - - user2_auth = self.build_authentication_request( - user_id=self.another_user['id'], - password=self.another_user['password']) - user2_token = self.get_requested_token(user2_auth) - - self.get('/auth/tokens', token=user1_token, - headers={'X-Subject-Token': user2_token}, - expected_status=http_client.FORBIDDEN) - - def test_admin_validate_user_token(self): - # An admin can validate a user's token. - # This is GET /v3/auth/tokens - - admin_auth = self.build_authentication_request( - user_id=self.admin_user['id'], - password=self.admin_user['password'], - project_id=self.project['id']) - admin_token = self.get_requested_token(admin_auth) - - user_auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - user_token = self.get_requested_token(user_auth) - - self.get('/auth/tokens', token=admin_token, - headers={'X-Subject-Token': user_token}) - - def test_user_check_same_token(self): - # Given a non-admin user token, the token can be used to check - # itself. - # This is HEAD /v3/auth/tokens, with X-Auth-Token == X-Subject-Token - - auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - token = self.get_requested_token(auth) - - self.head('/auth/tokens', token=token, - headers={'X-Subject-Token': token}, - expected_status=http_client.OK) - - def test_user_check_user_token(self): - # A user can check one of their own tokens. - # This is HEAD /v3/auth/tokens - - auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - token1 = self.get_requested_token(auth) - token2 = self.get_requested_token(auth) - - self.head('/auth/tokens', token=token1, - headers={'X-Subject-Token': token2}, - expected_status=http_client.OK) - - def test_user_check_other_user_token_rejected(self): - # A user cannot check another user's token. - # This is HEAD /v3/auth/tokens - - user1_auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - user1_token = self.get_requested_token(user1_auth) - - user2_auth = self.build_authentication_request( - user_id=self.another_user['id'], - password=self.another_user['password']) - user2_token = self.get_requested_token(user2_auth) - - self.head('/auth/tokens', token=user1_token, - headers={'X-Subject-Token': user2_token}, - expected_status=http_client.FORBIDDEN) - - def test_admin_check_user_token(self): - # An admin can check a user's token. - # This is HEAD /v3/auth/tokens - - admin_auth = self.build_authentication_request( - user_id=self.admin_user['id'], - password=self.admin_user['password'], - project_id=self.project['id']) - admin_token = self.get_requested_token(admin_auth) - - user_auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - user_token = self.get_requested_token(user_auth) - - self.head('/auth/tokens', token=admin_token, - headers={'X-Subject-Token': user_token}, - expected_status=http_client.OK) - - def test_user_revoke_same_token(self): - # Given a non-admin user token, the token can be used to revoke - # itself. - # This is DELETE /v3/auth/tokens, with X-Auth-Token == X-Subject-Token - - auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - token = self.get_requested_token(auth) - - self.delete('/auth/tokens', token=token, - headers={'X-Subject-Token': token}) - - def test_user_revoke_user_token(self): - # A user can revoke one of their own tokens. - # This is DELETE /v3/auth/tokens - - auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - token1 = self.get_requested_token(auth) - token2 = self.get_requested_token(auth) - - self.delete('/auth/tokens', token=token1, - headers={'X-Subject-Token': token2}) - - def test_user_revoke_other_user_token_rejected(self): - # A user cannot revoke another user's token. - # This is DELETE /v3/auth/tokens - - user1_auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - user1_token = self.get_requested_token(user1_auth) - - user2_auth = self.build_authentication_request( - user_id=self.another_user['id'], - password=self.another_user['password']) - user2_token = self.get_requested_token(user2_auth) - - self.delete('/auth/tokens', token=user1_token, - headers={'X-Subject-Token': user2_token}, - expected_status=http_client.FORBIDDEN) - - def test_admin_revoke_user_token(self): - # An admin can revoke a user's token. - # This is DELETE /v3/auth/tokens - - admin_auth = self.build_authentication_request( - user_id=self.admin_user['id'], - password=self.admin_user['password'], - project_id=self.project['id']) - admin_token = self.get_requested_token(admin_auth) - - user_auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - user_token = self.get_requested_token(user_auth) - - self.delete('/auth/tokens', token=admin_token, - headers={'X-Subject-Token': user_token}) - - -class IdentityTestv3CloudPolicySample(test_v3.RestfulTestCase, - test_v3.AssignmentTestMixin): - """Test policy enforcement of the sample v3 cloud policy file.""" - - def _policy_fixture(self): - return ksfixtures.Policy(unit.dirs.etc('policy.v3cloudsample.json'), - self.config_fixture) - - def setUp(self): - """Setup for v3 Cloud Policy Sample Test Cases. - - The following data is created: - - - Three domains: domainA, domainB and admin_domain - - One project, which name is 'project' - - domainA has three users: domain_admin_user, project_admin_user and - just_a_user: - - - domain_admin_user has role 'admin' on domainA, - - project_admin_user has role 'admin' on the project, - - just_a_user has a non-admin role on both domainA and the project. - - admin_domain has admin_project, and user cloud_admin_user, with an - 'admin' role on admin_project. - - We test various api protection rules from the cloud sample policy - file to make sure the sample is valid and that we correctly enforce it. - - """ - # Ensure that test_v3.RestfulTestCase doesn't load its own - # sample data, which would make checking the results of our - # tests harder - super(IdentityTestv3CloudPolicySample, self).setUp() - - self.config_fixture.config( - group='resource', - admin_project_name=self.admin_project['name']) - self.config_fixture.config( - group='resource', - admin_project_domain_name=self.admin_domain['name']) - - def load_sample_data(self): - # Start by creating a couple of domains - self._populate_default_domain() - self.domainA = unit.new_domain_ref() - self.resource_api.create_domain(self.domainA['id'], self.domainA) - self.domainB = unit.new_domain_ref() - self.resource_api.create_domain(self.domainB['id'], self.domainB) - self.admin_domain = unit.new_domain_ref() - self.resource_api.create_domain(self.admin_domain['id'], - self.admin_domain) - - self.admin_project = unit.new_project_ref( - domain_id=self.admin_domain['id']) - self.resource_api.create_project(self.admin_project['id'], - self.admin_project) - - # And our users - self.cloud_admin_user = unit.create_user( - self.identity_api, - domain_id=self.admin_domain['id']) - self.just_a_user = unit.create_user( - self.identity_api, - domain_id=self.domainA['id']) - self.domain_admin_user = unit.create_user( - self.identity_api, - domain_id=self.domainA['id']) - self.domainB_admin_user = unit.create_user( - self.identity_api, - domain_id=self.domainB['id']) - self.project_admin_user = unit.create_user( - self.identity_api, - domain_id=self.domainA['id']) - self.project_adminB_user = unit.create_user( - self.identity_api, - domain_id=self.domainB['id']) - - # The admin role, a domain specific role and another plain role - self.admin_role = unit.new_role_ref(name='admin') - self.role_api.create_role(self.admin_role['id'], self.admin_role) - self.roleA = unit.new_role_ref(domain_id=self.domainA['id']) - self.role_api.create_role(self.roleA['id'], self.roleA) - self.role = unit.new_role_ref() - self.role_api.create_role(self.role['id'], self.role) - - # The cloud admin just gets the admin role on the special admin project - self.assignment_api.create_grant(self.admin_role['id'], - user_id=self.cloud_admin_user['id'], - project_id=self.admin_project['id']) - - # Assign roles to the domain - self.assignment_api.create_grant(self.admin_role['id'], - user_id=self.domain_admin_user['id'], - domain_id=self.domainA['id']) - self.assignment_api.create_grant(self.role['id'], - user_id=self.just_a_user['id'], - domain_id=self.domainA['id']) - self.assignment_api.create_grant(self.admin_role['id'], - user_id=self.domainB_admin_user['id'], - domain_id=self.domainB['id']) - - # Create and assign roles to the project - self.project = unit.new_project_ref(domain_id=self.domainA['id']) - self.resource_api.create_project(self.project['id'], self.project) - self.projectB = unit.new_project_ref(domain_id=self.domainB['id']) - self.resource_api.create_project(self.projectB['id'], self.projectB) - self.assignment_api.create_grant(self.admin_role['id'], - user_id=self.project_admin_user['id'], - project_id=self.project['id']) - self.assignment_api.create_grant( - self.admin_role['id'], user_id=self.project_adminB_user['id'], - project_id=self.projectB['id']) - self.assignment_api.create_grant(self.role['id'], - user_id=self.just_a_user['id'], - project_id=self.project['id']) - - def _stati(self, expected_status): - # Return the expected return codes for APIs with and without data - # with any specified status overriding the normal values - if expected_status is None: - return (http_client.OK, http_client.CREATED, - http_client.NO_CONTENT) - else: - return (expected_status, expected_status, expected_status) - - def _test_user_management(self, domain_id, expected=None): - status_OK, status_created, status_no_data = self._stati(expected) - entity_url = '/users/%s' % self.just_a_user['id'] - list_url = '/users?domain_id=%s' % domain_id - - self.get(entity_url, auth=self.auth, - expected_status=status_OK) - self.get(list_url, auth=self.auth, - expected_status=status_OK) - user = {'description': 'Updated'} - self.patch(entity_url, auth=self.auth, body={'user': user}, - expected_status=status_OK) - self.delete(entity_url, auth=self.auth, - expected_status=status_no_data) - - user_ref = unit.new_user_ref(domain_id=domain_id) - self.post('/users', auth=self.auth, body={'user': user_ref}, - expected_status=status_created) - - def _test_project_management(self, domain_id, expected=None): - status_OK, status_created, status_no_data = self._stati(expected) - entity_url = '/projects/%s' % self.project['id'] - list_url = '/projects?domain_id=%s' % domain_id - - self.get(entity_url, auth=self.auth, - expected_status=status_OK) - self.get(list_url, auth=self.auth, - expected_status=status_OK) - project = {'description': 'Updated'} - self.patch(entity_url, auth=self.auth, body={'project': project}, - expected_status=status_OK) - self.delete(entity_url, auth=self.auth, - expected_status=status_no_data) - - proj_ref = unit.new_project_ref(domain_id=domain_id) - self.post('/projects', auth=self.auth, body={'project': proj_ref}, - expected_status=status_created) - - def _test_domain_management(self, expected=None): - status_OK, status_created, status_no_data = self._stati(expected) - entity_url = '/domains/%s' % self.domainB['id'] - list_url = '/domains' - - self.get(entity_url, auth=self.auth, - expected_status=status_OK) - self.get(list_url, auth=self.auth, - expected_status=status_OK) - domain = {'description': 'Updated', 'enabled': False} - self.patch(entity_url, auth=self.auth, body={'domain': domain}, - expected_status=status_OK) - self.delete(entity_url, auth=self.auth, - expected_status=status_no_data) - - domain_ref = unit.new_domain_ref() - self.post('/domains', auth=self.auth, body={'domain': domain_ref}, - expected_status=status_created) - - def _test_grants(self, target, entity_id, role_domain_id=None, - list_status_OK=False, expected=None): - status_OK, status_created, status_no_data = self._stati(expected) - a_role = unit.new_role_ref(domain_id=role_domain_id) - self.role_api.create_role(a_role['id'], a_role) - - collection_url = ( - '/%(target)s/%(target_id)s/users/%(user_id)s/roles' % { - 'target': target, - 'target_id': entity_id, - 'user_id': self.just_a_user['id']}) - member_url = '%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': a_role['id']} - - self.put(member_url, auth=self.auth, - expected_status=status_no_data) - self.head(member_url, auth=self.auth, - expected_status=status_no_data) - if list_status_OK: - self.get(collection_url, auth=self.auth) - else: - self.get(collection_url, auth=self.auth, - expected_status=status_OK) - self.delete(member_url, auth=self.auth, - expected_status=status_no_data) - - def _role_management_cases(self, read_status_OK=False, expected=None): - # Set the different status values for different types of call depending - # on whether we expect the calls to fail or not. - status_OK, status_created, status_no_data = self._stati(expected) - entity_url = '/roles/%s' % self.role['id'] - list_url = '/roles' - - if read_status_OK: - self.get(entity_url, auth=self.auth) - self.get(list_url, auth=self.auth) - else: - self.get(entity_url, auth=self.auth, - expected_status=status_OK) - self.get(list_url, auth=self.auth, - expected_status=status_OK) - - role = {'name': 'Updated'} - self.patch(entity_url, auth=self.auth, body={'role': role}, - expected_status=status_OK) - self.delete(entity_url, auth=self.auth, - expected_status=status_no_data) - - role_ref = unit.new_role_ref() - self.post('/roles', auth=self.auth, body={'role': role_ref}, - expected_status=status_created) - - def _domain_role_management_cases(self, domain_id, read_status_OK=False, - expected=None): - # Set the different status values for different types of call depending - # on whether we expect the calls to fail or not. - status_OK, status_created, status_no_data = self._stati(expected) - entity_url = '/roles/%s' % self.roleA['id'] - list_url = '/roles?domain_id=%s' % domain_id - - if read_status_OK: - self.get(entity_url, auth=self.auth) - self.get(list_url, auth=self.auth) - else: - self.get(entity_url, auth=self.auth, - expected_status=status_OK) - self.get(list_url, auth=self.auth, - expected_status=status_OK) - - role = {'name': 'Updated'} - self.patch(entity_url, auth=self.auth, body={'role': role}, - expected_status=status_OK) - self.delete(entity_url, auth=self.auth, - expected_status=status_no_data) - - role_ref = unit.new_role_ref(domain_id=domain_id) - self.post('/roles', auth=self.auth, body={'role': role_ref}, - expected_status=status_created) - - def test_user_management(self): - # First, authenticate with a user that does not have the domain - # admin role - shouldn't be able to do much. - self.auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password'], - domain_id=self.domainA['id']) - - self._test_user_management( - self.domainA['id'], expected=exception.ForbiddenAction.code) - - # Now, authenticate with a user that does have the domain admin role - self.auth = self.build_authentication_request( - user_id=self.domain_admin_user['id'], - password=self.domain_admin_user['password'], - domain_id=self.domainA['id']) - - self._test_user_management(self.domainA['id']) - - def test_user_management_normalized_keys(self): - """Illustrate the inconsistent handling of hyphens in keys. - - To quote Morgan in bug 1526244: - - the reason this is converted from "domain-id" to "domain_id" is - because of how we process/normalize data. The way we have to handle - specific data types for known columns requires avoiding "-" in the - actual python code since "-" is not valid for attributes in python - w/o significant use of "getattr" etc. - - In short, historically we handle some things in conversions. The - use of "extras" has long been a poor design choice that leads to - odd/strange inconsistent behaviors because of other choices made in - handling data from within the body. (In many cases we convert from - "-" to "_" throughout openstack) - - Source: https://bugs.launchpad.net/keystone/+bug/1526244/comments/9 - - """ - # Authenticate with a user that has the domain admin role - self.auth = self.build_authentication_request( - user_id=self.domain_admin_user['id'], - password=self.domain_admin_user['password'], - domain_id=self.domainA['id']) - - # Show that we can read a normal user without any surprises. - r = self.get( - '/users/%s' % self.just_a_user['id'], - auth=self.auth, - expected_status=http_client.OK) - self.assertValidUserResponse(r) - - # We don't normalize query string keys, so both of these result in a - # 403, because we didn't specify a domain_id query string in either - # case, and we explicitly require one (it doesn't matter what - # 'domain-id' value you use). - self.get( - '/users?domain-id=%s' % self.domainA['id'], - auth=self.auth, - expected_status=exception.ForbiddenAction.code) - self.get( - '/users?domain-id=%s' % self.domainB['id'], - auth=self.auth, - expected_status=exception.ForbiddenAction.code) - - # If we try updating the user's 'domain_id' by specifying a - # 'domain-id', then it'll be stored into extras rather than normalized, - # and the user's actual 'domain_id' is not affected. - r = self.patch( - '/users/%s' % self.just_a_user['id'], - auth=self.auth, - body={'user': {'domain-id': self.domainB['id']}}, - expected_status=http_client.OK) - self.assertEqual(self.domainB['id'], r.json['user']['domain-id']) - self.assertEqual(self.domainA['id'], r.json['user']['domain_id']) - self.assertNotEqual(self.domainB['id'], self.just_a_user['domain_id']) - self.assertValidUserResponse(r, self.just_a_user) - - # Finally, show that we can create a new user without any surprises. - # But if we specify a 'domain-id' instead of a 'domain_id', we get a - # Forbidden response because we fail a policy check before - # normalization occurs. - user_ref = unit.new_user_ref(domain_id=self.domainA['id']) - r = self.post( - '/users', - auth=self.auth, - body={'user': user_ref}, - expected_status=http_client.CREATED) - self.assertValidUserResponse(r, ref=user_ref) - user_ref['domain-id'] = user_ref.pop('domain_id') - self.post( - '/users', - auth=self.auth, - body={'user': user_ref}, - expected_status=exception.ForbiddenAction.code) - - def test_user_management_by_cloud_admin(self): - # Test users management with a cloud admin. This user should - # be able to manage users in any domain. - self.auth = self.build_authentication_request( - user_id=self.cloud_admin_user['id'], - password=self.cloud_admin_user['password'], - project_id=self.admin_project['id']) - - self._test_user_management(self.domainA['id']) - - def test_project_management(self): - # First, authenticate with a user that does not have the project - # admin role - shouldn't be able to do much. - self.auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password'], - domain_id=self.domainA['id']) - - self._test_project_management( - self.domainA['id'], expected=exception.ForbiddenAction.code) - - # ...but should still be able to list projects of which they are - # a member - url = '/users/%s/projects' % self.just_a_user['id'] - self.get(url, auth=self.auth) - - # Now, authenticate with a user that does have the domain admin role - self.auth = self.build_authentication_request( - user_id=self.domain_admin_user['id'], - password=self.domain_admin_user['password'], - domain_id=self.domainA['id']) - - self._test_project_management(self.domainA['id']) - - def test_project_management_by_cloud_admin(self): - self.auth = self.build_authentication_request( - user_id=self.cloud_admin_user['id'], - password=self.cloud_admin_user['password'], - project_id=self.admin_project['id']) - - # Check whether cloud admin can operate a domain - # other than its own domain or not - self._test_project_management(self.domainA['id']) - - def test_domain_grants(self): - self.auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password'], - domain_id=self.domainA['id']) - - self._test_grants('domains', self.domainA['id'], - expected=exception.ForbiddenAction.code) - - # Now, authenticate with a user that does have the domain admin role - self.auth = self.build_authentication_request( - user_id=self.domain_admin_user['id'], - password=self.domain_admin_user['password'], - domain_id=self.domainA['id']) - - self._test_grants('domains', self.domainA['id']) - - # Check that with such a token we cannot modify grants on a - # different domain - self._test_grants('domains', self.domainB['id'], - expected=exception.ForbiddenAction.code) - - def test_domain_grants_by_cloud_admin(self): - # Test domain grants with a cloud admin. This user should be - # able to manage roles on any domain. - self.auth = self.build_authentication_request( - user_id=self.cloud_admin_user['id'], - password=self.cloud_admin_user['password'], - project_id=self.admin_project['id']) - - self._test_grants('domains', self.domainA['id']) - - def test_domain_grants_by_cloud_admin_for_domain_specific_role(self): - # Test domain grants with a cloud admin. This user should be - # able to manage domain roles on any domain. - self.auth = self.build_authentication_request( - user_id=self.cloud_admin_user['id'], - password=self.cloud_admin_user['password'], - project_id=self.admin_project['id']) - - self._test_grants('domains', self.domainA['id'], - role_domain_id=self.domainB['id']) - - def test_domain_grants_by_non_admin_for_domain_specific_role(self): - # A non-admin shouldn't be able to do anything - self.auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password'], - domain_id=self.domainA['id']) - - self._test_grants('domains', self.domainA['id'], - role_domain_id=self.domainA['id'], - expected=exception.ForbiddenAction.code) - self._test_grants('domains', self.domainA['id'], - role_domain_id=self.domainB['id'], - expected=exception.ForbiddenAction.code) - - def test_domain_grants_by_domain_admin_for_domain_specific_role(self): - # Authenticate with a user that does have the domain admin role, - # should not be able to assign a domain_specific role from another - # domain - self.auth = self.build_authentication_request( - user_id=self.domain_admin_user['id'], - password=self.domain_admin_user['password'], - domain_id=self.domainA['id']) - - self._test_grants('domains', self.domainA['id'], - role_domain_id=self.domainB['id'], - # List status will always be OK, since we are not - # granting/checking/deleting assignments - list_status_OK=True, - expected=exception.ForbiddenAction.code) - - # They should be able to assign a domain specific role from the same - # domain - self._test_grants('domains', self.domainA['id'], - role_domain_id=self.domainA['id']) - - def test_project_grants(self): - self.auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password'], - project_id=self.project['id']) - - self._test_grants('projects', self.project['id'], - expected=exception.ForbiddenAction.code) - - # Now, authenticate with a user that does have the project - # admin role - self.auth = self.build_authentication_request( - user_id=self.project_admin_user['id'], - password=self.project_admin_user['password'], - project_id=self.project['id']) - - self._test_grants('projects', self.project['id']) - - def test_project_grants_by_domain_admin(self): - # Test project grants with a domain admin. This user should be - # able to manage roles on any project in its own domain. - self.auth = self.build_authentication_request( - user_id=self.domain_admin_user['id'], - password=self.domain_admin_user['password'], - domain_id=self.domainA['id']) - - self._test_grants('projects', self.project['id']) - - def test_project_grants_by_non_admin_for_domain_specific_role(self): - # A non-admin shouldn't be able to do anything - self.auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password'], - project_id=self.project['id']) - - self._test_grants('projects', self.project['id'], - role_domain_id=self.domainA['id'], - expected=exception.ForbiddenAction.code) - self._test_grants('projects', self.project['id'], - role_domain_id=self.domainB['id'], - expected=exception.ForbiddenAction.code) - - def test_project_grants_by_project_admin_for_domain_specific_role(self): - # Authenticate with a user that does have the project admin role, - # should not be able to assign a domain_specific role from another - # domain - self.auth = self.build_authentication_request( - user_id=self.project_admin_user['id'], - password=self.project_admin_user['password'], - project_id=self.project['id']) - - self._test_grants('projects', self.project['id'], - role_domain_id=self.domainB['id'], - # List status will always be OK, since we are not - # granting/checking/deleting assignments - list_status_OK=True, - expected=exception.ForbiddenAction.code) - - # They should be able to assign a domain specific role from the same - # domain - self._test_grants('projects', self.project['id'], - role_domain_id=self.domainA['id']) - - def test_project_grants_by_domain_admin_for_domain_specific_role(self): - # Authenticate with a user that does have the domain admin role, - # should not be able to assign a domain_specific role from another - # domain - self.auth = self.build_authentication_request( - user_id=self.domain_admin_user['id'], - password=self.domain_admin_user['password'], - domain_id=self.domainA['id']) - - self._test_grants('projects', self.project['id'], - role_domain_id=self.domainB['id'], - # List status will always be OK, since we are not - # granting/checking/deleting assignments - list_status_OK=True, - expected=exception.ForbiddenAction.code) - - # They should be able to assign a domain specific role from the same - # domain - self._test_grants('projects', self.project['id'], - role_domain_id=self.domainA['id']) - - def test_cloud_admin_list_assignments_of_domain(self): - self.auth = self.build_authentication_request( - user_id=self.cloud_admin_user['id'], - password=self.cloud_admin_user['password'], - project_id=self.admin_project['id']) - - collection_url = self.build_role_assignment_query_url( - domain_id=self.domainA['id']) - r = self.get(collection_url, auth=self.auth) - self.assertValidRoleAssignmentListResponse( - r, expected_length=2, resource_url=collection_url) - - domainA_admin_entity = self.build_role_assignment_entity( - domain_id=self.domainA['id'], - user_id=self.domain_admin_user['id'], - role_id=self.admin_role['id'], - inherited_to_projects=False) - domainA_user_entity = self.build_role_assignment_entity( - domain_id=self.domainA['id'], - user_id=self.just_a_user['id'], - role_id=self.role['id'], - inherited_to_projects=False) - - self.assertRoleAssignmentInListResponse(r, domainA_admin_entity) - self.assertRoleAssignmentInListResponse(r, domainA_user_entity) - - def test_domain_admin_list_assignments_of_domain(self): - self.auth = self.build_authentication_request( - user_id=self.domain_admin_user['id'], - password=self.domain_admin_user['password'], - domain_id=self.domainA['id']) - - collection_url = self.build_role_assignment_query_url( - domain_id=self.domainA['id']) - r = self.get(collection_url, auth=self.auth) - self.assertValidRoleAssignmentListResponse( - r, expected_length=2, resource_url=collection_url) - - domainA_admin_entity = self.build_role_assignment_entity( - domain_id=self.domainA['id'], - user_id=self.domain_admin_user['id'], - role_id=self.admin_role['id'], - inherited_to_projects=False) - domainA_user_entity = self.build_role_assignment_entity( - domain_id=self.domainA['id'], - user_id=self.just_a_user['id'], - role_id=self.role['id'], - inherited_to_projects=False) - - self.assertRoleAssignmentInListResponse(r, domainA_admin_entity) - self.assertRoleAssignmentInListResponse(r, domainA_user_entity) - - def test_domain_admin_list_assignments_of_another_domain_failed(self): - self.auth = self.build_authentication_request( - user_id=self.domain_admin_user['id'], - password=self.domain_admin_user['password'], - domain_id=self.domainA['id']) - - collection_url = self.build_role_assignment_query_url( - domain_id=self.domainB['id']) - self.get(collection_url, auth=self.auth, - expected_status=http_client.FORBIDDEN) - - def test_domain_user_list_assignments_of_domain_failed(self): - self.auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password'], - domain_id=self.domainA['id']) - - collection_url = self.build_role_assignment_query_url( - domain_id=self.domainA['id']) - self.get(collection_url, auth=self.auth, - expected_status=http_client.FORBIDDEN) - - def test_cloud_admin_list_assignments_of_project(self): - self.auth = self.build_authentication_request( - user_id=self.cloud_admin_user['id'], - password=self.cloud_admin_user['password'], - project_id=self.admin_project['id']) - - collection_url = self.build_role_assignment_query_url( - project_id=self.project['id']) - r = self.get(collection_url, auth=self.auth) - self.assertValidRoleAssignmentListResponse( - r, expected_length=2, resource_url=collection_url) - - project_admin_entity = self.build_role_assignment_entity( - project_id=self.project['id'], - user_id=self.project_admin_user['id'], - role_id=self.admin_role['id'], - inherited_to_projects=False) - project_user_entity = self.build_role_assignment_entity( - project_id=self.project['id'], - user_id=self.just_a_user['id'], - role_id=self.role['id'], - inherited_to_projects=False) - - self.assertRoleAssignmentInListResponse(r, project_admin_entity) - self.assertRoleAssignmentInListResponse(r, project_user_entity) - - def test_admin_project_list_assignments_of_project(self): - self.auth = self.build_authentication_request( - user_id=self.project_admin_user['id'], - password=self.project_admin_user['password'], - project_id=self.project['id']) - - collection_url = self.build_role_assignment_query_url( - project_id=self.project['id']) - r = self.get(collection_url, auth=self.auth) - self.assertValidRoleAssignmentListResponse( - r, expected_length=2, resource_url=collection_url) - - project_admin_entity = self.build_role_assignment_entity( - project_id=self.project['id'], - user_id=self.project_admin_user['id'], - role_id=self.admin_role['id'], - inherited_to_projects=False) - project_user_entity = self.build_role_assignment_entity( - project_id=self.project['id'], - user_id=self.just_a_user['id'], - role_id=self.role['id'], - inherited_to_projects=False) - - self.assertRoleAssignmentInListResponse(r, project_admin_entity) - self.assertRoleAssignmentInListResponse(r, project_user_entity) - - @utils.wip('waiting on bug #1437407') - def test_domain_admin_list_assignments_of_project(self): - self.auth = self.build_authentication_request( - user_id=self.domain_admin_user['id'], - password=self.domain_admin_user['password'], - domain_id=self.domainA['id']) - - collection_url = self.build_role_assignment_query_url( - project_id=self.project['id']) - r = self.get(collection_url, auth=self.auth) - self.assertValidRoleAssignmentListResponse( - r, expected_length=2, resource_url=collection_url) - - project_admin_entity = self.build_role_assignment_entity( - project_id=self.project['id'], - user_id=self.project_admin_user['id'], - role_id=self.admin_role['id'], - inherited_to_projects=False) - project_user_entity = self.build_role_assignment_entity( - project_id=self.project['id'], - user_id=self.just_a_user['id'], - role_id=self.role['id'], - inherited_to_projects=False) - - self.assertRoleAssignmentInListResponse(r, project_admin_entity) - self.assertRoleAssignmentInListResponse(r, project_user_entity) - - def test_domain_admin_list_assignment_tree(self): - # Add a child project to the standard test data - sub_project = unit.new_project_ref(domain_id=self.domainA['id'], - parent_id=self.project['id']) - self.resource_api.create_project(sub_project['id'], sub_project) - self.assignment_api.create_grant(self.role['id'], - user_id=self.just_a_user['id'], - project_id=sub_project['id']) - - collection_url = self.build_role_assignment_query_url( - project_id=self.project['id']) - collection_url += '&include_subtree=True' - - # The domain admin should be able to list the assignment tree - auth = self.build_authentication_request( - user_id=self.domain_admin_user['id'], - password=self.domain_admin_user['password'], - domain_id=self.domainA['id']) - - r = self.get(collection_url, auth=auth) - self.assertValidRoleAssignmentListResponse( - r, expected_length=3, resource_url=collection_url) - - # A project admin should not be able to - auth = self.build_authentication_request( - user_id=self.project_admin_user['id'], - password=self.project_admin_user['password'], - project_id=self.project['id']) - - r = self.get(collection_url, auth=auth, - expected_status=http_client.FORBIDDEN) - - # A neither should a domain admin from a different domain - domainB_admin_user = unit.create_user( - self.identity_api, - domain_id=self.domainB['id']) - self.assignment_api.create_grant(self.admin_role['id'], - user_id=domainB_admin_user['id'], - domain_id=self.domainB['id']) - auth = self.build_authentication_request( - user_id=domainB_admin_user['id'], - password=domainB_admin_user['password'], - domain_id=self.domainB['id']) - - r = self.get(collection_url, auth=auth, - expected_status=http_client.FORBIDDEN) - - def test_domain_user_list_assignments_of_project_failed(self): - self.auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password'], - domain_id=self.domainA['id']) - - collection_url = self.build_role_assignment_query_url( - project_id=self.project['id']) - self.get(collection_url, auth=self.auth, - expected_status=http_client.FORBIDDEN) - - def test_cloud_admin(self): - self.auth = self.build_authentication_request( - user_id=self.domain_admin_user['id'], - password=self.domain_admin_user['password'], - domain_id=self.domainA['id']) - - self._test_domain_management( - expected=exception.ForbiddenAction.code) - - self.auth = self.build_authentication_request( - user_id=self.cloud_admin_user['id'], - password=self.cloud_admin_user['password'], - project_id=self.admin_project['id']) - - self._test_domain_management() - - def test_admin_project(self): - self.auth = self.build_authentication_request( - user_id=self.project_admin_user['id'], - password=self.project_admin_user['password'], - project_id=self.project['id']) - - self._test_domain_management( - expected=exception.ForbiddenAction.code) - - self.auth = self.build_authentication_request( - user_id=self.cloud_admin_user['id'], - password=self.cloud_admin_user['password'], - project_id=self.admin_project['id']) - - self._test_domain_management() - - def test_domain_admin_get_domain(self): - self.auth = self.build_authentication_request( - user_id=self.domain_admin_user['id'], - password=self.domain_admin_user['password'], - domain_id=self.domainA['id']) - entity_url = '/domains/%s' % self.domainA['id'] - self.get(entity_url, auth=self.auth) - - def test_list_user_credentials(self): - credential_user = unit.new_credential_ref(self.just_a_user['id']) - self.credential_api.create_credential(credential_user['id'], - credential_user) - credential_admin = unit.new_credential_ref(self.cloud_admin_user['id']) - self.credential_api.create_credential(credential_admin['id'], - credential_admin) - - self.auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - url = '/credentials?user_id=%s' % self.just_a_user['id'] - self.get(url, auth=self.auth) - url = '/credentials?user_id=%s' % self.cloud_admin_user['id'] - self.get(url, auth=self.auth, - expected_status=exception.ForbiddenAction.code) - url = '/credentials' - self.get(url, auth=self.auth, - expected_status=exception.ForbiddenAction.code) - - def test_get_and_delete_ec2_credentials(self): - """Tests getting and deleting ec2 credentials through the ec2 API.""" - another_user = unit.create_user(self.identity_api, - domain_id=self.domainA['id']) - - # create a credential for just_a_user - just_user_auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password'], - project_id=self.project['id']) - url = '/users/%s/credentials/OS-EC2' % self.just_a_user['id'] - r = self.post(url, body={'tenant_id': self.project['id']}, - auth=just_user_auth) - - # another normal user can't get the credential - another_user_auth = self.build_authentication_request( - user_id=another_user['id'], - password=another_user['password']) - another_user_url = '/users/%s/credentials/OS-EC2/%s' % ( - another_user['id'], r.result['credential']['access']) - self.get(another_user_url, auth=another_user_auth, - expected_status=exception.ForbiddenAction.code) - - # the owner can get the credential - just_user_url = '/users/%s/credentials/OS-EC2/%s' % ( - self.just_a_user['id'], r.result['credential']['access']) - self.get(just_user_url, auth=just_user_auth) - - # another normal user can't delete the credential - self.delete(another_user_url, auth=another_user_auth, - expected_status=exception.ForbiddenAction.code) - - # the owner can get the credential - self.delete(just_user_url, auth=just_user_auth) - - def test_user_validate_same_token(self): - # Given a non-admin user token, the token can be used to validate - # itself. - # This is GET /v3/auth/tokens, with X-Auth-Token == X-Subject-Token - - auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - token = self.get_requested_token(auth) - - self.get('/auth/tokens', token=token, - headers={'X-Subject-Token': token}) - - def test_user_validate_user_token(self): - # A user can validate one of their own tokens. - # This is GET /v3/auth/tokens - - auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - token1 = self.get_requested_token(auth) - token2 = self.get_requested_token(auth) - - self.get('/auth/tokens', token=token1, - headers={'X-Subject-Token': token2}) - - def test_user_validate_other_user_token_rejected(self): - # A user cannot validate another user's token. - # This is GET /v3/auth/tokens - - user1_auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - user1_token = self.get_requested_token(user1_auth) - - user2_auth = self.build_authentication_request( - user_id=self.cloud_admin_user['id'], - password=self.cloud_admin_user['password']) - user2_token = self.get_requested_token(user2_auth) - - self.get('/auth/tokens', token=user1_token, - headers={'X-Subject-Token': user2_token}, - expected_status=http_client.FORBIDDEN) - - def test_admin_validate_user_token(self): - # An admin can validate a user's token. - # This is GET /v3/auth/tokens - - admin_auth = self.build_authentication_request( - user_id=self.cloud_admin_user['id'], - password=self.cloud_admin_user['password'], - project_id=self.admin_project['id']) - admin_token = self.get_requested_token(admin_auth) - - user_auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - user_token = self.get_requested_token(user_auth) - - self.get('/auth/tokens', token=admin_token, - headers={'X-Subject-Token': user_token}) - - def test_admin_project_validate_user_token(self): - # An admin can validate a user's token. - # This is GET /v3/auth/tokens - - admin_auth = self.build_authentication_request( - user_id=self.project_admin_user['id'], - password=self.project_admin_user['password'], - project_id=self.project['id']) - - admin_token = self.get_requested_token(admin_auth) - - user_auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - user_token = self.get_requested_token(user_auth) - - self.get('/auth/tokens', token=admin_token, - headers={'X-Subject-Token': user_token}) - - def test_user_check_same_token(self): - # Given a non-admin user token, the token can be used to check - # itself. - # This is HEAD /v3/auth/tokens, with X-Auth-Token == X-Subject-Token - - auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - token = self.get_requested_token(auth) - - self.head('/auth/tokens', token=token, - headers={'X-Subject-Token': token}, - expected_status=http_client.OK) - - def test_user_check_user_token(self): - # A user can check one of their own tokens. - # This is HEAD /v3/auth/tokens - - auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - token1 = self.get_requested_token(auth) - token2 = self.get_requested_token(auth) - - self.head('/auth/tokens', token=token1, - headers={'X-Subject-Token': token2}, - expected_status=http_client.OK) - - def test_user_check_other_user_token_rejected(self): - # A user cannot check another user's token. - # This is HEAD /v3/auth/tokens - - user1_auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - user1_token = self.get_requested_token(user1_auth) - - user2_auth = self.build_authentication_request( - user_id=self.cloud_admin_user['id'], - password=self.cloud_admin_user['password']) - user2_token = self.get_requested_token(user2_auth) - - self.head('/auth/tokens', token=user1_token, - headers={'X-Subject-Token': user2_token}, - expected_status=http_client.FORBIDDEN) - - def test_admin_check_user_token(self): - # An admin can check a user's token. - # This is HEAD /v3/auth/tokens - - admin_auth = self.build_authentication_request( - user_id=self.domain_admin_user['id'], - password=self.domain_admin_user['password'], - domain_id=self.domainA['id']) - admin_token = self.get_requested_token(admin_auth) - - user_auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - user_token = self.get_requested_token(user_auth) - - self.head('/auth/tokens', token=admin_token, - headers={'X-Subject-Token': user_token}, - expected_status=http_client.OK) - - def test_user_revoke_same_token(self): - # Given a non-admin user token, the token can be used to revoke - # itself. - # This is DELETE /v3/auth/tokens, with X-Auth-Token == X-Subject-Token - - auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - token = self.get_requested_token(auth) - - self.delete('/auth/tokens', token=token, - headers={'X-Subject-Token': token}) - - def test_user_revoke_user_token(self): - # A user can revoke one of their own tokens. - # This is DELETE /v3/auth/tokens - - auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - token1 = self.get_requested_token(auth) - token2 = self.get_requested_token(auth) - - self.delete('/auth/tokens', token=token1, - headers={'X-Subject-Token': token2}) - - def test_user_revoke_other_user_token_rejected(self): - # A user cannot revoke another user's token. - # This is DELETE /v3/auth/tokens - - user1_auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - user1_token = self.get_requested_token(user1_auth) - - user2_auth = self.build_authentication_request( - user_id=self.cloud_admin_user['id'], - password=self.cloud_admin_user['password']) - user2_token = self.get_requested_token(user2_auth) - - self.delete('/auth/tokens', token=user1_token, - headers={'X-Subject-Token': user2_token}, - expected_status=http_client.FORBIDDEN) - - def test_admin_revoke_user_token(self): - # An admin can revoke a user's token. - # This is DELETE /v3/auth/tokens - - admin_auth = self.build_authentication_request( - user_id=self.domain_admin_user['id'], - password=self.domain_admin_user['password'], - domain_id=self.domainA['id']) - admin_token = self.get_requested_token(admin_auth) - - user_auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password']) - user_token = self.get_requested_token(user_auth) - - self.delete('/auth/tokens', token=admin_token, - headers={'X-Subject-Token': user_token}) - - def test_user_with_a_role_get_project(self): - user_auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password'], - project_id=self.project['id']) - - # Test user can get project for one they have a role in - self.get('/projects/%s' % self.project['id'], auth=user_auth) - - # Test user can not get project for one they don't have a role in, - # even if they have a role on another project - project2 = unit.new_project_ref(domain_id=self.domainA['id']) - self.resource_api.create_project(project2['id'], project2) - self.get('/projects/%s' % project2['id'], auth=user_auth, - expected_status=exception.ForbiddenAction.code) - - def test_project_admin_get_project(self): - admin_auth = self.build_authentication_request( - user_id=self.project_admin_user['id'], - password=self.project_admin_user['password'], - project_id=self.project['id']) - - resp = self.get('/projects/%s' % self.project['id'], auth=admin_auth) - self.assertEqual(self.project['id'], - jsonutils.loads(resp.body)['project']['id']) - - def test_role_management_no_admin_no_rights(self): - # A non-admin domain user shouldn't be able to manipulate roles - self.auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password'], - domain_id=self.domainA['id']) - - self._role_management_cases(expected=exception.ForbiddenAction.code) - - # ...and nor should non-admin project user - self.auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password'], - project_id=self.project['id']) - - self._role_management_cases(expected=exception.ForbiddenAction.code) - - def test_role_management_with_project_admin(self): - # A project admin user should be able to get and list, but not be able - # to create/update/delete global roles - self.auth = self.build_authentication_request( - user_id=self.project_admin_user['id'], - password=self.project_admin_user['password'], - project_id=self.project['id']) - - self._role_management_cases(read_status_OK=True, - expected=exception.ForbiddenAction.code) - - def test_role_management_with_domain_admin(self): - # A domain admin user should be able to get and list, but not be able - # to create/update/delete global roles - self.auth = self.build_authentication_request( - user_id=self.domain_admin_user['id'], - password=self.domain_admin_user['password'], - domain_id=self.domainA['id']) - - self._role_management_cases(read_status_OK=True, - expected=exception.ForbiddenAction.code) - - def test_role_management_with_cloud_admin(self): - # A cloud admin user should have rights to manipulate global roles - self.auth = self.build_authentication_request( - user_id=self.cloud_admin_user['id'], - password=self.cloud_admin_user['password'], - project_id=self.admin_project['id']) - - self._role_management_cases() - - def test_domain_role_management_no_admin_no_rights(self): - # A non-admin domain user shouldn't be able to manipulate domain roles - self.auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password'], - domain_id=self.domainA['id']) - - self._domain_role_management_cases( - self.domainA['id'], expected=exception.ForbiddenAction.code) - - # ...and nor should non-admin project user - self.auth = self.build_authentication_request( - user_id=self.just_a_user['id'], - password=self.just_a_user['password'], - project_id=self.project['id']) - - self._domain_role_management_cases( - self.domainA['id'], expected=exception.ForbiddenAction.code) - - def test_domain_role_management_with_cloud_admin(self): - # A cloud admin user should have rights to manipulate domain roles - self.auth = self.build_authentication_request( - user_id=self.cloud_admin_user['id'], - password=self.cloud_admin_user['password'], - project_id=self.admin_project['id']) - - self._domain_role_management_cases(self.domainA['id']) - - def test_domain_role_management_with_domain_admin(self): - # A domain admin user should only be able to manipulate the domain - # specific roles in their own domain - self.auth = self.build_authentication_request( - user_id=self.domainB_admin_user['id'], - password=self.domainB_admin_user['password'], - domain_id=self.domainB['id']) - - # Try to access the domain specific roles in another domain - self._domain_role_management_cases( - self.domainA['id'], expected=exception.ForbiddenAction.code) - - # ...but they should be able to work with those in their own domain - self.auth = self.build_authentication_request( - user_id=self.domain_admin_user['id'], - password=self.domain_admin_user['password'], - domain_id=self.domainA['id']) - - self._domain_role_management_cases(self.domainA['id']) - - def test_domain_role_management_with_project_admin(self): - # A project admin user should have not access to domain specific roles - # in another domain. They should be able to get and list domain - # specific roles from their own domain, but not be able to create, - # update or delete them, - self.auth = self.build_authentication_request( - user_id=self.project_adminB_user['id'], - password=self.project_adminB_user['password'], - project_id=self.projectB['id']) - - # Try access the domain specific roless in another domain - self._domain_role_management_cases( - self.domainA['id'], expected=exception.ForbiddenAction.code) - - # ...but they should be ablet to work with those in their own domain - self.auth = self.build_authentication_request( - user_id=self.project_admin_user['id'], - password=self.project_admin_user['password'], - project_id=self.project['id']) - - self._domain_role_management_cases( - self.domainA['id'], read_status_OK=True, - expected=exception.ForbiddenAction.code) diff --git a/keystone-moon/keystone/tests/unit/test_v3_resource.py b/keystone-moon/keystone/tests/unit/test_v3_resource.py deleted file mode 100644 index f54fcb57..00000000 --- a/keystone-moon/keystone/tests/unit/test_v3_resource.py +++ /dev/null @@ -1,1434 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from oslo_config import cfg -from six.moves import http_client -from six.moves import range -from testtools import matchers - -from keystone.common import controller -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit import test_v3 -from keystone.tests.unit import utils as test_utils - - -CONF = cfg.CONF - - -class ResourceTestCase(test_v3.RestfulTestCase, - test_v3.AssignmentTestMixin): - """Test domains and projects.""" - - # Domain CRUD tests - - def test_create_domain(self): - """Call ``POST /domains``.""" - ref = unit.new_domain_ref() - r = self.post( - '/domains', - body={'domain': ref}) - return self.assertValidDomainResponse(r, ref) - - def test_create_domain_case_sensitivity(self): - """Call `POST /domains`` twice with upper() and lower() cased name.""" - ref = unit.new_domain_ref() - - # ensure the name is lowercase - ref['name'] = ref['name'].lower() - r = self.post( - '/domains', - body={'domain': ref}) - self.assertValidDomainResponse(r, ref) - - # ensure the name is uppercase - ref['name'] = ref['name'].upper() - r = self.post( - '/domains', - body={'domain': ref}) - self.assertValidDomainResponse(r, ref) - - def test_create_domain_bad_request(self): - """Call ``POST /domains``.""" - self.post('/domains', body={'domain': {}}, - expected_status=http_client.BAD_REQUEST) - - def test_create_domain_unsafe(self): - """Call ``POST /domains with unsafe names``.""" - unsafe_name = 'i am not / safe' - - self.config_fixture.config(group='resource', - domain_name_url_safe='off') - ref = unit.new_domain_ref(name=unsafe_name) - self.post( - '/domains', - body={'domain': ref}) - - for config_setting in ['new', 'strict']: - self.config_fixture.config(group='resource', - domain_name_url_safe=config_setting) - ref = unit.new_domain_ref(name=unsafe_name) - self.post( - '/domains', - body={'domain': ref}, - expected_status=http_client.BAD_REQUEST) - - def test_create_domain_unsafe_default(self): - """Check default for unsafe names for ``POST /domains``.""" - unsafe_name = 'i am not / safe' - - # By default, we should be able to create unsafe names - ref = unit.new_domain_ref(name=unsafe_name) - self.post( - '/domains', - body={'domain': ref}) - - def test_create_domain_creates_is_domain_project(self): - """Check a project that acts as a domain is created. - - Call ``POST /domains``. - """ - # Create a new domain - domain_ref = unit.new_domain_ref() - r = self.post('/domains', body={'domain': domain_ref}) - self.assertValidDomainResponse(r, domain_ref) - - # Retrieve its correspondent project - r = self.get('/projects/%(project_id)s' % { - 'project_id': r.result['domain']['id']}) - self.assertValidProjectResponse(r) - - # The created project has is_domain flag as True - self.assertTrue(r.result['project']['is_domain']) - - # And its parent_id and domain_id attributes are equal - self.assertIsNone(r.result['project']['parent_id']) - self.assertIsNone(r.result['project']['domain_id']) - - def test_create_is_domain_project_creates_domain(self): - """Call ``POST /projects`` is_domain and check a domain is created.""" - # Create a new project that acts as a domain - project_ref = unit.new_project_ref(domain_id=None, is_domain=True) - r = self.post('/projects', body={'project': project_ref}) - self.assertValidProjectResponse(r) - - # Retrieve its correspondent domain - r = self.get('/domains/%(domain_id)s' % { - 'domain_id': r.result['project']['id']}) - self.assertValidDomainResponse(r) - self.assertIsNotNone(r.result['domain']) - - def test_list_domains(self): - """Call ``GET /domains``.""" - resource_url = '/domains' - r = self.get(resource_url) - self.assertValidDomainListResponse(r, ref=self.domain, - resource_url=resource_url) - - def test_get_domain(self): - """Call ``GET /domains/{domain_id}``.""" - r = self.get('/domains/%(domain_id)s' % { - 'domain_id': self.domain_id}) - self.assertValidDomainResponse(r, self.domain) - - def test_update_domain(self): - """Call ``PATCH /domains/{domain_id}``.""" - ref = unit.new_domain_ref() - del ref['id'] - r = self.patch('/domains/%(domain_id)s' % { - 'domain_id': self.domain_id}, - body={'domain': ref}) - self.assertValidDomainResponse(r, ref) - - def test_update_domain_unsafe(self): - """Call ``POST /domains/{domain_id} with unsafe names``.""" - unsafe_name = 'i am not / safe' - - self.config_fixture.config(group='resource', - domain_name_url_safe='off') - ref = unit.new_domain_ref(name=unsafe_name) - del ref['id'] - self.patch('/domains/%(domain_id)s' % { - 'domain_id': self.domain_id}, - body={'domain': ref}) - - unsafe_name = 'i am still not / safe' - for config_setting in ['new', 'strict']: - self.config_fixture.config(group='resource', - domain_name_url_safe=config_setting) - ref = unit.new_domain_ref(name=unsafe_name) - del ref['id'] - self.patch('/domains/%(domain_id)s' % { - 'domain_id': self.domain_id}, - body={'domain': ref}, - expected_status=http_client.BAD_REQUEST) - - def test_update_domain_unsafe_default(self): - """Check default for unsafe names for ``POST /domains``.""" - unsafe_name = 'i am not / safe' - - # By default, we should be able to create unsafe names - ref = unit.new_domain_ref(name=unsafe_name) - del ref['id'] - self.patch('/domains/%(domain_id)s' % { - 'domain_id': self.domain_id}, - body={'domain': ref}) - - def test_update_domain_updates_is_domain_project(self): - """Check the project that acts as a domain is updated. - - Call ``PATCH /domains``. - """ - # Create a new domain - domain_ref = unit.new_domain_ref() - r = self.post('/domains', body={'domain': domain_ref}) - self.assertValidDomainResponse(r, domain_ref) - - # Disable it - self.patch('/domains/%s' % r.result['domain']['id'], - body={'domain': {'enabled': False}}) - - # Retrieve its correspondent project - r = self.get('/projects/%(project_id)s' % { - 'project_id': r.result['domain']['id']}) - self.assertValidProjectResponse(r) - - # The created project is disabled as well - self.assertFalse(r.result['project']['enabled']) - - def test_disable_domain(self): - """Call ``PATCH /domains/{domain_id}`` (set enabled=False).""" - # Create a 2nd set of entities in a 2nd domain - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - - project2 = unit.new_project_ref(domain_id=domain2['id']) - self.resource_api.create_project(project2['id'], project2) - - user2 = unit.create_user(self.identity_api, - domain_id=domain2['id'], - project_id=project2['id']) - - self.assignment_api.add_user_to_project(project2['id'], - user2['id']) - - # First check a user in that domain can authenticate.. - body = { - 'auth': { - 'passwordCredentials': { - 'userId': user2['id'], - 'password': user2['password'] - }, - 'tenantId': project2['id'] - } - } - self.admin_request( - path='/v2.0/tokens', method='POST', body=body) - - auth_data = self.build_authentication_request( - user_id=user2['id'], - password=user2['password'], - project_id=project2['id']) - self.v3_create_token(auth_data) - - # Now disable the domain - domain2['enabled'] = False - r = self.patch('/domains/%(domain_id)s' % { - 'domain_id': domain2['id']}, - body={'domain': {'enabled': False}}) - self.assertValidDomainResponse(r, domain2) - - # Make sure the user can no longer authenticate, via - # either API - body = { - 'auth': { - 'passwordCredentials': { - 'userId': user2['id'], - 'password': user2['password'] - }, - 'tenantId': project2['id'] - } - } - self.admin_request( - path='/v2.0/tokens', method='POST', body=body, - expected_status=http_client.UNAUTHORIZED) - - # Try looking up in v3 by name and id - auth_data = self.build_authentication_request( - user_id=user2['id'], - password=user2['password'], - project_id=project2['id']) - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - auth_data = self.build_authentication_request( - username=user2['name'], - user_domain_id=domain2['id'], - password=user2['password'], - project_id=project2['id']) - self.v3_create_token(auth_data, - expected_status=http_client.UNAUTHORIZED) - - def test_delete_enabled_domain_fails(self): - """Call ``DELETE /domains/{domain_id}`` (when domain enabled).""" - # Try deleting an enabled domain, which should fail - self.delete('/domains/%(domain_id)s' % { - 'domain_id': self.domain['id']}, - expected_status=exception.ForbiddenAction.code) - - def test_delete_domain(self): - """Call ``DELETE /domains/{domain_id}``. - - The sample data set up already has a user and project that is part of - self.domain. Additionally we will create a group and a credential - within it. Since the user we will authenticate with is in this domain, - we create a another set of entities in a second domain. Deleting this - second domain should delete all these new entities. In addition, - all the entities in the regular self.domain should be unaffected - by the delete. - - Test Plan: - - - Create domain2 and a 2nd set of entities - - Disable domain2 - - Delete domain2 - - Check entities in domain2 have been deleted - - Check entities in self.domain are unaffected - - """ - # Create a group and a credential in the main domain - group = unit.new_group_ref(domain_id=self.domain_id) - group = self.identity_api.create_group(group) - - credential = unit.new_credential_ref(user_id=self.user['id'], - project_id=self.project_id) - self.credential_api.create_credential(credential['id'], credential) - - # Create a 2nd set of entities in a 2nd domain - domain2 = unit.new_domain_ref() - self.resource_api.create_domain(domain2['id'], domain2) - - project2 = unit.new_project_ref(domain_id=domain2['id']) - project2 = self.resource_api.create_project(project2['id'], project2) - - user2 = unit.new_user_ref(domain_id=domain2['id'], - project_id=project2['id']) - user2 = self.identity_api.create_user(user2) - - group2 = unit.new_group_ref(domain_id=domain2['id']) - group2 = self.identity_api.create_group(group2) - - credential2 = unit.new_credential_ref(user_id=user2['id'], - project_id=project2['id']) - self.credential_api.create_credential(credential2['id'], - credential2) - - # Now disable the new domain and delete it - domain2['enabled'] = False - r = self.patch('/domains/%(domain_id)s' % { - 'domain_id': domain2['id']}, - body={'domain': {'enabled': False}}) - self.assertValidDomainResponse(r, domain2) - self.delete('/domains/%(domain_id)s' % {'domain_id': domain2['id']}) - - # Check all the domain2 relevant entities are gone - self.assertRaises(exception.DomainNotFound, - self.resource_api.get_domain, - domain2['id']) - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - project2['id']) - self.assertRaises(exception.GroupNotFound, - self.identity_api.get_group, - group2['id']) - self.assertRaises(exception.UserNotFound, - self.identity_api.get_user, - user2['id']) - self.assertRaises(exception.CredentialNotFound, - self.credential_api.get_credential, - credential2['id']) - - # ...and that all self.domain entities are still here - r = self.resource_api.get_domain(self.domain['id']) - self.assertDictEqual(self.domain, r) - r = self.resource_api.get_project(self.project['id']) - self.assertDictEqual(self.project, r) - r = self.identity_api.get_group(group['id']) - self.assertDictEqual(group, r) - r = self.identity_api.get_user(self.user['id']) - self.user.pop('password') - self.assertDictEqual(self.user, r) - r = self.credential_api.get_credential(credential['id']) - self.assertDictEqual(credential, r) - - def test_delete_domain_deletes_is_domain_project(self): - """Check the project that acts as a domain is deleted. - - Call ``DELETE /domains``. - """ - # Create a new domain - domain_ref = unit.new_domain_ref() - r = self.post('/domains', body={'domain': domain_ref}) - self.assertValidDomainResponse(r, domain_ref) - - # Retrieve its correspondent project - self.get('/projects/%(project_id)s' % { - 'project_id': r.result['domain']['id']}) - - # Delete the domain - self.patch('/domains/%s' % r.result['domain']['id'], - body={'domain': {'enabled': False}}) - self.delete('/domains/%s' % r.result['domain']['id']) - - # The created project is deleted as well - self.get('/projects/%(project_id)s' % { - 'project_id': r.result['domain']['id']}, expected_status=404) - - def test_delete_default_domain(self): - # Need to disable it first. - self.patch('/domains/%(domain_id)s' % { - 'domain_id': CONF.identity.default_domain_id}, - body={'domain': {'enabled': False}}) - - self.delete( - '/domains/%(domain_id)s' % { - 'domain_id': CONF.identity.default_domain_id}) - - def test_token_revoked_once_domain_disabled(self): - """Test token from a disabled domain has been invalidated. - - Test that a token that was valid for an enabled domain - becomes invalid once that domain is disabled. - - """ - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - - user2 = unit.create_user(self.identity_api, - domain_id=domain['id']) - - # build a request body - auth_body = self.build_authentication_request( - user_id=user2['id'], - password=user2['password']) - - # sends a request for the user's token - token_resp = self.post('/auth/tokens', body=auth_body) - - subject_token = token_resp.headers.get('x-subject-token') - - # validates the returned token and it should be valid. - self.head('/auth/tokens', - headers={'x-subject-token': subject_token}, - expected_status=http_client.OK) - - # now disable the domain - domain['enabled'] = False - url = "/domains/%(domain_id)s" % {'domain_id': domain['id']} - self.patch(url, - body={'domain': {'enabled': False}}) - - # validates the same token again and it should be 'not found' - # as the domain has already been disabled. - self.head('/auth/tokens', - headers={'x-subject-token': subject_token}, - expected_status=http_client.NOT_FOUND) - - def test_delete_domain_hierarchy(self): - """Call ``DELETE /domains/{domain_id}``.""" - domain = unit.new_domain_ref() - self.resource_api.create_domain(domain['id'], domain) - - root_project = unit.new_project_ref(domain_id=domain['id']) - root_project = self.resource_api.create_project(root_project['id'], - root_project) - - leaf_project = unit.new_project_ref( - domain_id=domain['id'], - parent_id=root_project['id']) - self.resource_api.create_project(leaf_project['id'], leaf_project) - - # Need to disable it first. - self.patch('/domains/%(domain_id)s' % { - 'domain_id': domain['id']}, - body={'domain': {'enabled': False}}) - - self.delete( - '/domains/%(domain_id)s' % { - 'domain_id': domain['id']}) - - self.assertRaises(exception.DomainNotFound, - self.resource_api.get_domain, - domain['id']) - - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - root_project['id']) - - self.assertRaises(exception.ProjectNotFound, - self.resource_api.get_project, - leaf_project['id']) - - def test_forbid_operations_on_federated_domain(self): - """Make sure one cannot operate on federated domain. - - This includes operations like create, update, delete - on domain identified by id and name where difference variations of - id 'Federated' are used. - - """ - def create_domains(): - for variation in ('Federated', 'FEDERATED', - 'federated', 'fEderated'): - domain = unit.new_domain_ref() - domain['id'] = variation - yield domain - - for domain in create_domains(): - self.assertRaises( - AssertionError, self.resource_api.create_domain, - domain['id'], domain) - self.assertRaises( - AssertionError, self.resource_api.update_domain, - domain['id'], domain) - self.assertRaises( - exception.DomainNotFound, self.resource_api.delete_domain, - domain['id']) - - # swap 'name' with 'id' and try again, expecting the request to - # gracefully fail - domain['id'], domain['name'] = domain['name'], domain['id'] - self.assertRaises( - AssertionError, self.resource_api.create_domain, - domain['id'], domain) - self.assertRaises( - AssertionError, self.resource_api.update_domain, - domain['id'], domain) - self.assertRaises( - exception.DomainNotFound, self.resource_api.delete_domain, - domain['id']) - - def test_forbid_operations_on_defined_federated_domain(self): - """Make sure one cannot operate on a user-defined federated domain. - - This includes operations like create, update, delete. - - """ - non_default_name = 'beta_federated_domain' - self.config_fixture.config(group='federation', - federated_domain_name=non_default_name) - domain = unit.new_domain_ref(name=non_default_name) - self.assertRaises(AssertionError, - self.resource_api.create_domain, - domain['id'], domain) - self.assertRaises(exception.DomainNotFound, - self.resource_api.delete_domain, - domain['id']) - self.assertRaises(AssertionError, - self.resource_api.update_domain, - domain['id'], domain) - - # Project CRUD tests - - def test_list_projects(self): - """Call ``GET /projects``.""" - resource_url = '/projects' - r = self.get(resource_url) - self.assertValidProjectListResponse(r, ref=self.project, - resource_url=resource_url) - - def test_create_project(self): - """Call ``POST /projects``.""" - ref = unit.new_project_ref(domain_id=self.domain_id) - r = self.post( - '/projects', - body={'project': ref}) - self.assertValidProjectResponse(r, ref) - - def test_create_project_bad_request(self): - """Call ``POST /projects``.""" - self.post('/projects', body={'project': {}}, - expected_status=http_client.BAD_REQUEST) - - def test_create_project_invalid_domain_id(self): - """Call ``POST /projects``.""" - ref = unit.new_project_ref(domain_id=uuid.uuid4().hex) - self.post('/projects', body={'project': ref}, - expected_status=http_client.BAD_REQUEST) - - def test_create_project_unsafe(self): - """Call ``POST /projects with unsafe names``.""" - unsafe_name = 'i am not / safe' - - self.config_fixture.config(group='resource', - project_name_url_safe='off') - ref = unit.new_project_ref(name=unsafe_name) - self.post( - '/projects', - body={'project': ref}) - - for config_setting in ['new', 'strict']: - self.config_fixture.config(group='resource', - project_name_url_safe=config_setting) - ref = unit.new_project_ref(name=unsafe_name) - self.post( - '/projects', - body={'project': ref}, - expected_status=http_client.BAD_REQUEST) - - def test_create_project_unsafe_default(self): - """Check default for unsafe names for ``POST /projects``.""" - unsafe_name = 'i am not / safe' - - # By default, we should be able to create unsafe names - ref = unit.new_project_ref(name=unsafe_name) - self.post( - '/projects', - body={'project': ref}) - - def test_create_project_with_parent_id_none_and_domain_id_none(self): - """Call ``POST /projects``.""" - # Grant a domain role for the user - collection_url = ( - '/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': self.domain_id, - 'user_id': self.user['id']}) - member_url = '%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': self.role_id} - self.put(member_url) - - # Create an authentication request for a domain scoped token - auth = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - domain_id=self.domain_id) - - # Without parent_id and domain_id passed as None, the domain_id should - # be normalized to the domain on the token, when using a domain - # scoped token. - ref = unit.new_project_ref() - r = self.post( - '/projects', - auth=auth, - body={'project': ref}) - ref['domain_id'] = self.domain['id'] - self.assertValidProjectResponse(r, ref) - - def test_create_project_without_parent_id_and_without_domain_id(self): - """Call ``POST /projects``.""" - # Grant a domain role for the user - collection_url = ( - '/domains/%(domain_id)s/users/%(user_id)s/roles' % { - 'domain_id': self.domain_id, - 'user_id': self.user['id']}) - member_url = '%(collection_url)s/%(role_id)s' % { - 'collection_url': collection_url, - 'role_id': self.role_id} - self.put(member_url) - - # Create an authentication request for a domain scoped token - auth = self.build_authentication_request( - user_id=self.user['id'], - password=self.user['password'], - domain_id=self.domain_id) - - # Without domain_id and parent_id, the domain_id should be - # normalized to the domain on the token, when using a domain - # scoped token. - ref = unit.new_project_ref() - r = self.post( - '/projects', - auth=auth, - body={'project': ref}) - ref['domain_id'] = self.domain['id'] - self.assertValidProjectResponse(r, ref) - - @test_utils.wip('waiting for support for parent_id to imply domain_id') - def test_create_project_with_parent_id_and_no_domain_id(self): - """Call ``POST /projects``.""" - # With only the parent_id, the domain_id should be - # normalized to the parent's domain_id - ref_child = unit.new_project_ref(parent_id=self.project['id']) - - r = self.post( - '/projects', - body={'project': ref_child}) - self.assertEqual(r.result['project']['domain_id'], - self.project['domain_id']) - ref_child['domain_id'] = self.domain['id'] - self.assertValidProjectResponse(r, ref_child) - - def _create_projects_hierarchy(self, hierarchy_size=1): - """Creates a single-branched project hierarchy with the specified size. - - :param hierarchy_size: the desired hierarchy size, default is 1 - - a project with one child. - - :returns projects: a list of the projects in the created hierarchy. - - """ - new_ref = unit.new_project_ref(domain_id=self.domain_id) - resp = self.post('/projects', body={'project': new_ref}) - - projects = [resp.result] - - for i in range(hierarchy_size): - new_ref = unit.new_project_ref( - domain_id=self.domain_id, - parent_id=projects[i]['project']['id']) - resp = self.post('/projects', - body={'project': new_ref}) - self.assertValidProjectResponse(resp, new_ref) - - projects.append(resp.result) - - return projects - - def test_list_projects_filtering_by_parent_id(self): - """Call ``GET /projects?parent_id={project_id}``.""" - projects = self._create_projects_hierarchy(hierarchy_size=2) - - # Add another child to projects[1] - it will be projects[3] - new_ref = unit.new_project_ref( - domain_id=self.domain_id, - parent_id=projects[1]['project']['id']) - resp = self.post('/projects', - body={'project': new_ref}) - self.assertValidProjectResponse(resp, new_ref) - - projects.append(resp.result) - - # Query for projects[0] immediate children - it will - # be only projects[1] - r = self.get( - '/projects?parent_id=%(project_id)s' % { - 'project_id': projects[0]['project']['id']}) - self.assertValidProjectListResponse(r) - - projects_result = r.result['projects'] - expected_list = [projects[1]['project']] - - # projects[0] has projects[1] as child - self.assertEqual(expected_list, projects_result) - - # Query for projects[1] immediate children - it will - # be projects[2] and projects[3] - r = self.get( - '/projects?parent_id=%(project_id)s' % { - 'project_id': projects[1]['project']['id']}) - self.assertValidProjectListResponse(r) - - projects_result = r.result['projects'] - expected_list = [projects[2]['project'], projects[3]['project']] - - # projects[1] has projects[2] and projects[3] as children - self.assertEqual(expected_list, projects_result) - - # Query for projects[2] immediate children - it will be an empty list - r = self.get( - '/projects?parent_id=%(project_id)s' % { - 'project_id': projects[2]['project']['id']}) - self.assertValidProjectListResponse(r) - - projects_result = r.result['projects'] - expected_list = [] - - # projects[2] has no child, projects_result must be an empty list - self.assertEqual(expected_list, projects_result) - - def test_create_hierarchical_project(self): - """Call ``POST /projects``.""" - self._create_projects_hierarchy() - - def test_get_project(self): - """Call ``GET /projects/{project_id}``.""" - r = self.get( - '/projects/%(project_id)s' % { - 'project_id': self.project_id}) - self.assertValidProjectResponse(r, self.project) - - def test_get_project_with_parents_as_list_with_invalid_id(self): - """Call ``GET /projects/{project_id}?parents_as_list``.""" - self.get('/projects/%(project_id)s?parents_as_list' % { - 'project_id': None}, expected_status=http_client.NOT_FOUND) - - self.get('/projects/%(project_id)s?parents_as_list' % { - 'project_id': uuid.uuid4().hex}, - expected_status=http_client.NOT_FOUND) - - def test_get_project_with_subtree_as_list_with_invalid_id(self): - """Call ``GET /projects/{project_id}?subtree_as_list``.""" - self.get('/projects/%(project_id)s?subtree_as_list' % { - 'project_id': None}, expected_status=http_client.NOT_FOUND) - - self.get('/projects/%(project_id)s?subtree_as_list' % { - 'project_id': uuid.uuid4().hex}, - expected_status=http_client.NOT_FOUND) - - def test_get_project_with_parents_as_ids(self): - """Call ``GET /projects/{project_id}?parents_as_ids``.""" - projects = self._create_projects_hierarchy(hierarchy_size=2) - - # Query for projects[2] parents_as_ids - r = self.get( - '/projects/%(project_id)s?parents_as_ids' % { - 'project_id': projects[2]['project']['id']}) - - self.assertValidProjectResponse(r, projects[2]['project']) - parents_as_ids = r.result['project']['parents'] - - # Assert parents_as_ids is a structured dictionary correctly - # representing the hierarchy. The request was made using projects[2] - # id, hence its parents should be projects[1], projects[0] and the - # is_domain_project, which is the root of the hierarchy. It should - # have the following structure: - # { - # projects[1]: { - # projects[0]: { - # is_domain_project: None - # } - # } - # } - is_domain_project_id = projects[0]['project']['domain_id'] - expected_dict = { - projects[1]['project']['id']: { - projects[0]['project']['id']: {is_domain_project_id: None} - } - } - self.assertDictEqual(expected_dict, parents_as_ids) - - # Query for projects[0] parents_as_ids - r = self.get( - '/projects/%(project_id)s?parents_as_ids' % { - 'project_id': projects[0]['project']['id']}) - - self.assertValidProjectResponse(r, projects[0]['project']) - parents_as_ids = r.result['project']['parents'] - - # projects[0] has only the project that acts as a domain as parent - expected_dict = { - is_domain_project_id: None - } - self.assertDictEqual(expected_dict, parents_as_ids) - - # Query for is_domain_project parents_as_ids - r = self.get( - '/projects/%(project_id)s?parents_as_ids' % { - 'project_id': is_domain_project_id}) - - parents_as_ids = r.result['project']['parents'] - - # the project that acts as a domain has no parents, parents_as_ids - # must be None - self.assertIsNone(parents_as_ids) - - def test_get_project_with_parents_as_list_with_full_access(self): - """``GET /projects/{project_id}?parents_as_list`` with full access. - - Test plan: - - - Create 'parent', 'project' and 'subproject' projects; - - Assign a user a role on each one of those projects; - - Check that calling parents_as_list on 'subproject' returns both - 'project' and 'parent'. - - """ - # Create the project hierarchy - parent, project, subproject = self._create_projects_hierarchy(2) - - # Assign a role for the user on all the created projects - for proj in (parent, project, subproject): - self.put(self.build_role_assignment_link( - role_id=self.role_id, user_id=self.user_id, - project_id=proj['project']['id'])) - - # Make the API call - r = self.get('/projects/%(project_id)s?parents_as_list' % - {'project_id': subproject['project']['id']}) - self.assertValidProjectResponse(r, subproject['project']) - - # Assert only 'project' and 'parent' are in the parents list - self.assertIn(project, r.result['project']['parents']) - self.assertIn(parent, r.result['project']['parents']) - self.assertEqual(2, len(r.result['project']['parents'])) - - def test_get_project_with_parents_as_list_with_partial_access(self): - """``GET /projects/{project_id}?parents_as_list`` with partial access. - - Test plan: - - - Create 'parent', 'project' and 'subproject' projects; - - Assign a user a role on 'parent' and 'subproject'; - - Check that calling parents_as_list on 'subproject' only returns - 'parent'. - - """ - # Create the project hierarchy - parent, project, subproject = self._create_projects_hierarchy(2) - - # Assign a role for the user on parent and subproject - for proj in (parent, subproject): - self.put(self.build_role_assignment_link( - role_id=self.role_id, user_id=self.user_id, - project_id=proj['project']['id'])) - - # Make the API call - r = self.get('/projects/%(project_id)s?parents_as_list' % - {'project_id': subproject['project']['id']}) - self.assertValidProjectResponse(r, subproject['project']) - - # Assert only 'parent' is in the parents list - self.assertIn(parent, r.result['project']['parents']) - self.assertEqual(1, len(r.result['project']['parents'])) - - def test_get_project_with_parents_as_list_and_parents_as_ids(self): - """Attempt to list a project's parents as both a list and as IDs. - - This uses ``GET /projects/{project_id}?parents_as_list&parents_as_ids`` - which should fail with a Bad Request due to the conflicting query - strings. - - """ - projects = self._create_projects_hierarchy(hierarchy_size=2) - - self.get( - '/projects/%(project_id)s?parents_as_list&parents_as_ids' % { - 'project_id': projects[1]['project']['id']}, - expected_status=http_client.BAD_REQUEST) - - def test_list_project_is_domain_filter(self): - """Call ``GET /projects?is_domain=True/False``.""" - # Get the initial number of projects, both acting as a domain as well - # as regular. - r = self.get('/projects?is_domain=True', expected_status=200) - initial_number_is_domain_true = len(r.result['projects']) - r = self.get('/projects?is_domain=False', expected_status=200) - initial_number_is_domain_false = len(r.result['projects']) - - # Add some more projects acting as domains - new_is_domain_project = unit.new_project_ref(is_domain=True) - new_is_domain_project = self.resource_api.create_project( - new_is_domain_project['id'], new_is_domain_project) - new_is_domain_project2 = unit.new_project_ref(is_domain=True) - new_is_domain_project2 = self.resource_api.create_project( - new_is_domain_project2['id'], new_is_domain_project2) - number_is_domain_true = initial_number_is_domain_true + 2 - - r = self.get('/projects?is_domain=True', expected_status=200) - self.assertThat(r.result['projects'], - matchers.HasLength(number_is_domain_true)) - self.assertIn(new_is_domain_project['id'], - [p['id'] for p in r.result['projects']]) - self.assertIn(new_is_domain_project2['id'], - [p['id'] for p in r.result['projects']]) - - # Now add a regular project - new_regular_project = unit.new_project_ref(domain_id=self.domain_id) - new_regular_project = self.resource_api.create_project( - new_regular_project['id'], new_regular_project) - number_is_domain_false = initial_number_is_domain_false + 1 - - # Check we still have the same number of projects acting as domains - r = self.get('/projects?is_domain=True', expected_status=200) - self.assertThat(r.result['projects'], - matchers.HasLength(number_is_domain_true)) - - # Check the number of regular projects is correct - r = self.get('/projects?is_domain=False', expected_status=200) - self.assertThat(r.result['projects'], - matchers.HasLength(number_is_domain_false)) - self.assertIn(new_regular_project['id'], - [p['id'] for p in r.result['projects']]) - - def test_list_project_is_domain_filter_default(self): - """Default project list should not see projects acting as domains""" - # Get the initial count of regular projects - r = self.get('/projects?is_domain=False', expected_status=200) - number_is_domain_false = len(r.result['projects']) - - # Make sure we have at least one project acting as a domain - new_is_domain_project = unit.new_project_ref(is_domain=True) - new_is_domain_project = self.resource_api.create_project( - new_is_domain_project['id'], new_is_domain_project) - - r = self.get('/projects', expected_status=200) - self.assertThat(r.result['projects'], - matchers.HasLength(number_is_domain_false)) - self.assertNotIn(new_is_domain_project, r.result['projects']) - - def test_get_project_with_subtree_as_ids(self): - """Call ``GET /projects/{project_id}?subtree_as_ids``. - - This test creates a more complex hierarchy to test if the structured - dictionary returned by using the ``subtree_as_ids`` query param - correctly represents the hierarchy. - - The hierarchy contains 5 projects with the following structure:: - - +--A--+ - | | - +--B--+ C - | | - D E - - - """ - projects = self._create_projects_hierarchy(hierarchy_size=2) - - # Add another child to projects[0] - it will be projects[3] - new_ref = unit.new_project_ref( - domain_id=self.domain_id, - parent_id=projects[0]['project']['id']) - resp = self.post('/projects', - body={'project': new_ref}) - self.assertValidProjectResponse(resp, new_ref) - projects.append(resp.result) - - # Add another child to projects[1] - it will be projects[4] - new_ref = unit.new_project_ref( - domain_id=self.domain_id, - parent_id=projects[1]['project']['id']) - resp = self.post('/projects', - body={'project': new_ref}) - self.assertValidProjectResponse(resp, new_ref) - projects.append(resp.result) - - # Query for projects[0] subtree_as_ids - r = self.get( - '/projects/%(project_id)s?subtree_as_ids' % { - 'project_id': projects[0]['project']['id']}) - self.assertValidProjectResponse(r, projects[0]['project']) - subtree_as_ids = r.result['project']['subtree'] - - # The subtree hierarchy from projects[0] should have the following - # structure: - # { - # projects[1]: { - # projects[2]: None, - # projects[4]: None - # }, - # projects[3]: None - # } - expected_dict = { - projects[1]['project']['id']: { - projects[2]['project']['id']: None, - projects[4]['project']['id']: None - }, - projects[3]['project']['id']: None - } - self.assertDictEqual(expected_dict, subtree_as_ids) - - # Now query for projects[1] subtree_as_ids - r = self.get( - '/projects/%(project_id)s?subtree_as_ids' % { - 'project_id': projects[1]['project']['id']}) - self.assertValidProjectResponse(r, projects[1]['project']) - subtree_as_ids = r.result['project']['subtree'] - - # The subtree hierarchy from projects[1] should have the following - # structure: - # { - # projects[2]: None, - # projects[4]: None - # } - expected_dict = { - projects[2]['project']['id']: None, - projects[4]['project']['id']: None - } - self.assertDictEqual(expected_dict, subtree_as_ids) - - # Now query for projects[3] subtree_as_ids - r = self.get( - '/projects/%(project_id)s?subtree_as_ids' % { - 'project_id': projects[3]['project']['id']}) - self.assertValidProjectResponse(r, projects[3]['project']) - subtree_as_ids = r.result['project']['subtree'] - - # projects[3] has no subtree, subtree_as_ids must be None - self.assertIsNone(subtree_as_ids) - - def test_get_project_with_subtree_as_list_with_full_access(self): - """``GET /projects/{project_id}?subtree_as_list`` with full access. - - Test plan: - - - Create 'parent', 'project' and 'subproject' projects; - - Assign a user a role on each one of those projects; - - Check that calling subtree_as_list on 'parent' returns both 'parent' - and 'subproject'. - - """ - # Create the project hierarchy - parent, project, subproject = self._create_projects_hierarchy(2) - - # Assign a role for the user on all the created projects - for proj in (parent, project, subproject): - self.put(self.build_role_assignment_link( - role_id=self.role_id, user_id=self.user_id, - project_id=proj['project']['id'])) - - # Make the API call - r = self.get('/projects/%(project_id)s?subtree_as_list' % - {'project_id': parent['project']['id']}) - self.assertValidProjectResponse(r, parent['project']) - - # Assert only 'project' and 'subproject' are in the subtree - self.assertIn(project, r.result['project']['subtree']) - self.assertIn(subproject, r.result['project']['subtree']) - self.assertEqual(2, len(r.result['project']['subtree'])) - - def test_get_project_with_subtree_as_list_with_partial_access(self): - """``GET /projects/{project_id}?subtree_as_list`` with partial access. - - Test plan: - - - Create 'parent', 'project' and 'subproject' projects; - - Assign a user a role on 'parent' and 'subproject'; - - Check that calling subtree_as_list on 'parent' returns 'subproject'. - - """ - # Create the project hierarchy - parent, project, subproject = self._create_projects_hierarchy(2) - - # Assign a role for the user on parent and subproject - for proj in (parent, subproject): - self.put(self.build_role_assignment_link( - role_id=self.role_id, user_id=self.user_id, - project_id=proj['project']['id'])) - - # Make the API call - r = self.get('/projects/%(project_id)s?subtree_as_list' % - {'project_id': parent['project']['id']}) - self.assertValidProjectResponse(r, parent['project']) - - # Assert only 'subproject' is in the subtree - self.assertIn(subproject, r.result['project']['subtree']) - self.assertEqual(1, len(r.result['project']['subtree'])) - - def test_get_project_with_subtree_as_list_and_subtree_as_ids(self): - """Attempt to get a project subtree as both a list and as IDs. - - This uses ``GET /projects/{project_id}?subtree_as_list&subtree_as_ids`` - which should fail with a bad request due to the conflicting query - strings. - - """ - projects = self._create_projects_hierarchy(hierarchy_size=2) - - self.get( - '/projects/%(project_id)s?subtree_as_list&subtree_as_ids' % { - 'project_id': projects[1]['project']['id']}, - expected_status=http_client.BAD_REQUEST) - - def test_update_project(self): - """Call ``PATCH /projects/{project_id}``.""" - ref = unit.new_project_ref(domain_id=self.domain_id, - parent_id=self.project['parent_id']) - del ref['id'] - r = self.patch( - '/projects/%(project_id)s' % { - 'project_id': self.project_id}, - body={'project': ref}) - self.assertValidProjectResponse(r, ref) - - def test_update_project_unsafe(self): - """Call ``POST /projects/{project_id} with unsafe names``.""" - unsafe_name = 'i am not / safe' - - self.config_fixture.config(group='resource', - project_name_url_safe='off') - ref = unit.new_project_ref(name=unsafe_name, - domain_id=self.domain_id, - parent_id=self.project['parent_id']) - del ref['id'] - self.patch( - '/projects/%(project_id)s' % { - 'project_id': self.project_id}, - body={'project': ref}) - - unsafe_name = 'i am still not / safe' - for config_setting in ['new', 'strict']: - self.config_fixture.config(group='resource', - project_name_url_safe=config_setting) - ref = unit.new_project_ref(name=unsafe_name, - domain_id=self.domain_id, - parent_id=self.project['parent_id']) - del ref['id'] - self.patch( - '/projects/%(project_id)s' % { - 'project_id': self.project_id}, - body={'project': ref}, - expected_status=http_client.BAD_REQUEST) - - def test_update_project_unsafe_default(self): - """Check default for unsafe names for ``POST /projects``.""" - unsafe_name = 'i am not / safe' - - # By default, we should be able to create unsafe names - ref = unit.new_project_ref(name=unsafe_name, - domain_id=self.domain_id, - parent_id=self.project['parent_id']) - del ref['id'] - self.patch( - '/projects/%(project_id)s' % { - 'project_id': self.project_id}, - body={'project': ref}) - - def test_update_project_domain_id(self): - """Call ``PATCH /projects/{project_id}`` with domain_id.""" - project = unit.new_project_ref(domain_id=self.domain['id']) - project = self.resource_api.create_project(project['id'], project) - project['domain_id'] = CONF.identity.default_domain_id - r = self.patch('/projects/%(project_id)s' % { - 'project_id': project['id']}, - body={'project': project}, - expected_status=exception.ValidationError.code) - self.config_fixture.config(domain_id_immutable=False) - project['domain_id'] = self.domain['id'] - r = self.patch('/projects/%(project_id)s' % { - 'project_id': project['id']}, - body={'project': project}) - self.assertValidProjectResponse(r, project) - - def test_update_project_parent_id(self): - """Call ``PATCH /projects/{project_id}``.""" - projects = self._create_projects_hierarchy() - leaf_project = projects[1]['project'] - leaf_project['parent_id'] = None - self.patch( - '/projects/%(project_id)s' % { - 'project_id': leaf_project['id']}, - body={'project': leaf_project}, - expected_status=http_client.FORBIDDEN) - - def test_update_project_is_domain_not_allowed(self): - """Call ``PATCH /projects/{project_id}`` with is_domain. - - The is_domain flag is immutable. - """ - project = unit.new_project_ref(domain_id=self.domain['id']) - resp = self.post('/projects', - body={'project': project}) - self.assertFalse(resp.result['project']['is_domain']) - - project['parent_id'] = resp.result['project']['parent_id'] - project['is_domain'] = True - self.patch('/projects/%(project_id)s' % { - 'project_id': resp.result['project']['id']}, - body={'project': project}, - expected_status=http_client.BAD_REQUEST) - - def test_disable_leaf_project(self): - """Call ``PATCH /projects/{project_id}``.""" - projects = self._create_projects_hierarchy() - leaf_project = projects[1]['project'] - leaf_project['enabled'] = False - r = self.patch( - '/projects/%(project_id)s' % { - 'project_id': leaf_project['id']}, - body={'project': leaf_project}) - self.assertEqual( - leaf_project['enabled'], r.result['project']['enabled']) - - def test_disable_not_leaf_project(self): - """Call ``PATCH /projects/{project_id}``.""" - projects = self._create_projects_hierarchy() - root_project = projects[0]['project'] - root_project['enabled'] = False - self.patch( - '/projects/%(project_id)s' % { - 'project_id': root_project['id']}, - body={'project': root_project}, - expected_status=http_client.FORBIDDEN) - - def test_delete_project(self): - """Call ``DELETE /projects/{project_id}`` - - As well as making sure the delete succeeds, we ensure - that any credentials that reference this projects are - also deleted, while other credentials are unaffected. - - """ - credential = unit.new_credential_ref(user_id=self.user['id'], - project_id=self.project_id) - self.credential_api.create_credential(credential['id'], credential) - - # First check the credential for this project is present - r = self.credential_api.get_credential(credential['id']) - self.assertDictEqual(credential, r) - # Create a second credential with a different project - project2 = unit.new_project_ref(domain_id=self.domain['id']) - self.resource_api.create_project(project2['id'], project2) - credential2 = unit.new_credential_ref(user_id=self.user['id'], - project_id=project2['id']) - self.credential_api.create_credential(credential2['id'], credential2) - - # Now delete the project - self.delete( - '/projects/%(project_id)s' % { - 'project_id': self.project_id}) - - # Deleting the project should have deleted any credentials - # that reference this project - self.assertRaises(exception.CredentialNotFound, - self.credential_api.get_credential, - credential_id=credential['id']) - # But the credential for project2 is unaffected - r = self.credential_api.get_credential(credential2['id']) - self.assertDictEqual(credential2, r) - - def test_delete_not_leaf_project(self): - """Call ``DELETE /projects/{project_id}``.""" - projects = self._create_projects_hierarchy() - self.delete( - '/projects/%(project_id)s' % { - 'project_id': projects[0]['project']['id']}, - expected_status=http_client.FORBIDDEN) - - -class ResourceV3toV2MethodsTestCase(unit.TestCase): - """Test domain V3 to V2 conversion methods.""" - - def _setup_initial_projects(self): - self.project_id = uuid.uuid4().hex - self.domain_id = CONF.identity.default_domain_id - self.parent_id = uuid.uuid4().hex - # Project with only domain_id in ref - self.project1 = unit.new_project_ref(id=self.project_id, - name=self.project_id, - domain_id=self.domain_id) - # Project with both domain_id and parent_id in ref - self.project2 = unit.new_project_ref(id=self.project_id, - name=self.project_id, - domain_id=self.domain_id, - parent_id=self.parent_id) - # Project with no domain_id and parent_id in ref - self.project3 = unit.new_project_ref(id=self.project_id, - name=self.project_id, - domain_id=self.domain_id, - parent_id=self.parent_id) - # Expected result with no domain_id and parent_id - self.expected_project = {'id': self.project_id, - 'name': self.project_id} - - def test_v2controller_filter_domain_id(self): - # V2.0 is not domain aware, ensure domain_id is popped off the ref. - other_data = uuid.uuid4().hex - domain_id = CONF.identity.default_domain_id - ref = {'domain_id': domain_id, - 'other_data': other_data} - - ref_no_domain = {'other_data': other_data} - expected_ref = ref_no_domain.copy() - - updated_ref = controller.V2Controller.filter_domain_id(ref) - self.assertIs(ref, updated_ref) - self.assertDictEqual(expected_ref, ref) - # Make sure we don't error/muck up data if domain_id isn't present - updated_ref = controller.V2Controller.filter_domain_id(ref_no_domain) - self.assertIs(ref_no_domain, updated_ref) - self.assertDictEqual(expected_ref, ref_no_domain) - - def test_v3controller_filter_domain_id(self): - # No data should be filtered out in this case. - other_data = uuid.uuid4().hex - domain_id = uuid.uuid4().hex - ref = {'domain_id': domain_id, - 'other_data': other_data} - - expected_ref = ref.copy() - updated_ref = controller.V3Controller.filter_domain_id(ref) - self.assertIs(ref, updated_ref) - self.assertDictEqual(expected_ref, ref) - - def test_v2controller_filter_domain(self): - other_data = uuid.uuid4().hex - domain_id = uuid.uuid4().hex - non_default_domain_ref = {'domain': {'id': domain_id}, - 'other_data': other_data} - default_domain_ref = {'domain': {'id': 'default'}, - 'other_data': other_data} - updated_ref = controller.V2Controller.filter_domain(default_domain_ref) - self.assertNotIn('domain', updated_ref) - self.assertNotIn( - 'domain', - controller.V2Controller.filter_domain(non_default_domain_ref)) - - def test_v2controller_filter_project_parent_id(self): - # V2.0 is not project hierarchy aware, ensure parent_id is popped off. - other_data = uuid.uuid4().hex - parent_id = uuid.uuid4().hex - ref = {'parent_id': parent_id, - 'other_data': other_data} - - ref_no_parent = {'other_data': other_data} - expected_ref = ref_no_parent.copy() - - updated_ref = controller.V2Controller.filter_project_parent_id(ref) - self.assertIs(ref, updated_ref) - self.assertDictEqual(expected_ref, ref) - # Make sure we don't error/muck up data if parent_id isn't present - updated_ref = controller.V2Controller.filter_project_parent_id( - ref_no_parent) - self.assertIs(ref_no_parent, updated_ref) - self.assertDictEqual(expected_ref, ref_no_parent) - - def test_v3_to_v2_project_method(self): - self._setup_initial_projects() - - # TODO(shaleh): these optional fields are not handled well by the - # v3_to_v2 code. Manually remove them for now. Eventually update - # new_project_ref to not return optional values - del self.project1['enabled'] - del self.project1['description'] - del self.project2['enabled'] - del self.project2['description'] - del self.project3['enabled'] - del self.project3['description'] - - updated_project1 = controller.V2Controller.v3_to_v2_project( - self.project1) - self.assertIs(self.project1, updated_project1) - self.assertDictEqual(self.expected_project, self.project1) - updated_project2 = controller.V2Controller.v3_to_v2_project( - self.project2) - self.assertIs(self.project2, updated_project2) - self.assertDictEqual(self.expected_project, self.project2) - updated_project3 = controller.V2Controller.v3_to_v2_project( - self.project3) - self.assertIs(self.project3, updated_project3) - self.assertDictEqual(self.expected_project, self.project2) - - def test_v3_to_v2_project_method_list(self): - self._setup_initial_projects() - project_list = [self.project1, self.project2, self.project3] - - # TODO(shaleh): these optional fields are not handled well by the - # v3_to_v2 code. Manually remove them for now. Eventually update - # new_project_ref to not return optional values - for p in project_list: - del p['enabled'] - del p['description'] - updated_list = controller.V2Controller.v3_to_v2_project(project_list) - - self.assertEqual(len(updated_list), len(project_list)) - - for i, ref in enumerate(updated_list): - # Order should not change. - self.assertIs(ref, project_list[i]) - - self.assertDictEqual(self.expected_project, self.project1) - self.assertDictEqual(self.expected_project, self.project2) - self.assertDictEqual(self.expected_project, self.project3) diff --git a/keystone-moon/keystone/tests/unit/test_v3_trust.py b/keystone-moon/keystone/tests/unit/test_v3_trust.py deleted file mode 100644 index d3127c89..00000000 --- a/keystone-moon/keystone/tests/unit/test_v3_trust.py +++ /dev/null @@ -1,403 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime -import uuid - -from six.moves import http_client - -from keystone.tests import unit -from keystone.tests.unit import test_v3 - - -class TestTrustOperations(test_v3.RestfulTestCase): - """Test module for create, read, update and delete operations on trusts. - - This module is specific to tests for trust CRUD operations. All other tests - related to trusts that are authentication or authorization specific should - live in in the keystone/tests/unit/test_v3_auth.py module. - - """ - - def setUp(self): - super(TestTrustOperations, self).setUp() - # create a trustee to delegate stuff to - self.trustee_user = unit.create_user(self.identity_api, - domain_id=self.domain_id) - self.trustee_user_id = self.trustee_user['id'] - - def test_create_trust_bad_request(self): - # The server returns a 403 Forbidden rather than a 400 Bad Request, see - # bug 1133435 - self.post('/OS-TRUST/trusts', body={'trust': {}}, - expected_status=http_client.FORBIDDEN) - - def test_trust_crud(self): - # create a new trust - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user_id, - project_id=self.project_id, - role_ids=[self.role_id]) - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - trust = self.assertValidTrustResponse(r, ref) - - # get the trust - r = self.get( - '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']}) - self.assertValidTrustResponse(r, ref) - - # validate roles on the trust - r = self.get( - '/OS-TRUST/trusts/%(trust_id)s/roles' % { - 'trust_id': trust['id']}) - roles = self.assertValidRoleListResponse(r, self.role) - self.assertIn(self.role['id'], [x['id'] for x in roles]) - self.head( - '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % { - 'trust_id': trust['id'], - 'role_id': self.role['id']}, - expected_status=http_client.OK) - r = self.get( - '/OS-TRUST/trusts/%(trust_id)s/roles/%(role_id)s' % { - 'trust_id': trust['id'], - 'role_id': self.role['id']}) - self.assertValidRoleResponse(r, self.role) - - # list all trusts - r = self.get('/OS-TRUST/trusts') - self.assertValidTrustListResponse(r, trust) - - # trusts are immutable - self.patch( - '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']}, - body={'trust': ref}, - expected_status=http_client.NOT_FOUND) - - # delete the trust - self.delete( - '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']}) - - # ensure the trust is not found - self.get( - '/OS-TRUST/trusts/%(trust_id)s' % {'trust_id': trust['id']}, - expected_status=http_client.NOT_FOUND) - - def test_list_trusts(self): - # create three trusts with the same trustor and trustee - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user_id, - project_id=self.project_id, - impersonation=False, - expires=dict(minutes=1), - role_ids=[self.role_id]) - for i in range(3): - ref['expires_at'] = datetime.datetime.utcnow().replace( - year=2032).strftime(unit.TIME_FORMAT) - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - self.assertValidTrustResponse(r, ref) - - # list all trusts - r = self.get('/OS-TRUST/trusts') - trusts = r.result['trusts'] - self.assertEqual(3, len(trusts)) - self.assertValidTrustListResponse(r) - - # list all trusts for the trustor - r = self.get('/OS-TRUST/trusts?trustor_user_id=%s' % - self.user_id) - trusts = r.result['trusts'] - self.assertEqual(3, len(trusts)) - self.assertValidTrustListResponse(r) - - # list all trusts as the trustor as the trustee. - r = self.get('/OS-TRUST/trusts?trustee_user_id=%s' % - self.user_id) - trusts = r.result['trusts'] - self.assertEqual(0, len(trusts)) - - # list all trusts as the trustee is forbidden - r = self.get('/OS-TRUST/trusts?trustee_user_id=%s' % - self.trustee_user_id, - expected_status=http_client.FORBIDDEN) - - def test_delete_trust(self): - # create a trust - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user_id, - project_id=self.project_id, - impersonation=False, - expires=dict(minutes=1), - role_ids=[self.role_id]) - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - trust = self.assertValidTrustResponse(r, ref) - - # delete the trust - self.delete('/OS-TRUST/trusts/%(trust_id)s' % { - 'trust_id': trust['id']}) - - # ensure the trust isn't found - self.get('/OS-TRUST/trusts/%(trust_id)s' % { - 'trust_id': trust['id']}, - expected_status=http_client.NOT_FOUND) - - def test_create_trust_without_trustee_returns_bad_request(self): - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user_id, - project_id=self.project_id, - role_ids=[self.role_id]) - - # trustee_user_id is required to create a trust - del ref['trustee_user_id'] - - self.post('/OS-TRUST/trusts', - body={'trust': ref}, - expected_status=http_client.BAD_REQUEST) - - def test_create_trust_without_impersonation_returns_bad_request(self): - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user_id, - project_id=self.project_id, - role_ids=[self.role_id]) - - # impersonation is required to create a trust - del ref['impersonation'] - - self.post('/OS-TRUST/trusts', - body={'trust': ref}, - expected_status=http_client.BAD_REQUEST) - - def test_create_trust_with_bad_remaining_uses_returns_bad_request(self): - # negative numbers, strings, non-integers, and 0 are not value values - for value in [-1, 0, "a bad value", 7.2]: - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user_id, - project_id=self.project_id, - remaining_uses=value, - role_ids=[self.role_id]) - self.post('/OS-TRUST/trusts', - body={'trust': ref}, - expected_status=http_client.BAD_REQUEST) - - def test_create_trust_with_non_existant_trustee_returns_not_found(self): - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=uuid.uuid4().hex, - project_id=self.project_id, - role_ids=[self.role_id]) - self.post('/OS-TRUST/trusts', body={'trust': ref}, - expected_status=http_client.NOT_FOUND) - - def test_create_trust_with_trustee_as_trustor_returns_forbidden(self): - ref = unit.new_trust_ref( - trustor_user_id=self.trustee_user_id, - trustee_user_id=self.user_id, - project_id=self.project_id, - role_ids=[self.role_id]) - # NOTE(lbragstad): This fails because the user making the request isn't - # the trustor defined in the request. - self.post('/OS-TRUST/trusts', body={'trust': ref}, - expected_status=http_client.FORBIDDEN) - - def test_create_trust_with_non_existant_project_returns_not_found(self): - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user_id, - project_id=uuid.uuid4().hex, - role_ids=[self.role_id]) - self.post('/OS-TRUST/trusts', body={'trust': ref}, - expected_status=http_client.NOT_FOUND) - - def test_create_trust_with_non_existant_role_id_returns_not_found(self): - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user_id, - project_id=self.project_id, - role_ids=[uuid.uuid4().hex]) - self.post('/OS-TRUST/trusts', body={'trust': ref}, - expected_status=http_client.NOT_FOUND) - - def test_create_trust_with_non_existant_role_name_returns_not_found(self): - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user_id, - project_id=self.project_id, - role_names=[uuid.uuid4().hex]) - self.post('/OS-TRUST/trusts', body={'trust': ref}, - expected_status=http_client.NOT_FOUND) - - def test_validate_trust_scoped_token_against_v2_returns_unauthorized(self): - # create a new trust - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.default_domain_user_id, - project_id=self.project_id, - impersonation=False, - expires=dict(minutes=1), - role_ids=[self.role_id]) - r = self.post('/OS-TRUST/trusts', body={'trust': ref}) - trust = self.assertValidTrustResponse(r) - - # get a v3 trust-scoped token as the trustee - auth_data = self.build_authentication_request( - user_id=self.default_domain_user['id'], - password=self.default_domain_user['password'], - trust_id=trust['id']) - r = self.v3_create_token(auth_data) - self.assertValidProjectScopedTokenResponse( - r, self.default_domain_user) - token = r.headers.get('X-Subject-Token') - - # now validate the v3 token with v2 API - path = '/v2.0/tokens/%s' % (token) - self.admin_request( - path=path, token=self.get_admin_token(), - method='GET', expected_status=http_client.UNAUTHORIZED) - - def test_v3_v2_intermix_trustor_not_in_default_domain_failed(self): - # get a project-scoped token - auth_data = self.build_authentication_request( - user_id=self.default_domain_user['id'], - password=self.default_domain_user['password'], - project_id=self.default_domain_project_id) - token = self.get_requested_token(auth_data) - - # create a new trust - ref = unit.new_trust_ref( - trustor_user_id=self.default_domain_user_id, - trustee_user_id=self.trustee_user_id, - project_id=self.default_domain_project_id, - impersonation=False, - expires=dict(minutes=1), - role_ids=[self.role_id]) - r = self.post('/OS-TRUST/trusts', body={'trust': ref}, token=token) - trust = self.assertValidTrustResponse(r) - - # get a trust-scoped token as the trustee - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password'], - trust_id=trust['id']) - r = self.v3_create_token(auth_data) - self.assertValidProjectScopedTokenResponse( - r, self.trustee_user) - token = r.headers.get('X-Subject-Token') - - # now validate the v3 token with v2 API - path = '/v2.0/tokens/%s' % (token) - self.admin_request( - path=path, token=self.get_admin_token(), - method='GET', expected_status=http_client.UNAUTHORIZED) - - def test_v3_v2_intermix_project_not_in_default_domain_failed(self): - # create a trustee in default domain to delegate stuff to - trustee_user = unit.create_user(self.identity_api, - domain_id=test_v3.DEFAULT_DOMAIN_ID) - trustee_user_id = trustee_user['id'] - - # create a new trust - ref = unit.new_trust_ref( - trustor_user_id=self.default_domain_user_id, - trustee_user_id=trustee_user_id, - project_id=self.project_id, - impersonation=False, - expires=dict(minutes=1), - role_ids=[self.role_id]) - - # get a project-scoped token as the default_domain_user - auth_data = self.build_authentication_request( - user_id=self.default_domain_user['id'], - password=self.default_domain_user['password'], - project_id=self.default_domain_project_id) - token = self.get_requested_token(auth_data) - - r = self.post('/OS-TRUST/trusts', body={'trust': ref}, token=token) - trust = self.assertValidTrustResponse(r) - - # get a trust-scoped token as the trustee - auth_data = self.build_authentication_request( - user_id=trustee_user['id'], - password=trustee_user['password'], - trust_id=trust['id']) - r = self.v3_create_token(auth_data) - self.assertValidProjectScopedTokenResponse(r, trustee_user) - token = r.headers.get('X-Subject-Token') - - # ensure the token is invalid against v2 - path = '/v2.0/tokens/%s' % (token) - self.admin_request( - path=path, token=self.get_admin_token(), - method='GET', expected_status=http_client.UNAUTHORIZED) - - def test_exercise_trust_scoped_token_without_impersonation(self): - # create a new trust - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user_id, - project_id=self.project_id, - impersonation=False, - expires=dict(minutes=1), - role_ids=[self.role_id]) - resp = self.post('/OS-TRUST/trusts', body={'trust': ref}) - trust = self.assertValidTrustResponse(resp) - - # get a trust-scoped token as the trustee - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password'], - trust_id=trust['id']) - resp = self.v3_create_token(auth_data) - resp_body = resp.json_body['token'] - - self.assertValidProjectScopedTokenResponse(resp, - self.trustee_user) - self.assertEqual(self.trustee_user['id'], resp_body['user']['id']) - self.assertEqual(self.trustee_user['name'], resp_body['user']['name']) - self.assertEqual(self.domain['id'], resp_body['user']['domain']['id']) - self.assertEqual(self.domain['name'], - resp_body['user']['domain']['name']) - self.assertEqual(self.project['id'], resp_body['project']['id']) - self.assertEqual(self.project['name'], resp_body['project']['name']) - - def test_exercise_trust_scoped_token_with_impersonation(self): - # create a new trust - ref = unit.new_trust_ref( - trustor_user_id=self.user_id, - trustee_user_id=self.trustee_user_id, - project_id=self.project_id, - impersonation=True, - expires=dict(minutes=1), - role_ids=[self.role_id]) - resp = self.post('/OS-TRUST/trusts', body={'trust': ref}) - trust = self.assertValidTrustResponse(resp) - - # get a trust-scoped token as the trustee - auth_data = self.build_authentication_request( - user_id=self.trustee_user['id'], - password=self.trustee_user['password'], - trust_id=trust['id']) - resp = self.v3_create_token(auth_data) - resp_body = resp.json_body['token'] - - self.assertValidProjectScopedTokenResponse(resp, self.user) - self.assertEqual(self.user['id'], resp_body['user']['id']) - self.assertEqual(self.user['name'], resp_body['user']['name']) - self.assertEqual(self.domain['id'], resp_body['user']['domain']['id']) - self.assertEqual(self.domain['name'], - resp_body['user']['domain']['name']) - self.assertEqual(self.project['id'], resp_body['project']['id']) - self.assertEqual(self.project['name'], resp_body['project']['name']) diff --git a/keystone-moon/keystone/tests/unit/test_validation.py b/keystone-moon/keystone/tests/unit/test_validation.py deleted file mode 100644 index 73cb6ef6..00000000 --- a/keystone-moon/keystone/tests/unit/test_validation.py +++ /dev/null @@ -1,2115 +0,0 @@ -# -*- coding: utf-8 -*- -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -import six -import testtools - -from keystone.assignment import schema as assignment_schema -from keystone.catalog import schema as catalog_schema -from keystone.common import validation -from keystone.common.validation import parameter_types -from keystone.common.validation import validators -from keystone.credential import schema as credential_schema -from keystone import exception -from keystone.federation import schema as federation_schema -from keystone.identity import schema as identity_schema -from keystone.oauth1 import schema as oauth1_schema -from keystone.policy import schema as policy_schema -from keystone.resource import schema as resource_schema -from keystone.tests import unit -from keystone.trust import schema as trust_schema - -"""Example model to validate create requests against. Assume that this is -the only backend for the create and validate schemas. This is just an -example to show how a backend can be used to construct a schema. In -Keystone, schemas are built according to the Identity API and the backends -available in Keystone. This example does not mean that all schema in -Keystone were strictly based on the SQL backends. - -class Entity(sql.ModelBase): - __tablename__ = 'entity' - attributes = ['id', 'name', 'domain_id', 'description'] - id = sql.Column(sql.String(64), primary_key=True) - name = sql.Column(sql.String(255), nullable=False) - description = sql.Column(sql.Text(), nullable=True) - enabled = sql.Column(sql.Boolean, default=True, nullable=False) - url = sql.Column(sql.String(225), nullable=True) - email = sql.Column(sql.String(64), nullable=True) -""" - -# Test schema to validate create requests against - -_entity_properties = { - 'name': parameter_types.name, - 'description': validation.nullable(parameter_types.description), - 'enabled': parameter_types.boolean, - 'url': validation.nullable(parameter_types.url), - 'email': validation.nullable(parameter_types.email), - 'id_string': validation.nullable(parameter_types.id_string) -} - -entity_create = { - 'type': 'object', - 'properties': _entity_properties, - 'required': ['name'], - 'additionalProperties': True, -} - -entity_create_optional_body = { - 'type': 'object', - 'properties': _entity_properties, - 'additionalProperties': True, -} - -entity_update = { - 'type': 'object', - 'properties': _entity_properties, - 'minProperties': 1, - 'additionalProperties': True, -} - -_VALID_ENABLED_FORMATS = [True, False] - -_INVALID_ENABLED_FORMATS = ['some string', 1, 0, 'True', 'False'] - -_INVALID_DESC_FORMATS = [False, 1, 2.0] - -_VALID_URLS = ['https://example.com', 'http://EXAMPLE.com/v3', - 'http://localhost', 'http://127.0.0.1:5000', - 'http://1.1.1.1', 'http://255.255.255.255', - 'http://[::1]', 'http://[::1]:35357', - 'http://[1::8]', 'http://[fe80::8%25eth0]', - 'http://[::1.2.3.4]', 'http://[2001:DB8::1.2.3.4]', - 'http://[::a:1.2.3.4]', 'http://[a::b:1.2.3.4]', - 'http://[1:2:3:4:5:6:7:8]', 'http://[1:2:3:4:5:6:1.2.3.4]', - 'http://[abcd:efAB:CDEF:1111:9999::]'] - -_INVALID_URLS = [False, 'this is not a URL', 1234, 'www.example.com', - 'localhost', 'http//something.com', - 'https//something.com', ' http://example.com'] - -_VALID_FILTERS = [{'interface': 'admin'}, - {'region': 'US-WEST', - 'interface': 'internal'}] - -_INVALID_FILTERS = ['some string', 1, 0, True, False] - - -def expected_validation_failure(msg): - def wrapper(f): - def wrapped(self, *args, **kwargs): - args = (self,) + args - e = self.assertRaises(exception.ValidationError, f, - *args, **kwargs) - self.assertIn(msg, six.text_type(e)) - return wrapped - return wrapper - - -class ValidatedDecoratorTests(unit.BaseTestCase): - - entity_schema = { - 'type': 'object', - 'properties': { - 'name': parameter_types.name, - }, - 'required': ['name'], - } - - valid_entity = { - 'name': uuid.uuid4().hex, - } - - invalid_entity = { - 'name': 1.0, # NOTE(dstanek): this is the incorrect type for name - } - - @validation.validated(entity_create, 'entity') - def create_entity(self, entity): - """Used to test cases where validated param is the only param.""" - - @validation.validated(entity_create_optional_body, 'entity') - def create_entity_optional_body(self, entity): - """Used to test cases where there is an optional body.""" - - @validation.validated(entity_update, 'entity') - def update_entity(self, entity_id, entity): - """Used to test cases where validated param is not the only param.""" - - def test_calling_create_with_valid_entity_kwarg_succeeds(self): - self.create_entity(entity=self.valid_entity) - - def test_calling_create_with_empty_entity_kwarg_succeeds(self): - """Test the case when client passing in an empty kwarg reference.""" - self.create_entity_optional_body(entity={}) - - @expected_validation_failure('Expecting to find entity in request body') - def test_calling_create_with_kwarg_as_None_fails(self): - self.create_entity(entity=None) - - def test_calling_create_with_valid_entity_arg_succeeds(self): - self.create_entity(self.valid_entity) - - def test_calling_create_with_empty_entity_arg_succeeds(self): - """Test the case when client passing in an empty entity reference.""" - self.create_entity_optional_body({}) - - @expected_validation_failure("Invalid input for field 'name'") - def test_calling_create_with_invalid_entity_fails(self): - self.create_entity(self.invalid_entity) - - @expected_validation_failure('Expecting to find entity in request body') - def test_calling_create_with_entity_arg_as_None_fails(self): - self.create_entity(None) - - @expected_validation_failure('Expecting to find entity in request body') - def test_calling_create_without_an_entity_fails(self): - self.create_entity() - - def test_using_the_wrong_name_with_the_decorator_fails(self): - with testtools.ExpectedException(TypeError): - @validation.validated(self.entity_schema, 'entity_') - def function(entity): - pass - - # NOTE(dstanek): below are the test cases for making sure the validation - # works when the validated param is not the only param. Since all of the - # actual validation cases are tested above these test are for a sanity - # check. - - def test_calling_update_with_valid_entity_succeeds(self): - self.update_entity(uuid.uuid4().hex, self.valid_entity) - - @expected_validation_failure("Invalid input for field 'name'") - def test_calling_update_with_invalid_entity_fails(self): - self.update_entity(uuid.uuid4().hex, self.invalid_entity) - - def test_calling_update_with_empty_entity_kwarg_succeeds(self): - """Test the case when client passing in an empty entity reference.""" - global entity_update - original_entity_update = entity_update.copy() - # pop 'minProperties' from schema so that empty body is allowed. - entity_update.pop('minProperties') - self.update_entity(uuid.uuid4().hex, entity={}) - entity_update = original_entity_update - - -class EntityValidationTestCase(unit.BaseTestCase): - - def setUp(self): - super(EntityValidationTestCase, self).setUp() - self.resource_name = 'some resource name' - self.description = 'Some valid description' - self.valid_enabled = True - self.valid_url = 'http://example.com' - self.valid_email = 'joe@example.com' - self.create_schema_validator = validators.SchemaValidator( - entity_create) - self.update_schema_validator = validators.SchemaValidator( - entity_update) - - def test_create_entity_with_all_valid_parameters_validates(self): - """Validate all parameter values against test schema.""" - request_to_validate = {'name': self.resource_name, - 'description': self.description, - 'enabled': self.valid_enabled, - 'url': self.valid_url, - 'email': self.valid_email} - self.create_schema_validator.validate(request_to_validate) - - def test_create_entity_with_only_required_valid_parameters_validates(self): - """Validate correct for only parameters values against test schema.""" - request_to_validate = {'name': self.resource_name} - self.create_schema_validator.validate(request_to_validate) - - def test_create_entity_with_name_too_long_raises_exception(self): - """Validate long names. - - Validate that an exception is raised when validating a string of 255+ - characters passed in as a name. - """ - invalid_name = 'a' * 256 - request_to_validate = {'name': invalid_name} - self.assertRaises(exception.SchemaValidationError, - self.create_schema_validator.validate, - request_to_validate) - - def test_create_entity_with_name_too_short_raises_exception(self): - """Validate short names. - - Test that an exception is raised when passing a string of length - zero as a name parameter. - """ - request_to_validate = {'name': ''} - self.assertRaises(exception.SchemaValidationError, - self.create_schema_validator.validate, - request_to_validate) - - def test_create_entity_with_unicode_name_validates(self): - """Test that we successfully validate a unicode string.""" - request_to_validate = {'name': u'αβγδ'} - self.create_schema_validator.validate(request_to_validate) - - def test_create_entity_with_invalid_enabled_format_raises_exception(self): - """Validate invalid enabled formats. - - Test that an exception is raised when passing invalid boolean-like - values as `enabled`. - """ - for format in _INVALID_ENABLED_FORMATS: - request_to_validate = {'name': self.resource_name, - 'enabled': format} - self.assertRaises(exception.SchemaValidationError, - self.create_schema_validator.validate, - request_to_validate) - - def test_create_entity_with_valid_enabled_formats_validates(self): - """Validate valid enabled formats. - - Test that we have successful validation on boolean values for - `enabled`. - """ - for valid_enabled in _VALID_ENABLED_FORMATS: - request_to_validate = {'name': self.resource_name, - 'enabled': valid_enabled} - # Make sure validation doesn't raise a validation exception - self.create_schema_validator.validate(request_to_validate) - - def test_create_entity_with_valid_urls_validates(self): - """Test that proper urls are successfully validated.""" - for valid_url in _VALID_URLS: - request_to_validate = {'name': self.resource_name, - 'url': valid_url} - self.create_schema_validator.validate(request_to_validate) - - def test_create_entity_with_invalid_urls_fails(self): - """Test that an exception is raised when validating improper urls.""" - for invalid_url in _INVALID_URLS: - request_to_validate = {'name': self.resource_name, - 'url': invalid_url} - self.assertRaises(exception.SchemaValidationError, - self.create_schema_validator.validate, - request_to_validate) - - def test_create_entity_with_valid_email_validates(self): - """Validate email address - - Test that we successfully validate properly formatted email - addresses. - """ - request_to_validate = {'name': self.resource_name, - 'email': self.valid_email} - self.create_schema_validator.validate(request_to_validate) - - def test_create_entity_with_invalid_email_fails(self): - """Validate invalid email address. - - Test that an exception is raised when validating improperly - formatted email addresses. - """ - request_to_validate = {'name': self.resource_name, - 'email': 'some invalid email value'} - self.assertRaises(exception.SchemaValidationError, - self.create_schema_validator.validate, - request_to_validate) - - def test_create_entity_with_valid_id_strings(self): - """Validate acceptable id strings.""" - valid_id_strings = [str(uuid.uuid4()), uuid.uuid4().hex, 'default'] - for valid_id in valid_id_strings: - request_to_validate = {'name': self.resource_name, - 'id_string': valid_id} - self.create_schema_validator.validate(request_to_validate) - - def test_create_entity_with_invalid_id_strings(self): - """Exception raised when using invalid id strings.""" - long_string = 'A' * 65 - invalid_id_strings = ['', long_string] - for invalid_id in invalid_id_strings: - request_to_validate = {'name': self.resource_name, - 'id_string': invalid_id} - self.assertRaises(exception.SchemaValidationError, - self.create_schema_validator.validate, - request_to_validate) - - def test_create_entity_with_null_id_string(self): - """Validate that None is an acceptable optional string type.""" - request_to_validate = {'name': self.resource_name, - 'id_string': None} - self.create_schema_validator.validate(request_to_validate) - - def test_create_entity_with_null_string_succeeds(self): - """Exception raised when passing None on required id strings.""" - request_to_validate = {'name': self.resource_name, - 'id_string': None} - self.create_schema_validator.validate(request_to_validate) - - def test_update_entity_with_no_parameters_fails(self): - """At least one parameter needs to be present for an update.""" - request_to_validate = {} - self.assertRaises(exception.SchemaValidationError, - self.update_schema_validator.validate, - request_to_validate) - - def test_update_entity_with_all_parameters_valid_validates(self): - """Simulate updating an entity by ID.""" - request_to_validate = {'name': self.resource_name, - 'description': self.description, - 'enabled': self.valid_enabled, - 'url': self.valid_url, - 'email': self.valid_email} - self.update_schema_validator.validate(request_to_validate) - - def test_update_entity_with_a_valid_required_parameter_validates(self): - """Succeed if a valid required parameter is provided.""" - request_to_validate = {'name': self.resource_name} - self.update_schema_validator.validate(request_to_validate) - - def test_update_entity_with_invalid_required_parameter_fails(self): - """Fail if a provided required parameter is invalid.""" - request_to_validate = {'name': 'a' * 256} - self.assertRaises(exception.SchemaValidationError, - self.update_schema_validator.validate, - request_to_validate) - - def test_update_entity_with_a_null_optional_parameter_validates(self): - """Optional parameters can be null to removed the value.""" - request_to_validate = {'email': None} - self.update_schema_validator.validate(request_to_validate) - - def test_update_entity_with_a_required_null_parameter_fails(self): - """The `name` parameter can't be null.""" - request_to_validate = {'name': None} - self.assertRaises(exception.SchemaValidationError, - self.update_schema_validator.validate, - request_to_validate) - - def test_update_entity_with_a_valid_optional_parameter_validates(self): - """Succeeds with only a single valid optional parameter.""" - request_to_validate = {'email': self.valid_email} - self.update_schema_validator.validate(request_to_validate) - - def test_update_entity_with_invalid_optional_parameter_fails(self): - """Fails when an optional parameter is invalid.""" - request_to_validate = {'email': 0} - self.assertRaises(exception.SchemaValidationError, - self.update_schema_validator.validate, - request_to_validate) - - -class ProjectValidationTestCase(unit.BaseTestCase): - """Test for V3 Project API validation.""" - - def setUp(self): - super(ProjectValidationTestCase, self).setUp() - - self.project_name = 'My Project' - - create = resource_schema.project_create - update = resource_schema.project_update - self.create_project_validator = validators.SchemaValidator(create) - self.update_project_validator = validators.SchemaValidator(update) - - def test_validate_project_request(self): - """Test that we validate a project with `name` in request.""" - request_to_validate = {'name': self.project_name} - self.create_project_validator.validate(request_to_validate) - - def test_validate_project_request_without_name_fails(self): - """Validate project request fails without name.""" - request_to_validate = {'enabled': True} - self.assertRaises(exception.SchemaValidationError, - self.create_project_validator.validate, - request_to_validate) - - def test_validate_project_request_with_enabled(self): - """Validate `enabled` as boolean-like values for projects.""" - for valid_enabled in _VALID_ENABLED_FORMATS: - request_to_validate = {'name': self.project_name, - 'enabled': valid_enabled} - self.create_project_validator.validate(request_to_validate) - - def test_validate_project_request_with_invalid_enabled_fails(self): - """Exception is raised when `enabled` isn't a boolean-like value.""" - for invalid_enabled in _INVALID_ENABLED_FORMATS: - request_to_validate = {'name': self.project_name, - 'enabled': invalid_enabled} - self.assertRaises(exception.SchemaValidationError, - self.create_project_validator.validate, - request_to_validate) - - def test_validate_project_request_with_valid_description(self): - """Test that we validate `description` in create project requests.""" - request_to_validate = {'name': self.project_name, - 'description': 'My Project'} - self.create_project_validator.validate(request_to_validate) - - def test_validate_project_request_with_invalid_description_fails(self): - """Exception is raised when `description` as a non-string value.""" - request_to_validate = {'name': self.project_name, - 'description': False} - self.assertRaises(exception.SchemaValidationError, - self.create_project_validator.validate, - request_to_validate) - - def test_validate_project_request_with_name_too_long(self): - """Exception is raised when `name` is too long.""" - long_project_name = 'a' * 65 - request_to_validate = {'name': long_project_name} - self.assertRaises(exception.SchemaValidationError, - self.create_project_validator.validate, - request_to_validate) - - def test_validate_project_request_with_name_too_short(self): - """Exception raised when `name` is too short.""" - request_to_validate = {'name': ''} - self.assertRaises(exception.SchemaValidationError, - self.create_project_validator.validate, - request_to_validate) - - def test_validate_project_request_with_valid_parent_id(self): - """Test that we validate `parent_id` in create project requests.""" - # parent_id is nullable - request_to_validate = {'name': self.project_name, - 'parent_id': None} - self.create_project_validator.validate(request_to_validate) - request_to_validate = {'name': self.project_name, - 'parent_id': uuid.uuid4().hex} - self.create_project_validator.validate(request_to_validate) - - def test_validate_project_request_with_invalid_parent_id_fails(self): - """Exception is raised when `parent_id` as a non-id value.""" - request_to_validate = {'name': self.project_name, - 'parent_id': False} - self.assertRaises(exception.SchemaValidationError, - self.create_project_validator.validate, - request_to_validate) - request_to_validate = {'name': self.project_name, - 'parent_id': 'fake project'} - self.assertRaises(exception.SchemaValidationError, - self.create_project_validator.validate, - request_to_validate) - - def test_validate_project_update_request(self): - """Test that we validate a project update request.""" - request_to_validate = {'domain_id': uuid.uuid4().hex} - self.update_project_validator.validate(request_to_validate) - - def test_validate_project_update_request_with_no_parameters_fails(self): - """Exception is raised when updating project without parameters.""" - request_to_validate = {} - self.assertRaises(exception.SchemaValidationError, - self.update_project_validator.validate, - request_to_validate) - - def test_validate_project_update_request_with_name_too_long_fails(self): - """Exception raised when updating a project with `name` too long.""" - long_project_name = 'a' * 65 - request_to_validate = {'name': long_project_name} - self.assertRaises(exception.SchemaValidationError, - self.update_project_validator.validate, - request_to_validate) - - def test_validate_project_update_request_with_name_too_short_fails(self): - """Exception raised when updating a project with `name` too short.""" - request_to_validate = {'name': ''} - self.assertRaises(exception.SchemaValidationError, - self.update_project_validator.validate, - request_to_validate) - - def test_validate_project_create_request_with_valid_domain_id(self): - """Test that we validate `domain_id` in create project requests.""" - # domain_id is nullable - for domain_id in [None, uuid.uuid4().hex]: - request_to_validate = {'name': self.project_name, - 'domain_id': domain_id} - self.create_project_validator.validate(request_to_validate) - - def test_validate_project_request_with_invalid_domain_id_fails(self): - """Exception is raised when `domain_id` is a non-id value.""" - for domain_id in [False, 'fake_project']: - request_to_validate = {'name': self.project_name, - 'domain_id': domain_id} - self.assertRaises(exception.SchemaValidationError, - self.create_project_validator.validate, - request_to_validate) - - -class DomainValidationTestCase(unit.BaseTestCase): - """Test for V3 Domain API validation.""" - - def setUp(self): - super(DomainValidationTestCase, self).setUp() - - self.domain_name = 'My Domain' - - create = resource_schema.domain_create - update = resource_schema.domain_update - self.create_domain_validator = validators.SchemaValidator(create) - self.update_domain_validator = validators.SchemaValidator(update) - - def test_validate_domain_request(self): - """Make sure we successfully validate a create domain request.""" - request_to_validate = {'name': self.domain_name} - self.create_domain_validator.validate(request_to_validate) - - def test_validate_domain_request_without_name_fails(self): - """Make sure we raise an exception when `name` isn't included.""" - request_to_validate = {'enabled': True} - self.assertRaises(exception.SchemaValidationError, - self.create_domain_validator.validate, - request_to_validate) - - def test_validate_domain_request_with_enabled(self): - """Validate `enabled` as boolean-like values for domains.""" - for valid_enabled in _VALID_ENABLED_FORMATS: - request_to_validate = {'name': self.domain_name, - 'enabled': valid_enabled} - self.create_domain_validator.validate(request_to_validate) - - def test_validate_domain_request_with_invalid_enabled_fails(self): - """Exception is raised when `enabled` isn't a boolean-like value.""" - for invalid_enabled in _INVALID_ENABLED_FORMATS: - request_to_validate = {'name': self.domain_name, - 'enabled': invalid_enabled} - self.assertRaises(exception.SchemaValidationError, - self.create_domain_validator.validate, - request_to_validate) - - def test_validate_domain_request_with_valid_description(self): - """Test that we validate `description` in create domain requests.""" - request_to_validate = {'name': self.domain_name, - 'description': 'My Domain'} - self.create_domain_validator.validate(request_to_validate) - - def test_validate_domain_request_with_invalid_description_fails(self): - """Exception is raised when `description` is a non-string value.""" - request_to_validate = {'name': self.domain_name, - 'description': False} - self.assertRaises(exception.SchemaValidationError, - self.create_domain_validator.validate, - request_to_validate) - - def test_validate_domain_request_with_name_too_long(self): - """Exception is raised when `name` is too long.""" - long_domain_name = 'a' * 65 - request_to_validate = {'name': long_domain_name} - self.assertRaises(exception.SchemaValidationError, - self.create_domain_validator.validate, - request_to_validate) - - def test_validate_domain_request_with_name_too_short(self): - """Exception raised when `name` is too short.""" - request_to_validate = {'name': ''} - self.assertRaises(exception.SchemaValidationError, - self.create_domain_validator.validate, - request_to_validate) - - def test_validate_domain_update_request(self): - """Test that we validate a domain update request.""" - request_to_validate = {'domain_id': uuid.uuid4().hex} - self.update_domain_validator.validate(request_to_validate) - - def test_validate_domain_update_request_with_no_parameters_fails(self): - """Exception is raised when updating a domain without parameters.""" - request_to_validate = {} - self.assertRaises(exception.SchemaValidationError, - self.update_domain_validator.validate, - request_to_validate) - - def test_validate_domain_update_request_with_name_too_long_fails(self): - """Exception raised when updating a domain with `name` too long.""" - long_domain_name = 'a' * 65 - request_to_validate = {'name': long_domain_name} - self.assertRaises(exception.SchemaValidationError, - self.update_domain_validator.validate, - request_to_validate) - - def test_validate_domain_update_request_with_name_too_short_fails(self): - """Exception raised when updating a domain with `name` too short.""" - request_to_validate = {'name': ''} - self.assertRaises(exception.SchemaValidationError, - self.update_domain_validator.validate, - request_to_validate) - - -class RoleValidationTestCase(unit.BaseTestCase): - """Test for V3 Role API validation.""" - - def setUp(self): - super(RoleValidationTestCase, self).setUp() - - self.role_name = 'My Role' - - create = assignment_schema.role_create - update = assignment_schema.role_update - self.create_role_validator = validators.SchemaValidator(create) - self.update_role_validator = validators.SchemaValidator(update) - - def test_validate_role_request(self): - """Test we can successfully validate a create role request.""" - request_to_validate = {'name': self.role_name} - self.create_role_validator.validate(request_to_validate) - - def test_validate_role_create_without_name_raises_exception(self): - """Test that we raise an exception when `name` isn't included.""" - request_to_validate = {'enabled': True} - self.assertRaises(exception.SchemaValidationError, - self.create_role_validator.validate, - request_to_validate) - - def test_validate_role_create_when_name_is_not_string_fails(self): - """Exception is raised on role create with a non-string `name`.""" - request_to_validate = {'name': True} - self.assertRaises(exception.SchemaValidationError, - self.create_role_validator.validate, - request_to_validate) - request_to_validate = {'name': 24} - self.assertRaises(exception.SchemaValidationError, - self.create_role_validator.validate, - request_to_validate) - - def test_validate_role_update_request(self): - """Test that we validate a role update request.""" - request_to_validate = {'name': 'My New Role'} - self.update_role_validator.validate(request_to_validate) - - def test_validate_role_update_fails_with_invalid_name_fails(self): - """Exception when validating an update request with invalid `name`.""" - request_to_validate = {'name': True} - self.assertRaises(exception.SchemaValidationError, - self.update_role_validator.validate, - request_to_validate) - - request_to_validate = {'name': 24} - self.assertRaises(exception.SchemaValidationError, - self.update_role_validator.validate, - request_to_validate) - - -class PolicyValidationTestCase(unit.BaseTestCase): - """Test for V3 Policy API validation.""" - - def setUp(self): - super(PolicyValidationTestCase, self).setUp() - - create = policy_schema.policy_create - update = policy_schema.policy_update - self.create_policy_validator = validators.SchemaValidator(create) - self.update_policy_validator = validators.SchemaValidator(update) - - def test_validate_policy_succeeds(self): - """Test that we validate a create policy request.""" - request_to_validate = {'blob': 'some blob information', - 'type': 'application/json'} - self.create_policy_validator.validate(request_to_validate) - - def test_validate_policy_without_blob_fails(self): - """Exception raised without `blob` in request.""" - request_to_validate = {'type': 'application/json'} - self.assertRaises(exception.SchemaValidationError, - self.create_policy_validator.validate, - request_to_validate) - - def test_validate_policy_without_type_fails(self): - """Exception raised without `type` in request.""" - request_to_validate = {'blob': 'some blob information'} - self.assertRaises(exception.SchemaValidationError, - self.create_policy_validator.validate, - request_to_validate) - - def test_validate_policy_create_with_extra_parameters_succeeds(self): - """Validate policy create with extra parameters.""" - request_to_validate = {'blob': 'some blob information', - 'type': 'application/json', - 'extra': 'some extra stuff'} - self.create_policy_validator.validate(request_to_validate) - - def test_validate_policy_create_with_invalid_type_fails(self): - """Exception raised when `blob` and `type` are boolean.""" - for prop in ['blob', 'type']: - request_to_validate = {prop: False} - self.assertRaises(exception.SchemaValidationError, - self.create_policy_validator.validate, - request_to_validate) - - def test_validate_policy_update_without_parameters_fails(self): - """Exception raised when updating policy without parameters.""" - request_to_validate = {} - self.assertRaises(exception.SchemaValidationError, - self.update_policy_validator.validate, - request_to_validate) - - def test_validate_policy_update_with_extra_parameters_succeeds(self): - """Validate policy update request with extra parameters.""" - request_to_validate = {'blob': 'some blob information', - 'type': 'application/json', - 'extra': 'some extra stuff'} - self.update_policy_validator.validate(request_to_validate) - - def test_validate_policy_update_succeeds(self): - """Test that we validate a policy update request.""" - request_to_validate = {'blob': 'some blob information', - 'type': 'application/json'} - self.update_policy_validator.validate(request_to_validate) - - def test_validate_policy_update_with_invalid_type_fails(self): - """Exception raised when invalid `type` on policy update.""" - for prop in ['blob', 'type']: - request_to_validate = {prop: False} - self.assertRaises(exception.SchemaValidationError, - self.update_policy_validator.validate, - request_to_validate) - - -class CredentialValidationTestCase(unit.BaseTestCase): - """Test for V3 Credential API validation.""" - - def setUp(self): - super(CredentialValidationTestCase, self).setUp() - - create = credential_schema.credential_create - update = credential_schema.credential_update - self.create_credential_validator = validators.SchemaValidator(create) - self.update_credential_validator = validators.SchemaValidator(update) - - def test_validate_credential_succeeds(self): - """Test that we validate a credential request.""" - request_to_validate = {'blob': 'some string', - 'project_id': uuid.uuid4().hex, - 'type': 'ec2', - 'user_id': uuid.uuid4().hex} - self.create_credential_validator.validate(request_to_validate) - - def test_validate_credential_without_blob_fails(self): - """Exception raised without `blob` in create request.""" - request_to_validate = {'type': 'ec2', - 'user_id': uuid.uuid4().hex} - self.assertRaises(exception.SchemaValidationError, - self.create_credential_validator.validate, - request_to_validate) - - def test_validate_credential_without_user_id_fails(self): - """Exception raised without `user_id` in create request.""" - request_to_validate = {'blob': 'some credential blob', - 'type': 'ec2'} - self.assertRaises(exception.SchemaValidationError, - self.create_credential_validator.validate, - request_to_validate) - - def test_validate_credential_without_type_fails(self): - """Exception raised without `type` in create request.""" - request_to_validate = {'blob': 'some credential blob', - 'user_id': uuid.uuid4().hex} - self.assertRaises(exception.SchemaValidationError, - self.create_credential_validator.validate, - request_to_validate) - - def test_validate_credential_ec2_without_project_id_fails(self): - """Validate `project_id` is required for ec2. - - Test that a SchemaValidationError is raised when type is ec2 - and no `project_id` is provided in create request. - """ - request_to_validate = {'blob': 'some credential blob', - 'type': 'ec2', - 'user_id': uuid.uuid4().hex} - self.assertRaises(exception.SchemaValidationError, - self.create_credential_validator.validate, - request_to_validate) - - def test_validate_credential_with_project_id_succeeds(self): - """Test that credential request works for all types.""" - cred_types = ['ec2', 'cert', uuid.uuid4().hex] - - for c_type in cred_types: - request_to_validate = {'blob': 'some blob', - 'project_id': uuid.uuid4().hex, - 'type': c_type, - 'user_id': uuid.uuid4().hex} - # Make sure an exception isn't raised - self.create_credential_validator.validate(request_to_validate) - - def test_validate_credential_non_ec2_without_project_id_succeeds(self): - """Validate `project_id` is not required for non-ec2. - - Test that create request without `project_id` succeeds for any - non-ec2 credential. - """ - cred_types = ['cert', uuid.uuid4().hex] - - for c_type in cred_types: - request_to_validate = {'blob': 'some blob', - 'type': c_type, - 'user_id': uuid.uuid4().hex} - # Make sure an exception isn't raised - self.create_credential_validator.validate(request_to_validate) - - def test_validate_credential_with_extra_parameters_succeeds(self): - """Validate create request with extra parameters.""" - request_to_validate = {'blob': 'some string', - 'extra': False, - 'project_id': uuid.uuid4().hex, - 'type': 'ec2', - 'user_id': uuid.uuid4().hex} - self.create_credential_validator.validate(request_to_validate) - - def test_validate_credential_update_succeeds(self): - """Test that a credential request is properly validated.""" - request_to_validate = {'blob': 'some string', - 'project_id': uuid.uuid4().hex, - 'type': 'ec2', - 'user_id': uuid.uuid4().hex} - self.update_credential_validator.validate(request_to_validate) - - def test_validate_credential_update_without_parameters_fails(self): - """Exception is raised on update without parameters.""" - request_to_validate = {} - self.assertRaises(exception.SchemaValidationError, - self.update_credential_validator.validate, - request_to_validate) - - def test_validate_credential_update_with_extra_parameters_succeeds(self): - """Validate credential update with extra parameters.""" - request_to_validate = {'blob': 'some string', - 'extra': False, - 'project_id': uuid.uuid4().hex, - 'type': 'ec2', - 'user_id': uuid.uuid4().hex} - self.update_credential_validator.validate(request_to_validate) - - -class RegionValidationTestCase(unit.BaseTestCase): - """Test for V3 Region API validation.""" - - def setUp(self): - super(RegionValidationTestCase, self).setUp() - - self.region_name = 'My Region' - - create = catalog_schema.region_create - update = catalog_schema.region_update - self.create_region_validator = validators.SchemaValidator(create) - self.update_region_validator = validators.SchemaValidator(update) - - def test_validate_region_request(self): - """Test that we validate a basic region request.""" - # Create_region doesn't take any parameters in the request so let's - # make sure we cover that case. - request_to_validate = {} - self.create_region_validator.validate(request_to_validate) - - def test_validate_region_create_request_with_parameters(self): - """Test that we validate a region request with parameters.""" - request_to_validate = {'id': 'us-east', - 'description': 'US East Region', - 'parent_region_id': 'US Region'} - self.create_region_validator.validate(request_to_validate) - - def test_validate_region_create_with_uuid(self): - """Test that we validate a region request with a UUID as the id.""" - request_to_validate = {'id': uuid.uuid4().hex, - 'description': 'US East Region', - 'parent_region_id': uuid.uuid4().hex} - self.create_region_validator.validate(request_to_validate) - - def test_validate_region_create_fails_with_invalid_region_id(self): - """Exception raised when passing invalid `id` in request.""" - request_to_validate = {'id': 1234, - 'description': 'US East Region'} - self.assertRaises(exception.SchemaValidationError, - self.create_region_validator.validate, - request_to_validate) - - def test_validate_region_create_succeeds_with_extra_parameters(self): - """Validate create region request with extra values.""" - request_to_validate = {'other_attr': uuid.uuid4().hex} - self.create_region_validator.validate(request_to_validate) - - def test_validate_region_create_succeeds_with_no_parameters(self): - """Validate create region request with no parameters.""" - request_to_validate = {} - self.create_region_validator.validate(request_to_validate) - - def test_validate_region_update_succeeds(self): - """Test that we validate a region update request.""" - request_to_validate = {'id': 'us-west', - 'description': 'US West Region', - 'parent_region_id': 'us-region'} - self.update_region_validator.validate(request_to_validate) - - def test_validate_region_update_succeeds_with_extra_parameters(self): - """Validate extra attributes in the region update request.""" - request_to_validate = {'other_attr': uuid.uuid4().hex} - self.update_region_validator.validate(request_to_validate) - - def test_validate_region_update_fails_with_no_parameters(self): - """Exception raised when passing no parameters in a region update.""" - # An update request should consist of at least one value to update - request_to_validate = {} - self.assertRaises(exception.SchemaValidationError, - self.update_region_validator.validate, - request_to_validate) - - -class ServiceValidationTestCase(unit.BaseTestCase): - """Test for V3 Service API validation.""" - - def setUp(self): - super(ServiceValidationTestCase, self).setUp() - - create = catalog_schema.service_create - update = catalog_schema.service_update - self.create_service_validator = validators.SchemaValidator(create) - self.update_service_validator = validators.SchemaValidator(update) - - def test_validate_service_create_succeeds(self): - """Test that we validate a service create request.""" - request_to_validate = {'name': 'Nova', - 'description': 'OpenStack Compute Service', - 'enabled': True, - 'type': 'compute'} - self.create_service_validator.validate(request_to_validate) - - def test_validate_service_create_succeeds_with_required_parameters(self): - """Validate a service create request with the required parameters.""" - # The only parameter type required for service creation is 'type' - request_to_validate = {'type': 'compute'} - self.create_service_validator.validate(request_to_validate) - - def test_validate_service_create_fails_without_type(self): - """Exception raised when trying to create a service without `type`.""" - request_to_validate = {'name': 'Nova'} - self.assertRaises(exception.SchemaValidationError, - self.create_service_validator.validate, - request_to_validate) - - def test_validate_service_create_succeeds_with_extra_parameters(self): - """Test that extra parameters pass validation on create service.""" - request_to_validate = {'other_attr': uuid.uuid4().hex, - 'type': uuid.uuid4().hex} - self.create_service_validator.validate(request_to_validate) - - def test_validate_service_create_succeeds_with_valid_enabled(self): - """Validate boolean values as enabled values on service create.""" - for valid_enabled in _VALID_ENABLED_FORMATS: - request_to_validate = {'enabled': valid_enabled, - 'type': uuid.uuid4().hex} - self.create_service_validator.validate(request_to_validate) - - def test_validate_service_create_fails_with_invalid_enabled(self): - """Exception raised when boolean-like parameters as `enabled` - - On service create, make sure an exception is raised if `enabled` is - not a boolean value. - """ - for invalid_enabled in _INVALID_ENABLED_FORMATS: - request_to_validate = {'enabled': invalid_enabled, - 'type': uuid.uuid4().hex} - self.assertRaises(exception.SchemaValidationError, - self.create_service_validator.validate, - request_to_validate) - - def test_validate_service_create_fails_when_name_too_long(self): - """Exception raised when `name` is greater than 255 characters.""" - long_name = 'a' * 256 - request_to_validate = {'type': 'compute', - 'name': long_name} - self.assertRaises(exception.SchemaValidationError, - self.create_service_validator.validate, - request_to_validate) - - def test_validate_service_create_fails_when_name_too_short(self): - """Exception is raised when `name` is too short.""" - request_to_validate = {'type': 'compute', - 'name': ''} - self.assertRaises(exception.SchemaValidationError, - self.create_service_validator.validate, - request_to_validate) - - def test_validate_service_create_fails_when_type_too_long(self): - """Exception is raised when `type` is too long.""" - long_type_name = 'a' * 256 - request_to_validate = {'type': long_type_name} - self.assertRaises(exception.SchemaValidationError, - self.create_service_validator.validate, - request_to_validate) - - def test_validate_service_create_fails_when_type_too_short(self): - """Exception is raised when `type` is too short.""" - request_to_validate = {'type': ''} - self.assertRaises(exception.SchemaValidationError, - self.create_service_validator.validate, - request_to_validate) - - def test_validate_service_update_request_succeeds(self): - """Test that we validate a service update request.""" - request_to_validate = {'name': 'Cinder', - 'type': 'volume', - 'description': 'OpenStack Block Storage', - 'enabled': False} - self.update_service_validator.validate(request_to_validate) - - def test_validate_service_update_fails_with_no_parameters(self): - """Exception raised when updating a service without values.""" - request_to_validate = {} - self.assertRaises(exception.SchemaValidationError, - self.update_service_validator.validate, - request_to_validate) - - def test_validate_service_update_succeeds_with_extra_parameters(self): - """Validate updating a service with extra parameters.""" - request_to_validate = {'other_attr': uuid.uuid4().hex} - self.update_service_validator.validate(request_to_validate) - - def test_validate_service_update_succeeds_with_valid_enabled(self): - """Validate boolean formats as `enabled` on service update.""" - for valid_enabled in _VALID_ENABLED_FORMATS: - request_to_validate = {'enabled': valid_enabled} - self.update_service_validator.validate(request_to_validate) - - def test_validate_service_update_fails_with_invalid_enabled(self): - """Exception raised when boolean-like values as `enabled`.""" - for invalid_enabled in _INVALID_ENABLED_FORMATS: - request_to_validate = {'enabled': invalid_enabled} - self.assertRaises(exception.SchemaValidationError, - self.update_service_validator.validate, - request_to_validate) - - def test_validate_service_update_fails_with_name_too_long(self): - """Exception is raised when `name` is too long on update.""" - long_name = 'a' * 256 - request_to_validate = {'name': long_name} - self.assertRaises(exception.SchemaValidationError, - self.update_service_validator.validate, - request_to_validate) - - def test_validate_service_update_fails_with_name_too_short(self): - """Exception is raised when `name` is too short on update.""" - request_to_validate = {'name': ''} - self.assertRaises(exception.SchemaValidationError, - self.update_service_validator.validate, - request_to_validate) - - def test_validate_service_update_fails_with_type_too_long(self): - """Exception is raised when `type` is too long on update.""" - long_type_name = 'a' * 256 - request_to_validate = {'type': long_type_name} - self.assertRaises(exception.SchemaValidationError, - self.update_service_validator.validate, - request_to_validate) - - def test_validate_service_update_fails_with_type_too_short(self): - """Exception is raised when `type` is too short on update.""" - request_to_validate = {'type': ''} - self.assertRaises(exception.SchemaValidationError, - self.update_service_validator.validate, - request_to_validate) - - -class EndpointValidationTestCase(unit.BaseTestCase): - """Test for V3 Endpoint API validation.""" - - def setUp(self): - super(EndpointValidationTestCase, self).setUp() - - create = catalog_schema.endpoint_create - update = catalog_schema.endpoint_update - self.create_endpoint_validator = validators.SchemaValidator(create) - self.update_endpoint_validator = validators.SchemaValidator(update) - - def test_validate_endpoint_request_succeeds(self): - """Test that we validate an endpoint request.""" - request_to_validate = {'enabled': True, - 'interface': 'admin', - 'region_id': uuid.uuid4().hex, - 'service_id': uuid.uuid4().hex, - 'url': 'https://service.example.com:5000/'} - self.create_endpoint_validator.validate(request_to_validate) - - def test_validate_endpoint_create_succeeds_with_required_parameters(self): - """Validate an endpoint request with only the required parameters.""" - # According to the Identity V3 API endpoint creation requires - # 'service_id', 'interface', and 'url' - request_to_validate = {'service_id': uuid.uuid4().hex, - 'interface': 'public', - 'url': 'https://service.example.com:5000/'} - self.create_endpoint_validator.validate(request_to_validate) - - def test_validate_endpoint_create_succeeds_with_valid_enabled(self): - """Validate an endpoint with boolean values. - - Validate boolean values as `enabled` in endpoint create requests. - """ - for valid_enabled in _VALID_ENABLED_FORMATS: - request_to_validate = {'enabled': valid_enabled, - 'service_id': uuid.uuid4().hex, - 'interface': 'public', - 'url': 'https://service.example.com:5000/'} - self.create_endpoint_validator.validate(request_to_validate) - - def test_validate_create_endpoint_fails_with_invalid_enabled(self): - """Exception raised when boolean-like values as `enabled`.""" - for invalid_enabled in _INVALID_ENABLED_FORMATS: - request_to_validate = {'enabled': invalid_enabled, - 'service_id': uuid.uuid4().hex, - 'interface': 'public', - 'url': 'https://service.example.com:5000/'} - self.assertRaises(exception.SchemaValidationError, - self.create_endpoint_validator.validate, - request_to_validate) - - def test_validate_endpoint_create_succeeds_with_extra_parameters(self): - """Test that extra parameters pass validation on create endpoint.""" - request_to_validate = {'other_attr': uuid.uuid4().hex, - 'service_id': uuid.uuid4().hex, - 'interface': 'public', - 'url': 'https://service.example.com:5000/'} - self.create_endpoint_validator.validate(request_to_validate) - - def test_validate_endpoint_create_fails_without_service_id(self): - """Exception raised when `service_id` isn't in endpoint request.""" - request_to_validate = {'interface': 'public', - 'url': 'https://service.example.com:5000/'} - self.assertRaises(exception.SchemaValidationError, - self.create_endpoint_validator.validate, - request_to_validate) - - def test_validate_endpoint_create_fails_without_interface(self): - """Exception raised when `interface` isn't in endpoint request.""" - request_to_validate = {'service_id': uuid.uuid4().hex, - 'url': 'https://service.example.com:5000/'} - self.assertRaises(exception.SchemaValidationError, - self.create_endpoint_validator.validate, - request_to_validate) - - def test_validate_endpoint_create_fails_without_url(self): - """Exception raised when `url` isn't in endpoint request.""" - request_to_validate = {'service_id': uuid.uuid4().hex, - 'interface': 'public'} - self.assertRaises(exception.SchemaValidationError, - self.create_endpoint_validator.validate, - request_to_validate) - - def test_validate_endpoint_create_succeeds_with_url(self): - """Validate `url` attribute in endpoint create request.""" - request_to_validate = {'service_id': uuid.uuid4().hex, - 'interface': 'public'} - for url in _VALID_URLS: - request_to_validate['url'] = url - self.create_endpoint_validator.validate(request_to_validate) - - def test_validate_endpoint_create_fails_with_invalid_url(self): - """Exception raised when passing invalid `url` in request.""" - request_to_validate = {'service_id': uuid.uuid4().hex, - 'interface': 'public'} - for url in _INVALID_URLS: - request_to_validate['url'] = url - self.assertRaises(exception.SchemaValidationError, - self.create_endpoint_validator.validate, - request_to_validate) - - def test_validate_endpoint_create_fails_with_invalid_interface(self): - """Exception raised with invalid `interface`.""" - request_to_validate = {'interface': uuid.uuid4().hex, - 'service_id': uuid.uuid4().hex, - 'url': 'https://service.example.com:5000/'} - self.assertRaises(exception.SchemaValidationError, - self.create_endpoint_validator.validate, - request_to_validate) - - def test_validate_endpoint_create_fails_with_invalid_region_id(self): - """Exception raised when passing invalid `region(_id)` in request.""" - request_to_validate = {'interface': 'admin', - 'region_id': 1234, - 'service_id': uuid.uuid4().hex, - 'url': 'https://service.example.com:5000/'} - - self.assertRaises(exception.SchemaValidationError, - self.create_endpoint_validator.validate, - request_to_validate) - - request_to_validate = {'interface': 'admin', - 'region': 1234, - 'service_id': uuid.uuid4().hex, - 'url': 'https://service.example.com:5000/'} - - self.assertRaises(exception.SchemaValidationError, - self.create_endpoint_validator.validate, - request_to_validate) - - def test_validate_endpoint_update_fails_with_invalid_enabled(self): - """Exception raised when `enabled` is boolean-like value.""" - for invalid_enabled in _INVALID_ENABLED_FORMATS: - request_to_validate = {'enabled': invalid_enabled} - self.assertRaises(exception.SchemaValidationError, - self.update_endpoint_validator.validate, - request_to_validate) - - def test_validate_endpoint_update_succeeds_with_valid_enabled(self): - """Validate `enabled` as boolean values.""" - for valid_enabled in _VALID_ENABLED_FORMATS: - request_to_validate = {'enabled': valid_enabled} - self.update_endpoint_validator.validate(request_to_validate) - - def test_validate_endpoint_update_fails_with_invalid_interface(self): - """Exception raised when invalid `interface` on endpoint update.""" - request_to_validate = {'interface': uuid.uuid4().hex, - 'service_id': uuid.uuid4().hex, - 'url': 'https://service.example.com:5000/'} - self.assertRaises(exception.SchemaValidationError, - self.update_endpoint_validator.validate, - request_to_validate) - - def test_validate_endpoint_update_request_succeeds(self): - """Test that we validate an endpoint update request.""" - request_to_validate = {'enabled': True, - 'interface': 'admin', - 'region_id': uuid.uuid4().hex, - 'service_id': uuid.uuid4().hex, - 'url': 'https://service.example.com:5000/'} - self.update_endpoint_validator.validate(request_to_validate) - - def test_validate_endpoint_update_fails_with_no_parameters(self): - """Exception raised when no parameters on endpoint update.""" - request_to_validate = {} - self.assertRaises(exception.SchemaValidationError, - self.update_endpoint_validator.validate, - request_to_validate) - - def test_validate_endpoint_update_succeeds_with_extra_parameters(self): - """Test that extra parameters pass validation on update endpoint.""" - request_to_validate = {'enabled': True, - 'interface': 'admin', - 'region_id': uuid.uuid4().hex, - 'service_id': uuid.uuid4().hex, - 'url': 'https://service.example.com:5000/', - 'other_attr': uuid.uuid4().hex} - self.update_endpoint_validator.validate(request_to_validate) - - def test_validate_endpoint_update_succeeds_with_url(self): - """Validate `url` attribute in endpoint update request.""" - request_to_validate = {'service_id': uuid.uuid4().hex, - 'interface': 'public'} - for url in _VALID_URLS: - request_to_validate['url'] = url - self.update_endpoint_validator.validate(request_to_validate) - - def test_validate_endpoint_update_fails_with_invalid_url(self): - """Exception raised when passing invalid `url` in request.""" - request_to_validate = {'service_id': uuid.uuid4().hex, - 'interface': 'public'} - for url in _INVALID_URLS: - request_to_validate['url'] = url - self.assertRaises(exception.SchemaValidationError, - self.update_endpoint_validator.validate, - request_to_validate) - - def test_validate_endpoint_update_fails_with_invalid_region_id(self): - """Exception raised when passing invalid `region(_id)` in request.""" - request_to_validate = {'interface': 'admin', - 'region_id': 1234, - 'service_id': uuid.uuid4().hex, - 'url': 'https://service.example.com:5000/'} - - self.assertRaises(exception.SchemaValidationError, - self.update_endpoint_validator.validate, - request_to_validate) - - request_to_validate = {'interface': 'admin', - 'region': 1234, - 'service_id': uuid.uuid4().hex, - 'url': 'https://service.example.com:5000/'} - - self.assertRaises(exception.SchemaValidationError, - self.update_endpoint_validator.validate, - request_to_validate) - - -class EndpointGroupValidationTestCase(unit.BaseTestCase): - """Test for V3 Endpoint Group API validation.""" - - def setUp(self): - super(EndpointGroupValidationTestCase, self).setUp() - - create = catalog_schema.endpoint_group_create - update = catalog_schema.endpoint_group_update - self.create_endpoint_grp_validator = validators.SchemaValidator(create) - self.update_endpoint_grp_validator = validators.SchemaValidator(update) - - def test_validate_endpoint_group_request_succeeds(self): - """Test that we validate an endpoint group request.""" - request_to_validate = {'description': 'endpoint group description', - 'filters': {'interface': 'admin'}, - 'name': 'endpoint_group_name'} - self.create_endpoint_grp_validator.validate(request_to_validate) - - def test_validate_endpoint_group_create_succeeds_with_req_parameters(self): - """Validate required endpoint group parameters. - - This test ensure that validation succeeds with only the required - parameters passed for creating an endpoint group. - """ - request_to_validate = {'filters': {'interface': 'admin'}, - 'name': 'endpoint_group_name'} - self.create_endpoint_grp_validator.validate(request_to_validate) - - def test_validate_endpoint_group_create_succeeds_with_valid_filters(self): - """Validate `filters` in endpoint group create requests.""" - request_to_validate = {'description': 'endpoint group description', - 'name': 'endpoint_group_name'} - for valid_filters in _VALID_FILTERS: - request_to_validate['filters'] = valid_filters - self.create_endpoint_grp_validator.validate(request_to_validate) - - def test_validate_create_endpoint_group_fails_with_invalid_filters(self): - """Validate invalid `filters` value in endpoint group parameters. - - This test ensures that exception is raised when non-dict values is - used as `filters` in endpoint group create request. - """ - request_to_validate = {'description': 'endpoint group description', - 'name': 'endpoint_group_name'} - for invalid_filters in _INVALID_FILTERS: - request_to_validate['filters'] = invalid_filters - self.assertRaises(exception.SchemaValidationError, - self.create_endpoint_grp_validator.validate, - request_to_validate) - - def test_validate_endpoint_group_create_fails_without_name(self): - """Exception raised when `name` isn't in endpoint group request.""" - request_to_validate = {'description': 'endpoint group description', - 'filters': {'interface': 'admin'}} - self.assertRaises(exception.SchemaValidationError, - self.create_endpoint_grp_validator.validate, - request_to_validate) - - def test_validate_endpoint_group_create_fails_without_filters(self): - """Exception raised when `filters` isn't in endpoint group request.""" - request_to_validate = {'description': 'endpoint group description', - 'name': 'endpoint_group_name'} - self.assertRaises(exception.SchemaValidationError, - self.create_endpoint_grp_validator.validate, - request_to_validate) - - def test_validate_endpoint_group_update_request_succeeds(self): - """Test that we validate an endpoint group update request.""" - request_to_validate = {'description': 'endpoint group description', - 'filters': {'interface': 'admin'}, - 'name': 'endpoint_group_name'} - self.update_endpoint_grp_validator.validate(request_to_validate) - - def test_validate_endpoint_group_update_fails_with_no_parameters(self): - """Exception raised when no parameters on endpoint group update.""" - request_to_validate = {} - self.assertRaises(exception.SchemaValidationError, - self.update_endpoint_grp_validator.validate, - request_to_validate) - - def test_validate_endpoint_group_update_succeeds_with_name(self): - """Validate request with only `name` in endpoint group update. - - This test ensures that passing only a `name` passes validation - on update endpoint group request. - """ - request_to_validate = {'name': 'endpoint_group_name'} - self.update_endpoint_grp_validator.validate(request_to_validate) - - def test_validate_endpoint_group_update_succeeds_with_valid_filters(self): - """Validate `filters` as dict values.""" - for valid_filters in _VALID_FILTERS: - request_to_validate = {'filters': valid_filters} - self.update_endpoint_grp_validator.validate(request_to_validate) - - def test_validate_endpoint_group_update_fails_with_invalid_filters(self): - """Exception raised when passing invalid `filters` in request.""" - for invalid_filters in _INVALID_FILTERS: - request_to_validate = {'filters': invalid_filters} - self.assertRaises(exception.SchemaValidationError, - self.update_endpoint_grp_validator.validate, - request_to_validate) - - -class TrustValidationTestCase(unit.BaseTestCase): - """Test for V3 Trust API validation.""" - - _valid_roles = ['member', uuid.uuid4().hex, str(uuid.uuid4())] - _invalid_roles = [False, True, 123, None] - - def setUp(self): - super(TrustValidationTestCase, self).setUp() - - create = trust_schema.trust_create - self.create_trust_validator = validators.SchemaValidator(create) - - def test_validate_trust_succeeds(self): - """Test that we can validate a trust request.""" - request_to_validate = {'trustor_user_id': uuid.uuid4().hex, - 'trustee_user_id': uuid.uuid4().hex, - 'impersonation': False} - self.create_trust_validator.validate(request_to_validate) - - def test_validate_trust_with_all_parameters_succeeds(self): - """Test that we can validate a trust request with all parameters.""" - request_to_validate = {'trustor_user_id': uuid.uuid4().hex, - 'trustee_user_id': uuid.uuid4().hex, - 'impersonation': False, - 'project_id': uuid.uuid4().hex, - 'roles': [uuid.uuid4().hex, uuid.uuid4().hex], - 'expires_at': 'some timestamp', - 'remaining_uses': 2} - self.create_trust_validator.validate(request_to_validate) - - def test_validate_trust_without_trustor_id_fails(self): - """Validate trust request fails without `trustor_id`.""" - request_to_validate = {'trustee_user_id': uuid.uuid4().hex, - 'impersonation': False} - self.assertRaises(exception.SchemaValidationError, - self.create_trust_validator.validate, - request_to_validate) - - def test_validate_trust_without_trustee_id_fails(self): - """Validate trust request fails without `trustee_id`.""" - request_to_validate = {'trusor_user_id': uuid.uuid4().hex, - 'impersonation': False} - self.assertRaises(exception.SchemaValidationError, - self.create_trust_validator.validate, - request_to_validate) - - def test_validate_trust_without_impersonation_fails(self): - """Validate trust request fails without `impersonation`.""" - request_to_validate = {'trustee_user_id': uuid.uuid4().hex, - 'trustor_user_id': uuid.uuid4().hex} - self.assertRaises(exception.SchemaValidationError, - self.create_trust_validator.validate, - request_to_validate) - - def test_validate_trust_with_extra_parameters_succeeds(self): - """Test that we can validate a trust request with extra parameters.""" - request_to_validate = {'trustor_user_id': uuid.uuid4().hex, - 'trustee_user_id': uuid.uuid4().hex, - 'impersonation': False, - 'project_id': uuid.uuid4().hex, - 'roles': [uuid.uuid4().hex, uuid.uuid4().hex], - 'expires_at': 'some timestamp', - 'remaining_uses': 2, - 'extra': 'something extra!'} - self.create_trust_validator.validate(request_to_validate) - - def test_validate_trust_with_invalid_impersonation_fails(self): - """Validate trust request with invalid `impersonation` fails.""" - request_to_validate = {'trustor_user_id': uuid.uuid4().hex, - 'trustee_user_id': uuid.uuid4().hex, - 'impersonation': 2} - self.assertRaises(exception.SchemaValidationError, - self.create_trust_validator.validate, - request_to_validate) - - def test_validate_trust_with_null_remaining_uses_succeeds(self): - """Validate trust request with null `remaining_uses`.""" - request_to_validate = {'trustor_user_id': uuid.uuid4().hex, - 'trustee_user_id': uuid.uuid4().hex, - 'impersonation': False, - 'remaining_uses': None} - self.create_trust_validator.validate(request_to_validate) - - def test_validate_trust_with_remaining_uses_succeeds(self): - """Validate trust request with `remaining_uses` succeeds.""" - request_to_validate = {'trustor_user_id': uuid.uuid4().hex, - 'trustee_user_id': uuid.uuid4().hex, - 'impersonation': False, - 'remaining_uses': 2} - self.create_trust_validator.validate(request_to_validate) - - def test_validate_trust_with_period_in_user_id_string(self): - """Validate trust request with a period in the user id string.""" - request_to_validate = {'trustor_user_id': 'john.smith', - 'trustee_user_id': 'joe.developer', - 'impersonation': False} - self.create_trust_validator.validate(request_to_validate) - - def test_validate_trust_with_invalid_expires_at_fails(self): - """Validate trust request with invalid `expires_at` fails.""" - request_to_validate = {'trustor_user_id': uuid.uuid4().hex, - 'trustee_user_id': uuid.uuid4().hex, - 'impersonation': False, - 'expires_at': 3} - self.assertRaises(exception.SchemaValidationError, - self.create_trust_validator.validate, - request_to_validate) - - def test_validate_trust_with_role_types_succeeds(self): - """Validate trust request with `roles` succeeds.""" - for role in self._valid_roles: - request_to_validate = {'trustor_user_id': uuid.uuid4().hex, - 'trustee_user_id': uuid.uuid4().hex, - 'impersonation': False, - 'roles': [role]} - self.create_trust_validator.validate(request_to_validate) - - def test_validate_trust_with_invalid_role_type_fails(self): - """Validate trust request with invalid `roles` fails.""" - for role in self._invalid_roles: - request_to_validate = {'trustor_user_id': uuid.uuid4().hex, - 'trustee_user_id': uuid.uuid4().hex, - 'impersonation': False, - 'roles': role} - self.assertRaises(exception.SchemaValidationError, - self.create_trust_validator.validate, - request_to_validate) - - def test_validate_trust_with_list_of_valid_roles_succeeds(self): - """Validate trust request with a list of valid `roles`.""" - request_to_validate = {'trustor_user_id': uuid.uuid4().hex, - 'trustee_user_id': uuid.uuid4().hex, - 'impersonation': False, - 'roles': self._valid_roles} - self.create_trust_validator.validate(request_to_validate) - - -class ServiceProviderValidationTestCase(unit.BaseTestCase): - """Test for V3 Service Provider API validation.""" - - def setUp(self): - super(ServiceProviderValidationTestCase, self).setUp() - - self.valid_auth_url = 'https://' + uuid.uuid4().hex + '.com' - self.valid_sp_url = 'https://' + uuid.uuid4().hex + '.com' - - create = federation_schema.service_provider_create - update = federation_schema.service_provider_update - self.create_sp_validator = validators.SchemaValidator(create) - self.update_sp_validator = validators.SchemaValidator(update) - - def test_validate_sp_request(self): - """Test that we validate `auth_url` and `sp_url` in request.""" - request_to_validate = { - 'auth_url': self.valid_auth_url, - 'sp_url': self.valid_sp_url - } - self.create_sp_validator.validate(request_to_validate) - - def test_validate_sp_request_with_invalid_auth_url_fails(self): - """Validate request fails with invalid `auth_url`.""" - request_to_validate = { - 'auth_url': uuid.uuid4().hex, - 'sp_url': self.valid_sp_url - } - self.assertRaises(exception.SchemaValidationError, - self.create_sp_validator.validate, - request_to_validate) - - def test_validate_sp_request_with_invalid_sp_url_fails(self): - """Validate request fails with invalid `sp_url`.""" - request_to_validate = { - 'auth_url': self.valid_auth_url, - 'sp_url': uuid.uuid4().hex, - } - self.assertRaises(exception.SchemaValidationError, - self.create_sp_validator.validate, - request_to_validate) - - def test_validate_sp_request_without_auth_url_fails(self): - """Validate request fails without `auth_url`.""" - request_to_validate = { - 'sp_url': self.valid_sp_url - } - self.assertRaises(exception.SchemaValidationError, - self.create_sp_validator.validate, - request_to_validate) - request_to_validate = { - 'auth_url': None, - 'sp_url': self.valid_sp_url - } - self.assertRaises(exception.SchemaValidationError, - self.create_sp_validator.validate, - request_to_validate) - - def test_validate_sp_request_without_sp_url_fails(self): - """Validate request fails without `sp_url`.""" - request_to_validate = { - 'auth_url': self.valid_auth_url, - } - self.assertRaises(exception.SchemaValidationError, - self.create_sp_validator.validate, - request_to_validate) - request_to_validate = { - 'auth_url': self.valid_auth_url, - 'sp_url': None, - } - self.assertRaises(exception.SchemaValidationError, - self.create_sp_validator.validate, - request_to_validate) - - def test_validate_sp_request_with_enabled(self): - """Validate `enabled` as boolean-like values.""" - for valid_enabled in _VALID_ENABLED_FORMATS: - request_to_validate = { - 'auth_url': self.valid_auth_url, - 'sp_url': self.valid_sp_url, - 'enabled': valid_enabled - } - self.create_sp_validator.validate(request_to_validate) - - def test_validate_sp_request_with_invalid_enabled_fails(self): - """Exception is raised when `enabled` isn't a boolean-like value.""" - for invalid_enabled in _INVALID_ENABLED_FORMATS: - request_to_validate = { - 'auth_url': self.valid_auth_url, - 'sp_url': self.valid_sp_url, - 'enabled': invalid_enabled - } - self.assertRaises(exception.SchemaValidationError, - self.create_sp_validator.validate, - request_to_validate) - - def test_validate_sp_request_with_valid_description(self): - """Test that we validate `description` in create requests.""" - request_to_validate = { - 'auth_url': self.valid_auth_url, - 'sp_url': self.valid_sp_url, - 'description': 'My Service Provider' - } - self.create_sp_validator.validate(request_to_validate) - - def test_validate_sp_request_with_invalid_description_fails(self): - """Exception is raised when `description` as a non-string value.""" - request_to_validate = { - 'auth_url': self.valid_auth_url, - 'sp_url': self.valid_sp_url, - 'description': False - } - self.assertRaises(exception.SchemaValidationError, - self.create_sp_validator.validate, - request_to_validate) - - def test_validate_sp_request_with_extra_field_fails(self): - """Exception raised when passing extra fields in the body.""" - # 'id' can't be passed in the body since it is passed in the URL - request_to_validate = { - 'id': 'ACME', - 'auth_url': self.valid_auth_url, - 'sp_url': self.valid_sp_url, - 'description': 'My Service Provider' - } - self.assertRaises(exception.SchemaValidationError, - self.create_sp_validator.validate, - request_to_validate) - - def test_validate_sp_update_request(self): - """Test that we validate a update request.""" - request_to_validate = {'description': uuid.uuid4().hex} - self.update_sp_validator.validate(request_to_validate) - - def test_validate_sp_update_request_with_no_parameters_fails(self): - """Exception is raised when updating without parameters.""" - request_to_validate = {} - self.assertRaises(exception.SchemaValidationError, - self.update_sp_validator.validate, - request_to_validate) - - def test_validate_sp_update_request_with_invalid_auth_url_fails(self): - """Exception raised when updating with invalid `auth_url`.""" - request_to_validate = {'auth_url': uuid.uuid4().hex} - self.assertRaises(exception.SchemaValidationError, - self.update_sp_validator.validate, - request_to_validate) - request_to_validate = {'auth_url': None} - self.assertRaises(exception.SchemaValidationError, - self.update_sp_validator.validate, - request_to_validate) - - def test_validate_sp_update_request_with_invalid_sp_url_fails(self): - """Exception raised when updating with invalid `sp_url`.""" - request_to_validate = {'sp_url': uuid.uuid4().hex} - self.assertRaises(exception.SchemaValidationError, - self.update_sp_validator.validate, - request_to_validate) - request_to_validate = {'sp_url': None} - self.assertRaises(exception.SchemaValidationError, - self.update_sp_validator.validate, - request_to_validate) - - -class UserValidationTestCase(unit.BaseTestCase): - """Test for V3 User API validation.""" - - def setUp(self): - super(UserValidationTestCase, self).setUp() - - self.user_name = uuid.uuid4().hex - - create = identity_schema.user_create - update = identity_schema.user_update - self.create_user_validator = validators.SchemaValidator(create) - self.update_user_validator = validators.SchemaValidator(update) - - def test_validate_user_create_request_succeeds(self): - """Test that validating a user create request succeeds.""" - request_to_validate = {'name': self.user_name} - self.create_user_validator.validate(request_to_validate) - - def test_validate_user_create_with_all_valid_parameters_succeeds(self): - """Test that validating a user create request succeeds.""" - request_to_validate = unit.new_user_ref(domain_id=uuid.uuid4().hex, - name=self.user_name) - self.create_user_validator.validate(request_to_validate) - - def test_validate_user_create_fails_without_name(self): - """Exception raised when validating a user without name.""" - request_to_validate = {'email': uuid.uuid4().hex} - self.assertRaises(exception.SchemaValidationError, - self.create_user_validator.validate, - request_to_validate) - - def test_validate_user_create_fails_with_name_of_zero_length(self): - """Exception raised when validating a username with length of zero.""" - request_to_validate = {'name': ''} - self.assertRaises(exception.SchemaValidationError, - self.create_user_validator.validate, - request_to_validate) - - def test_validate_user_create_fails_with_name_of_wrong_type(self): - """Exception raised when validating a username of wrong type.""" - request_to_validate = {'name': True} - self.assertRaises(exception.SchemaValidationError, - self.create_user_validator.validate, - request_to_validate) - - def test_validate_user_create_succeeds_with_valid_enabled_formats(self): - """Validate acceptable enabled formats in create user requests.""" - for enabled in _VALID_ENABLED_FORMATS: - request_to_validate = {'name': self.user_name, - 'enabled': enabled} - self.create_user_validator.validate(request_to_validate) - - def test_validate_user_create_fails_with_invalid_enabled_formats(self): - """Exception raised when enabled is not an acceptable format.""" - for invalid_enabled in _INVALID_ENABLED_FORMATS: - request_to_validate = {'name': self.user_name, - 'enabled': invalid_enabled} - self.assertRaises(exception.SchemaValidationError, - self.create_user_validator.validate, - request_to_validate) - - def test_validate_user_create_succeeds_with_extra_attributes(self): - """Validate extra parameters on user create requests.""" - request_to_validate = {'name': self.user_name, - 'other_attr': uuid.uuid4().hex} - self.create_user_validator.validate(request_to_validate) - - def test_validate_user_create_succeeds_with_password_of_zero_length(self): - """Validate empty password on user create requests.""" - request_to_validate = {'name': self.user_name, - 'password': ''} - self.create_user_validator.validate(request_to_validate) - - def test_validate_user_create_succeeds_with_null_password(self): - """Validate that password is nullable on create user.""" - request_to_validate = {'name': self.user_name, - 'password': None} - self.create_user_validator.validate(request_to_validate) - - def test_validate_user_create_fails_with_invalid_password_type(self): - """Exception raised when user password is of the wrong type.""" - request_to_validate = {'name': self.user_name, - 'password': True} - self.assertRaises(exception.SchemaValidationError, - self.create_user_validator.validate, - request_to_validate) - - def test_validate_user_create_succeeds_with_null_description(self): - """Validate that description can be nullable on create user.""" - request_to_validate = {'name': self.user_name, - 'description': None} - self.create_user_validator.validate(request_to_validate) - - def test_validate_user_update_succeeds(self): - """Validate an update user request.""" - request_to_validate = {'email': uuid.uuid4().hex} - self.update_user_validator.validate(request_to_validate) - - def test_validate_user_update_fails_with_no_parameters(self): - """Exception raised when updating nothing.""" - request_to_validate = {} - self.assertRaises(exception.SchemaValidationError, - self.update_user_validator.validate, - request_to_validate) - - def test_validate_user_update_succeeds_with_extra_parameters(self): - """Validate user update requests with extra parameters.""" - request_to_validate = {'other_attr': uuid.uuid4().hex} - self.update_user_validator.validate(request_to_validate) - - -class GroupValidationTestCase(unit.BaseTestCase): - """Test for V3 Group API validation.""" - - def setUp(self): - super(GroupValidationTestCase, self).setUp() - - self.group_name = uuid.uuid4().hex - - create = identity_schema.group_create - update = identity_schema.group_update - self.create_group_validator = validators.SchemaValidator(create) - self.update_group_validator = validators.SchemaValidator(update) - - def test_validate_group_create_succeeds(self): - """Validate create group requests.""" - request_to_validate = {'name': self.group_name} - self.create_group_validator.validate(request_to_validate) - - def test_validate_group_create_succeeds_with_all_parameters(self): - """Validate create group requests with all parameters.""" - request_to_validate = {'name': self.group_name, - 'description': uuid.uuid4().hex, - 'domain_id': uuid.uuid4().hex} - self.create_group_validator.validate(request_to_validate) - - def test_validate_group_create_fails_without_group_name(self): - """Exception raised when group name is not provided in request.""" - request_to_validate = {'description': uuid.uuid4().hex} - self.assertRaises(exception.SchemaValidationError, - self.create_group_validator.validate, - request_to_validate) - - def test_validate_group_create_fails_when_group_name_is_too_short(self): - """Exception raised when group name is equal to zero.""" - request_to_validate = {'name': ''} - self.assertRaises(exception.SchemaValidationError, - self.create_group_validator.validate, - request_to_validate) - - def test_validate_group_create_succeeds_with_extra_parameters(self): - """Validate extra attributes on group create requests.""" - request_to_validate = {'name': self.group_name, - 'other_attr': uuid.uuid4().hex} - self.create_group_validator.validate(request_to_validate) - - def test_validate_group_update_succeeds(self): - """Validate group update requests.""" - request_to_validate = {'description': uuid.uuid4().hex} - self.update_group_validator.validate(request_to_validate) - - def test_validate_group_update_fails_with_no_parameters(self): - """Exception raised when no parameters passed in on update.""" - request_to_validate = {} - self.assertRaises(exception.SchemaValidationError, - self.update_group_validator.validate, - request_to_validate) - - def test_validate_group_update_succeeds_with_extra_parameters(self): - """Validate group update requests with extra parameters.""" - request_to_validate = {'other_attr': uuid.uuid4().hex} - self.update_group_validator.validate(request_to_validate) - - -class IdentityProviderValidationTestCase(unit.BaseTestCase): - """Test for V3 Identity Provider API validation.""" - - def setUp(self): - super(IdentityProviderValidationTestCase, self).setUp() - - create = federation_schema.identity_provider_create - update = federation_schema.identity_provider_update - self.create_idp_validator = validators.SchemaValidator(create) - self.update_idp_validator = validators.SchemaValidator(update) - - def test_validate_idp_request_succeeds(self): - """Test that we validate an identity provider request.""" - request_to_validate = {'description': 'identity provider description', - 'enabled': True, - 'remote_ids': [uuid.uuid4().hex, - uuid.uuid4().hex]} - self.create_idp_validator.validate(request_to_validate) - self.update_idp_validator.validate(request_to_validate) - - def test_validate_idp_request_fails_with_invalid_params(self): - """Exception raised when unknown parameter is found.""" - request_to_validate = {'bogus': uuid.uuid4().hex} - self.assertRaises(exception.SchemaValidationError, - self.create_idp_validator.validate, - request_to_validate) - - self.assertRaises(exception.SchemaValidationError, - self.update_idp_validator.validate, - request_to_validate) - - def test_validate_idp_request_with_enabled(self): - """Validate `enabled` as boolean-like values.""" - for valid_enabled in _VALID_ENABLED_FORMATS: - request_to_validate = {'enabled': valid_enabled} - self.create_idp_validator.validate(request_to_validate) - self.update_idp_validator.validate(request_to_validate) - - def test_validate_idp_request_with_invalid_enabled_fails(self): - """Exception is raised when `enabled` isn't a boolean-like value.""" - for invalid_enabled in _INVALID_ENABLED_FORMATS: - request_to_validate = {'enabled': invalid_enabled} - self.assertRaises(exception.SchemaValidationError, - self.create_idp_validator.validate, - request_to_validate) - - self.assertRaises(exception.SchemaValidationError, - self.update_idp_validator.validate, - request_to_validate) - - def test_validate_idp_request_no_parameters(self): - """Test that schema validation with empty request body.""" - request_to_validate = {} - self.create_idp_validator.validate(request_to_validate) - - # Exception raised when no property on IdP update. - self.assertRaises(exception.SchemaValidationError, - self.update_idp_validator.validate, - request_to_validate) - - def test_validate_idp_request_with_invalid_description_fails(self): - """Exception is raised when `description` as a non-string value.""" - request_to_validate = {'description': False} - self.assertRaises(exception.SchemaValidationError, - self.create_idp_validator.validate, - request_to_validate) - - self.assertRaises(exception.SchemaValidationError, - self.update_idp_validator.validate, - request_to_validate) - - def test_validate_idp_request_with_invalid_remote_id_fails(self): - """Exception is raised when `remote_ids` is not a array.""" - request_to_validate = {"remote_ids": uuid.uuid4().hex} - self.assertRaises(exception.SchemaValidationError, - self.create_idp_validator.validate, - request_to_validate) - - self.assertRaises(exception.SchemaValidationError, - self.update_idp_validator.validate, - request_to_validate) - - def test_validate_idp_request_with_duplicated_remote_id(self): - """Exception is raised when the duplicated `remote_ids` is found.""" - idp_id = uuid.uuid4().hex - request_to_validate = {"remote_ids": [idp_id, idp_id]} - self.assertRaises(exception.SchemaValidationError, - self.create_idp_validator.validate, - request_to_validate) - - self.assertRaises(exception.SchemaValidationError, - self.update_idp_validator.validate, - request_to_validate) - - def test_validate_idp_request_remote_id_nullable(self): - """Test that `remote_ids` could be explicitly set to None""" - request_to_validate = {'remote_ids': None} - self.create_idp_validator.validate(request_to_validate) - self.update_idp_validator.validate(request_to_validate) - - -class FederationProtocolValidationTestCase(unit.BaseTestCase): - """Test for V3 Federation Protocol API validation.""" - - def setUp(self): - super(FederationProtocolValidationTestCase, self).setUp() - - schema = federation_schema.federation_protocol_schema - # create protocol and update protocol have the same shema definition, - # combine them together, no need to validate separately. - self.protocol_validator = validators.SchemaValidator(schema) - - def test_validate_protocol_request_succeeds(self): - """Test that we validate a protocol request successfully.""" - request_to_validate = {'mapping_id': uuid.uuid4().hex} - self.protocol_validator.validate(request_to_validate) - - def test_validate_protocol_request_succeeds_with_nonuuid_mapping_id(self): - """Test that we allow underscore in mapping_id value.""" - request_to_validate = {'mapping_id': 'my_mapping_id'} - self.protocol_validator.validate(request_to_validate) - - def test_validate_protocol_request_fails_with_invalid_params(self): - """Exception raised when unknown parameter is found.""" - request_to_validate = {'bogus': uuid.uuid4().hex} - self.assertRaises(exception.SchemaValidationError, - self.protocol_validator.validate, - request_to_validate) - - def test_validate_protocol_request_no_parameters(self): - """Test that schema validation with empty request body.""" - request_to_validate = {} - # 'mapping_id' is required. - self.assertRaises(exception.SchemaValidationError, - self.protocol_validator.validate, - request_to_validate) - - def test_validate_protocol_request_fails_with_invalid_mapping_id(self): - """Exception raised when mapping_id is not string.""" - request_to_validate = {'mapping_id': 12334} - self.assertRaises(exception.SchemaValidationError, - self.protocol_validator.validate, - request_to_validate) - - -class OAuth1ValidationTestCase(unit.BaseTestCase): - """Test for V3 Identity OAuth1 API validation.""" - - def setUp(self): - super(OAuth1ValidationTestCase, self).setUp() - - create = oauth1_schema.consumer_create - update = oauth1_schema.consumer_update - self.create_consumer_validator = validators.SchemaValidator(create) - self.update_consumer_validator = validators.SchemaValidator(update) - - def test_validate_consumer_request_succeeds(self): - """Test that we validate a consumer request successfully.""" - request_to_validate = {'description': uuid.uuid4().hex, - 'name': uuid.uuid4().hex} - self.create_consumer_validator.validate(request_to_validate) - self.update_consumer_validator.validate(request_to_validate) - - def test_validate_consumer_request_with_no_parameters(self): - """Test that schema validation with empty request body.""" - request_to_validate = {} - self.create_consumer_validator.validate(request_to_validate) - # At least one property should be given. - self.assertRaises(exception.SchemaValidationError, - self.update_consumer_validator.validate, - request_to_validate) - - def test_validate_consumer_request_with_invalid_description_fails(self): - """Exception is raised when `description` as a non-string value.""" - for invalid_desc in _INVALID_DESC_FORMATS: - request_to_validate = {'description': invalid_desc} - self.assertRaises(exception.SchemaValidationError, - self.create_consumer_validator.validate, - request_to_validate) - - self.assertRaises(exception.SchemaValidationError, - self.update_consumer_validator.validate, - request_to_validate) - - def test_validate_update_consumer_request_fails_with_secret(self): - """Exception raised when secret is given.""" - request_to_validate = {'secret': uuid.uuid4().hex} - self.assertRaises(exception.SchemaValidationError, - self.update_consumer_validator.validate, - request_to_validate) - - def test_validate_consumer_request_with_none_desc(self): - """Test that schema validation with None desc.""" - request_to_validate = {'description': None} - self.create_consumer_validator.validate(request_to_validate) - self.update_consumer_validator.validate(request_to_validate) diff --git a/keystone-moon/keystone/tests/unit/test_versions.py b/keystone-moon/keystone/tests/unit/test_versions.py deleted file mode 100644 index 2f5c2b17..00000000 --- a/keystone-moon/keystone/tests/unit/test_versions.py +++ /dev/null @@ -1,1065 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import copy -import functools -import random - -import mock -from oslo_config import cfg -from oslo_serialization import jsonutils -from six.moves import http_client -from testtools import matchers as tt_matchers -import webob - -from keystone.common import json_home -from keystone.tests import unit -from keystone.tests.unit import utils -from keystone.version import controllers - - -CONF = cfg.CONF - -v2_MEDIA_TYPES = [ - { - "base": "application/json", - "type": "application/" - "vnd.openstack.identity-v2.0+json" - } -] - -v2_HTML_DESCRIPTION = { - "rel": "describedby", - "type": "text/html", - "href": "http://docs.openstack.org/" -} - - -v2_EXPECTED_RESPONSE = { - "id": "v2.0", - "status": "stable", - "updated": "2014-04-17T00:00:00Z", - "links": [ - { - "rel": "self", - "href": "", # Will get filled in after initialization - }, - v2_HTML_DESCRIPTION - ], - "media-types": v2_MEDIA_TYPES -} - -v2_VERSION_RESPONSE = { - "version": v2_EXPECTED_RESPONSE -} - -v3_MEDIA_TYPES = [ - { - "base": "application/json", - "type": "application/" - "vnd.openstack.identity-v3+json" - } -] - -v3_EXPECTED_RESPONSE = { - "id": "v3.6", - "status": "stable", - "updated": "2016-04-04T00:00:00Z", - "links": [ - { - "rel": "self", - "href": "", # Will get filled in after initialization - } - ], - "media-types": v3_MEDIA_TYPES -} - -v3_VERSION_RESPONSE = { - "version": v3_EXPECTED_RESPONSE -} - -VERSIONS_RESPONSE = { - "versions": { - "values": [ - v3_EXPECTED_RESPONSE, - v2_EXPECTED_RESPONSE - ] - } -} - -_build_ec2tokens_relation = functools.partial( - json_home.build_v3_extension_resource_relation, extension_name='OS-EC2', - extension_version='1.0') - -REVOCATIONS_RELATION = json_home.build_v3_extension_resource_relation( - 'OS-PKI', '1.0', 'revocations') - -_build_simple_cert_relation = functools.partial( - json_home.build_v3_extension_resource_relation, - extension_name='OS-SIMPLE-CERT', extension_version='1.0') - -_build_trust_relation = functools.partial( - json_home.build_v3_extension_resource_relation, extension_name='OS-TRUST', - extension_version='1.0') - -_build_federation_rel = functools.partial( - json_home.build_v3_extension_resource_relation, - extension_name='OS-FEDERATION', - extension_version='1.0') - -_build_oauth1_rel = functools.partial( - json_home.build_v3_extension_resource_relation, - extension_name='OS-OAUTH1', extension_version='1.0') - -_build_ep_policy_rel = functools.partial( - json_home.build_v3_extension_resource_relation, - extension_name='OS-ENDPOINT-POLICY', extension_version='1.0') - -_build_ep_filter_rel = functools.partial( - json_home.build_v3_extension_resource_relation, - extension_name='OS-EP-FILTER', extension_version='1.0') - -_build_os_inherit_rel = functools.partial( - json_home.build_v3_extension_resource_relation, - extension_name='OS-INHERIT', extension_version='1.0') - -TRUST_ID_PARAMETER_RELATION = json_home.build_v3_extension_parameter_relation( - 'OS-TRUST', '1.0', 'trust_id') - -IDP_ID_PARAMETER_RELATION = json_home.build_v3_extension_parameter_relation( - 'OS-FEDERATION', '1.0', 'idp_id') - -PROTOCOL_ID_PARAM_RELATION = json_home.build_v3_extension_parameter_relation( - 'OS-FEDERATION', '1.0', 'protocol_id') - -MAPPING_ID_PARAM_RELATION = json_home.build_v3_extension_parameter_relation( - 'OS-FEDERATION', '1.0', 'mapping_id') - -SP_ID_PARAMETER_RELATION = json_home.build_v3_extension_parameter_relation( - 'OS-FEDERATION', '1.0', 'sp_id') - -CONSUMER_ID_PARAMETER_RELATION = ( - json_home.build_v3_extension_parameter_relation( - 'OS-OAUTH1', '1.0', 'consumer_id')) - -REQUEST_TOKEN_ID_PARAMETER_RELATION = ( - json_home.build_v3_extension_parameter_relation( - 'OS-OAUTH1', '1.0', 'request_token_id')) - -ACCESS_TOKEN_ID_PARAMETER_RELATION = ( - json_home.build_v3_extension_parameter_relation( - 'OS-OAUTH1', '1.0', 'access_token_id')) - -ENDPOINT_GROUP_ID_PARAMETER_RELATION = ( - json_home.build_v3_extension_parameter_relation( - 'OS-EP-FILTER', '1.0', 'endpoint_group_id')) - -BASE_IDP_PROTOCOL = '/OS-FEDERATION/identity_providers/{idp_id}/protocols' -BASE_EP_POLICY = '/policies/{policy_id}/OS-ENDPOINT-POLICY' -BASE_EP_FILTER_PREFIX = '/OS-EP-FILTER' -BASE_EP_FILTER = BASE_EP_FILTER_PREFIX + '/endpoint_groups/{endpoint_group_id}' -BASE_ACCESS_TOKEN = ( - '/users/{user_id}/OS-OAUTH1/access_tokens/{access_token_id}') - -FEDERATED_AUTH_URL = ('/OS-FEDERATION/identity_providers/{idp_id}' - '/protocols/{protocol_id}/auth') -FEDERATED_IDP_SPECIFIC_WEBSSO = ('/auth/OS-FEDERATION/identity_providers/' - '{idp_id}/protocols/{protocol_id}/websso') - -V3_JSON_HOME_RESOURCES = { - json_home.build_v3_resource_relation('auth_tokens'): { - 'href': '/auth/tokens'}, - json_home.build_v3_resource_relation('auth_catalog'): { - 'href': '/auth/catalog'}, - json_home.build_v3_resource_relation('auth_projects'): { - 'href': '/auth/projects'}, - json_home.build_v3_resource_relation('auth_domains'): { - 'href': '/auth/domains'}, - json_home.build_v3_resource_relation('credential'): { - 'href-template': '/credentials/{credential_id}', - 'href-vars': { - 'credential_id': - json_home.build_v3_parameter_relation('credential_id')}}, - json_home.build_v3_resource_relation('credentials'): { - 'href': '/credentials'}, - json_home.build_v3_resource_relation('domain'): { - 'href-template': '/domains/{domain_id}', - 'href-vars': {'domain_id': json_home.Parameters.DOMAIN_ID, }}, - json_home.build_v3_resource_relation('domain_group_role'): { - 'href-template': - '/domains/{domain_id}/groups/{group_id}/roles/{role_id}', - 'href-vars': { - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'group_id': json_home.Parameters.GROUP_ID, - 'role_id': json_home.Parameters.ROLE_ID, }}, - json_home.build_v3_resource_relation('domain_group_roles'): { - 'href-template': '/domains/{domain_id}/groups/{group_id}/roles', - 'href-vars': { - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'group_id': json_home.Parameters.GROUP_ID}}, - json_home.build_v3_resource_relation('domain_user_role'): { - 'href-template': - '/domains/{domain_id}/users/{user_id}/roles/{role_id}', - 'href-vars': { - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'role_id': json_home.Parameters.ROLE_ID, - 'user_id': json_home.Parameters.USER_ID, }}, - json_home.build_v3_resource_relation('domain_user_roles'): { - 'href-template': '/domains/{domain_id}/users/{user_id}/roles', - 'href-vars': { - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'user_id': json_home.Parameters.USER_ID, }}, - json_home.build_v3_resource_relation('domains'): {'href': '/domains'}, - json_home.build_v3_resource_relation('endpoint'): { - 'href-template': '/endpoints/{endpoint_id}', - 'href-vars': { - 'endpoint_id': - json_home.build_v3_parameter_relation('endpoint_id'), }}, - json_home.build_v3_resource_relation('endpoints'): { - 'href': '/endpoints'}, - _build_ec2tokens_relation(resource_name='ec2tokens'): { - 'href': '/ec2tokens'}, - _build_ec2tokens_relation(resource_name='user_credential'): { - 'href-template': '/users/{user_id}/credentials/OS-EC2/{credential_id}', - 'href-vars': { - 'credential_id': - json_home.build_v3_parameter_relation('credential_id'), - 'user_id': json_home.Parameters.USER_ID, }}, - _build_ec2tokens_relation(resource_name='user_credentials'): { - 'href-template': '/users/{user_id}/credentials/OS-EC2', - 'href-vars': { - 'user_id': json_home.Parameters.USER_ID, }}, - REVOCATIONS_RELATION: { - 'href': '/auth/tokens/OS-PKI/revoked'}, - 'http://docs.openstack.org/api/openstack-identity/3/ext/OS-REVOKE/1.0/rel/' - 'events': { - 'href': '/OS-REVOKE/events'}, - _build_simple_cert_relation(resource_name='ca_certificate'): { - 'href': '/OS-SIMPLE-CERT/ca'}, - _build_simple_cert_relation(resource_name='certificates'): { - 'href': '/OS-SIMPLE-CERT/certificates'}, - _build_trust_relation(resource_name='trust'): - { - 'href-template': '/OS-TRUST/trusts/{trust_id}', - 'href-vars': {'trust_id': TRUST_ID_PARAMETER_RELATION, }}, - _build_trust_relation(resource_name='trust_role'): { - 'href-template': '/OS-TRUST/trusts/{trust_id}/roles/{role_id}', - 'href-vars': { - 'role_id': json_home.Parameters.ROLE_ID, - 'trust_id': TRUST_ID_PARAMETER_RELATION, }}, - _build_trust_relation(resource_name='trust_roles'): { - 'href-template': '/OS-TRUST/trusts/{trust_id}/roles', - 'href-vars': {'trust_id': TRUST_ID_PARAMETER_RELATION, }}, - _build_trust_relation(resource_name='trusts'): { - 'href': '/OS-TRUST/trusts'}, - 'http://docs.openstack.org/api/openstack-identity/3/ext/s3tokens/1.0/rel/' - 's3tokens': { - 'href': '/s3tokens'}, - json_home.build_v3_resource_relation('group'): { - 'href-template': '/groups/{group_id}', - 'href-vars': { - 'group_id': json_home.Parameters.GROUP_ID, }}, - json_home.build_v3_resource_relation('group_user'): { - 'href-template': '/groups/{group_id}/users/{user_id}', - 'href-vars': { - 'group_id': json_home.Parameters.GROUP_ID, - 'user_id': json_home.Parameters.USER_ID, }}, - json_home.build_v3_resource_relation('group_users'): { - 'href-template': '/groups/{group_id}/users', - 'href-vars': {'group_id': json_home.Parameters.GROUP_ID, }}, - json_home.build_v3_resource_relation('groups'): {'href': '/groups'}, - json_home.build_v3_resource_relation('policies'): { - 'href': '/policies'}, - json_home.build_v3_resource_relation('policy'): { - 'href-template': '/policies/{policy_id}', - 'href-vars': { - 'policy_id': - json_home.build_v3_parameter_relation('policy_id'), }}, - json_home.build_v3_resource_relation('project'): { - 'href-template': '/projects/{project_id}', - 'href-vars': { - 'project_id': json_home.Parameters.PROJECT_ID, }}, - json_home.build_v3_resource_relation('project_group_role'): { - 'href-template': - '/projects/{project_id}/groups/{group_id}/roles/{role_id}', - 'href-vars': { - 'group_id': json_home.Parameters.GROUP_ID, - 'project_id': json_home.Parameters.PROJECT_ID, - 'role_id': json_home.Parameters.ROLE_ID, }}, - json_home.build_v3_resource_relation('project_group_roles'): { - 'href-template': '/projects/{project_id}/groups/{group_id}/roles', - 'href-vars': { - 'group_id': json_home.Parameters.GROUP_ID, - 'project_id': json_home.Parameters.PROJECT_ID, }}, - json_home.build_v3_resource_relation('project_user_role'): { - 'href-template': - '/projects/{project_id}/users/{user_id}/roles/{role_id}', - 'href-vars': { - 'project_id': json_home.Parameters.PROJECT_ID, - 'role_id': json_home.Parameters.ROLE_ID, - 'user_id': json_home.Parameters.USER_ID, }}, - json_home.build_v3_resource_relation('project_user_roles'): { - 'href-template': '/projects/{project_id}/users/{user_id}/roles', - 'href-vars': { - 'project_id': json_home.Parameters.PROJECT_ID, - 'user_id': json_home.Parameters.USER_ID, }}, - json_home.build_v3_resource_relation('projects'): { - 'href': '/projects'}, - json_home.build_v3_resource_relation('region'): { - 'href-template': '/regions/{region_id}', - 'href-vars': { - 'region_id': - json_home.build_v3_parameter_relation('region_id'), }}, - json_home.build_v3_resource_relation('regions'): {'href': '/regions'}, - json_home.build_v3_resource_relation('role'): { - 'href-template': '/roles/{role_id}', - 'href-vars': { - 'role_id': json_home.Parameters.ROLE_ID, }}, - json_home.build_v3_resource_relation('implied_roles'): { - 'href-template': '/roles/{prior_role_id}/implies', - 'href-vars': { - 'prior_role_id': json_home.Parameters.ROLE_ID}, - 'hints': {'status': 'experimental'}}, - json_home.build_v3_resource_relation('implied_role'): { - 'href-template': - '/roles/{prior_role_id}/implies/{implied_role_id}', - 'href-vars': { - 'prior_role_id': json_home.Parameters.ROLE_ID, - 'implied_role_id': json_home.Parameters.ROLE_ID, - }, - 'hints': {'status': 'experimental'}}, - json_home.build_v3_resource_relation('role_inferences'): { - 'href': '/role_inferences', - 'hints': {'status': 'experimental'}}, - json_home.build_v3_resource_relation('role_assignments'): { - 'href': '/role_assignments'}, - json_home.build_v3_resource_relation('roles'): {'href': '/roles'}, - json_home.build_v3_resource_relation('service'): { - 'href-template': '/services/{service_id}', - 'href-vars': { - 'service_id': - json_home.build_v3_parameter_relation('service_id')}}, - json_home.build_v3_resource_relation('services'): { - 'href': '/services'}, - json_home.build_v3_resource_relation('user'): { - 'href-template': '/users/{user_id}', - 'href-vars': { - 'user_id': json_home.Parameters.USER_ID, }}, - json_home.build_v3_resource_relation('user_change_password'): { - 'href-template': '/users/{user_id}/password', - 'href-vars': {'user_id': json_home.Parameters.USER_ID, }}, - json_home.build_v3_resource_relation('user_groups'): { - 'href-template': '/users/{user_id}/groups', - 'href-vars': {'user_id': json_home.Parameters.USER_ID, }}, - json_home.build_v3_resource_relation('user_projects'): { - 'href-template': '/users/{user_id}/projects', - 'href-vars': {'user_id': json_home.Parameters.USER_ID, }}, - json_home.build_v3_resource_relation('users'): {'href': '/users'}, - _build_federation_rel(resource_name='domains'): { - 'href': '/auth/domains'}, - _build_federation_rel(resource_name='websso'): { - 'href-template': '/auth/OS-FEDERATION/websso/{protocol_id}', - 'href-vars': { - 'protocol_id': PROTOCOL_ID_PARAM_RELATION, }}, - _build_federation_rel(resource_name='projects'): { - 'href': '/auth/projects'}, - _build_federation_rel(resource_name='saml2'): { - 'href': '/auth/OS-FEDERATION/saml2'}, - _build_federation_rel(resource_name='ecp'): { - 'href': '/auth/OS-FEDERATION/saml2/ecp'}, - _build_federation_rel(resource_name='metadata'): { - 'href': '/OS-FEDERATION/saml2/metadata'}, - _build_federation_rel(resource_name='identity_providers'): { - 'href': '/OS-FEDERATION/identity_providers'}, - _build_federation_rel(resource_name='service_providers'): { - 'href': '/OS-FEDERATION/service_providers'}, - _build_federation_rel(resource_name='mappings'): { - 'href': '/OS-FEDERATION/mappings'}, - _build_federation_rel(resource_name='identity_provider'): - { - 'href-template': '/OS-FEDERATION/identity_providers/{idp_id}', - 'href-vars': {'idp_id': IDP_ID_PARAMETER_RELATION, }}, - _build_federation_rel(resource_name='identity_providers'): { - 'href-template': FEDERATED_IDP_SPECIFIC_WEBSSO, - 'href-vars': { - 'idp_id': IDP_ID_PARAMETER_RELATION, - 'protocol_id': PROTOCOL_ID_PARAM_RELATION, }}, - _build_federation_rel(resource_name='service_provider'): - { - 'href-template': '/OS-FEDERATION/service_providers/{sp_id}', - 'href-vars': {'sp_id': SP_ID_PARAMETER_RELATION, }}, - _build_federation_rel(resource_name='mapping'): - { - 'href-template': '/OS-FEDERATION/mappings/{mapping_id}', - 'href-vars': {'mapping_id': MAPPING_ID_PARAM_RELATION, }}, - _build_federation_rel(resource_name='identity_provider_protocol'): { - 'href-template': BASE_IDP_PROTOCOL + '/{protocol_id}', - 'href-vars': { - 'idp_id': IDP_ID_PARAMETER_RELATION, - 'protocol_id': PROTOCOL_ID_PARAM_RELATION, }}, - _build_federation_rel(resource_name='identity_provider_protocols'): { - 'href-template': BASE_IDP_PROTOCOL, - 'href-vars': { - 'idp_id': IDP_ID_PARAMETER_RELATION}}, - _build_federation_rel(resource_name='identity_provider_protocol_auth'): { - 'href-template': FEDERATED_AUTH_URL, - 'href-vars': { - 'idp_id': IDP_ID_PARAMETER_RELATION, - 'protocol_id': PROTOCOL_ID_PARAM_RELATION, }}, - _build_oauth1_rel(resource_name='access_tokens'): { - 'href': '/OS-OAUTH1/access_token'}, - _build_oauth1_rel(resource_name='request_tokens'): { - 'href': '/OS-OAUTH1/request_token'}, - _build_oauth1_rel(resource_name='consumers'): { - 'href': '/OS-OAUTH1/consumers'}, - _build_oauth1_rel(resource_name='authorize_request_token'): - { - 'href-template': '/OS-OAUTH1/authorize/{request_token_id}', - 'href-vars': {'request_token_id': - REQUEST_TOKEN_ID_PARAMETER_RELATION, }}, - _build_oauth1_rel(resource_name='consumer'): - { - 'href-template': '/OS-OAUTH1/consumers/{consumer_id}', - 'href-vars': {'consumer_id': CONSUMER_ID_PARAMETER_RELATION, }}, - _build_oauth1_rel(resource_name='user_access_token'): - { - 'href-template': BASE_ACCESS_TOKEN, - 'href-vars': {'user_id': json_home.Parameters.USER_ID, - 'access_token_id': - ACCESS_TOKEN_ID_PARAMETER_RELATION, }}, - _build_oauth1_rel(resource_name='user_access_tokens'): - { - 'href-template': '/users/{user_id}/OS-OAUTH1/access_tokens', - 'href-vars': {'user_id': json_home.Parameters.USER_ID, }}, - _build_oauth1_rel(resource_name='user_access_token_role'): - { - 'href-template': BASE_ACCESS_TOKEN + '/roles/{role_id}', - 'href-vars': {'user_id': json_home.Parameters.USER_ID, - 'role_id': json_home.Parameters.ROLE_ID, - 'access_token_id': - ACCESS_TOKEN_ID_PARAMETER_RELATION, }}, - _build_oauth1_rel(resource_name='user_access_token_roles'): - { - 'href-template': BASE_ACCESS_TOKEN + '/roles', - 'href-vars': {'user_id': json_home.Parameters.USER_ID, - 'access_token_id': - ACCESS_TOKEN_ID_PARAMETER_RELATION, }}, - _build_ep_policy_rel(resource_name='endpoint_policy'): - { - 'href-template': '/endpoints/{endpoint_id}/OS-ENDPOINT-POLICY/policy', - 'href-vars': {'endpoint_id': json_home.Parameters.ENDPOINT_ID, }}, - _build_ep_policy_rel(resource_name='endpoint_policy_association'): - { - 'href-template': BASE_EP_POLICY + '/endpoints/{endpoint_id}', - 'href-vars': {'endpoint_id': json_home.Parameters.ENDPOINT_ID, - 'policy_id': json_home.Parameters.POLICY_ID, }}, - _build_ep_policy_rel(resource_name='policy_endpoints'): - { - 'href-template': BASE_EP_POLICY + '/endpoints', - 'href-vars': {'policy_id': json_home.Parameters.POLICY_ID, }}, - _build_ep_policy_rel( - resource_name='region_and_service_policy_association'): - { - 'href-template': (BASE_EP_POLICY + - '/services/{service_id}/regions/{region_id}'), - 'href-vars': {'policy_id': json_home.Parameters.POLICY_ID, - 'service_id': json_home.Parameters.SERVICE_ID, - 'region_id': json_home.Parameters.REGION_ID, }}, - _build_ep_policy_rel(resource_name='service_policy_association'): - { - 'href-template': BASE_EP_POLICY + '/services/{service_id}', - 'href-vars': {'policy_id': json_home.Parameters.POLICY_ID, - 'service_id': json_home.Parameters.SERVICE_ID, }}, - _build_ep_filter_rel(resource_name='endpoint_group'): - { - 'href-template': '/OS-EP-FILTER/endpoint_groups/{endpoint_group_id}', - 'href-vars': {'endpoint_group_id': - ENDPOINT_GROUP_ID_PARAMETER_RELATION, }}, - _build_ep_filter_rel( - resource_name='endpoint_group_to_project_association'): - { - 'href-template': BASE_EP_FILTER + '/projects/{project_id}', - 'href-vars': {'endpoint_group_id': - ENDPOINT_GROUP_ID_PARAMETER_RELATION, - 'project_id': json_home.Parameters.PROJECT_ID, }}, - _build_ep_filter_rel(resource_name='endpoint_groups'): - {'href': '/OS-EP-FILTER/endpoint_groups'}, - _build_ep_filter_rel(resource_name='endpoint_projects'): - { - 'href-template': '/OS-EP-FILTER/endpoints/{endpoint_id}/projects', - 'href-vars': {'endpoint_id': json_home.Parameters.ENDPOINT_ID, }}, - _build_ep_filter_rel(resource_name='endpoints_in_endpoint_group'): - { - 'href-template': BASE_EP_FILTER + '/endpoints', - 'href-vars': {'endpoint_group_id': - ENDPOINT_GROUP_ID_PARAMETER_RELATION, }}, - _build_ep_filter_rel(resource_name='project_endpoint_groups'): - { - 'href-template': (BASE_EP_FILTER_PREFIX + '/projects/{project_id}' + - '/endpoint_groups'), - 'href-vars': {'project_id': - json_home.Parameters.PROJECT_ID, }}, - _build_ep_filter_rel(resource_name='project_endpoint'): - { - 'href-template': ('/OS-EP-FILTER/projects/{project_id}' - '/endpoints/{endpoint_id}'), - 'href-vars': {'endpoint_id': json_home.Parameters.ENDPOINT_ID, - 'project_id': json_home.Parameters.PROJECT_ID, }}, - _build_ep_filter_rel(resource_name='project_endpoints'): - { - 'href-template': '/OS-EP-FILTER/projects/{project_id}/endpoints', - 'href-vars': {'project_id': json_home.Parameters.PROJECT_ID, }}, - _build_ep_filter_rel( - resource_name='projects_associated_with_endpoint_group'): - { - 'href-template': BASE_EP_FILTER + '/projects', - 'href-vars': {'endpoint_group_id': - ENDPOINT_GROUP_ID_PARAMETER_RELATION, }}, - _build_os_inherit_rel( - resource_name='domain_user_role_inherited_to_projects'): - { - 'href-template': '/OS-INHERIT/domains/{domain_id}/users/' - '{user_id}/roles/{role_id}/inherited_to_projects', - 'href-vars': { - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'role_id': json_home.Parameters.ROLE_ID, - 'user_id': json_home.Parameters.USER_ID, }}, - _build_os_inherit_rel( - resource_name='domain_group_role_inherited_to_projects'): - { - 'href-template': '/OS-INHERIT/domains/{domain_id}/groups/' - '{group_id}/roles/{role_id}/inherited_to_projects', - 'href-vars': { - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'group_id': json_home.Parameters.GROUP_ID, - 'role_id': json_home.Parameters.ROLE_ID, }}, - _build_os_inherit_rel( - resource_name='domain_user_roles_inherited_to_projects'): - { - 'href-template': '/OS-INHERIT/domains/{domain_id}/users/' - '{user_id}/roles/inherited_to_projects', - 'href-vars': { - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'user_id': json_home.Parameters.USER_ID, }}, - _build_os_inherit_rel( - resource_name='domain_group_roles_inherited_to_projects'): - { - 'href-template': '/OS-INHERIT/domains/{domain_id}/groups/' - '{group_id}/roles/inherited_to_projects', - 'href-vars': { - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'group_id': json_home.Parameters.GROUP_ID, }}, - _build_os_inherit_rel( - resource_name='project_user_role_inherited_to_projects'): - { - 'href-template': '/OS-INHERIT/projects/{project_id}/users/' - '{user_id}/roles/{role_id}/inherited_to_projects', - 'href-vars': { - 'project_id': json_home.Parameters.PROJECT_ID, - 'role_id': json_home.Parameters.ROLE_ID, - 'user_id': json_home.Parameters.USER_ID, }}, - _build_os_inherit_rel( - resource_name='project_group_role_inherited_to_projects'): - { - 'href-template': '/OS-INHERIT/projects/{project_id}/groups/' - '{group_id}/roles/{role_id}/inherited_to_projects', - 'href-vars': { - 'project_id': json_home.Parameters.PROJECT_ID, - 'group_id': json_home.Parameters.GROUP_ID, - 'role_id': json_home.Parameters.ROLE_ID, }}, - json_home.build_v3_resource_relation('domain_config'): { - 'href-template': - '/domains/{domain_id}/config', - 'href-vars': { - 'domain_id': json_home.Parameters.DOMAIN_ID}, - 'hints': {'status': 'experimental'}}, - json_home.build_v3_resource_relation('domain_config_group'): { - 'href-template': - '/domains/{domain_id}/config/{group}', - 'href-vars': { - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'group': json_home.build_v3_parameter_relation('config_group')}, - 'hints': {'status': 'experimental'}}, - json_home.build_v3_resource_relation('domain_config_option'): { - 'href-template': - '/domains/{domain_id}/config/{group}/{option}', - 'href-vars': { - 'domain_id': json_home.Parameters.DOMAIN_ID, - 'group': json_home.build_v3_parameter_relation('config_group'), - 'option': json_home.build_v3_parameter_relation('config_option')}, - 'hints': {'status': 'experimental'}}, - json_home.build_v3_resource_relation('domain_config_default'): { - 'href': '/domains/config/default', - 'hints': {'status': 'experimental'}}, - json_home.build_v3_resource_relation('domain_config_default_group'): { - 'href-template': '/domains/config/{group}/default', - 'href-vars': { - 'group': json_home.build_v3_parameter_relation('config_group')}, - 'hints': {'status': 'experimental'}}, - json_home.build_v3_resource_relation('domain_config_default_option'): { - 'href-template': '/domains/config/{group}/{option}/default', - 'href-vars': { - 'group': json_home.build_v3_parameter_relation('config_group'), - 'option': json_home.build_v3_parameter_relation('config_option')}, - 'hints': {'status': 'experimental'}}, -} - - -class TestClient(object): - def __init__(self, app=None, token=None): - self.app = app - self.token = token - - def request(self, method, path, headers=None, body=None): - if headers is None: - headers = {} - - if self.token: - headers.setdefault('X-Auth-Token', self.token) - - req = webob.Request.blank(path) - req.method = method - for k, v in headers.items(): - req.headers[k] = v - if body: - req.body = body - return req.get_response(self.app) - - def get(self, path, headers=None): - return self.request('GET', path=path, headers=headers) - - def post(self, path, headers=None, body=None): - return self.request('POST', path=path, headers=headers, body=body) - - def put(self, path, headers=None, body=None): - return self.request('PUT', path=path, headers=headers, body=body) - - -class _VersionsEqual(tt_matchers.MatchesListwise): - def __init__(self, expected): - super(_VersionsEqual, self).__init__([ - tt_matchers.KeysEqual(expected), - tt_matchers.KeysEqual(expected['versions']), - tt_matchers.HasLength(len(expected['versions']['values'])), - tt_matchers.ContainsAll(expected['versions']['values']), - ]) - - def match(self, other): - return super(_VersionsEqual, self).match([ - other, - other['versions'], - other['versions']['values'], - other['versions']['values'], - ]) - - -class VersionTestCase(unit.TestCase): - def setUp(self): - super(VersionTestCase, self).setUp() - self.load_backends() - self.public_app = self.loadapp('keystone', 'main') - self.admin_app = self.loadapp('keystone', 'admin') - - self.config_fixture.config( - public_endpoint='http://localhost:%(public_port)d', - admin_endpoint='http://localhost:%(admin_port)d') - - def config_overrides(self): - super(VersionTestCase, self).config_overrides() - admin_port = random.randint(10000, 30000) - public_port = random.randint(40000, 60000) - self.config_fixture.config(group='eventlet_server', - public_port=public_port, - admin_port=admin_port) - - def _paste_in_port(self, response, port): - for link in response['links']: - if link['rel'] == 'self': - link['href'] = port - - def test_public_versions(self): - client = TestClient(self.public_app) - resp = client.get('/') - self.assertEqual(300, resp.status_int) - data = jsonutils.loads(resp.body) - expected = VERSIONS_RESPONSE - for version in expected['versions']['values']: - if version['id'].startswith('v3'): - self._paste_in_port( - version, 'http://localhost:%s/v3/' % - CONF.eventlet_server.public_port) - elif version['id'] == 'v2.0': - self._paste_in_port( - version, 'http://localhost:%s/v2.0/' % - CONF.eventlet_server.public_port) - self.assertThat(data, _VersionsEqual(expected)) - - def test_admin_versions(self): - client = TestClient(self.admin_app) - resp = client.get('/') - self.assertEqual(300, resp.status_int) - data = jsonutils.loads(resp.body) - expected = VERSIONS_RESPONSE - for version in expected['versions']['values']: - if version['id'].startswith('v3'): - self._paste_in_port( - version, 'http://localhost:%s/v3/' % - CONF.eventlet_server.admin_port) - elif version['id'] == 'v2.0': - self._paste_in_port( - version, 'http://localhost:%s/v2.0/' % - CONF.eventlet_server.admin_port) - self.assertThat(data, _VersionsEqual(expected)) - - def test_use_site_url_if_endpoint_unset(self): - self.config_fixture.config(public_endpoint=None, admin_endpoint=None) - - for app in (self.public_app, self.admin_app): - client = TestClient(app) - resp = client.get('/') - self.assertEqual(300, resp.status_int) - data = jsonutils.loads(resp.body) - expected = VERSIONS_RESPONSE - for version in expected['versions']['values']: - # localhost happens to be the site url for tests - if version['id'].startswith('v3'): - self._paste_in_port( - version, 'http://localhost/v3/') - elif version['id'] == 'v2.0': - self._paste_in_port( - version, 'http://localhost/v2.0/') - self.assertThat(data, _VersionsEqual(expected)) - - def test_public_version_v2(self): - client = TestClient(self.public_app) - resp = client.get('/v2.0/') - self.assertEqual(http_client.OK, resp.status_int) - data = jsonutils.loads(resp.body) - expected = v2_VERSION_RESPONSE - self._paste_in_port(expected['version'], - 'http://localhost:%s/v2.0/' % - CONF.eventlet_server.public_port) - self.assertEqual(expected, data) - - def test_admin_version_v2(self): - client = TestClient(self.admin_app) - resp = client.get('/v2.0/') - self.assertEqual(http_client.OK, resp.status_int) - data = jsonutils.loads(resp.body) - expected = v2_VERSION_RESPONSE - self._paste_in_port(expected['version'], - 'http://localhost:%s/v2.0/' % - CONF.eventlet_server.admin_port) - self.assertEqual(expected, data) - - def test_use_site_url_if_endpoint_unset_v2(self): - self.config_fixture.config(public_endpoint=None, admin_endpoint=None) - for app in (self.public_app, self.admin_app): - client = TestClient(app) - resp = client.get('/v2.0/') - self.assertEqual(http_client.OK, resp.status_int) - data = jsonutils.loads(resp.body) - expected = v2_VERSION_RESPONSE - self._paste_in_port(expected['version'], 'http://localhost/v2.0/') - self.assertEqual(data, expected) - - def test_public_version_v3(self): - client = TestClient(self.public_app) - resp = client.get('/v3/') - self.assertEqual(http_client.OK, resp.status_int) - data = jsonutils.loads(resp.body) - expected = v3_VERSION_RESPONSE - self._paste_in_port(expected['version'], - 'http://localhost:%s/v3/' % - CONF.eventlet_server.public_port) - self.assertEqual(expected, data) - - @utils.wip('waiting on bug #1381961') - def test_admin_version_v3(self): - client = TestClient(self.admin_app) - resp = client.get('/v3/') - self.assertEqual(http_client.OK, resp.status_int) - data = jsonutils.loads(resp.body) - expected = v3_VERSION_RESPONSE - self._paste_in_port(expected['version'], - 'http://localhost:%s/v3/' % - CONF.eventlet_server.admin_port) - self.assertEqual(expected, data) - - def test_use_site_url_if_endpoint_unset_v3(self): - self.config_fixture.config(public_endpoint=None, admin_endpoint=None) - for app in (self.public_app, self.admin_app): - client = TestClient(app) - resp = client.get('/v3/') - self.assertEqual(http_client.OK, resp.status_int) - data = jsonutils.loads(resp.body) - expected = v3_VERSION_RESPONSE - self._paste_in_port(expected['version'], 'http://localhost/v3/') - self.assertEqual(expected, data) - - @mock.patch.object(controllers, '_VERSIONS', ['v3']) - def test_v2_disabled(self): - client = TestClient(self.public_app) - # request to /v2.0 should fail - resp = client.get('/v2.0/') - self.assertEqual(http_client.NOT_FOUND, resp.status_int) - - # request to /v3 should pass - resp = client.get('/v3/') - self.assertEqual(http_client.OK, resp.status_int) - data = jsonutils.loads(resp.body) - expected = v3_VERSION_RESPONSE - self._paste_in_port(expected['version'], - 'http://localhost:%s/v3/' % - CONF.eventlet_server.public_port) - self.assertEqual(expected, data) - - # only v3 information should be displayed by requests to / - v3_only_response = { - "versions": { - "values": [ - v3_EXPECTED_RESPONSE - ] - } - } - self._paste_in_port(v3_only_response['versions']['values'][0], - 'http://localhost:%s/v3/' % - CONF.eventlet_server.public_port) - resp = client.get('/') - self.assertEqual(300, resp.status_int) - data = jsonutils.loads(resp.body) - self.assertEqual(v3_only_response, data) - - @mock.patch.object(controllers, '_VERSIONS', ['v2.0']) - def test_v3_disabled(self): - client = TestClient(self.public_app) - # request to /v3 should fail - resp = client.get('/v3/') - self.assertEqual(http_client.NOT_FOUND, resp.status_int) - - # request to /v2.0 should pass - resp = client.get('/v2.0/') - self.assertEqual(http_client.OK, resp.status_int) - data = jsonutils.loads(resp.body) - expected = v2_VERSION_RESPONSE - self._paste_in_port(expected['version'], - 'http://localhost:%s/v2.0/' % - CONF.eventlet_server.public_port) - self.assertEqual(expected, data) - - # only v2 information should be displayed by requests to / - v2_only_response = { - "versions": { - "values": [ - v2_EXPECTED_RESPONSE - ] - } - } - self._paste_in_port(v2_only_response['versions']['values'][0], - 'http://localhost:%s/v2.0/' % - CONF.eventlet_server.public_port) - resp = client.get('/') - self.assertEqual(300, resp.status_int) - data = jsonutils.loads(resp.body) - self.assertEqual(v2_only_response, data) - - def _test_json_home(self, path, exp_json_home_data): - client = TestClient(self.public_app) - resp = client.get(path, headers={'Accept': 'application/json-home'}) - - self.assertThat(resp.status, tt_matchers.Equals('200 OK')) - self.assertThat(resp.headers['Content-Type'], - tt_matchers.Equals('application/json-home')) - - self.assertThat(jsonutils.loads(resp.body), - tt_matchers.Equals(exp_json_home_data)) - - def test_json_home_v3(self): - # If the request is /v3 and the Accept header is application/json-home - # then the server responds with a JSON Home document. - - exp_json_home_data = { - 'resources': V3_JSON_HOME_RESOURCES} - - self._test_json_home('/v3', exp_json_home_data) - - def test_json_home_root(self): - # If the request is / and the Accept header is application/json-home - # then the server responds with a JSON Home document. - - exp_json_home_data = copy.deepcopy({ - 'resources': V3_JSON_HOME_RESOURCES}) - json_home.translate_urls(exp_json_home_data, '/v3') - - self._test_json_home('/', exp_json_home_data) - - def test_accept_type_handling(self): - # Accept headers with multiple types and qvalues are handled. - - def make_request(accept_types=None): - client = TestClient(self.public_app) - headers = None - if accept_types: - headers = {'Accept': accept_types} - resp = client.get('/v3', headers=headers) - self.assertThat(resp.status, tt_matchers.Equals('200 OK')) - return resp.headers['Content-Type'] - - JSON = controllers.MimeTypes.JSON - JSON_HOME = controllers.MimeTypes.JSON_HOME - - JSON_MATCHER = tt_matchers.Equals(JSON) - JSON_HOME_MATCHER = tt_matchers.Equals(JSON_HOME) - - # Default is JSON. - self.assertThat(make_request(), JSON_MATCHER) - - # Can request JSON and get JSON. - self.assertThat(make_request(JSON), JSON_MATCHER) - - # Can request JSONHome and get JSONHome. - self.assertThat(make_request(JSON_HOME), JSON_HOME_MATCHER) - - # If request JSON, JSON Home get JSON. - accept_types = '%s, %s' % (JSON, JSON_HOME) - self.assertThat(make_request(accept_types), JSON_MATCHER) - - # If request JSON Home, JSON get JSON. - accept_types = '%s, %s' % (JSON_HOME, JSON) - self.assertThat(make_request(accept_types), JSON_MATCHER) - - # If request JSON Home, JSON;q=0.5 get JSON Home. - accept_types = '%s, %s;q=0.5' % (JSON_HOME, JSON) - self.assertThat(make_request(accept_types), JSON_HOME_MATCHER) - - # If request some unknown mime-type, get JSON. - self.assertThat(make_request(self.getUniqueString()), JSON_MATCHER) - - @mock.patch.object(controllers, '_VERSIONS', []) - def test_no_json_home_document_returned_when_v3_disabled(self): - json_home_document = controllers.request_v3_json_home('some_prefix') - expected_document = {'resources': {}} - self.assertEqual(expected_document, json_home_document) - - def test_extension_property_method_returns_none(self): - extension_obj = controllers.Extensions() - extensions_property = extension_obj.extensions - self.assertIsNone(extensions_property) - - -class VersionSingleAppTestCase(unit.TestCase): - """Tests running with a single application loaded. - - These are important because when Keystone is running in Apache httpd - there's only one application loaded for each instance. - - """ - - def setUp(self): - super(VersionSingleAppTestCase, self).setUp() - self.load_backends() - - self.config_fixture.config( - public_endpoint='http://localhost:%(public_port)d', - admin_endpoint='http://localhost:%(admin_port)d') - - def config_overrides(self): - super(VersionSingleAppTestCase, self).config_overrides() - admin_port = random.randint(10000, 30000) - public_port = random.randint(40000, 60000) - self.config_fixture.config(group='eventlet_server', - public_port=public_port, - admin_port=admin_port) - - def _paste_in_port(self, response, port): - for link in response['links']: - if link['rel'] == 'self': - link['href'] = port - - def _test_version(self, app_name): - def app_port(): - if app_name == 'admin': - return CONF.eventlet_server.admin_port - else: - return CONF.eventlet_server.public_port - app = self.loadapp('keystone', app_name) - client = TestClient(app) - resp = client.get('/') - self.assertEqual(300, resp.status_int) - data = jsonutils.loads(resp.body) - expected = VERSIONS_RESPONSE - for version in expected['versions']['values']: - if version['id'].startswith('v3'): - self._paste_in_port( - version, 'http://localhost:%s/v3/' % app_port()) - elif version['id'] == 'v2.0': - self._paste_in_port( - version, 'http://localhost:%s/v2.0/' % app_port()) - self.assertThat(data, _VersionsEqual(expected)) - - def test_public(self): - self._test_version('main') - - def test_admin(self): - self._test_version('admin') - - -class VersionBehindSslTestCase(unit.TestCase): - def setUp(self): - super(VersionBehindSslTestCase, self).setUp() - self.load_backends() - self.public_app = self.loadapp('keystone', 'main') - - def config_overrides(self): - super(VersionBehindSslTestCase, self).config_overrides() - self.config_fixture.config( - secure_proxy_ssl_header='HTTP_X_FORWARDED_PROTO') - - def _paste_in_port(self, response, port): - for link in response['links']: - if link['rel'] == 'self': - link['href'] = port - - def _get_expected(self, host): - expected = VERSIONS_RESPONSE - for version in expected['versions']['values']: - if version['id'].startswith('v3'): - self._paste_in_port(version, host + 'v3/') - elif version['id'] == 'v2.0': - self._paste_in_port(version, host + 'v2.0/') - return expected - - def test_versions_without_headers(self): - client = TestClient(self.public_app) - host_name = 'host-%d' % random.randint(10, 30) - host_port = random.randint(10000, 30000) - host = 'http://%s:%s/' % (host_name, host_port) - resp = client.get(host) - self.assertEqual(300, resp.status_int) - data = jsonutils.loads(resp.body) - expected = self._get_expected(host) - self.assertThat(data, _VersionsEqual(expected)) - - def test_versions_with_header(self): - client = TestClient(self.public_app) - host_name = 'host-%d' % random.randint(10, 30) - host_port = random.randint(10000, 30000) - resp = client.get('http://%s:%s/' % (host_name, host_port), - headers={'X-Forwarded-Proto': 'https'}) - self.assertEqual(300, resp.status_int) - data = jsonutils.loads(resp.body) - expected = self._get_expected('https://%s:%s/' % (host_name, - host_port)) - self.assertThat(data, _VersionsEqual(expected)) diff --git a/keystone-moon/keystone/tests/unit/test_wsgi.py b/keystone-moon/keystone/tests/unit/test_wsgi.py deleted file mode 100644 index 564d7406..00000000 --- a/keystone-moon/keystone/tests/unit/test_wsgi.py +++ /dev/null @@ -1,586 +0,0 @@ -# encoding: utf-8 -# -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import gettext -import socket -import uuid - -import eventlet -import mock -import oslo_i18n -from oslo_serialization import jsonutils -import six -from six.moves import http_client -from testtools import matchers -import webob - -from keystone.common import environment -from keystone.common import wsgi -from keystone import exception -from keystone.tests import unit - - -class FakeApp(wsgi.Application): - def index(self, context): - return {'a': 'b'} - - -class FakeAttributeCheckerApp(wsgi.Application): - def index(self, context): - return context['query_string'] - - def assert_attribute(self, body, attr): - """Asserts that the given request has a certain attribute.""" - ref = jsonutils.loads(body) - self._require_attribute(ref, attr) - - def assert_attributes(self, body, attr): - """Asserts that the given request has a certain set attributes.""" - ref = jsonutils.loads(body) - self._require_attributes(ref, attr) - - -class RouterTest(unit.TestCase): - def setUp(self): - self.router = wsgi.RoutersBase() - super(RouterTest, self).setUp() - - def test_invalid_status(self): - fake_mapper = uuid.uuid4().hex - fake_controller = uuid.uuid4().hex - fake_path = uuid.uuid4().hex - fake_rel = uuid.uuid4().hex - self.assertRaises(exception.Error, - self.router._add_resource, - fake_mapper, fake_controller, fake_path, fake_rel, - status=uuid.uuid4().hex) - - -class BaseWSGITest(unit.TestCase): - def setUp(self): - self.app = FakeApp() - super(BaseWSGITest, self).setUp() - - def _make_request(self, url='/'): - req = webob.Request.blank(url) - args = {'action': 'index', 'controller': None} - req.environ['wsgiorg.routing_args'] = [None, args] - return req - - -class ApplicationTest(BaseWSGITest): - def test_response_content_type(self): - req = self._make_request() - resp = req.get_response(self.app) - self.assertEqual('application/json', resp.content_type) - - def test_query_string_available(self): - class FakeApp(wsgi.Application): - def index(self, context): - return context['query_string'] - req = self._make_request(url='/?1=2') - resp = req.get_response(FakeApp()) - self.assertEqual({'1': '2'}, jsonutils.loads(resp.body)) - - def test_headers_available(self): - class FakeApp(wsgi.Application): - def index(self, context): - return context['headers'] - - app = FakeApp() - req = self._make_request(url='/?1=2') - req.headers['X-Foo'] = "bar" - resp = req.get_response(app) - self.assertIn('X-Foo', eval(resp.body)) - - def test_render_response(self): - data = {'attribute': 'value'} - body = b'{"attribute": "value"}' - - resp = wsgi.render_response(body=data) - self.assertEqual('200 OK', resp.status) - self.assertEqual(http_client.OK, resp.status_int) - self.assertEqual(body, resp.body) - self.assertEqual('X-Auth-Token', resp.headers.get('Vary')) - self.assertEqual(str(len(body)), resp.headers.get('Content-Length')) - - def test_render_response_custom_status(self): - resp = wsgi.render_response( - status=(http_client.NOT_IMPLEMENTED, 'Not Implemented')) - self.assertEqual('501 Not Implemented', resp.status) - self.assertEqual(http_client.NOT_IMPLEMENTED, resp.status_int) - - def test_successful_require_attribute(self): - app = FakeAttributeCheckerApp() - req = self._make_request(url='/?1=2') - resp = req.get_response(app) - app.assert_attribute(resp.body, '1') - - def test_require_attribute_fail_if_attribute_not_present(self): - app = FakeAttributeCheckerApp() - req = self._make_request(url='/?1=2') - resp = req.get_response(app) - self.assertRaises(exception.ValidationError, - app.assert_attribute, resp.body, 'a') - - def test_successful_require_multiple_attributes(self): - app = FakeAttributeCheckerApp() - req = self._make_request(url='/?a=1&b=2') - resp = req.get_response(app) - app.assert_attributes(resp.body, ['a', 'b']) - - def test_attribute_missing_from_request(self): - app = FakeAttributeCheckerApp() - req = self._make_request(url='/?a=1&b=2') - resp = req.get_response(app) - ex = self.assertRaises(exception.ValidationError, - app.assert_attributes, - resp.body, ['a', 'missing_attribute']) - self.assertThat(six.text_type(ex), - matchers.Contains('missing_attribute')) - - def test_no_required_attributes_present(self): - app = FakeAttributeCheckerApp() - req = self._make_request(url='/') - resp = req.get_response(app) - - ex = self.assertRaises(exception.ValidationError, - app.assert_attributes, resp.body, - ['missing_attribute1', 'missing_attribute2']) - self.assertThat(six.text_type(ex), - matchers.Contains('missing_attribute1')) - self.assertThat(six.text_type(ex), - matchers.Contains('missing_attribute2')) - - def test_render_response_custom_headers(self): - resp = wsgi.render_response(headers=[('Custom-Header', 'Some-Value')]) - self.assertEqual('Some-Value', resp.headers.get('Custom-Header')) - self.assertEqual('X-Auth-Token', resp.headers.get('Vary')) - - def test_render_response_non_str_headers_converted(self): - resp = wsgi.render_response( - headers=[('Byte-Header', 'Byte-Value'), - (u'Unicode-Header', u'Unicode-Value')]) - # assert that all headers are identified. - self.assertThat(resp.headers, matchers.HasLength(4)) - self.assertEqual('Unicode-Value', resp.headers.get('Unicode-Header')) - # assert that unicode value is converted, the expected type is str - # on both python2 and python3. - self.assertEqual(str, - type(resp.headers.get('Unicode-Header'))) - - def test_render_response_no_body(self): - resp = wsgi.render_response() - self.assertEqual('204 No Content', resp.status) - self.assertEqual(http_client.NO_CONTENT, resp.status_int) - self.assertEqual(b'', resp.body) - self.assertEqual('0', resp.headers.get('Content-Length')) - self.assertIsNone(resp.headers.get('Content-Type')) - - def test_render_response_head_with_body(self): - resp = wsgi.render_response({'id': uuid.uuid4().hex}, method='HEAD') - self.assertEqual(http_client.OK, resp.status_int) - self.assertEqual(b'', resp.body) - self.assertNotEqual('0', resp.headers.get('Content-Length')) - self.assertEqual('application/json', resp.headers.get('Content-Type')) - - def test_application_local_config(self): - class FakeApp(wsgi.Application): - def __init__(self, *args, **kwargs): - self.kwargs = kwargs - - app = FakeApp.factory({}, testkey="test") - self.assertIn("testkey", app.kwargs) - self.assertEqual("test", app.kwargs["testkey"]) - - def test_render_exception(self): - e = exception.Unauthorized(message=u'\u7f51\u7edc') - resp = wsgi.render_exception(e) - self.assertEqual(http_client.UNAUTHORIZED, resp.status_int) - - def test_render_exception_host(self): - e = exception.Unauthorized(message=u'\u7f51\u7edc') - req = self._make_request(url='/') - context = {'host_url': 'http://%s:5000' % uuid.uuid4().hex, - 'environment': req.environ} - resp = wsgi.render_exception(e, context=context) - - self.assertEqual(http_client.UNAUTHORIZED, resp.status_int) - - def test_improperly_encoded_params(self): - class FakeApp(wsgi.Application): - def index(self, context): - return context['query_string'] - # this is high bit set ASCII, copy & pasted from Windows. - # aka code page 1252. It is not valid UTF8. - req = self._make_request(url='/?name=nonexit%E8nt') - self.assertRaises(exception.ValidationError, req.get_response, - FakeApp()) - - def test_properly_encoded_params(self): - class FakeApp(wsgi.Application): - def index(self, context): - return context['query_string'] - # nonexitènt encoded as UTF-8 - req = self._make_request(url='/?name=nonexit%C3%A8nt') - resp = req.get_response(FakeApp()) - self.assertEqual({'name': u'nonexit\xe8nt'}, - jsonutils.loads(resp.body)) - - def test_base_url(self): - class FakeApp(wsgi.Application): - def index(self, context): - return self.base_url(context, 'public') - req = self._make_request(url='/') - # NOTE(gyee): according to wsgiref, if HTTP_HOST is present in the - # request environment, it will be used to construct the base url. - # SERVER_NAME and SERVER_PORT will be ignored. These are standard - # WSGI environment variables populated by the webserver. - req.environ.update({ - 'SCRIPT_NAME': '/identity', - 'SERVER_NAME': '1.2.3.4', - 'wsgi.url_scheme': 'http', - 'SERVER_PORT': '80', - 'HTTP_HOST': '1.2.3.4', - }) - resp = req.get_response(FakeApp()) - self.assertEqual(b"http://1.2.3.4/identity", resp.body) - - # if HTTP_HOST is absent, SERVER_NAME and SERVER_PORT will be used - req = self._make_request(url='/') - del req.environ['HTTP_HOST'] - req.environ.update({ - 'SCRIPT_NAME': '/identity', - 'SERVER_NAME': '1.1.1.1', - 'wsgi.url_scheme': 'http', - 'SERVER_PORT': '1234', - }) - resp = req.get_response(FakeApp()) - self.assertEqual(b"http://1.1.1.1:1234/identity", resp.body) - - # make sure keystone normalize the standard HTTP port 80 by stripping - # it - req = self._make_request(url='/') - req.environ.update({'HTTP_HOST': 'foo:80', - 'SCRIPT_NAME': '/identity'}) - resp = req.get_response(FakeApp()) - self.assertEqual(b"http://foo/identity", resp.body) - - # make sure keystone normalize the standard HTTPS port 443 by stripping - # it - req = self._make_request(url='/') - req.environ.update({'HTTP_HOST': 'foo:443', - 'SCRIPT_NAME': '/identity', - 'wsgi.url_scheme': 'https'}) - resp = req.get_response(FakeApp()) - self.assertEqual(b"https://foo/identity", resp.body) - - # make sure non-standard port is preserved - req = self._make_request(url='/') - req.environ.update({'HTTP_HOST': 'foo:1234', - 'SCRIPT_NAME': '/identity'}) - resp = req.get_response(FakeApp()) - self.assertEqual(b"http://foo:1234/identity", resp.body) - - # make sure version portion of the SCRIPT_NAME, '/v2.0', is stripped - # from base url - req = self._make_request(url='/') - req.environ.update({'HTTP_HOST': 'foo:80', - 'SCRIPT_NAME': '/bar/identity/v2.0'}) - resp = req.get_response(FakeApp()) - self.assertEqual(b"http://foo/bar/identity", resp.body) - - # make sure version portion of the SCRIPT_NAME, '/v3' is stripped from - # base url - req = self._make_request(url='/') - req.environ.update({'HTTP_HOST': 'foo:80', - 'SCRIPT_NAME': '/identity/v3'}) - resp = req.get_response(FakeApp()) - self.assertEqual(b"http://foo/identity", resp.body) - - -class ExtensionRouterTest(BaseWSGITest): - def test_extensionrouter_local_config(self): - class FakeRouter(wsgi.ExtensionRouter): - def __init__(self, *args, **kwargs): - self.kwargs = kwargs - - factory = FakeRouter.factory({}, testkey="test") - app = factory(self.app) - self.assertIn("testkey", app.kwargs) - self.assertEqual("test", app.kwargs["testkey"]) - - -class MiddlewareTest(BaseWSGITest): - def test_middleware_request(self): - class FakeMiddleware(wsgi.Middleware): - def process_request(self, req): - req.environ['fake_request'] = True - return req - req = self._make_request() - resp = FakeMiddleware(None)(req) - self.assertIn('fake_request', resp.environ) - - def test_middleware_response(self): - class FakeMiddleware(wsgi.Middleware): - def process_response(self, request, response): - response.environ = {} - response.environ['fake_response'] = True - return response - req = self._make_request() - resp = FakeMiddleware(self.app)(req) - self.assertIn('fake_response', resp.environ) - - def test_middleware_bad_request(self): - class FakeMiddleware(wsgi.Middleware): - def process_response(self, request, response): - raise exception.Unauthorized() - - req = self._make_request() - req.environ['REMOTE_ADDR'] = '127.0.0.1' - resp = FakeMiddleware(self.app)(req) - self.assertEqual(exception.Unauthorized.code, resp.status_int) - - def test_middleware_type_error(self): - class FakeMiddleware(wsgi.Middleware): - def process_response(self, request, response): - raise TypeError() - - req = self._make_request() - req.environ['REMOTE_ADDR'] = '127.0.0.1' - resp = FakeMiddleware(self.app)(req) - # This is a validationerror type - self.assertEqual(exception.ValidationError.code, resp.status_int) - - def test_middleware_exception_error(self): - - exception_str = b'EXCEPTIONERROR' - - class FakeMiddleware(wsgi.Middleware): - def process_response(self, request, response): - raise exception.UnexpectedError(exception_str) - - def do_request(): - req = self._make_request() - resp = FakeMiddleware(self.app)(req) - self.assertEqual(exception.UnexpectedError.code, resp.status_int) - return resp - - # Exception data should not be in the message when insecure_debug is - # False - self.config_fixture.config(debug=False, insecure_debug=False) - self.assertNotIn(exception_str, do_request().body) - - # Exception data should be in the message when insecure_debug is True - self.config_fixture.config(debug=True, insecure_debug=True) - self.assertIn(exception_str, do_request().body) - - -class LocalizedResponseTest(unit.TestCase): - def test_request_match_default(self): - # The default language if no Accept-Language is provided is None - req = webob.Request.blank('/') - self.assertIsNone(wsgi.best_match_language(req)) - - @mock.patch.object(oslo_i18n, 'get_available_languages') - def test_request_match_language_expected(self, mock_gal): - # If Accept-Language is a supported language, best_match_language() - # returns it. - - language = uuid.uuid4().hex - mock_gal.return_value = [language] - - req = webob.Request.blank('/', headers={'Accept-Language': language}) - self.assertEqual(language, wsgi.best_match_language(req)) - - @mock.patch.object(oslo_i18n, 'get_available_languages') - def test_request_match_language_unexpected(self, mock_gal): - # If Accept-Language is a language we do not support, - # best_match_language() returns None. - - supported_language = uuid.uuid4().hex - mock_gal.return_value = [supported_language] - - request_language = uuid.uuid4().hex - req = webob.Request.blank( - '/', headers={'Accept-Language': request_language}) - self.assertIsNone(wsgi.best_match_language(req)) - - def test_static_translated_string_is_lazy_translatable(self): - # Statically created message strings are an object that can get - # lazy-translated rather than a regular string. - self.assertNotEqual(six.text_type, - type(exception.Unauthorized.message_format)) - - @mock.patch.object(oslo_i18n, 'get_available_languages') - def test_get_localized_response(self, mock_gal): - # If the request has the Accept-Language set to a supported language - # and an exception is raised by the application that is translatable - # then the response will have the translated message. - - language = uuid.uuid4().hex - mock_gal.return_value = [language] - - # The arguments for the xlated message format have to match the args - # for the chosen exception (exception.NotFound) - xlated_msg_fmt = "Xlated NotFound, %(target)s." - - # Fake out gettext.translation() to return a translator for our - # expected language and a passthrough translator for other langs. - - def fake_translation(*args, **kwargs): - class IdentityTranslator(object): - def ugettext(self, msgid): - return msgid - - gettext = ugettext - - class LangTranslator(object): - def ugettext(self, msgid): - if msgid == exception.NotFound.message_format: - return xlated_msg_fmt - return msgid - - gettext = ugettext - - if language in kwargs.get('languages', []): - return LangTranslator() - return IdentityTranslator() - - with mock.patch.object(gettext, 'translation', - side_effect=fake_translation) as xlation_mock: - target = uuid.uuid4().hex - - # Fake app raises NotFound exception to simulate Keystone raising. - - class FakeApp(wsgi.Application): - def index(self, context): - raise exception.NotFound(target=target) - - # Make the request with Accept-Language on the app, expect an error - # response with the translated message. - - req = webob.Request.blank('/') - args = {'action': 'index', 'controller': None} - req.environ['wsgiorg.routing_args'] = [None, args] - req.headers['Accept-Language'] = language - resp = req.get_response(FakeApp()) - - # Assert that the translated message appears in the response. - - exp_msg = xlated_msg_fmt % dict(target=target) - self.assertThat(resp.json['error']['message'], - matchers.Equals(exp_msg)) - self.assertThat(xlation_mock.called, matchers.Equals(True)) - - -class ServerTest(unit.TestCase): - - def setUp(self): - super(ServerTest, self).setUp() - self.host = '127.0.0.1' - self.port = '1234' - - @mock.patch('eventlet.listen') - @mock.patch('socket.getaddrinfo') - def test_keepalive_unset(self, mock_getaddrinfo, mock_listen): - mock_getaddrinfo.return_value = [(1, 2, 3, 4, 5)] - mock_sock_dup = mock_listen.return_value.dup.return_value - - server = environment.Server(mock.MagicMock(), host=self.host, - port=self.port) - server.start() - self.addCleanup(server.stop) - self.assertTrue(mock_listen.called) - self.assertFalse(mock_sock_dup.setsockopt.called) - - @mock.patch('eventlet.listen') - @mock.patch('socket.getaddrinfo') - def test_keepalive_set(self, mock_getaddrinfo, mock_listen): - mock_getaddrinfo.return_value = [(1, 2, 3, 4, 5)] - mock_sock_dup = mock_listen.return_value.dup.return_value - - server = environment.Server(mock.MagicMock(), host=self.host, - port=self.port, keepalive=True) - server.start() - self.addCleanup(server.stop) - mock_sock_dup.setsockopt.assert_called_once_with(socket.SOL_SOCKET, - socket.SO_KEEPALIVE, - 1) - self.assertTrue(mock_listen.called) - - @mock.patch('eventlet.listen') - @mock.patch('socket.getaddrinfo') - def test_keepalive_and_keepidle_set(self, mock_getaddrinfo, mock_listen): - mock_getaddrinfo.return_value = [(1, 2, 3, 4, 5)] - mock_sock_dup = mock_listen.return_value.dup.return_value - - server = environment.Server(mock.MagicMock(), host=self.host, - port=self.port, keepalive=True, - keepidle=1) - server.start() - self.addCleanup(server.stop) - - if hasattr(socket, 'TCP_KEEPIDLE'): - self.assertEqual(2, mock_sock_dup.setsockopt.call_count) - # Test the last set of call args i.e. for the keepidle - mock_sock_dup.setsockopt.assert_called_with(socket.IPPROTO_TCP, - socket.TCP_KEEPIDLE, - 1) - else: - self.assertEqual(1, mock_sock_dup.setsockopt.call_count) - - self.assertTrue(mock_listen.called) - - def test_client_socket_timeout(self): - # mocking server method of eventlet.wsgi to check it is called with - # configured 'client_socket_timeout' value. - for socket_timeout in range(1, 10): - self.config_fixture.config(group='eventlet_server', - client_socket_timeout=socket_timeout) - server = environment.Server(mock.MagicMock(), host=self.host, - port=self.port) - with mock.patch.object(eventlet.wsgi, 'server') as mock_server: - fake_application = uuid.uuid4().hex - fake_socket = uuid.uuid4().hex - server._run(fake_application, fake_socket) - mock_server.assert_called_once_with( - fake_socket, - fake_application, - debug=mock.ANY, - socket_timeout=socket_timeout, - log=mock.ANY, - keepalive=mock.ANY) - - def test_wsgi_keep_alive(self): - # mocking server method of eventlet.wsgi to check it is called with - # configured 'wsgi_keep_alive' value. - wsgi_keepalive = False - self.config_fixture.config(group='eventlet_server', - wsgi_keep_alive=wsgi_keepalive) - - server = environment.Server(mock.MagicMock(), host=self.host, - port=self.port) - with mock.patch.object(eventlet.wsgi, 'server') as mock_server: - fake_application = uuid.uuid4().hex - fake_socket = uuid.uuid4().hex - server._run(fake_application, fake_socket) - mock_server.assert_called_once_with(fake_socket, - fake_application, - debug=mock.ANY, - socket_timeout=mock.ANY, - log=mock.ANY, - keepalive=wsgi_keepalive) diff --git a/keystone-moon/keystone/tests/unit/tests/__init__.py b/keystone-moon/keystone/tests/unit/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/tests/test_core.py b/keystone-moon/keystone/tests/unit/tests/test_core.py deleted file mode 100644 index 56e42bcc..00000000 --- a/keystone-moon/keystone/tests/unit/tests/test_core.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2014 IBM Corp. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import sys -import warnings - -from oslo_log import log -from sqlalchemy import exc -from testtools import matchers - -from keystone.tests import unit - - -LOG = log.getLogger(__name__) - - -class BaseTestTestCase(unit.BaseTestCase): - - def test_unexpected_exit(self): - # if a test calls sys.exit it raises rather than exiting. - self.assertThat(lambda: sys.exit(), - matchers.raises(unit.UnexpectedExit)) - - -class TestTestCase(unit.TestCase): - - def test_bad_log(self): - # If the arguments are invalid for the string in a log it raises an - # exception during testing. - self.assertThat( - lambda: LOG.warning('String %(p1)s %(p2)s', {'p1': 'something'}), - matchers.raises(KeyError)) - - def test_sa_warning(self): - self.assertThat( - lambda: warnings.warn('test sa warning error', exc.SAWarning), - matchers.raises(exc.SAWarning)) - - def test_deprecation_warnings_are_raised_as_exceptions_in_tests(self): - self.assertThat( - lambda: warnings.warn('this is deprecated', DeprecationWarning), - matchers.raises(DeprecationWarning)) diff --git a/keystone-moon/keystone/tests/unit/tests/test_utils.py b/keystone-moon/keystone/tests/unit/tests/test_utils.py deleted file mode 100644 index 22c485c0..00000000 --- a/keystone-moon/keystone/tests/unit/tests/test_utils.py +++ /dev/null @@ -1,37 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from testtools import matchers -from testtools import testcase - -from keystone.tests.unit import utils - - -class TestWipDecorator(testcase.TestCase): - - def test_raises_SkipError_when_broken_test_fails(self): - - @utils.wip('waiting on bug #000000') - def test(): - raise Exception('i expected a failure - this is a WIP') - - e = self.assertRaises(testcase.TestSkipped, test) - self.assertThat(str(e), matchers.Contains('#000000')) - - def test_raises_AssertionError_when_test_passes(self): - - @utils.wip('waiting on bug #000000') - def test(): - pass # literally - - e = self.assertRaises(AssertionError, test) - self.assertThat(str(e), matchers.Contains('#000000')) diff --git a/keystone-moon/keystone/tests/unit/token/__init__.py b/keystone-moon/keystone/tests/unit/token/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/token/test_backends.py b/keystone-moon/keystone/tests/unit/token/test_backends.py deleted file mode 100644 index feb7e017..00000000 --- a/keystone-moon/keystone/tests/unit/token/test_backends.py +++ /dev/null @@ -1,551 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import datetime -import hashlib -import uuid - -from keystoneclient.common import cms -from oslo_config import cfg -from oslo_utils import timeutils -import six -from six.moves import range - -from keystone import exception -from keystone.tests import unit -from keystone.tests.unit import utils as test_utils -from keystone.token import provider - - -CONF = cfg.CONF -NULL_OBJECT = object() - - -class TokenTests(object): - def _create_token_id(self): - # Use a token signed by the cms module - token_id = "" - for i in range(1, 20): - token_id += uuid.uuid4().hex - return cms.cms_sign_token(token_id, - CONF.signing.certfile, - CONF.signing.keyfile) - - def _assert_revoked_token_list_matches_token_persistence( - self, revoked_token_id_list): - # Assert that the list passed in matches the list returned by the - # token persistence service - persistence_list = [ - x['id'] - for x in self.token_provider_api.list_revoked_tokens() - ] - self.assertEqual(persistence_list, revoked_token_id_list) - - def test_token_crud(self): - token_id = self._create_token_id() - data = {'id': token_id, 'a': 'b', - 'trust_id': None, - 'user': {'id': 'testuserid'}, - 'token_data': {'access': {'token': { - 'audit_ids': [uuid.uuid4().hex]}}}} - data_ref = self.token_provider_api._persistence.create_token(token_id, - data) - expires = data_ref.pop('expires') - data_ref.pop('user_id') - self.assertIsInstance(expires, datetime.datetime) - data_ref.pop('id') - data.pop('id') - self.assertDictEqual(data, data_ref) - - new_data_ref = self.token_provider_api._persistence.get_token(token_id) - expires = new_data_ref.pop('expires') - self.assertIsInstance(expires, datetime.datetime) - new_data_ref.pop('user_id') - new_data_ref.pop('id') - - self.assertEqual(data, new_data_ref) - - self.token_provider_api._persistence.delete_token(token_id) - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api._persistence.get_token, token_id) - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api._persistence.delete_token, token_id) - - def create_token_sample_data(self, token_id=None, tenant_id=None, - trust_id=None, user_id=None, expires=None): - if token_id is None: - token_id = self._create_token_id() - if user_id is None: - user_id = 'testuserid' - # FIXME(morganfainberg): These tokens look nothing like "Real" tokens. - # This should be fixed when token issuance is cleaned up. - data = {'id': token_id, 'a': 'b', - 'user': {'id': user_id}, - 'access': {'token': {'audit_ids': [uuid.uuid4().hex]}}} - if tenant_id is not None: - data['tenant'] = {'id': tenant_id, 'name': tenant_id} - if tenant_id is NULL_OBJECT: - data['tenant'] = None - if expires is not None: - data['expires'] = expires - if trust_id is not None: - data['trust_id'] = trust_id - data['access'].setdefault('trust', {}) - # Testuserid2 is used here since a trustee will be different in - # the cases of impersonation and therefore should not match the - # token's user_id. - data['access']['trust']['trustee_user_id'] = 'testuserid2' - data['token_version'] = provider.V2 - # Issue token stores a copy of all token data at token['token_data']. - # This emulates that assumption as part of the test. - data['token_data'] = copy.deepcopy(data) - new_token = self.token_provider_api._persistence.create_token(token_id, - data) - return new_token['id'], data - - def test_delete_tokens(self): - tokens = self.token_provider_api._persistence._list_tokens( - 'testuserid') - self.assertEqual(0, len(tokens)) - token_id1, data = self.create_token_sample_data( - tenant_id='testtenantid') - token_id2, data = self.create_token_sample_data( - tenant_id='testtenantid') - token_id3, data = self.create_token_sample_data( - tenant_id='testtenantid', - user_id='testuserid1') - tokens = self.token_provider_api._persistence._list_tokens( - 'testuserid') - self.assertEqual(2, len(tokens)) - self.assertIn(token_id2, tokens) - self.assertIn(token_id1, tokens) - self.token_provider_api._persistence.delete_tokens( - user_id='testuserid', - tenant_id='testtenantid') - tokens = self.token_provider_api._persistence._list_tokens( - 'testuserid') - self.assertEqual(0, len(tokens)) - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._persistence.get_token, - token_id1) - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._persistence.get_token, - token_id2) - - self.token_provider_api._persistence.get_token(token_id3) - - def test_delete_tokens_trust(self): - tokens = self.token_provider_api._persistence._list_tokens( - user_id='testuserid') - self.assertEqual(0, len(tokens)) - token_id1, data = self.create_token_sample_data( - tenant_id='testtenantid', - trust_id='testtrustid') - token_id2, data = self.create_token_sample_data( - tenant_id='testtenantid', - user_id='testuserid1', - trust_id='testtrustid1') - tokens = self.token_provider_api._persistence._list_tokens( - 'testuserid') - self.assertEqual(1, len(tokens)) - self.assertIn(token_id1, tokens) - self.token_provider_api._persistence.delete_tokens( - user_id='testuserid', - tenant_id='testtenantid', - trust_id='testtrustid') - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._persistence.get_token, - token_id1) - self.token_provider_api._persistence.get_token(token_id2) - - def _test_token_list(self, token_list_fn): - tokens = token_list_fn('testuserid') - self.assertEqual(0, len(tokens)) - token_id1, data = self.create_token_sample_data() - tokens = token_list_fn('testuserid') - self.assertEqual(1, len(tokens)) - self.assertIn(token_id1, tokens) - token_id2, data = self.create_token_sample_data() - tokens = token_list_fn('testuserid') - self.assertEqual(2, len(tokens)) - self.assertIn(token_id2, tokens) - self.assertIn(token_id1, tokens) - self.token_provider_api._persistence.delete_token(token_id1) - tokens = token_list_fn('testuserid') - self.assertIn(token_id2, tokens) - self.assertNotIn(token_id1, tokens) - self.token_provider_api._persistence.delete_token(token_id2) - tokens = token_list_fn('testuserid') - self.assertNotIn(token_id2, tokens) - self.assertNotIn(token_id1, tokens) - - # tenant-specific tokens - tenant1 = uuid.uuid4().hex - tenant2 = uuid.uuid4().hex - token_id3, data = self.create_token_sample_data(tenant_id=tenant1) - token_id4, data = self.create_token_sample_data(tenant_id=tenant2) - # test for existing but empty tenant (LP:1078497) - token_id5, data = self.create_token_sample_data(tenant_id=NULL_OBJECT) - tokens = token_list_fn('testuserid') - self.assertEqual(3, len(tokens)) - self.assertNotIn(token_id1, tokens) - self.assertNotIn(token_id2, tokens) - self.assertIn(token_id3, tokens) - self.assertIn(token_id4, tokens) - self.assertIn(token_id5, tokens) - tokens = token_list_fn('testuserid', tenant2) - self.assertEqual(1, len(tokens)) - self.assertNotIn(token_id1, tokens) - self.assertNotIn(token_id2, tokens) - self.assertNotIn(token_id3, tokens) - self.assertIn(token_id4, tokens) - - def test_token_list(self): - self._test_token_list( - self.token_provider_api._persistence._list_tokens) - - def test_token_list_trust(self): - trust_id = uuid.uuid4().hex - token_id5, data = self.create_token_sample_data(trust_id=trust_id) - tokens = self.token_provider_api._persistence._list_tokens( - 'testuserid', trust_id=trust_id) - self.assertEqual(1, len(tokens)) - self.assertIn(token_id5, tokens) - - def test_get_token_returns_not_found(self): - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._persistence.get_token, - uuid.uuid4().hex) - - def test_delete_token_returns_not_found(self): - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._persistence.delete_token, - uuid.uuid4().hex) - - def test_expired_token(self): - token_id = uuid.uuid4().hex - expire_time = timeutils.utcnow() - datetime.timedelta(minutes=1) - data = {'id_hash': token_id, 'id': token_id, 'a': 'b', - 'expires': expire_time, - 'trust_id': None, - 'user': {'id': 'testuserid'}} - data_ref = self.token_provider_api._persistence.create_token(token_id, - data) - data_ref.pop('user_id') - self.assertDictEqual(data, data_ref) - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._persistence.get_token, - token_id) - - def test_null_expires_token(self): - token_id = uuid.uuid4().hex - data = {'id': token_id, 'id_hash': token_id, 'a': 'b', 'expires': None, - 'user': {'id': 'testuserid'}} - data_ref = self.token_provider_api._persistence.create_token(token_id, - data) - self.assertIsNotNone(data_ref['expires']) - new_data_ref = self.token_provider_api._persistence.get_token(token_id) - - # MySQL doesn't store microseconds, so discard them before testing - data_ref['expires'] = data_ref['expires'].replace(microsecond=0) - new_data_ref['expires'] = new_data_ref['expires'].replace( - microsecond=0) - - self.assertEqual(data_ref, new_data_ref) - - def check_list_revoked_tokens(self, token_infos): - revocation_list = self.token_provider_api.list_revoked_tokens() - revoked_ids = [x['id'] for x in revocation_list] - revoked_audit_ids = [x['audit_id'] for x in revocation_list] - self._assert_revoked_token_list_matches_token_persistence(revoked_ids) - for token_id, audit_id in token_infos: - self.assertIn(token_id, revoked_ids) - self.assertIn(audit_id, revoked_audit_ids) - - def delete_token(self): - token_id = uuid.uuid4().hex - audit_id = uuid.uuid4().hex - data = {'id_hash': token_id, 'id': token_id, 'a': 'b', - 'user': {'id': 'testuserid'}, - 'token_data': {'token': {'audit_ids': [audit_id]}}} - data_ref = self.token_provider_api._persistence.create_token(token_id, - data) - self.token_provider_api._persistence.delete_token(token_id) - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api._persistence.get_token, - data_ref['id']) - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api._persistence.delete_token, - data_ref['id']) - return (token_id, audit_id) - - def test_list_revoked_tokens_returns_empty_list(self): - revoked_ids = [x['id'] - for x in self.token_provider_api.list_revoked_tokens()] - self._assert_revoked_token_list_matches_token_persistence(revoked_ids) - self.assertEqual([], revoked_ids) - - def test_list_revoked_tokens_for_single_token(self): - self.check_list_revoked_tokens([self.delete_token()]) - - def test_list_revoked_tokens_for_multiple_tokens(self): - self.check_list_revoked_tokens([self.delete_token() - for x in range(2)]) - - def test_flush_expired_token(self): - token_id = uuid.uuid4().hex - expire_time = timeutils.utcnow() - datetime.timedelta(minutes=1) - data = {'id_hash': token_id, 'id': token_id, 'a': 'b', - 'expires': expire_time, - 'trust_id': None, - 'user': {'id': 'testuserid'}} - data_ref = self.token_provider_api._persistence.create_token(token_id, - data) - data_ref.pop('user_id') - self.assertDictEqual(data, data_ref) - - token_id = uuid.uuid4().hex - expire_time = timeutils.utcnow() + datetime.timedelta(minutes=1) - data = {'id_hash': token_id, 'id': token_id, 'a': 'b', - 'expires': expire_time, - 'trust_id': None, - 'user': {'id': 'testuserid'}} - data_ref = self.token_provider_api._persistence.create_token(token_id, - data) - data_ref.pop('user_id') - self.assertDictEqual(data, data_ref) - - self.token_provider_api._persistence.flush_expired_tokens() - tokens = self.token_provider_api._persistence._list_tokens( - 'testuserid') - self.assertEqual(1, len(tokens)) - self.assertIn(token_id, tokens) - - @unit.skip_if_cache_disabled('token') - def test_revocation_list_cache(self): - expire_time = timeutils.utcnow() + datetime.timedelta(minutes=10) - token_id = uuid.uuid4().hex - token_data = {'id_hash': token_id, 'id': token_id, 'a': 'b', - 'expires': expire_time, - 'trust_id': None, - 'user': {'id': 'testuserid'}, - 'token_data': {'token': { - 'audit_ids': [uuid.uuid4().hex]}}} - token2_id = uuid.uuid4().hex - token2_data = {'id_hash': token2_id, 'id': token2_id, 'a': 'b', - 'expires': expire_time, - 'trust_id': None, - 'user': {'id': 'testuserid'}, - 'token_data': {'token': { - 'audit_ids': [uuid.uuid4().hex]}}} - # Create 2 Tokens. - self.token_provider_api._persistence.create_token(token_id, - token_data) - self.token_provider_api._persistence.create_token(token2_id, - token2_data) - # Verify the revocation list is empty. - self.assertEqual( - [], self.token_provider_api._persistence.list_revoked_tokens()) - self.assertEqual([], self.token_provider_api.list_revoked_tokens()) - # Delete a token directly, bypassing the manager. - self.token_provider_api._persistence.driver.delete_token(token_id) - # Verify the revocation list is still empty. - self.assertEqual( - [], self.token_provider_api._persistence.list_revoked_tokens()) - self.assertEqual([], self.token_provider_api.list_revoked_tokens()) - # Invalidate the revocation list. - self.token_provider_api._persistence.invalidate_revocation_list() - # Verify the deleted token is in the revocation list. - revoked_ids = [x['id'] - for x in self.token_provider_api.list_revoked_tokens()] - self._assert_revoked_token_list_matches_token_persistence(revoked_ids) - self.assertIn(token_id, revoked_ids) - # Delete the second token, through the manager - self.token_provider_api._persistence.delete_token(token2_id) - revoked_ids = [x['id'] - for x in self.token_provider_api.list_revoked_tokens()] - self._assert_revoked_token_list_matches_token_persistence(revoked_ids) - # Verify both tokens are in the revocation list. - self.assertIn(token_id, revoked_ids) - self.assertIn(token2_id, revoked_ids) - - def _test_predictable_revoked_pki_token_id(self, hash_fn): - token_id = self._create_token_id() - token_id_hash = hash_fn(token_id.encode('utf-8')).hexdigest() - token = {'user': {'id': uuid.uuid4().hex}, - 'token_data': {'token': {'audit_ids': [uuid.uuid4().hex]}}} - - self.token_provider_api._persistence.create_token(token_id, token) - self.token_provider_api._persistence.delete_token(token_id) - - revoked_ids = [x['id'] - for x in self.token_provider_api.list_revoked_tokens()] - self._assert_revoked_token_list_matches_token_persistence(revoked_ids) - self.assertIn(token_id_hash, revoked_ids) - self.assertNotIn(token_id, revoked_ids) - for t in self.token_provider_api._persistence.list_revoked_tokens(): - self.assertIn('expires', t) - - def test_predictable_revoked_pki_token_id_default(self): - self._test_predictable_revoked_pki_token_id(hashlib.md5) - - def test_predictable_revoked_pki_token_id_sha256(self): - self.config_fixture.config(group='token', hash_algorithm='sha256') - self._test_predictable_revoked_pki_token_id(hashlib.sha256) - - def test_predictable_revoked_uuid_token_id(self): - token_id = uuid.uuid4().hex - token = {'user': {'id': uuid.uuid4().hex}, - 'token_data': {'token': {'audit_ids': [uuid.uuid4().hex]}}} - - self.token_provider_api._persistence.create_token(token_id, token) - self.token_provider_api._persistence.delete_token(token_id) - - revoked_tokens = self.token_provider_api.list_revoked_tokens() - revoked_ids = [x['id'] for x in revoked_tokens] - self._assert_revoked_token_list_matches_token_persistence(revoked_ids) - self.assertIn(token_id, revoked_ids) - for t in revoked_tokens: - self.assertIn('expires', t) - - def test_create_unicode_token_id(self): - token_id = six.text_type(self._create_token_id()) - self.create_token_sample_data(token_id=token_id) - self.token_provider_api._persistence.get_token(token_id) - - def test_create_unicode_user_id(self): - user_id = six.text_type(uuid.uuid4().hex) - token_id, data = self.create_token_sample_data(user_id=user_id) - self.token_provider_api._persistence.get_token(token_id) - - def test_token_expire_timezone(self): - - @test_utils.timezone - def _create_token(expire_time): - token_id = uuid.uuid4().hex - user_id = six.text_type(uuid.uuid4().hex) - return self.create_token_sample_data(token_id=token_id, - user_id=user_id, - expires=expire_time) - - for d in ['+0', '-11', '-8', '-5', '+5', '+8', '+14']: - test_utils.TZ = 'UTC' + d - expire_time = timeutils.utcnow() + datetime.timedelta(minutes=1) - token_id, data_in = _create_token(expire_time) - data_get = self.token_provider_api._persistence.get_token(token_id) - - self.assertEqual(data_in['id'], data_get['id'], - 'TZ=%s' % test_utils.TZ) - - expire_time_expired = ( - timeutils.utcnow() + datetime.timedelta(minutes=-1)) - token_id, data_in = _create_token(expire_time_expired) - self.assertRaises(exception.TokenNotFound, - self.token_provider_api._persistence.get_token, - data_in['id']) - - -class TokenCacheInvalidation(object): - def _create_test_data(self): - self.user = unit.new_user_ref( - domain_id=CONF.identity.default_domain_id) - self.tenant = unit.new_project_ref( - domain_id=CONF.identity.default_domain_id) - - # Create an equivalent of a scoped token - token_dict = {'user': self.user, 'tenant': self.tenant, - 'metadata': {}, 'id': 'placeholder'} - token_id, data = self.token_provider_api.issue_v2_token(token_dict) - self.scoped_token_id = token_id - - # ..and an un-scoped one - token_dict = {'user': self.user, 'tenant': None, - 'metadata': {}, 'id': 'placeholder'} - token_id, data = self.token_provider_api.issue_v2_token(token_dict) - self.unscoped_token_id = token_id - - # Validate them, in the various ways possible - this will load the - # responses into the token cache. - self._check_scoped_tokens_are_valid() - self._check_unscoped_tokens_are_valid() - - def _check_unscoped_tokens_are_invalid(self): - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api.validate_token, - self.unscoped_token_id) - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api.validate_v2_token, - self.unscoped_token_id) - - def _check_scoped_tokens_are_invalid(self): - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api.validate_token, - self.scoped_token_id) - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api.validate_token, - self.scoped_token_id, - self.tenant['id']) - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api.validate_v2_token, - self.scoped_token_id) - self.assertRaises( - exception.TokenNotFound, - self.token_provider_api.validate_v2_token, - self.scoped_token_id, - self.tenant['id']) - - def _check_scoped_tokens_are_valid(self): - self.token_provider_api.validate_token(self.scoped_token_id) - self.token_provider_api.validate_token( - self.scoped_token_id, belongs_to=self.tenant['id']) - self.token_provider_api.validate_v2_token(self.scoped_token_id) - self.token_provider_api.validate_v2_token( - self.scoped_token_id, belongs_to=self.tenant['id']) - - def _check_unscoped_tokens_are_valid(self): - self.token_provider_api.validate_token(self.unscoped_token_id) - self.token_provider_api.validate_v2_token(self.unscoped_token_id) - - def test_delete_unscoped_token(self): - self.token_provider_api._persistence.delete_token( - self.unscoped_token_id) - self._check_unscoped_tokens_are_invalid() - self._check_scoped_tokens_are_valid() - - def test_delete_scoped_token_by_id(self): - self.token_provider_api._persistence.delete_token(self.scoped_token_id) - self._check_scoped_tokens_are_invalid() - self._check_unscoped_tokens_are_valid() - - def test_delete_scoped_token_by_user(self): - self.token_provider_api._persistence.delete_tokens(self.user['id']) - # Since we are deleting all tokens for this user, they should all - # now be invalid. - self._check_scoped_tokens_are_invalid() - self._check_unscoped_tokens_are_invalid() - - def test_delete_scoped_token_by_user_and_tenant(self): - self.token_provider_api._persistence.delete_tokens( - self.user['id'], - tenant_id=self.tenant['id']) - self._check_scoped_tokens_are_invalid() - self._check_unscoped_tokens_are_valid() diff --git a/keystone-moon/keystone/tests/unit/token/test_fernet_provider.py b/keystone-moon/keystone/tests/unit/token/test_fernet_provider.py deleted file mode 100644 index 5f51d7b3..00000000 --- a/keystone-moon/keystone/tests/unit/token/test_fernet_provider.py +++ /dev/null @@ -1,611 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import base64 -import datetime -import hashlib -import os -import uuid - -import msgpack -from oslo_utils import timeutils -from six.moves import urllib - -from keystone.common import config -from keystone.common import utils -from keystone import exception -from keystone.federation import constants as federation_constants -from keystone.tests import unit -from keystone.tests.unit import ksfixtures -from keystone.tests.unit.ksfixtures import database -from keystone.token import provider -from keystone.token.providers import fernet -from keystone.token.providers.fernet import token_formatters -from keystone.token.providers.fernet import utils as fernet_utils - - -CONF = config.CONF - - -class TestFernetTokenProvider(unit.TestCase): - def setUp(self): - super(TestFernetTokenProvider, self).setUp() - self.useFixture(ksfixtures.KeyRepository(self.config_fixture)) - self.provider = fernet.Provider() - - def test_supports_bind_authentication_returns_false(self): - self.assertFalse(self.provider._supports_bind_authentication) - - def test_needs_persistence_returns_false(self): - self.assertFalse(self.provider.needs_persistence()) - - def test_invalid_v3_token_raises_token_not_found(self): - # NOTE(lbragstad): Here we use the validate_non_persistent_token() - # methods because the validate_v3_token() method is strictly for - # validating UUID formatted tokens. It is written to assume cached - # tokens from a backend, where validate_non_persistent_token() is not. - token_id = uuid.uuid4().hex - e = self.assertRaises( - exception.TokenNotFound, - self.provider.validate_non_persistent_token, - token_id) - self.assertIn(token_id, u'%s' % e) - - def test_invalid_v2_token_raises_token_not_found(self): - token_id = uuid.uuid4().hex - e = self.assertRaises( - exception.TokenNotFound, - self.provider.validate_non_persistent_token, - token_id) - self.assertIn(token_id, u'%s' % e) - - -class TestValidate(unit.TestCase): - def setUp(self): - super(TestValidate, self).setUp() - self.useFixture(ksfixtures.KeyRepository(self.config_fixture)) - self.useFixture(database.Database()) - self.load_backends() - - def config_overrides(self): - super(TestValidate, self).config_overrides() - self.config_fixture.config(group='token', provider='fernet') - - def test_validate_v3_token_simple(self): - # Check the fields in the token result when use validate_v3_token - # with a simple token. - - domain_ref = unit.new_domain_ref() - domain_ref = self.resource_api.create_domain(domain_ref['id'], - domain_ref) - - user_ref = unit.new_user_ref(domain_ref['id']) - user_ref = self.identity_api.create_user(user_ref) - - method_names = ['password'] - token_id, token_data_ = self.token_provider_api.issue_v3_token( - user_ref['id'], method_names) - - token_data = self.token_provider_api.validate_v3_token(token_id) - token = token_data['token'] - self.assertIsInstance(token['audit_ids'], list) - self.assertIsInstance(token['expires_at'], str) - self.assertIsInstance(token['issued_at'], str) - self.assertEqual(method_names, token['methods']) - exp_user_info = { - 'id': user_ref['id'], - 'name': user_ref['name'], - 'domain': { - 'id': domain_ref['id'], - 'name': domain_ref['name'], - }, - } - self.assertEqual(exp_user_info, token['user']) - - def test_validate_v3_token_federated_info(self): - # Check the user fields in the token result when use validate_v3_token - # when the token has federated info. - - domain_ref = unit.new_domain_ref() - domain_ref = self.resource_api.create_domain(domain_ref['id'], - domain_ref) - - user_ref = unit.new_user_ref(domain_ref['id']) - user_ref = self.identity_api.create_user(user_ref) - - method_names = ['mapped'] - - group_ids = [uuid.uuid4().hex, ] - identity_provider = uuid.uuid4().hex - protocol = uuid.uuid4().hex - auth_context = { - 'user_id': user_ref['id'], - 'group_ids': group_ids, - federation_constants.IDENTITY_PROVIDER: identity_provider, - federation_constants.PROTOCOL: protocol, - } - token_id, token_data_ = self.token_provider_api.issue_v3_token( - user_ref['id'], method_names, auth_context=auth_context) - - token_data = self.token_provider_api.validate_v3_token(token_id) - token = token_data['token'] - exp_user_info = { - 'id': user_ref['id'], - 'name': user_ref['id'], - 'domain': {'id': CONF.federation.federated_domain_name, - 'name': CONF.federation.federated_domain_name, }, - federation_constants.FEDERATION: { - 'groups': [{'id': group_id} for group_id in group_ids], - 'identity_provider': {'id': identity_provider, }, - 'protocol': {'id': protocol, }, - }, - } - self.assertEqual(exp_user_info, token['user']) - - def test_validate_v3_token_trust(self): - # Check the trust fields in the token result when use validate_v3_token - # when the token has trust info. - - domain_ref = unit.new_domain_ref() - domain_ref = self.resource_api.create_domain(domain_ref['id'], - domain_ref) - - user_ref = unit.new_user_ref(domain_ref['id']) - user_ref = self.identity_api.create_user(user_ref) - - trustor_user_ref = unit.new_user_ref(domain_ref['id']) - trustor_user_ref = self.identity_api.create_user(trustor_user_ref) - - project_ref = unit.new_project_ref(domain_id=domain_ref['id']) - project_ref = self.resource_api.create_project(project_ref['id'], - project_ref) - - role_ref = unit.new_role_ref() - role_ref = self.role_api.create_role(role_ref['id'], role_ref) - - self.assignment_api.create_grant( - role_ref['id'], user_id=user_ref['id'], - project_id=project_ref['id']) - - self.assignment_api.create_grant( - role_ref['id'], user_id=trustor_user_ref['id'], - project_id=project_ref['id']) - - trustor_user_id = trustor_user_ref['id'] - trustee_user_id = user_ref['id'] - trust_ref = unit.new_trust_ref( - trustor_user_id, trustee_user_id, project_id=project_ref['id'], - role_ids=[role_ref['id'], ]) - trust_ref = self.trust_api.create_trust(trust_ref['id'], trust_ref, - trust_ref['roles']) - - method_names = ['password'] - - token_id, token_data_ = self.token_provider_api.issue_v3_token( - user_ref['id'], method_names, project_id=project_ref['id'], - trust=trust_ref) - - token_data = self.token_provider_api.validate_v3_token(token_id) - token = token_data['token'] - exp_trust_info = { - 'id': trust_ref['id'], - 'impersonation': False, - 'trustee_user': {'id': user_ref['id'], }, - 'trustor_user': {'id': trustor_user_ref['id'], }, - } - self.assertEqual(exp_trust_info, token['OS-TRUST:trust']) - - def test_validate_v3_token_validation_error_exc(self): - # When the token format isn't recognized, TokenNotFound is raised. - - # A uuid string isn't a valid Fernet token. - token_id = uuid.uuid4().hex - self.assertRaises(exception.TokenNotFound, - self.token_provider_api.validate_v3_token, token_id) - - -class TestTokenFormatter(unit.TestCase): - def setUp(self): - super(TestTokenFormatter, self).setUp() - self.useFixture(ksfixtures.KeyRepository(self.config_fixture)) - - def test_restore_padding(self): - # 'a' will result in '==' padding, 'aa' will result in '=' padding, and - # 'aaa' will result in no padding. - binary_to_test = [b'a', b'aa', b'aaa'] - - for binary in binary_to_test: - # base64.urlsafe_b64encode takes six.binary_type and returns - # six.binary_type. - encoded_string = base64.urlsafe_b64encode(binary) - encoded_string = encoded_string.decode('utf-8') - # encoded_string is now six.text_type. - encoded_str_without_padding = encoded_string.rstrip('=') - self.assertFalse(encoded_str_without_padding.endswith('=')) - encoded_str_with_padding_restored = ( - token_formatters.TokenFormatter.restore_padding( - encoded_str_without_padding) - ) - self.assertEqual(encoded_string, encoded_str_with_padding_restored) - - def test_legacy_padding_validation(self): - first_value = uuid.uuid4().hex - second_value = uuid.uuid4().hex - payload = (first_value, second_value) - msgpack_payload = msgpack.packb(payload) - # msgpack_payload is six.binary_type. - - tf = token_formatters.TokenFormatter() - - # NOTE(lbragstad): This method preserves the way that keystone used to - # percent encode the tokens, prior to bug #1491926. - def legacy_pack(payload): - # payload is six.binary_type. - encrypted_payload = tf.crypto.encrypt(payload) - # encrypted_payload is six.binary_type. - - # the encrypted_payload is returned with padding appended - self.assertTrue(encrypted_payload.endswith(b'=')) - - # using urllib.parse.quote will percent encode the padding, like - # keystone did in Kilo. - percent_encoded_payload = urllib.parse.quote(encrypted_payload) - # percent_encoded_payload is six.text_type. - - # ensure that the padding was actually percent encoded - self.assertTrue(percent_encoded_payload.endswith('%3D')) - return percent_encoded_payload - - token_with_legacy_padding = legacy_pack(msgpack_payload) - # token_with_legacy_padding is six.text_type. - - # demonstrate the we can validate a payload that has been percent - # encoded with the Fernet logic that existed in Kilo - serialized_payload = tf.unpack(token_with_legacy_padding) - # serialized_payload is six.binary_type. - returned_payload = msgpack.unpackb(serialized_payload) - # returned_payload contains six.binary_type. - self.assertEqual(first_value, returned_payload[0].decode('utf-8')) - self.assertEqual(second_value, returned_payload[1].decode('utf-8')) - - -class TestPayloads(unit.TestCase): - def assertTimestampsEqual(self, expected, actual): - # The timestamp that we get back when parsing the payload may not - # exactly match the timestamp that was put in the payload due to - # conversion to and from a float. - - exp_time = timeutils.parse_isotime(expected) - actual_time = timeutils.parse_isotime(actual) - - # the granularity of timestamp string is microseconds and it's only the - # last digit in the representation that's different, so use a delta - # just above nanoseconds. - return self.assertCloseEnoughForGovernmentWork(exp_time, actual_time, - delta=1e-05) - - def test_uuid_hex_to_byte_conversions(self): - payload_cls = token_formatters.BasePayload - - expected_hex_uuid = uuid.uuid4().hex - uuid_obj = uuid.UUID(expected_hex_uuid) - expected_uuid_in_bytes = uuid_obj.bytes - actual_uuid_in_bytes = payload_cls.convert_uuid_hex_to_bytes( - expected_hex_uuid) - self.assertEqual(expected_uuid_in_bytes, actual_uuid_in_bytes) - actual_hex_uuid = payload_cls.convert_uuid_bytes_to_hex( - expected_uuid_in_bytes) - self.assertEqual(expected_hex_uuid, actual_hex_uuid) - - def test_time_string_to_float_conversions(self): - payload_cls = token_formatters.BasePayload - - original_time_str = utils.isotime(subsecond=True) - time_obj = timeutils.parse_isotime(original_time_str) - expected_time_float = ( - (timeutils.normalize_time(time_obj) - - datetime.datetime.utcfromtimestamp(0)).total_seconds()) - - # NOTE(lbragstad): The token expiration time for Fernet tokens is - # passed in the payload of the token. This is different from the token - # creation time, which is handled by Fernet and doesn't support - # subsecond precision because it is a timestamp integer. - self.assertIsInstance(expected_time_float, float) - - actual_time_float = payload_cls._convert_time_string_to_float( - original_time_str) - self.assertIsInstance(actual_time_float, float) - self.assertEqual(expected_time_float, actual_time_float) - - # Generate expected_time_str using the same time float. Using - # original_time_str from utils.isotime will occasionally fail due to - # floating point rounding differences. - time_object = datetime.datetime.utcfromtimestamp(actual_time_float) - expected_time_str = utils.isotime(time_object, subsecond=True) - - actual_time_str = payload_cls._convert_float_to_time_string( - actual_time_float) - self.assertEqual(expected_time_str, actual_time_str) - - def _test_payload(self, payload_class, exp_user_id=None, exp_methods=None, - exp_project_id=None, exp_domain_id=None, - exp_trust_id=None, exp_federated_info=None, - exp_access_token_id=None): - exp_user_id = exp_user_id or uuid.uuid4().hex - exp_methods = exp_methods or ['password'] - exp_expires_at = utils.isotime(timeutils.utcnow(), subsecond=True) - exp_audit_ids = [provider.random_urlsafe_str()] - - payload = payload_class.assemble( - exp_user_id, exp_methods, exp_project_id, exp_domain_id, - exp_expires_at, exp_audit_ids, exp_trust_id, exp_federated_info, - exp_access_token_id) - - (user_id, methods, project_id, - domain_id, expires_at, audit_ids, - trust_id, federated_info, - access_token_id) = payload_class.disassemble(payload) - - self.assertEqual(exp_user_id, user_id) - self.assertEqual(exp_methods, methods) - self.assertTimestampsEqual(exp_expires_at, expires_at) - self.assertEqual(exp_audit_ids, audit_ids) - self.assertEqual(exp_project_id, project_id) - self.assertEqual(exp_domain_id, domain_id) - self.assertEqual(exp_trust_id, trust_id) - self.assertEqual(exp_access_token_id, access_token_id) - - if exp_federated_info: - self.assertDictEqual(exp_federated_info, federated_info) - else: - self.assertIsNone(federated_info) - - def test_unscoped_payload(self): - self._test_payload(token_formatters.UnscopedPayload) - - def test_project_scoped_payload(self): - self._test_payload(token_formatters.ProjectScopedPayload, - exp_project_id=uuid.uuid4().hex) - - def test_domain_scoped_payload(self): - self._test_payload(token_formatters.DomainScopedPayload, - exp_domain_id=uuid.uuid4().hex) - - def test_domain_scoped_payload_with_default_domain(self): - self._test_payload(token_formatters.DomainScopedPayload, - exp_domain_id=CONF.identity.default_domain_id) - - def test_trust_scoped_payload(self): - self._test_payload(token_formatters.TrustScopedPayload, - exp_project_id=uuid.uuid4().hex, - exp_trust_id=uuid.uuid4().hex) - - def test_unscoped_payload_with_non_uuid_user_id(self): - self._test_payload(token_formatters.UnscopedPayload, - exp_user_id='someNonUuidUserId') - - def test_unscoped_payload_with_16_char_non_uuid_user_id(self): - self._test_payload(token_formatters.UnscopedPayload, - exp_user_id='0123456789abcdef') - - def test_project_scoped_payload_with_non_uuid_ids(self): - self._test_payload(token_formatters.ProjectScopedPayload, - exp_user_id='someNonUuidUserId', - exp_project_id='someNonUuidProjectId') - - def test_project_scoped_payload_with_16_char_non_uuid_ids(self): - self._test_payload(token_formatters.ProjectScopedPayload, - exp_user_id='0123456789abcdef', - exp_project_id='0123456789abcdef') - - def test_domain_scoped_payload_with_non_uuid_user_id(self): - self._test_payload(token_formatters.DomainScopedPayload, - exp_user_id='nonUuidUserId', - exp_domain_id=uuid.uuid4().hex) - - def test_domain_scoped_payload_with_16_char_non_uuid_user_id(self): - self._test_payload(token_formatters.DomainScopedPayload, - exp_user_id='0123456789abcdef', - exp_domain_id=uuid.uuid4().hex) - - def test_trust_scoped_payload_with_non_uuid_ids(self): - self._test_payload(token_formatters.TrustScopedPayload, - exp_user_id='someNonUuidUserId', - exp_project_id='someNonUuidProjectId', - exp_trust_id=uuid.uuid4().hex) - - def test_trust_scoped_payload_with_16_char_non_uuid_ids(self): - self._test_payload(token_formatters.TrustScopedPayload, - exp_user_id='0123456789abcdef', - exp_project_id='0123456789abcdef', - exp_trust_id=uuid.uuid4().hex) - - def _test_federated_payload_with_ids(self, exp_user_id, exp_group_id): - exp_federated_info = {'group_ids': [{'id': exp_group_id}], - 'idp_id': uuid.uuid4().hex, - 'protocol_id': uuid.uuid4().hex} - - self._test_payload(token_formatters.FederatedUnscopedPayload, - exp_user_id=exp_user_id, - exp_federated_info=exp_federated_info) - - def test_federated_payload_with_non_uuid_ids(self): - self._test_federated_payload_with_ids('someNonUuidUserId', - 'someNonUuidGroupId') - - def test_federated_payload_with_16_char_non_uuid_ids(self): - self._test_federated_payload_with_ids('0123456789abcdef', - '0123456789abcdef') - - def test_federated_project_scoped_payload(self): - exp_federated_info = {'group_ids': [{'id': 'someNonUuidGroupId'}], - 'idp_id': uuid.uuid4().hex, - 'protocol_id': uuid.uuid4().hex} - - self._test_payload(token_formatters.FederatedProjectScopedPayload, - exp_user_id='someNonUuidUserId', - exp_methods=['token'], - exp_project_id=uuid.uuid4().hex, - exp_federated_info=exp_federated_info) - - def test_federated_domain_scoped_payload(self): - exp_federated_info = {'group_ids': [{'id': 'someNonUuidGroupId'}], - 'idp_id': uuid.uuid4().hex, - 'protocol_id': uuid.uuid4().hex} - - self._test_payload(token_formatters.FederatedDomainScopedPayload, - exp_user_id='someNonUuidUserId', - exp_methods=['token'], - exp_domain_id=uuid.uuid4().hex, - exp_federated_info=exp_federated_info) - - def test_oauth_scoped_payload(self): - self._test_payload(token_formatters.OauthScopedPayload, - exp_project_id=uuid.uuid4().hex, - exp_access_token_id=uuid.uuid4().hex) - - -class TestFernetKeyRotation(unit.TestCase): - def setUp(self): - super(TestFernetKeyRotation, self).setUp() - - # A collection of all previously-seen signatures of the key - # repository's contents. - self.key_repo_signatures = set() - - @property - def keys(self): - """Key files converted to numbers.""" - return sorted( - int(x) for x in os.listdir(CONF.fernet_tokens.key_repository)) - - @property - def key_repository_size(self): - """The number of keys in the key repository.""" - return len(self.keys) - - @property - def key_repository_signature(self): - """Create a "thumbprint" of the current key repository. - - Because key files are renamed, this produces a hash of the contents of - the key files, ignoring their filenames. - - The resulting signature can be used, for example, to ensure that you - have a unique set of keys after you perform a key rotation (taking a - static set of keys, and simply shuffling them, would fail such a test). - - """ - # Load the keys into a list, keys is list of six.text_type. - keys = fernet_utils.load_keys() - - # Sort the list of keys by the keys themselves (they were previously - # sorted by filename). - keys.sort() - - # Create the thumbprint using all keys in the repository. - signature = hashlib.sha1() - for key in keys: - # Need to convert key to six.binary_type for update. - signature.update(key.encode('utf-8')) - return signature.hexdigest() - - def assertRepositoryState(self, expected_size): - """Validate the state of the key repository.""" - self.assertEqual(expected_size, self.key_repository_size) - self.assertUniqueRepositoryState() - - def assertUniqueRepositoryState(self): - """Ensures that the current key repo state has not been seen before.""" - # This is assigned to a variable because it takes some work to - # calculate. - signature = self.key_repository_signature - - # Ensure the signature is not in the set of previously seen signatures. - self.assertNotIn(signature, self.key_repo_signatures) - - # Add the signature to the set of repository signatures to validate - # that we don't see it again later. - self.key_repo_signatures.add(signature) - - def test_rotation(self): - # Initializing a key repository results in this many keys. We don't - # support max_active_keys being set any lower. - min_active_keys = 2 - - # Simulate every rotation strategy up to "rotating once a week while - # maintaining a year's worth of keys." - for max_active_keys in range(min_active_keys, 52 + 1): - self.config_fixture.config(group='fernet_tokens', - max_active_keys=max_active_keys) - - # Ensure that resetting the key repository always results in 2 - # active keys. - self.useFixture(ksfixtures.KeyRepository(self.config_fixture)) - - # Validate the initial repository state. - self.assertRepositoryState(expected_size=min_active_keys) - - # The repository should be initialized with a staged key (0) and a - # primary key (1). The next key is just auto-incremented. - exp_keys = [0, 1] - next_key_number = exp_keys[-1] + 1 # keep track of next key - self.assertEqual(exp_keys, self.keys) - - # Rotate the keys just enough times to fully populate the key - # repository. - for rotation in range(max_active_keys - min_active_keys): - fernet_utils.rotate_keys() - self.assertRepositoryState(expected_size=rotation + 3) - - exp_keys.append(next_key_number) - next_key_number += 1 - self.assertEqual(exp_keys, self.keys) - - # We should have a fully populated key repository now. - self.assertEqual(max_active_keys, self.key_repository_size) - - # Rotate an additional number of times to ensure that we maintain - # the desired number of active keys. - for rotation in range(10): - fernet_utils.rotate_keys() - self.assertRepositoryState(expected_size=max_active_keys) - - exp_keys.pop(1) - exp_keys.append(next_key_number) - next_key_number += 1 - self.assertEqual(exp_keys, self.keys) - - def test_non_numeric_files(self): - self.useFixture(ksfixtures.KeyRepository(self.config_fixture)) - evil_file = os.path.join(CONF.fernet_tokens.key_repository, '99.bak') - with open(evil_file, 'w'): - pass - fernet_utils.rotate_keys() - self.assertTrue(os.path.isfile(evil_file)) - keys = 0 - for x in os.listdir(CONF.fernet_tokens.key_repository): - if x == '99.bak': - continue - keys += 1 - self.assertEqual(3, keys) - - -class TestLoadKeys(unit.TestCase): - def test_non_numeric_files(self): - self.useFixture(ksfixtures.KeyRepository(self.config_fixture)) - evil_file = os.path.join(CONF.fernet_tokens.key_repository, '~1') - with open(evil_file, 'w'): - pass - keys = fernet_utils.load_keys() - self.assertEqual(2, len(keys)) - self.assertTrue(len(keys[0])) diff --git a/keystone-moon/keystone/tests/unit/token/test_pki_provider.py b/keystone-moon/keystone/tests/unit/token/test_pki_provider.py deleted file mode 100644 index b3ad4c2b..00000000 --- a/keystone-moon/keystone/tests/unit/token/test_pki_provider.py +++ /dev/null @@ -1,26 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.tests import unit -from keystone.token.providers import pki - - -class TestPkiTokenProvider(unit.TestCase): - def setUp(self): - super(TestPkiTokenProvider, self).setUp() - self.provider = pki.Provider() - - def test_supports_bind_authentication_returns_true(self): - self.assertTrue(self.provider._supports_bind_authentication) - - def test_need_persistence_return_true(self): - self.assertIs(True, self.provider.needs_persistence()) diff --git a/keystone-moon/keystone/tests/unit/token/test_pkiz_provider.py b/keystone-moon/keystone/tests/unit/token/test_pkiz_provider.py deleted file mode 100644 index 1ffe7cfc..00000000 --- a/keystone-moon/keystone/tests/unit/token/test_pkiz_provider.py +++ /dev/null @@ -1,26 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.tests import unit -from keystone.token.providers import pkiz - - -class TestPkizTokenProvider(unit.TestCase): - def setUp(self): - super(TestPkizTokenProvider, self).setUp() - self.provider = pkiz.Provider() - - def test_supports_bind_authentication_returns_true(self): - self.assertTrue(self.provider._supports_bind_authentication) - - def test_need_persistence_return_true(self): - self.assertIs(True, self.provider.needs_persistence()) diff --git a/keystone-moon/keystone/tests/unit/token/test_provider.py b/keystone-moon/keystone/tests/unit/token/test_provider.py deleted file mode 100644 index 7093f3ba..00000000 --- a/keystone-moon/keystone/tests/unit/token/test_provider.py +++ /dev/null @@ -1,30 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import six -from six.moves import urllib - -from keystone.tests import unit -from keystone.token import provider - - -class TestRandomStrings(unit.BaseTestCase): - def test_strings_are_url_safe(self): - s = provider.random_urlsafe_str() - self.assertEqual(s, urllib.parse.quote_plus(s)) - - def test_strings_can_be_converted_to_bytes(self): - s = provider.random_urlsafe_str() - self.assertIsInstance(s, six.text_type) - - b = provider.random_urlsafe_str_to_bytes(s) - self.assertIsInstance(b, six.binary_type) diff --git a/keystone-moon/keystone/tests/unit/token/test_token_data_helper.py b/keystone-moon/keystone/tests/unit/token/test_token_data_helper.py deleted file mode 100644 index 9e8c3889..00000000 --- a/keystone-moon/keystone/tests/unit/token/test_token_data_helper.py +++ /dev/null @@ -1,56 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import base64 -import uuid - -from testtools import matchers - -from keystone import exception -from keystone.tests import unit -from keystone.token.providers import common - - -class TestTokenDataHelper(unit.TestCase): - def setUp(self): - super(TestTokenDataHelper, self).setUp() - self.load_backends() - self.v3_data_helper = common.V3TokenDataHelper() - - def test_v3_token_data_helper_populate_audit_info_string(self): - token_data = {} - audit_info_bytes = base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2] - audit_info = audit_info_bytes.decode('utf-8') - self.v3_data_helper._populate_audit_info(token_data, audit_info) - self.assertIn(audit_info, token_data['audit_ids']) - self.assertThat(token_data['audit_ids'], matchers.HasLength(2)) - - def test_v3_token_data_helper_populate_audit_info_none(self): - token_data = {} - self.v3_data_helper._populate_audit_info(token_data, audit_info=None) - self.assertThat(token_data['audit_ids'], matchers.HasLength(1)) - self.assertNotIn(None, token_data['audit_ids']) - - def test_v3_token_data_helper_populate_audit_info_list(self): - token_data = {} - audit_info = [base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2], - base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2]] - self.v3_data_helper._populate_audit_info(token_data, audit_info) - self.assertEqual(audit_info, token_data['audit_ids']) - - def test_v3_token_data_helper_populate_audit_info_invalid(self): - token_data = {} - audit_info = dict() - self.assertRaises(exception.UnexpectedError, - self.v3_data_helper._populate_audit_info, - token_data=token_data, - audit_info=audit_info) diff --git a/keystone-moon/keystone/tests/unit/token/test_token_model.py b/keystone-moon/keystone/tests/unit/token/test_token_model.py deleted file mode 100644 index 1cb0ef55..00000000 --- a/keystone-moon/keystone/tests/unit/token/test_token_model.py +++ /dev/null @@ -1,263 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import uuid - -from oslo_config import cfg -from oslo_utils import timeutils -from six.moves import range - -from keystone import exception -from keystone.federation import constants as federation_constants -from keystone.models import token_model -from keystone.tests.unit import core -from keystone.tests.unit import test_token_provider - - -CONF = cfg.CONF - - -class TestKeystoneTokenModel(core.TestCase): - def setUp(self): - super(TestKeystoneTokenModel, self).setUp() - self.v2_sample_token = copy.deepcopy( - test_token_provider.SAMPLE_V2_TOKEN) - self.v3_sample_token = copy.deepcopy( - test_token_provider.SAMPLE_V3_TOKEN) - - def test_token_model_v3(self): - token_data = token_model.KeystoneToken(uuid.uuid4().hex, - self.v3_sample_token) - self.assertIs(token_model.V3, token_data.version) - expires = timeutils.normalize_time(timeutils.parse_isotime( - self.v3_sample_token['token']['expires_at'])) - issued = timeutils.normalize_time(timeutils.parse_isotime( - self.v3_sample_token['token']['issued_at'])) - self.assertEqual(expires, token_data.expires) - self.assertEqual(issued, token_data.issued) - self.assertEqual(self.v3_sample_token['token']['user']['id'], - token_data.user_id) - self.assertEqual(self.v3_sample_token['token']['user']['name'], - token_data.user_name) - self.assertEqual(self.v3_sample_token['token']['user']['domain']['id'], - token_data.user_domain_id) - self.assertEqual( - self.v3_sample_token['token']['user']['domain']['name'], - token_data.user_domain_name) - self.assertEqual( - self.v3_sample_token['token']['project']['domain']['id'], - token_data.project_domain_id) - self.assertEqual( - self.v3_sample_token['token']['project']['domain']['name'], - token_data.project_domain_name) - self.assertEqual(self.v3_sample_token['token']['OS-TRUST:trust']['id'], - token_data.trust_id) - self.assertEqual( - self.v3_sample_token['token']['OS-TRUST:trust']['trustor_user_id'], - token_data.trustor_user_id) - self.assertEqual( - self.v3_sample_token['token']['OS-TRUST:trust']['trustee_user_id'], - token_data.trustee_user_id) - # Project Scoped Token - self.assertRaises(exception.UnexpectedError, getattr, token_data, - 'domain_id') - self.assertRaises(exception.UnexpectedError, getattr, token_data, - 'domain_name') - self.assertFalse(token_data.domain_scoped) - self.assertEqual(self.v3_sample_token['token']['project']['id'], - token_data.project_id) - self.assertEqual(self.v3_sample_token['token']['project']['name'], - token_data.project_name) - self.assertTrue(token_data.project_scoped) - self.assertTrue(token_data.scoped) - self.assertTrue(token_data.trust_scoped) - self.assertEqual( - [r['id'] for r in self.v3_sample_token['token']['roles']], - token_data.role_ids) - self.assertEqual( - [r['name'] for r in self.v3_sample_token['token']['roles']], - token_data.role_names) - token_data.pop('project') - self.assertFalse(token_data.project_scoped) - self.assertFalse(token_data.scoped) - self.assertRaises(exception.UnexpectedError, getattr, token_data, - 'project_id') - self.assertRaises(exception.UnexpectedError, getattr, token_data, - 'project_name') - self.assertFalse(token_data.project_scoped) - domain_id = uuid.uuid4().hex - domain_name = uuid.uuid4().hex - token_data['domain'] = {'id': domain_id, - 'name': domain_name} - self.assertEqual(domain_id, token_data.domain_id) - self.assertEqual(domain_name, token_data.domain_name) - self.assertTrue(token_data.domain_scoped) - - token_data['audit_ids'] = [uuid.uuid4().hex] - self.assertEqual(token_data.audit_id, - token_data['audit_ids'][0]) - self.assertEqual(token_data.audit_chain_id, - token_data['audit_ids'][0]) - token_data['audit_ids'].append(uuid.uuid4().hex) - self.assertEqual(token_data.audit_chain_id, - token_data['audit_ids'][1]) - del token_data['audit_ids'] - self.assertIsNone(token_data.audit_id) - self.assertIsNone(token_data.audit_chain_id) - - def test_token_model_v3_federated_user(self): - token_data = token_model.KeystoneToken(token_id=uuid.uuid4().hex, - token_data=self.v3_sample_token) - federation_data = {'identity_provider': {'id': uuid.uuid4().hex}, - 'protocol': {'id': 'saml2'}, - 'groups': [{'id': uuid.uuid4().hex} - for x in range(1, 5)]} - - self.assertFalse(token_data.is_federated_user) - self.assertEqual([], token_data.federation_group_ids) - self.assertIsNone(token_data.federation_protocol_id) - self.assertIsNone(token_data.federation_idp_id) - - token_data['user'][federation_constants.FEDERATION] = federation_data - - self.assertTrue(token_data.is_federated_user) - self.assertEqual([x['id'] for x in federation_data['groups']], - token_data.federation_group_ids) - self.assertEqual(federation_data['protocol']['id'], - token_data.federation_protocol_id) - self.assertEqual(federation_data['identity_provider']['id'], - token_data.federation_idp_id) - - def test_token_model_v2_federated_user(self): - token_data = token_model.KeystoneToken(token_id=uuid.uuid4().hex, - token_data=self.v2_sample_token) - federation_data = {'identity_provider': {'id': uuid.uuid4().hex}, - 'protocol': {'id': 'saml2'}, - 'groups': [{'id': uuid.uuid4().hex} - for x in range(1, 5)]} - self.assertFalse(token_data.is_federated_user) - self.assertEqual([], token_data.federation_group_ids) - self.assertIsNone(token_data.federation_protocol_id) - self.assertIsNone(token_data.federation_idp_id) - - token_data['user'][federation_constants.FEDERATION] = federation_data - - # Federated users should not exist in V2, the data should remain empty - self.assertFalse(token_data.is_federated_user) - self.assertEqual([], token_data.federation_group_ids) - self.assertIsNone(token_data.federation_protocol_id) - self.assertIsNone(token_data.federation_idp_id) - - def test_token_model_v2(self): - token_data = token_model.KeystoneToken(uuid.uuid4().hex, - self.v2_sample_token) - self.assertIs(token_model.V2, token_data.version) - expires = timeutils.normalize_time(timeutils.parse_isotime( - self.v2_sample_token['access']['token']['expires'])) - issued = timeutils.normalize_time(timeutils.parse_isotime( - self.v2_sample_token['access']['token']['issued_at'])) - self.assertEqual(expires, token_data.expires) - self.assertEqual(issued, token_data.issued) - self.assertEqual(self.v2_sample_token['access']['user']['id'], - token_data.user_id) - self.assertEqual(self.v2_sample_token['access']['user']['name'], - token_data.user_name) - self.assertEqual(CONF.identity.default_domain_id, - token_data.user_domain_id) - self.assertEqual('Default', token_data.user_domain_name) - self.assertEqual(CONF.identity.default_domain_id, - token_data.project_domain_id) - self.assertEqual('Default', - token_data.project_domain_name) - self.assertEqual(self.v2_sample_token['access']['trust']['id'], - token_data.trust_id) - self.assertEqual( - self.v2_sample_token['access']['trust']['trustor_user_id'], - token_data.trustor_user_id) - self.assertEqual( - self.v2_sample_token['access']['trust']['impersonation'], - token_data.trust_impersonation) - self.assertEqual( - self.v2_sample_token['access']['trust']['trustee_user_id'], - token_data.trustee_user_id) - # Project Scoped Token - self.assertEqual( - self.v2_sample_token['access']['token']['tenant']['id'], - token_data.project_id) - self.assertEqual( - self.v2_sample_token['access']['token']['tenant']['name'], - token_data.project_name) - self.assertTrue(token_data.project_scoped) - self.assertTrue(token_data.scoped) - self.assertTrue(token_data.trust_scoped) - self.assertEqual( - [r['name'] - for r in self.v2_sample_token['access']['user']['roles']], - token_data.role_names) - token_data['token'].pop('tenant') - self.assertFalse(token_data.scoped) - self.assertFalse(token_data.project_scoped) - self.assertFalse(token_data.domain_scoped) - self.assertRaises(exception.UnexpectedError, getattr, token_data, - 'project_id') - self.assertRaises(exception.UnexpectedError, getattr, token_data, - 'project_name') - self.assertRaises(exception.UnexpectedError, getattr, token_data, - 'project_domain_id') - self.assertRaises(exception.UnexpectedError, getattr, token_data, - 'project_domain_id') - # No Domain Scoped tokens in V2 - self.assertRaises(NotImplementedError, getattr, token_data, - 'domain_id') - self.assertRaises(NotImplementedError, getattr, token_data, - 'domain_name') - token_data['domain'] = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex} - self.assertRaises(NotImplementedError, getattr, token_data, - 'domain_id') - self.assertRaises(NotImplementedError, getattr, token_data, - 'domain_name') - self.assertFalse(token_data.domain_scoped) - - token_data['token']['audit_ids'] = [uuid.uuid4().hex] - self.assertEqual(token_data.audit_chain_id, - token_data['token']['audit_ids'][0]) - token_data['token']['audit_ids'].append(uuid.uuid4().hex) - self.assertEqual(token_data.audit_chain_id, - token_data['token']['audit_ids'][1]) - self.assertEqual(token_data.audit_id, - token_data['token']['audit_ids'][0]) - del token_data['token']['audit_ids'] - self.assertIsNone(token_data.audit_id) - self.assertIsNone(token_data.audit_chain_id) - - def test_token_model_unknown(self): - self.assertRaises(exception.UnsupportedTokenVersionException, - token_model.KeystoneToken, - token_id=uuid.uuid4().hex, - token_data={'bogus_data': uuid.uuid4().hex}) - - def test_token_model_dual_scoped_token(self): - domain = {'id': uuid.uuid4().hex, - 'name': uuid.uuid4().hex} - self.v2_sample_token['access']['domain'] = domain - self.v3_sample_token['token']['domain'] = domain - - # V2 Tokens Cannot be domain scoped, this should work - token_model.KeystoneToken(token_id=uuid.uuid4().hex, - token_data=self.v2_sample_token) - - self.assertRaises(exception.UnexpectedError, - token_model.KeystoneToken, - token_id=uuid.uuid4().hex, - token_data=self.v3_sample_token) diff --git a/keystone-moon/keystone/tests/unit/token/test_uuid_provider.py b/keystone-moon/keystone/tests/unit/token/test_uuid_provider.py deleted file mode 100644 index 5c364490..00000000 --- a/keystone-moon/keystone/tests/unit/token/test_uuid_provider.py +++ /dev/null @@ -1,26 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.tests import unit -from keystone.token.providers import uuid - - -class TestUuidTokenProvider(unit.TestCase): - def setUp(self): - super(TestUuidTokenProvider, self).setUp() - self.provider = uuid.Provider() - - def test_supports_bind_authentication_returns_true(self): - self.assertTrue(self.provider._supports_bind_authentication) - - def test_need_persistence_return_true(self): - self.assertIs(True, self.provider.needs_persistence()) diff --git a/keystone-moon/keystone/tests/unit/trust/__init__.py b/keystone-moon/keystone/tests/unit/trust/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/tests/unit/trust/test_backends.py b/keystone-moon/keystone/tests/unit/trust/test_backends.py deleted file mode 100644 index 05df866f..00000000 --- a/keystone-moon/keystone/tests/unit/trust/test_backends.py +++ /dev/null @@ -1,172 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime -import uuid - -from oslo_utils import timeutils -from six.moves import range - -from keystone import exception - - -class TrustTests(object): - def create_sample_trust(self, new_id, remaining_uses=None): - self.trustor = self.user_foo - self.trustee = self.user_two - expires_at = datetime.datetime.utcnow().replace(year=2032) - trust_data = (self.trust_api.create_trust - (new_id, - {'trustor_user_id': self.trustor['id'], - 'trustee_user_id': self.user_two['id'], - 'project_id': self.tenant_bar['id'], - 'expires_at': expires_at, - 'impersonation': True, - 'remaining_uses': remaining_uses}, - roles=[{"id": "member"}, - {"id": "other"}, - {"id": "browser"}])) - return trust_data - - def test_delete_trust(self): - new_id = uuid.uuid4().hex - trust_data = self.create_sample_trust(new_id) - trust_id = trust_data['id'] - self.assertIsNotNone(trust_data) - trust_data = self.trust_api.get_trust(trust_id) - self.assertEqual(new_id, trust_data['id']) - self.trust_api.delete_trust(trust_id) - self.assertRaises(exception.TrustNotFound, - self.trust_api.get_trust, - trust_id) - - def test_delete_trust_not_found(self): - trust_id = uuid.uuid4().hex - self.assertRaises(exception.TrustNotFound, - self.trust_api.delete_trust, - trust_id) - - def test_get_trust(self): - new_id = uuid.uuid4().hex - trust_data = self.create_sample_trust(new_id) - trust_id = trust_data['id'] - self.assertIsNotNone(trust_data) - trust_data = self.trust_api.get_trust(trust_id) - self.assertEqual(new_id, trust_data['id']) - self.trust_api.delete_trust(trust_data['id']) - - def test_get_deleted_trust(self): - new_id = uuid.uuid4().hex - trust_data = self.create_sample_trust(new_id) - self.assertIsNotNone(trust_data) - self.assertIsNone(trust_data['deleted_at']) - self.trust_api.delete_trust(new_id) - self.assertRaises(exception.TrustNotFound, - self.trust_api.get_trust, - new_id) - deleted_trust = self.trust_api.get_trust(trust_data['id'], - deleted=True) - self.assertEqual(trust_data['id'], deleted_trust['id']) - self.assertIsNotNone(deleted_trust.get('deleted_at')) - - def test_create_trust(self): - new_id = uuid.uuid4().hex - trust_data = self.create_sample_trust(new_id) - - self.assertEqual(new_id, trust_data['id']) - self.assertEqual(self.trustee['id'], trust_data['trustee_user_id']) - self.assertEqual(self.trustor['id'], trust_data['trustor_user_id']) - self.assertTrue(timeutils.normalize_time(trust_data['expires_at']) > - timeutils.utcnow()) - - self.assertEqual([{'id': 'member'}, - {'id': 'other'}, - {'id': 'browser'}], trust_data['roles']) - - def test_list_trust_by_trustee(self): - for i in range(3): - self.create_sample_trust(uuid.uuid4().hex) - trusts = self.trust_api.list_trusts_for_trustee(self.trustee['id']) - self.assertEqual(3, len(trusts)) - self.assertEqual(trusts[0]["trustee_user_id"], self.trustee['id']) - trusts = self.trust_api.list_trusts_for_trustee(self.trustor['id']) - self.assertEqual(0, len(trusts)) - - def test_list_trust_by_trustor(self): - for i in range(3): - self.create_sample_trust(uuid.uuid4().hex) - trusts = self.trust_api.list_trusts_for_trustor(self.trustor['id']) - self.assertEqual(3, len(trusts)) - self.assertEqual(trusts[0]["trustor_user_id"], self.trustor['id']) - trusts = self.trust_api.list_trusts_for_trustor(self.trustee['id']) - self.assertEqual(0, len(trusts)) - - def test_list_trusts(self): - for i in range(3): - self.create_sample_trust(uuid.uuid4().hex) - trusts = self.trust_api.list_trusts() - self.assertEqual(3, len(trusts)) - - def test_trust_has_remaining_uses_positive(self): - # create a trust with limited uses, check that we have uses left - trust_data = self.create_sample_trust(uuid.uuid4().hex, - remaining_uses=5) - self.assertEqual(5, trust_data['remaining_uses']) - # create a trust with unlimited uses, check that we have uses left - trust_data = self.create_sample_trust(uuid.uuid4().hex) - self.assertIsNone(trust_data['remaining_uses']) - - def test_trust_has_remaining_uses_negative(self): - # try to create a trust with no remaining uses, check that it fails - self.assertRaises(exception.ValidationError, - self.create_sample_trust, - uuid.uuid4().hex, - remaining_uses=0) - # try to create a trust with negative remaining uses, - # check that it fails - self.assertRaises(exception.ValidationError, - self.create_sample_trust, - uuid.uuid4().hex, - remaining_uses=-12) - - def test_consume_use(self): - # consume a trust repeatedly until it has no uses anymore - trust_data = self.create_sample_trust(uuid.uuid4().hex, - remaining_uses=2) - self.trust_api.consume_use(trust_data['id']) - t = self.trust_api.get_trust(trust_data['id']) - self.assertEqual(1, t['remaining_uses']) - self.trust_api.consume_use(trust_data['id']) - # This was the last use, the trust isn't available anymore - self.assertRaises(exception.TrustNotFound, - self.trust_api.get_trust, - trust_data['id']) - - def test_duplicate_trusts_not_allowed(self): - self.trustor = self.user_foo - self.trustee = self.user_two - trust_data = {'trustor_user_id': self.trustor['id'], - 'trustee_user_id': self.user_two['id'], - 'project_id': self.tenant_bar['id'], - 'expires_at': timeutils.parse_isotime( - '2032-02-18T18:10:00Z'), - 'impersonation': True, - 'remaining_uses': None} - roles = [{"id": "member"}, - {"id": "other"}, - {"id": "browser"}] - self.trust_api.create_trust(uuid.uuid4().hex, trust_data, roles) - self.assertRaises(exception.Conflict, - self.trust_api.create_trust, - uuid.uuid4().hex, - trust_data, - roles) diff --git a/keystone-moon/keystone/tests/unit/utils.py b/keystone-moon/keystone/tests/unit/utils.py deleted file mode 100644 index e3e49e70..00000000 --- a/keystone-moon/keystone/tests/unit/utils.py +++ /dev/null @@ -1,85 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Useful utilities for tests.""" - -import functools -import os -import time -import uuid - -import six -from testtools import testcase - - -TZ = None - - -def timezone(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - tz_original = os.environ.get('TZ') - try: - if TZ: - os.environ['TZ'] = TZ - time.tzset() - return func(*args, **kwargs) - finally: - if TZ: - if tz_original: - os.environ['TZ'] = tz_original - else: - if 'TZ' in os.environ: - del os.environ['TZ'] - time.tzset() - return wrapper - - -def new_uuid(): - """Return a string UUID.""" - return uuid.uuid4().hex - - -def wip(message): - """Mark a test as work in progress. - - Based on code by Nat Pryce: - https://gist.github.com/npryce/997195#file-wip-py - - The test will always be run. If the test fails then a TestSkipped - exception is raised. If the test passes an AssertionError exception - is raised so that the developer knows they made the test pass. This - is a reminder to remove the decorator. - - :param message: a string message to help clarify why the test is - marked as a work in progress - - usage: - >>> @wip('waiting on bug #000000') - >>> def test(): - >>> pass - - """ - def _wip(f): - @six.wraps(f) - def run_test(*args, **kwargs): - try: - f(*args, **kwargs) - except Exception: - raise testcase.TestSkipped('work in progress test failed: ' + - message) - - raise AssertionError('work in progress test passed: ' + message) - - return run_test - - return _wip diff --git a/keystone-moon/keystone/token/__init__.py b/keystone-moon/keystone/token/__init__.py deleted file mode 100644 index f85ffc79..00000000 --- a/keystone-moon/keystone/token/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.token import controllers # noqa -from keystone.token import persistence # noqa -from keystone.token import provider # noqa diff --git a/keystone-moon/keystone/token/_simple_cert.py b/keystone-moon/keystone/token/_simple_cert.py deleted file mode 100644 index 9c369255..00000000 --- a/keystone-moon/keystone/token/_simple_cert.py +++ /dev/null @@ -1,91 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# TODO(morganfainberg): Remove this file and extension in the "O" release as -# it is only used in support of the PKI/PKIz token providers. -import functools - -from oslo_config import cfg -import webob - -from keystone.common import controller -from keystone.common import dependency -from keystone.common import extension -from keystone.common import json_home -from keystone.common import wsgi -from keystone import exception - - -CONF = cfg.CONF -EXTENSION_DATA = { - 'name': 'OpenStack Simple Certificate API', - 'namespace': 'http://docs.openstack.org/identity/api/ext/' - 'OS-SIMPLE-CERT/v1.0', - 'alias': 'OS-SIMPLE-CERT', - 'updated': '2014-01-20T12:00:0-00:00', - 'description': 'OpenStack simple certificate retrieval extension', - 'links': [ - { - 'rel': 'describedby', - 'type': 'text/html', - 'href': 'http://developer.openstack.org/' - 'api-ref-identity-v2-ext.html', - } - ]} -extension.register_admin_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) -extension.register_public_extension(EXTENSION_DATA['alias'], EXTENSION_DATA) - -build_resource_relation = functools.partial( - json_home.build_v3_extension_resource_relation, - extension_name='OS-SIMPLE-CERT', extension_version='1.0') - - -class Routers(wsgi.RoutersBase): - - def _construct_url(self, suffix): - return "/OS-SIMPLE-CERT/%s" % suffix - - def append_v3_routers(self, mapper, routers): - controller = SimpleCert() - - self._add_resource( - mapper, controller, - path=self._construct_url('ca'), - get_action='get_ca_certificate', - rel=build_resource_relation(resource_name='ca_certificate')) - self._add_resource( - mapper, controller, - path=self._construct_url('certificates'), - get_action='list_certificates', - rel=build_resource_relation(resource_name='certificates')) - - -@dependency.requires('token_provider_api') -class SimpleCert(controller.V3Controller): - - def _get_certificate(self, name): - try: - with open(name, 'r') as f: - body = f.read() - except IOError: - raise exception.CertificateFilesUnavailable() - - # NOTE(jamielennox): We construct the webob Response ourselves here so - # that we don't pass through the JSON encoding process. - headers = [('Content-Type', 'application/x-pem-file')] - return webob.Response(body=body, headerlist=headers, status="200 OK") - - def get_ca_certificate(self, context): - return self._get_certificate(CONF.signing.ca_certs) - - def list_certificates(self, context): - return self._get_certificate(CONF.signing.certfile) diff --git a/keystone-moon/keystone/token/controllers.py b/keystone-moon/keystone/token/controllers.py deleted file mode 100644 index 6eeb23ec..00000000 --- a/keystone-moon/keystone/token/controllers.py +++ /dev/null @@ -1,529 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import datetime -import sys - -from keystone.common import utils -from keystoneclient.common import cms -from oslo_config import cfg -from oslo_log import log -from oslo_serialization import jsonutils -from oslo_utils import timeutils -import six - -from keystone.common import controller -from keystone.common import dependency -from keystone.common import wsgi -from keystone import exception -from keystone.i18n import _ -from keystone.models import token_model -from keystone.token import provider - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -class ExternalAuthNotApplicable(Exception): - """External authentication is not applicable.""" - - pass - - -@dependency.requires('assignment_api', 'catalog_api', 'identity_api', - 'resource_api', 'role_api', 'token_provider_api', - 'trust_api') -class Auth(controller.V2Controller): - - @controller.v2_deprecated - def ca_cert(self, context, auth=None): - with open(CONF.signing.ca_certs, 'r') as ca_file: - data = ca_file.read() - return data - - @controller.v2_deprecated - def signing_cert(self, context, auth=None): - with open(CONF.signing.certfile, 'r') as cert_file: - data = cert_file.read() - return data - - @controller.v2_auth_deprecated - def authenticate(self, context, auth=None): - """Authenticate credentials and return a token. - - Accept auth as a dict that looks like:: - - { - "auth":{ - "passwordCredentials":{ - "username":"test_user", - "password":"mypass" - }, - "tenantName":"customer-x" - } - } - - In this case, tenant is optional, if not provided the token will be - considered "unscoped" and can later be used to get a scoped token. - - Alternatively, this call accepts auth with only a token and tenant - that will return a token that is scoped to that tenant. - """ - if auth is None: - raise exception.ValidationError(attribute='auth', - target='request body') - - if "token" in auth: - # Try to authenticate using a token - auth_info = self._authenticate_token( - context, auth) - else: - # Try external authentication - try: - auth_info = self._authenticate_external( - context, auth) - except ExternalAuthNotApplicable: - # Try local authentication - auth_info = self._authenticate_local( - context, auth) - - user_ref, tenant_ref, metadata_ref, expiry, bind, audit_id = auth_info - # Validate that the auth info is valid and nothing is disabled - try: - self.identity_api.assert_user_enabled( - user_id=user_ref['id'], user=user_ref) - if tenant_ref: - self.resource_api.assert_project_enabled( - project_id=tenant_ref['id'], project=tenant_ref) - except AssertionError as e: - six.reraise(exception.Unauthorized, exception.Unauthorized(e), - sys.exc_info()[2]) - # NOTE(morganfainberg): Make sure the data is in correct form since it - # might be consumed external to Keystone and this is a v2.0 controller. - # The user_ref is encoded into the auth_token_data which is returned as - # part of the token data. The token provider doesn't care about the - # format. - user_ref = self.v3_to_v2_user(user_ref) - if tenant_ref: - tenant_ref = self.v3_to_v2_project(tenant_ref) - - auth_token_data = self._get_auth_token_data(user_ref, - tenant_ref, - metadata_ref, - expiry, - audit_id) - - if tenant_ref: - catalog_ref = self.catalog_api.get_catalog( - user_ref['id'], tenant_ref['id']) - else: - catalog_ref = {} - - auth_token_data['id'] = 'placeholder' - if bind: - auth_token_data['bind'] = bind - - roles_ref = [] - for role_id in metadata_ref.get('roles', []): - role_ref = self.role_api.get_role(role_id) - roles_ref.append(dict(name=role_ref['name'])) - - (token_id, token_data) = self.token_provider_api.issue_v2_token( - auth_token_data, roles_ref=roles_ref, catalog_ref=catalog_ref) - - # NOTE(wanghong): We consume a trust use only when we are using trusts - # and have successfully issued a token. - if CONF.trust.enabled and 'trust_id' in auth: - self.trust_api.consume_use(auth['trust_id']) - - return token_data - - def _restrict_scope(self, token_model_ref): - # A trust token cannot be used to get another token - if token_model_ref.trust_scoped: - raise exception.Forbidden() - if not CONF.token.allow_rescope_scoped_token: - # Do not allow conversion from scoped tokens. - if token_model_ref.project_scoped or token_model_ref.domain_scoped: - raise exception.Forbidden(action=_("rescope a scoped token")) - - def _authenticate_token(self, context, auth): - """Try to authenticate using an already existing token. - - Returns auth_token_data, (user_ref, tenant_ref, metadata_ref) - """ - if 'token' not in auth: - raise exception.ValidationError( - attribute='token', target='auth') - - if "id" not in auth['token']: - raise exception.ValidationError( - attribute="id", target="token") - - old_token = auth['token']['id'] - if len(old_token) > CONF.max_token_size: - raise exception.ValidationSizeError(attribute='token', - size=CONF.max_token_size) - - try: - token_model_ref = token_model.KeystoneToken( - token_id=old_token, - token_data=self.token_provider_api.validate_v2_token(old_token) - ) - except exception.NotFound as e: - raise exception.Unauthorized(e) - - wsgi.validate_token_bind(context, token_model_ref) - - self._restrict_scope(token_model_ref) - user_id = token_model_ref.user_id - tenant_id = self._get_project_id_from_auth(auth) - - if not CONF.trust.enabled and 'trust_id' in auth: - raise exception.Forbidden('Trusts are disabled.') - elif CONF.trust.enabled and 'trust_id' in auth: - try: - trust_ref = self.trust_api.get_trust(auth['trust_id']) - except exception.TrustNotFound: - raise exception.Forbidden() - if user_id != trust_ref['trustee_user_id']: - raise exception.Forbidden() - if (trust_ref['project_id'] and - tenant_id != trust_ref['project_id']): - raise exception.Forbidden() - if ('expires' in trust_ref) and (trust_ref['expires']): - expiry = trust_ref['expires'] - if expiry < timeutils.parse_isotime(utils.isotime()): - raise exception.Forbidden() - user_id = trust_ref['trustor_user_id'] - trustor_user_ref = self.identity_api.get_user( - trust_ref['trustor_user_id']) - if not trustor_user_ref['enabled']: - raise exception.Forbidden() - trustee_user_ref = self.identity_api.get_user( - trust_ref['trustee_user_id']) - if not trustee_user_ref['enabled']: - raise exception.Forbidden() - - if trust_ref['impersonation'] is True: - current_user_ref = trustor_user_ref - else: - current_user_ref = trustee_user_ref - - else: - current_user_ref = self.identity_api.get_user(user_id) - - metadata_ref = {} - tenant_ref, metadata_ref['roles'] = self._get_project_roles_and_ref( - user_id, tenant_id) - - expiry = token_model_ref.expires - if CONF.trust.enabled and 'trust_id' in auth: - trust_id = auth['trust_id'] - trust_roles = [] - for role in trust_ref['roles']: - if 'roles' not in metadata_ref: - raise exception.Forbidden() - if role['id'] in metadata_ref['roles']: - trust_roles.append(role['id']) - else: - raise exception.Forbidden() - if 'expiry' in trust_ref and trust_ref['expiry']: - trust_expiry = timeutils.parse_isotime(trust_ref['expiry']) - if trust_expiry < expiry: - expiry = trust_expiry - metadata_ref['roles'] = trust_roles - metadata_ref['trustee_user_id'] = trust_ref['trustee_user_id'] - metadata_ref['trust_id'] = trust_id - - bind = token_model_ref.bind - audit_id = token_model_ref.audit_chain_id - - return (current_user_ref, tenant_ref, metadata_ref, expiry, bind, - audit_id) - - def _authenticate_local(self, context, auth): - """Try to authenticate against the identity backend. - - Returns auth_token_data, (user_ref, tenant_ref, metadata_ref) - """ - if 'passwordCredentials' not in auth: - raise exception.ValidationError( - attribute='passwordCredentials', target='auth') - - if "password" not in auth['passwordCredentials']: - raise exception.ValidationError( - attribute='password', target='passwordCredentials') - - password = auth['passwordCredentials']['password'] - if password and len(password) > CONF.identity.max_password_length: - raise exception.ValidationSizeError( - attribute='password', size=CONF.identity.max_password_length) - - if (not auth['passwordCredentials'].get("userId") and - not auth['passwordCredentials'].get("username")): - raise exception.ValidationError( - attribute='username or userId', - target='passwordCredentials') - - user_id = auth['passwordCredentials'].get('userId') - if user_id and len(user_id) > CONF.max_param_size: - raise exception.ValidationSizeError(attribute='userId', - size=CONF.max_param_size) - - username = auth['passwordCredentials'].get('username', '') - - if username: - if len(username) > CONF.max_param_size: - raise exception.ValidationSizeError(attribute='username', - size=CONF.max_param_size) - try: - user_ref = self.identity_api.get_user_by_name( - username, CONF.identity.default_domain_id) - user_id = user_ref['id'] - except exception.UserNotFound as e: - raise exception.Unauthorized(e) - - try: - user_ref = self.identity_api.authenticate( - context, - user_id=user_id, - password=password) - except AssertionError as e: - raise exception.Unauthorized(e.args[0]) - - metadata_ref = {} - tenant_id = self._get_project_id_from_auth(auth) - tenant_ref, metadata_ref['roles'] = self._get_project_roles_and_ref( - user_id, tenant_id) - - expiry = provider.default_expire_time() - bind = None - audit_id = None - return (user_ref, tenant_ref, metadata_ref, expiry, bind, audit_id) - - def _authenticate_external(self, context, auth): - """Try to authenticate an external user via REMOTE_USER variable. - - Returns auth_token_data, (user_ref, tenant_ref, metadata_ref) - """ - environment = context.get('environment', {}) - if not environment.get('REMOTE_USER'): - raise ExternalAuthNotApplicable() - - username = environment['REMOTE_USER'] - try: - user_ref = self.identity_api.get_user_by_name( - username, CONF.identity.default_domain_id) - user_id = user_ref['id'] - except exception.UserNotFound as e: - raise exception.Unauthorized(e) - - metadata_ref = {} - tenant_id = self._get_project_id_from_auth(auth) - tenant_ref, metadata_ref['roles'] = self._get_project_roles_and_ref( - user_id, tenant_id) - - expiry = provider.default_expire_time() - bind = None - if ('kerberos' in CONF.token.bind and - environment.get('AUTH_TYPE', '').lower() == 'negotiate'): - bind = {'kerberos': username} - audit_id = None - - return (user_ref, tenant_ref, metadata_ref, expiry, bind, audit_id) - - def _get_auth_token_data(self, user, tenant, metadata, expiry, audit_id): - return dict(user=user, - tenant=tenant, - metadata=metadata, - expires=expiry, - parent_audit_id=audit_id) - - def _get_project_id_from_auth(self, auth): - """Extract tenant information from auth dict. - - Returns a valid tenant_id if it exists, or None if not specified. - """ - tenant_id = auth.get('tenantId') - if tenant_id and len(tenant_id) > CONF.max_param_size: - raise exception.ValidationSizeError(attribute='tenantId', - size=CONF.max_param_size) - - tenant_name = auth.get('tenantName') - if tenant_name and len(tenant_name) > CONF.max_param_size: - raise exception.ValidationSizeError(attribute='tenantName', - size=CONF.max_param_size) - - if tenant_name: - if (CONF.resource.project_name_url_safe == 'strict' and - utils.is_not_url_safe(tenant_name)): - msg = _('Tenant name cannot contain reserved characters.') - raise exception.Unauthorized(message=msg) - try: - tenant_ref = self.resource_api.get_project_by_name( - tenant_name, CONF.identity.default_domain_id) - tenant_id = tenant_ref['id'] - except exception.ProjectNotFound as e: - raise exception.Unauthorized(e) - return tenant_id - - def _get_project_roles_and_ref(self, user_id, tenant_id): - """Returns the project roles for this user, and the project ref.""" - tenant_ref = None - role_list = [] - if tenant_id: - try: - tenant_ref = self.resource_api.get_project(tenant_id) - role_list = self.assignment_api.get_roles_for_user_and_project( - user_id, tenant_id) - except exception.ProjectNotFound: - msg = _('Project ID not found: %(t_id)s') % {'t_id': tenant_id} - raise exception.Unauthorized(msg) - - if not role_list: - msg = _('User %(u_id)s is unauthorized for tenant %(t_id)s') - msg = msg % {'u_id': user_id, 't_id': tenant_id} - LOG.warning(msg) - raise exception.Unauthorized(msg) - - return (tenant_ref, role_list) - - def _get_token_ref(self, token_id, belongs_to=None): - """Returns a token if a valid one exists. - - Optionally, limited to a token owned by a specific tenant. - - """ - token_ref = token_model.KeystoneToken( - token_id=token_id, - token_data=self.token_provider_api.validate_token(token_id)) - if belongs_to: - if not token_ref.project_scoped: - raise exception.Unauthorized( - _('Token does not belong to specified tenant.')) - if token_ref.project_id != belongs_to: - raise exception.Unauthorized( - _('Token does not belong to specified tenant.')) - return token_ref - - @controller.v2_deprecated - @controller.protected() - def validate_token_head(self, context, token_id): - """Check that a token is valid. - - Optionally, also ensure that it is owned by a specific tenant. - - Identical to ``validate_token``, except does not return a response. - - The code in ``keystone.common.wsgi.render_response`` will remove - the content body. - - """ - belongs_to = context['query_string'].get('belongsTo') - return self.token_provider_api.validate_v2_token(token_id, belongs_to) - - @controller.v2_deprecated - @controller.protected() - def validate_token(self, context, token_id): - """Check that a token is valid. - - Optionally, also ensure that it is owned by a specific tenant. - - Returns metadata about the token along any associated roles. - - """ - belongs_to = context['query_string'].get('belongsTo') - # TODO(ayoung) validate against revocation API - return self.token_provider_api.validate_v2_token(token_id, belongs_to) - - @controller.v2_deprecated - def delete_token(self, context, token_id): - """Delete a token, effectively invalidating it for authz.""" - # TODO(termie): this stuff should probably be moved to middleware - self.assert_admin(context) - self.token_provider_api.revoke_token(token_id) - - @controller.v2_deprecated - @controller.protected() - def revocation_list(self, context, auth=None): - if not CONF.token.revoke_by_id: - raise exception.Gone() - tokens = self.token_provider_api.list_revoked_tokens() - - for t in tokens: - expires = t['expires'] - if expires and isinstance(expires, datetime.datetime): - t['expires'] = utils.isotime(expires) - data = {'revoked': tokens} - json_data = jsonutils.dumps(data) - signed_text = cms.cms_sign_text(json_data, - CONF.signing.certfile, - CONF.signing.keyfile) - - return {'signed': signed_text} - - @controller.v2_deprecated - def endpoints(self, context, token_id): - """Return a list of endpoints available to the token.""" - self.assert_admin(context) - - token_ref = self._get_token_ref(token_id) - - catalog_ref = None - if token_ref.project_id: - catalog_ref = self.catalog_api.get_catalog( - token_ref.user_id, - token_ref.project_id) - - return Auth.format_endpoint_list(catalog_ref) - - @classmethod - def format_endpoint_list(cls, catalog_ref): - """Formats a list of endpoints according to Identity API v2. - - The v2.0 API wants an endpoint list to look like:: - - { - 'endpoints': [ - { - 'id': $endpoint_id, - 'name': $SERVICE[name], - 'type': $SERVICE, - 'tenantId': $tenant_id, - 'region': $REGION, - } - ], - 'endpoints_links': [], - } - - """ - if not catalog_ref: - return {} - - endpoints = [] - for region_name, region_ref in catalog_ref.items(): - for service_type, service_ref in region_ref.items(): - endpoints.append({ - 'id': service_ref.get('id'), - 'name': service_ref.get('name'), - 'type': service_type, - 'region': region_name, - 'publicURL': service_ref.get('publicURL'), - 'internalURL': service_ref.get('internalURL'), - 'adminURL': service_ref.get('adminURL'), - }) - - return {'endpoints': endpoints, 'endpoints_links': []} diff --git a/keystone-moon/keystone/token/persistence/__init__.py b/keystone-moon/keystone/token/persistence/__init__.py deleted file mode 100644 index 9d8e17f2..00000000 --- a/keystone-moon/keystone/token/persistence/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.token.persistence.core import * # noqa - - -__all__ = ('Manager', 'Driver') diff --git a/keystone-moon/keystone/token/persistence/backends/__init__.py b/keystone-moon/keystone/token/persistence/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/token/persistence/backends/kvs.py b/keystone-moon/keystone/token/persistence/backends/kvs.py deleted file mode 100644 index 3620db58..00000000 --- a/keystone-moon/keystone/token/persistence/backends/kvs.py +++ /dev/null @@ -1,367 +0,0 @@ -# Copyright 2013 Metacloud, Inc. -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from __future__ import absolute_import -import copy - -from oslo_config import cfg -from oslo_log import log -from oslo_utils import timeutils -import six - -from keystone.common import kvs -from keystone.common import utils -from keystone import exception -from keystone.i18n import _, _LE, _LW -from keystone import token -from keystone.token import provider - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -class Token(token.persistence.TokenDriverV8): - """KeyValueStore backend for tokens. - - This is the base implementation for any/all key-value-stores (e.g. - memcached) for the Token backend. It is recommended to only use the base - in-memory implementation for testing purposes. - """ - - revocation_key = 'revocation-list' - kvs_backend = 'openstack.kvs.Memory' - - def __init__(self, backing_store=None, **kwargs): - super(Token, self).__init__() - self._store = kvs.get_key_value_store('token-driver') - if backing_store is not None: - self.kvs_backend = backing_store - if not self._store.is_configured: - # Do not re-configure the backend if the store has been initialized - self._store.configure(backing_store=self.kvs_backend, **kwargs) - if self.__class__ == Token: - # NOTE(morganfainberg): Only warn if the base KVS implementation - # is instantiated. - LOG.warning(_LW('It is recommended to only use the base ' - 'key-value-store implementation for the token ' - 'driver for testing purposes. Please use ' - "'memcache' or 'sql' instead.")) - - def _prefix_token_id(self, token_id): - return 'token-%s' % token_id.encode('utf-8') - - def _prefix_user_id(self, user_id): - return 'usertokens-%s' % user_id.encode('utf-8') - - def _get_key_or_default(self, key, default=None): - try: - return self._store.get(key) - except exception.NotFound: - return default - - def _get_key(self, key): - return self._store.get(key) - - def _set_key(self, key, value, lock=None): - self._store.set(key, value, lock) - - def _delete_key(self, key): - return self._store.delete(key) - - def get_token(self, token_id): - ptk = self._prefix_token_id(token_id) - try: - token_ref = self._get_key(ptk) - except exception.NotFound: - raise exception.TokenNotFound(token_id=token_id) - - return token_ref - - def create_token(self, token_id, data): - """Create a token by id and data. - - It is assumed the caller has performed data validation on the "data" - parameter. - """ - data_copy = copy.deepcopy(data) - ptk = self._prefix_token_id(token_id) - if not data_copy.get('expires'): - data_copy['expires'] = provider.default_expire_time() - if not data_copy.get('user_id'): - data_copy['user_id'] = data_copy['user']['id'] - - # NOTE(morganfainberg): for ease of manipulating the data without - # concern about the backend, always store the value(s) in the - # index as the isotime (string) version so this is where the string is - # built. - expires_str = utils.isotime(data_copy['expires'], subsecond=True) - - self._set_key(ptk, data_copy) - user_id = data['user']['id'] - user_key = self._prefix_user_id(user_id) - self._update_user_token_list(user_key, token_id, expires_str) - if CONF.trust.enabled and data.get('trust_id'): - # NOTE(morganfainberg): If trusts are enabled and this is a trust - # scoped token, we add the token to the trustee list as well. This - # allows password changes of the trustee to also expire the token. - # There is no harm in placing the token in multiple lists, as - # _list_tokens is smart enough to handle almost any case of - # valid/invalid/expired for a given token. - token_data = data_copy['token_data'] - if data_copy['token_version'] == token.provider.V2: - trustee_user_id = token_data['access']['trust'][ - 'trustee_user_id'] - elif data_copy['token_version'] == token.provider.V3: - trustee_user_id = token_data['OS-TRUST:trust'][ - 'trustee_user_id'] - else: - raise exception.UnsupportedTokenVersionException( - _('Unknown token version %s') % - data_copy.get('token_version')) - - trustee_key = self._prefix_user_id(trustee_user_id) - self._update_user_token_list(trustee_key, token_id, expires_str) - - return data_copy - - def _get_user_token_list_with_expiry(self, user_key): - """Return user token list with token expiry. - - :return: the tuples in the format (token_id, token_expiry) - :rtype: list - """ - return self._get_key_or_default(user_key, default=[]) - - def _get_user_token_list(self, user_key): - """Return a list of token_ids for the user_key.""" - token_list = self._get_user_token_list_with_expiry(user_key) - # Each element is a tuple of (token_id, token_expiry). Most code does - # not care about the expiry, it is stripped out and only a - # list of token_ids are returned. - return [t[0] for t in token_list] - - def _update_user_token_list(self, user_key, token_id, expires_isotime_str): - current_time = self._get_current_time() - revoked_token_list = set([t['id'] for t in - self.list_revoked_tokens()]) - - with self._store.get_lock(user_key) as lock: - filtered_list = [] - token_list = self._get_user_token_list_with_expiry(user_key) - for item in token_list: - try: - item_id, expires = self._format_token_index_item(item) - except (ValueError, TypeError): - # NOTE(morganfainberg): Skip on expected errors - # possibilities from the `_format_token_index_item` method. - continue - - if expires < current_time: - LOG.debug(('Token `%(token_id)s` is expired, removing ' - 'from `%(user_key)s`.'), - {'token_id': item_id, 'user_key': user_key}) - continue - - if item_id in revoked_token_list: - # NOTE(morganfainberg): If the token has been revoked, it - # can safely be removed from this list. This helps to keep - # the user_token_list as reasonably small as possible. - LOG.debug(('Token `%(token_id)s` is revoked, removing ' - 'from `%(user_key)s`.'), - {'token_id': item_id, 'user_key': user_key}) - continue - filtered_list.append(item) - filtered_list.append((token_id, expires_isotime_str)) - self._set_key(user_key, filtered_list, lock) - return filtered_list - - def _get_current_time(self): - return timeutils.normalize_time(timeutils.utcnow()) - - def _add_to_revocation_list(self, data, lock): - filtered_list = [] - revoked_token_data = {} - - current_time = self._get_current_time() - expires = data['expires'] - - if isinstance(expires, six.string_types): - expires = timeutils.parse_isotime(expires) - - expires = timeutils.normalize_time(expires) - - if expires < current_time: - LOG.warning(_LW('Token `%s` is expired, not adding to the ' - 'revocation list.'), data['id']) - return - - revoked_token_data['expires'] = utils.isotime(expires, - subsecond=True) - revoked_token_data['id'] = data['id'] - - token_data = data['token_data'] - if 'access' in token_data: - # It's a v2 token. - audit_ids = token_data['access']['token']['audit_ids'] - else: - # It's a v3 token. - audit_ids = token_data['token']['audit_ids'] - revoked_token_data['audit_id'] = audit_ids[0] - - token_list = self._get_key_or_default(self.revocation_key, default=[]) - if not isinstance(token_list, list): - # NOTE(morganfainberg): In the case that the revocation list is not - # in a format we understand, reinitialize it. This is an attempt to - # not allow the revocation list to be completely broken if - # somehow the key is changed outside of keystone (e.g. memcache - # that is shared by multiple applications). Logging occurs at error - # level so that the cloud administrators have some awareness that - # the revocation_list needed to be cleared out. In all, this should - # be recoverable. Keystone cannot control external applications - # from changing a key in some backends, however, it is possible to - # gracefully handle and notify of this event. - LOG.error(_LE('Reinitializing revocation list due to error ' - 'in loading revocation list from backend. ' - 'Expected `list` type got `%(type)s`. Old ' - 'revocation list data: %(list)r'), - {'type': type(token_list), 'list': token_list}) - token_list = [] - - # NOTE(morganfainberg): on revocation, cleanup the expired entries, try - # to keep the list of tokens revoked at the minimum. - for token_data in token_list: - try: - expires_at = timeutils.normalize_time( - timeutils.parse_isotime(token_data['expires'])) - except ValueError: - LOG.warning(_LW('Removing `%s` from revocation list due to ' - 'invalid expires data in revocation list.'), - token_data.get('id', 'INVALID_TOKEN_DATA')) - continue - if expires_at > current_time: - filtered_list.append(token_data) - filtered_list.append(revoked_token_data) - self._set_key(self.revocation_key, filtered_list, lock) - - def delete_token(self, token_id): - # Test for existence - with self._store.get_lock(self.revocation_key) as lock: - data = self.get_token(token_id) - ptk = self._prefix_token_id(token_id) - result = self._delete_key(ptk) - self._add_to_revocation_list(data, lock) - return result - - def delete_tokens(self, user_id, tenant_id=None, trust_id=None, - consumer_id=None): - return super(Token, self).delete_tokens( - user_id=user_id, - tenant_id=tenant_id, - trust_id=trust_id, - consumer_id=consumer_id, - ) - - def _format_token_index_item(self, item): - try: - token_id, expires = item - except (TypeError, ValueError): - LOG.debug(('Invalid token entry expected tuple of ' - '`(, )` got: `%(item)r`'), - dict(item=item)) - raise - - try: - expires = timeutils.normalize_time( - timeutils.parse_isotime(expires)) - except ValueError: - LOG.debug(('Invalid expires time on token `%(token_id)s`:' - ' %(expires)r'), - dict(token_id=token_id, expires=expires)) - raise - return token_id, expires - - def _token_match_tenant(self, token_ref, tenant_id): - if token_ref.get('tenant'): - return token_ref['tenant'].get('id') == tenant_id - return False - - def _token_match_trust(self, token_ref, trust_id): - if not token_ref.get('trust_id'): - return False - return token_ref['trust_id'] == trust_id - - def _token_match_consumer(self, token_ref, consumer_id): - try: - oauth = token_ref['token_data']['token']['OS-OAUTH1'] - return oauth.get('consumer_id') == consumer_id - except KeyError: - return False - - def _list_tokens(self, user_id, tenant_id=None, trust_id=None, - consumer_id=None): - # This function is used to generate the list of tokens that should be - # revoked when revoking by token identifiers. This approach will be - # deprecated soon, probably in the Juno release. Setting revoke_by_id - # to False indicates that this kind of recording should not be - # performed. In order to test the revocation events, tokens shouldn't - # be deleted from the backends. This check ensures that tokens are - # still recorded. - if not CONF.token.revoke_by_id: - return [] - tokens = [] - user_key = self._prefix_user_id(user_id) - token_list = self._get_user_token_list_with_expiry(user_key) - current_time = self._get_current_time() - for item in token_list: - try: - token_id, expires = self._format_token_index_item(item) - except (TypeError, ValueError): - # NOTE(morganfainberg): Skip on expected error possibilities - # from the `_format_token_index_item` method. - continue - - if expires < current_time: - continue - - try: - token_ref = self.get_token(token_id) - except exception.TokenNotFound: - # NOTE(morganfainberg): Token doesn't exist, skip it. - continue - if token_ref: - if tenant_id is not None: - if not self._token_match_tenant(token_ref, tenant_id): - continue - if trust_id is not None: - if not self._token_match_trust(token_ref, trust_id): - continue - if consumer_id is not None: - if not self._token_match_consumer(token_ref, consumer_id): - continue - - tokens.append(token_id) - return tokens - - def list_revoked_tokens(self): - revoked_token_list = self._get_key_or_default(self.revocation_key, - default=[]) - if isinstance(revoked_token_list, list): - return revoked_token_list - return [] - - def flush_expired_tokens(self): - """Archive or delete tokens that have expired.""" - raise exception.NotImplemented() diff --git a/keystone-moon/keystone/token/persistence/backends/memcache.py b/keystone-moon/keystone/token/persistence/backends/memcache.py deleted file mode 100644 index e6b0fcab..00000000 --- a/keystone-moon/keystone/token/persistence/backends/memcache.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2013 Metacloud, Inc. -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_log import versionutils - -from keystone.token.persistence.backends import kvs - - -CONF = cfg.CONF - - -class Token(kvs.Token): - kvs_backend = 'openstack.kvs.Memcached' - memcached_backend = 'memcached' - - @versionutils.deprecated( - what='Memcache Token Persistence Driver', - as_of=versionutils.deprecated.MITAKA, - in_favor_of='fernet token driver (no-persistence)', - remove_in=0) - def __init__(self, *args, **kwargs): - kwargs['memcached_backend'] = self.memcached_backend - kwargs['no_expiry_keys'] = [self.revocation_key] - kwargs['memcached_expire_time'] = CONF.token.expiration - kwargs['url'] = CONF.memcache.servers - super(Token, self).__init__(*args, **kwargs) diff --git a/keystone-moon/keystone/token/persistence/backends/memcache_pool.py b/keystone-moon/keystone/token/persistence/backends/memcache_pool.py deleted file mode 100644 index 39a5ca65..00000000 --- a/keystone-moon/keystone/token/persistence/backends/memcache_pool.py +++ /dev/null @@ -1,34 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_log import versionutils - -from keystone.token.persistence.backends import memcache - - -CONF = cfg.CONF - - -class Token(memcache.Token): - memcached_backend = 'pooled_memcached' - - @versionutils.deprecated( - what='Memcache Pool Token Persistence Driver', - as_of=versionutils.deprecated.MITAKA, - in_favor_of='fernet token driver (no-persistence)', - remove_in=0) - def __init__(self, *args, **kwargs): - for arg in ('dead_retry', 'socket_timeout', 'pool_maxsize', - 'pool_unused_timeout', 'pool_connection_get_timeout'): - kwargs[arg] = getattr(CONF.memcache, arg) - super(Token, self).__init__(*args, **kwargs) diff --git a/keystone-moon/keystone/token/persistence/backends/sql.py b/keystone-moon/keystone/token/persistence/backends/sql.py deleted file mode 100644 index 4b3439a1..00000000 --- a/keystone-moon/keystone/token/persistence/backends/sql.py +++ /dev/null @@ -1,286 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import functools - -from oslo_config import cfg -from oslo_log import log -from oslo_utils import timeutils - -from keystone.common import sql -from keystone import exception -from keystone.i18n import _LI -from keystone import token -from keystone.token import provider - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -class TokenModel(sql.ModelBase, sql.DictBase): - __tablename__ = 'token' - attributes = ['id', 'expires', 'user_id', 'trust_id'] - id = sql.Column(sql.String(64), primary_key=True) - expires = sql.Column(sql.DateTime(), default=None) - extra = sql.Column(sql.JsonBlob()) - valid = sql.Column(sql.Boolean(), default=True, nullable=False) - user_id = sql.Column(sql.String(64)) - trust_id = sql.Column(sql.String(64)) - __table_args__ = ( - sql.Index('ix_token_expires', 'expires'), - sql.Index('ix_token_expires_valid', 'expires', 'valid'), - sql.Index('ix_token_user_id', 'user_id'), - sql.Index('ix_token_trust_id', 'trust_id') - ) - - -def _expiry_range_batched(session, upper_bound_func, batch_size): - """Returns the stop point of the next batch for expiration. - - Return the timestamp of the next token that is `batch_size` rows from - being the oldest expired token. - """ - # This expiry strategy splits the tokens into roughly equal sized batches - # to be deleted. It does this by finding the timestamp of a token - # `batch_size` rows from the oldest token and yielding that to the caller. - # It's expected that the caller will then delete all rows with a timestamp - # equal to or older than the one yielded. This may delete slightly more - # tokens than the batch_size, but that should be ok in almost all cases. - LOG.debug('Token expiration batch size: %d', batch_size) - query = session.query(TokenModel.expires) - query = query.filter(TokenModel.expires < upper_bound_func()) - query = query.order_by(TokenModel.expires) - query = query.offset(batch_size - 1) - query = query.limit(1) - while True: - try: - next_expiration = query.one()[0] - except sql.NotFound: - # There are less than `batch_size` rows remaining, so fall - # through to the normal delete - break - yield next_expiration - yield upper_bound_func() - - -def _expiry_range_all(session, upper_bound_func): - """Expires all tokens in one pass.""" - yield upper_bound_func() - - -class Token(token.persistence.TokenDriverV8): - # Public interface - def get_token(self, token_id): - if token_id is None: - raise exception.TokenNotFound(token_id=token_id) - with sql.session_for_read() as session: - token_ref = session.query(TokenModel).get(token_id) - if not token_ref or not token_ref.valid: - raise exception.TokenNotFound(token_id=token_id) - return token_ref.to_dict() - - def create_token(self, token_id, data): - data_copy = copy.deepcopy(data) - if not data_copy.get('expires'): - data_copy['expires'] = provider.default_expire_time() - if not data_copy.get('user_id'): - data_copy['user_id'] = data_copy['user']['id'] - - token_ref = TokenModel.from_dict(data_copy) - token_ref.valid = True - with sql.session_for_write() as session: - session.add(token_ref) - return token_ref.to_dict() - - def delete_token(self, token_id): - with sql.session_for_write() as session: - token_ref = session.query(TokenModel).get(token_id) - if not token_ref or not token_ref.valid: - raise exception.TokenNotFound(token_id=token_id) - token_ref.valid = False - - def delete_tokens(self, user_id, tenant_id=None, trust_id=None, - consumer_id=None): - """Deletes all tokens in one session - - The user_id will be ignored if the trust_id is specified. user_id - will always be specified. - If using a trust, the token's user_id is set to the trustee's user ID - or the trustor's user ID, so will use trust_id to query the tokens. - - """ - token_list = [] - with sql.session_for_write() as session: - now = timeutils.utcnow() - query = session.query(TokenModel) - query = query.filter_by(valid=True) - query = query.filter(TokenModel.expires > now) - if trust_id: - query = query.filter(TokenModel.trust_id == trust_id) - else: - query = query.filter(TokenModel.user_id == user_id) - - for token_ref in query.all(): - if tenant_id: - token_ref_dict = token_ref.to_dict() - if not self._tenant_matches(tenant_id, token_ref_dict): - continue - if consumer_id: - token_ref_dict = token_ref.to_dict() - if not self._consumer_matches(consumer_id, token_ref_dict): - continue - - token_ref.valid = False - token_list.append(token_ref.id) - - return token_list - - def _tenant_matches(self, tenant_id, token_ref_dict): - return ((tenant_id is None) or - (token_ref_dict.get('tenant') and - token_ref_dict['tenant'].get('id') == tenant_id)) - - def _consumer_matches(self, consumer_id, ref): - if consumer_id is None: - return True - else: - try: - oauth = ref['token_data']['token'].get('OS-OAUTH1', {}) - return oauth and oauth['consumer_id'] == consumer_id - except KeyError: - return False - - def _list_tokens_for_trust(self, trust_id): - with sql.session_for_read() as session: - tokens = [] - now = timeutils.utcnow() - query = session.query(TokenModel) - query = query.filter(TokenModel.expires > now) - query = query.filter(TokenModel.trust_id == trust_id) - - token_references = query.filter_by(valid=True) - for token_ref in token_references: - token_ref_dict = token_ref.to_dict() - tokens.append(token_ref_dict['id']) - return tokens - - def _list_tokens_for_user(self, user_id, tenant_id=None): - with sql.session_for_read() as session: - tokens = [] - now = timeutils.utcnow() - query = session.query(TokenModel) - query = query.filter(TokenModel.expires > now) - query = query.filter(TokenModel.user_id == user_id) - - token_references = query.filter_by(valid=True) - for token_ref in token_references: - token_ref_dict = token_ref.to_dict() - if self._tenant_matches(tenant_id, token_ref_dict): - tokens.append(token_ref['id']) - return tokens - - def _list_tokens_for_consumer(self, user_id, consumer_id): - tokens = [] - with sql.session_for_write() as session: - now = timeutils.utcnow() - query = session.query(TokenModel) - query = query.filter(TokenModel.expires > now) - query = query.filter(TokenModel.user_id == user_id) - token_references = query.filter_by(valid=True) - - for token_ref in token_references: - token_ref_dict = token_ref.to_dict() - if self._consumer_matches(consumer_id, token_ref_dict): - tokens.append(token_ref_dict['id']) - return tokens - - def _list_tokens(self, user_id, tenant_id=None, trust_id=None, - consumer_id=None): - if not CONF.token.revoke_by_id: - return [] - if trust_id: - return self._list_tokens_for_trust(trust_id) - if consumer_id: - return self._list_tokens_for_consumer(user_id, consumer_id) - else: - return self._list_tokens_for_user(user_id, tenant_id) - - def list_revoked_tokens(self): - with sql.session_for_read() as session: - tokens = [] - now = timeutils.utcnow() - query = session.query(TokenModel.id, TokenModel.expires, - TokenModel.extra) - query = query.filter(TokenModel.expires > now) - token_references = query.filter_by(valid=False) - for token_ref in token_references: - token_data = token_ref[2]['token_data'] - if 'access' in token_data: - # It's a v2 token. - audit_ids = token_data['access']['token']['audit_ids'] - else: - # It's a v3 token. - audit_ids = token_data['token']['audit_ids'] - - record = { - 'id': token_ref[0], - 'expires': token_ref[1], - 'audit_id': audit_ids[0], - } - tokens.append(record) - return tokens - - def _expiry_range_strategy(self, dialect): - """Choose a token range expiration strategy - - Based on the DB dialect, select an expiry range callable that is - appropriate. - """ - # DB2 and MySQL can both benefit from a batched strategy. On DB2 the - # transaction log can fill up and on MySQL w/Galera, large - # transactions can exceed the maximum write set size. - if dialect == 'ibm_db_sa': - # Limit of 100 is known to not fill a transaction log - # of default maximum size while not significantly - # impacting the performance of large token purges on - # systems where the maximum transaction log size has - # been increased beyond the default. - return functools.partial(_expiry_range_batched, - batch_size=100) - elif dialect == 'mysql': - # We want somewhat more than 100, since Galera replication delay is - # at least RTT*2. This can be a significant amount of time if - # doing replication across a WAN. - return functools.partial(_expiry_range_batched, - batch_size=1000) - return _expiry_range_all - - def flush_expired_tokens(self): - with sql.session_for_write() as session: - dialect = session.bind.dialect.name - expiry_range_func = self._expiry_range_strategy(dialect) - query = session.query(TokenModel.expires) - total_removed = 0 - upper_bound_func = timeutils.utcnow - for expiry_time in expiry_range_func(session, upper_bound_func): - delete_query = query.filter(TokenModel.expires <= - expiry_time) - row_count = delete_query.delete(synchronize_session=False) - total_removed += row_count - LOG.debug('Removed %d total expired tokens', total_removed) - - session.flush() - LOG.info(_LI('Total expired tokens removed: %d'), total_removed) diff --git a/keystone-moon/keystone/token/persistence/core.py b/keystone-moon/keystone/token/persistence/core.py deleted file mode 100644 index 76c3ff70..00000000 --- a/keystone-moon/keystone/token/persistence/core.py +++ /dev/null @@ -1,357 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into the Token Persistence service.""" - -import abc -import copy - -from oslo_config import cfg -from oslo_log import log -from oslo_utils import timeutils -import six - -from keystone.common import cache -from keystone.common import dependency -from keystone.common import manager -from keystone import exception -from keystone.i18n import _LW -from keystone.token import utils - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) -MEMOIZE = cache.get_memoization_decorator(group='token') -REVOCATION_MEMOIZE = cache.get_memoization_decorator(group='token', - expiration_group='revoke') - - -@dependency.requires('assignment_api', 'identity_api', 'resource_api', - 'token_provider_api', 'trust_api') -class PersistenceManager(manager.Manager): - """Default pivot point for the Token Persistence backend. - - See :mod:`keystone.common.manager.Manager` for more details on how this - dynamically calls the backend. - - """ - - driver_namespace = 'keystone.token.persistence' - - def __init__(self): - super(PersistenceManager, self).__init__(CONF.token.driver) - - def _assert_valid(self, token_id, token_ref): - """Raise TokenNotFound if the token is expired.""" - current_time = timeutils.normalize_time(timeutils.utcnow()) - expires = token_ref.get('expires') - if not expires or current_time > timeutils.normalize_time(expires): - raise exception.TokenNotFound(token_id=token_id) - - def get_token(self, token_id): - unique_id = utils.generate_unique_id(token_id) - token_ref = self._get_token(unique_id) - # NOTE(morganfainberg): Lift expired checking to the manager, there is - # no reason to make the drivers implement this check. With caching, - # self._get_token could return an expired token. Make sure we behave - # as expected and raise TokenNotFound on those instances. - self._assert_valid(token_id, token_ref) - return token_ref - - @MEMOIZE - def _get_token(self, token_id): - # Only ever use the "unique" id in the cache key. - return self.driver.get_token(token_id) - - def create_token(self, token_id, data): - unique_id = utils.generate_unique_id(token_id) - data_copy = copy.deepcopy(data) - data_copy['id'] = unique_id - ret = self.driver.create_token(unique_id, data_copy) - if MEMOIZE.should_cache(ret): - # NOTE(morganfainberg): when doing a cache set, you must pass the - # same arguments through, the same as invalidate (this includes - # "self"). First argument is always the value to be cached - self._get_token.set(ret, self, unique_id) - return ret - - def delete_token(self, token_id): - if not CONF.token.revoke_by_id: - return - unique_id = utils.generate_unique_id(token_id) - self.driver.delete_token(unique_id) - self._invalidate_individual_token_cache(unique_id) - self.invalidate_revocation_list() - - def delete_tokens(self, user_id, tenant_id=None, trust_id=None, - consumer_id=None): - if not CONF.token.revoke_by_id: - return - token_list = self.driver.delete_tokens(user_id, tenant_id, trust_id, - consumer_id) - for token_id in token_list: - unique_id = utils.generate_unique_id(token_id) - self._invalidate_individual_token_cache(unique_id) - self.invalidate_revocation_list() - - @REVOCATION_MEMOIZE - def list_revoked_tokens(self): - return self.driver.list_revoked_tokens() - - def invalidate_revocation_list(self): - # NOTE(morganfainberg): Note that ``self`` needs to be passed to - # invalidate() because of the way the invalidation method works on - # determining cache-keys. - self.list_revoked_tokens.invalidate(self) - - def delete_tokens_for_domain(self, domain_id): - """Delete all tokens for a given domain. - - It will delete all the project-scoped tokens for the projects - that are owned by the given domain, as well as any tokens issued - to users that are owned by this domain. - - However, deletion of domain_scoped tokens will still need to be - implemented as stated in TODO below. - """ - if not CONF.token.revoke_by_id: - return - projects = self.resource_api.list_projects() - for project in projects: - if project['domain_id'] == domain_id: - for user_id in self.assignment_api.list_user_ids_for_project( - project['id']): - self.delete_tokens_for_user(user_id, project['id']) - # TODO(morganfainberg): implement deletion of domain_scoped tokens. - - users = self.identity_api.list_users(domain_id) - user_ids = (user['id'] for user in users) - self.delete_tokens_for_users(user_ids) - - def delete_tokens_for_user(self, user_id, project_id=None): - """Delete all tokens for a given user or user-project combination. - - This method adds in the extra logic for handling trust-scoped token - revocations in a single call instead of needing to explicitly handle - trusts in the caller's logic. - """ - if not CONF.token.revoke_by_id: - return - self.delete_tokens(user_id, tenant_id=project_id) - for trust in self.trust_api.list_trusts_for_trustee(user_id): - # Ensure we revoke tokens associated to the trust / project - # user_id combination. - self.delete_tokens(user_id, trust_id=trust['id'], - tenant_id=project_id) - for trust in self.trust_api.list_trusts_for_trustor(user_id): - # Ensure we revoke tokens associated to the trust / project / - # user_id combination where the user_id is the trustor. - - # NOTE(morganfainberg): This revocation is a bit coarse, but it - # covers a number of cases such as disabling of the trustor user, - # deletion of the trustor user (for any number of reasons). It - # might make sense to refine this and be more surgical on the - # deletions (e.g. don't revoke tokens for the trusts when the - # trustor changes password). For now, to maintain previous - # functionality, this will continue to be a bit overzealous on - # revocations. - self.delete_tokens(trust['trustee_user_id'], trust_id=trust['id'], - tenant_id=project_id) - - def delete_tokens_for_users(self, user_ids, project_id=None): - """Delete all tokens for a list of user_ids. - - :param user_ids: list of user identifiers - :param project_id: optional project identifier - """ - if not CONF.token.revoke_by_id: - return - for user_id in user_ids: - self.delete_tokens_for_user(user_id, project_id=project_id) - - def _invalidate_individual_token_cache(self, token_id): - # NOTE(morganfainberg): invalidate takes the exact same arguments as - # the normal method, this means we need to pass "self" in (which gets - # stripped off). - - # FIXME(morganfainberg): Does this cache actually need to be - # invalidated? We maintain a cached revocation list, which should be - # consulted before accepting a token as valid. For now we will - # do the explicit individual token invalidation. - self._get_token.invalidate(self, token_id) - self.token_provider_api.invalidate_individual_token_cache(token_id) - - -@dependency.requires('token_provider_api') -@dependency.provider('token_api') -class Manager(object): - """The token_api provider. - - This class is a proxy class to the token_provider_api's persistence - manager. - """ - - def __init__(self): - # NOTE(morganfainberg): __init__ is required for dependency processing. - super(Manager, self).__init__() - - def __getattr__(self, item): - """Forward calls to the `token_provider_api` persistence manager.""" - # NOTE(morganfainberg): Prevent infinite recursion, raise an - # AttributeError for 'token_provider_api' ensuring that the dep - # injection doesn't infinitely try and lookup self.token_provider_api - # on _process_dependencies. This doesn't need an exception string as - # it should only ever be hit on instantiation. - if item == 'token_provider_api': - raise AttributeError() - - f = getattr(self.token_provider_api._persistence, item) - LOG.warning(_LW('`token_api.%s` is deprecated as of Juno in favor of ' - 'utilizing methods on `token_provider_api` and may be ' - 'removed in Kilo.'), item) - setattr(self, item, f) - return f - - -@six.add_metaclass(abc.ABCMeta) -class TokenDriverV8(object): - """Interface description for a Token driver.""" - - @abc.abstractmethod - def get_token(self, token_id): - """Get a token by id. - - :param token_id: identity of the token - :type token_id: string - :returns: token_ref - :raises keystone.exception.TokenNotFound: If the token doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def create_token(self, token_id, data): - """Create a token by id and data. - - :param token_id: identity of the token - :type token_id: string - :param data: dictionary with additional reference information - - :: - - { - expires='' - id=token_id, - user=user_ref, - tenant=tenant_ref, - metadata=metadata_ref - } - - :type data: dict - :returns: token_ref or None. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_token(self, token_id): - """Deletes a token by id. - - :param token_id: identity of the token - :type token_id: string - :returns: None. - :raises keystone.exception.TokenNotFound: If the token doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_tokens(self, user_id, tenant_id=None, trust_id=None, - consumer_id=None): - """Deletes tokens by user. - - If the tenant_id is not None, only delete the tokens by user id under - the specified tenant. - - If the trust_id is not None, it will be used to query tokens and the - user_id will be ignored. - - If the consumer_id is not None, only delete the tokens by consumer id - that match the specified consumer id. - - :param user_id: identity of user - :type user_id: string - :param tenant_id: identity of the tenant - :type tenant_id: string - :param trust_id: identity of the trust - :type trust_id: string - :param consumer_id: identity of the consumer - :type consumer_id: string - :returns: The tokens that have been deleted. - :raises keystone.exception.TokenNotFound: If the token doesn't exist. - - """ - if not CONF.token.revoke_by_id: - return - token_list = self._list_tokens(user_id, - tenant_id=tenant_id, - trust_id=trust_id, - consumer_id=consumer_id) - - for token in token_list: - try: - self.delete_token(token) - except exception.NotFound: # nosec - # The token is already gone, good. - pass - return token_list - - @abc.abstractmethod - def _list_tokens(self, user_id, tenant_id=None, trust_id=None, - consumer_id=None): - """Returns a list of current token_id's for a user - - This is effectively a private method only used by the ``delete_tokens`` - method and should not be called by anything outside of the - ``token_api`` manager or the token driver itself. - - :param user_id: identity of the user - :type user_id: string - :param tenant_id: identity of the tenant - :type tenant_id: string - :param trust_id: identity of the trust - :type trust_id: string - :param consumer_id: identity of the consumer - :type consumer_id: string - :returns: list of token_id's - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_revoked_tokens(self): - """Returns a list of all revoked tokens - - :returns: list of token_id's - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def flush_expired_tokens(self): - """Archive or delete tokens that have expired.""" - raise exception.NotImplemented() # pragma: no cover - - -Driver = manager.create_legacy_driver(TokenDriverV8) diff --git a/keystone-moon/keystone/token/provider.py b/keystone-moon/keystone/token/provider.py deleted file mode 100644 index 7c4166f4..00000000 --- a/keystone-moon/keystone/token/provider.py +++ /dev/null @@ -1,637 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Token provider interface.""" - -import abc -import base64 -import datetime -import sys -import uuid - -from oslo_config import cfg -from oslo_log import log -from oslo_utils import timeutils -import six - -from keystone.common import cache -from keystone.common import dependency -from keystone.common import manager -from keystone import exception -from keystone.i18n import _, _LE -from keystone.models import token_model -from keystone import notifications -from keystone.token import persistence -from keystone.token import providers -from keystone.token import utils - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) -MEMOIZE = cache.get_memoization_decorator(group='token') - -# NOTE(morganfainberg): This is for compatibility in case someone was relying -# on the old location of the UnsupportedTokenVersionException for their code. -UnsupportedTokenVersionException = exception.UnsupportedTokenVersionException - -# supported token versions -V2 = token_model.V2 -V3 = token_model.V3 -VERSIONS = token_model.VERSIONS - - -def base64_encode(s): - """Encode a URL-safe string. - - :type s: six.text_type - :rtype: six.text_type - - """ - # urlsafe_b64encode() returns six.binary_type so need to convert to - # six.text_type, might as well do it before stripping. - return base64.urlsafe_b64encode(s).decode('utf-8').rstrip('=') - - -def random_urlsafe_str(): - """Generate a random URL-safe string. - - :rtype: six.text_type - - """ - # chop the padding (==) off the end of the encoding to save space - return base64.urlsafe_b64encode(uuid.uuid4().bytes)[:-2].decode('utf-8') - - -def random_urlsafe_str_to_bytes(s): - """Convert a string from :func:`random_urlsafe_str()` to six.binary_type. - - :type s: six.text_type - :rtype: six.binary_type - - """ - # urlsafe_b64decode() requires str, unicode isn't accepted. - s = str(s) - - # restore the padding (==) at the end of the string - return base64.urlsafe_b64decode(s + '==') - - -def default_expire_time(): - """Determine when a fresh token should expire. - - Expiration time varies based on configuration (see ``[token] expiration``). - - :returns: a naive UTC datetime.datetime object - - """ - expire_delta = datetime.timedelta(seconds=CONF.token.expiration) - return timeutils.utcnow() + expire_delta - - -def audit_info(parent_audit_id): - """Build the audit data for a token. - - If ``parent_audit_id`` is None, the list will be one element in length - containing a newly generated audit_id. - - If ``parent_audit_id`` is supplied, the list will be two elements in length - containing a newly generated audit_id and the ``parent_audit_id``. The - ``parent_audit_id`` will always be element index 1 in the resulting - list. - - :param parent_audit_id: the audit of the original token in the chain - :type parent_audit_id: str - :returns: Keystone token audit data - """ - audit_id = random_urlsafe_str() - if parent_audit_id is not None: - return [audit_id, parent_audit_id] - return [audit_id] - - -@dependency.provider('token_provider_api') -@dependency.requires('assignment_api', 'revoke_api') -class Manager(manager.Manager): - """Default pivot point for the token provider backend. - - See :mod:`keystone.common.manager.Manager` for more details on how this - dynamically calls the backend. - - """ - - driver_namespace = 'keystone.token.provider' - - V2 = V2 - V3 = V3 - VERSIONS = VERSIONS - INVALIDATE_PROJECT_TOKEN_PERSISTENCE = 'invalidate_project_tokens' - INVALIDATE_USER_TOKEN_PERSISTENCE = 'invalidate_user_tokens' - _persistence_manager = None - - def __init__(self): - super(Manager, self).__init__(CONF.token.provider) - self._register_callback_listeners() - - def _register_callback_listeners(self): - # This is used by the @dependency.provider decorator to register the - # provider (token_provider_api) manager to listen for trust deletions. - callbacks = { - notifications.ACTIONS.deleted: [ - ['OS-TRUST:trust', self._trust_deleted_event_callback], - ['user', self._delete_user_tokens_callback], - ['domain', self._delete_domain_tokens_callback], - ], - notifications.ACTIONS.disabled: [ - ['user', self._delete_user_tokens_callback], - ['domain', self._delete_domain_tokens_callback], - ['project', self._delete_project_tokens_callback], - ], - notifications.ACTIONS.internal: [ - [notifications.INVALIDATE_USER_TOKEN_PERSISTENCE, - self._delete_user_tokens_callback], - [notifications.INVALIDATE_USER_PROJECT_TOKEN_PERSISTENCE, - self._delete_user_project_tokens_callback], - [notifications.INVALIDATE_USER_OAUTH_CONSUMER_TOKENS, - self._delete_user_oauth_consumer_tokens_callback], - ] - } - - for event, cb_info in callbacks.items(): - for resource_type, callback_fns in cb_info: - notifications.register_event_callback(event, resource_type, - callback_fns) - - @property - def _needs_persistence(self): - return self.driver.needs_persistence() - - @property - def _persistence(self): - # NOTE(morganfainberg): This should not be handled via __init__ to - # avoid dependency injection oddities circular dependencies (where - # the provider manager requires the token persistence manager, which - # requires the token provider manager). - if self._persistence_manager is None: - self._persistence_manager = persistence.PersistenceManager() - return self._persistence_manager - - def _create_token(self, token_id, token_data): - try: - if isinstance(token_data['expires'], six.string_types): - token_data['expires'] = timeutils.normalize_time( - timeutils.parse_isotime(token_data['expires'])) - self._persistence.create_token(token_id, token_data) - except Exception: - exc_info = sys.exc_info() - # an identical token may have been created already. - # if so, return the token_data as it is also identical - try: - self._persistence.get_token(token_id) - except exception.TokenNotFound: - six.reraise(*exc_info) - - def validate_token(self, token_id, belongs_to=None): - unique_id = utils.generate_unique_id(token_id) - # NOTE(morganfainberg): Ensure we never use the long-form token_id - # (PKI) as part of the cache_key. - token = self._validate_token(unique_id) - self._token_belongs_to(token, belongs_to) - self._is_valid_token(token) - return token - - def check_revocation_v2(self, token): - try: - token_data = token['access'] - except KeyError: - raise exception.TokenNotFound(_('Failed to validate token')) - - token_values = self.revoke_api.model.build_token_values_v2( - token_data, CONF.identity.default_domain_id) - self.revoke_api.check_token(token_values) - - def validate_v2_token(self, token_id, belongs_to=None): - # NOTE(lbragstad): Only go to the persistence backend if the token - # provider requires it. - if self._needs_persistence: - # NOTE(morganfainberg): Ensure we never use the long-form token_id - # (PKI) as part of the cache_key. - unique_id = utils.generate_unique_id(token_id) - token_ref = self._persistence.get_token(unique_id) - token = self._validate_v2_token(token_ref) - else: - # NOTE(lbragstad): If the token doesn't require persistence, then - # it is a fernet token. The fernet token provider doesn't care if - # it's creating version 2.0 tokens or v3 tokens, so we use the same - # validate_non_persistent_token() method to validate both. Then we - # can leverage a separate method to make version 3 token data look - # like version 2.0 token data. The pattern we want to move towards - # is one where the token providers just handle data and the - # controller layers handle interpreting the token data in a format - # that makes sense for the request. - v3_token_ref = self.validate_non_persistent_token(token_id) - v2_token_data_helper = providers.common.V2TokenDataHelper() - token = v2_token_data_helper.v3_to_v2_token(v3_token_ref) - - # these are common things that happen regardless of token provider - token['access']['token']['id'] = token_id - self._token_belongs_to(token, belongs_to) - self._is_valid_token(token) - return token - - def check_revocation_v3(self, token): - try: - token_data = token['token'] - except KeyError: - raise exception.TokenNotFound(_('Failed to validate token')) - token_values = self.revoke_api.model.build_token_values(token_data) - self.revoke_api.check_token(token_values) - - def check_revocation(self, token): - version = self.get_token_version(token) - if version == V2: - return self.check_revocation_v2(token) - else: - return self.check_revocation_v3(token) - - def validate_v3_token(self, token_id): - if not token_id: - raise exception.TokenNotFound(_('No token in the request')) - - try: - # NOTE(lbragstad): Only go to persistent storage if we have a token - # to fetch from the backend (the driver persists the token). - # Otherwise the information about the token must be in the token - # id. - if not self._needs_persistence: - token_ref = self.validate_non_persistent_token(token_id) - else: - unique_id = utils.generate_unique_id(token_id) - # NOTE(morganfainberg): Ensure we never use the long-form - # token_id (PKI) as part of the cache_key. - token_ref = self._persistence.get_token(unique_id) - token_ref = self._validate_v3_token(token_ref) - self._is_valid_token(token_ref) - return token_ref - except exception.Unauthorized as e: - LOG.debug('Unable to validate token: %s', e) - raise exception.TokenNotFound(token_id=token_id) - - @MEMOIZE - def _validate_token(self, token_id): - if not token_id: - raise exception.TokenNotFound(_('No token in the request')) - - if not self._needs_persistence: - # NOTE(lbragstad): This will validate v2 and v3 non-persistent - # tokens. - return self.driver.validate_non_persistent_token(token_id) - token_ref = self._persistence.get_token(token_id) - version = self.get_token_version(token_ref) - if version == self.V3: - try: - return self.driver.validate_v3_token(token_ref) - except exception.Unauthorized as e: - LOG.debug('Unable to validate token: %s', e) - raise exception.TokenNotFound(token_id=token_id) - elif version == self.V2: - return self.driver.validate_v2_token(token_ref) - raise exception.UnsupportedTokenVersionException() - - @MEMOIZE - def _validate_v2_token(self, token_id): - return self.driver.validate_v2_token(token_id) - - @MEMOIZE - def _validate_v3_token(self, token_id): - return self.driver.validate_v3_token(token_id) - - def _is_valid_token(self, token): - """Verify the token is valid format and has not expired.""" - current_time = timeutils.normalize_time(timeutils.utcnow()) - - try: - # Get the data we need from the correct location (V2 and V3 tokens - # differ in structure, Try V3 first, fall back to V2 second) - token_data = token.get('token', token.get('access')) - expires_at = token_data.get('expires_at', - token_data.get('expires')) - if not expires_at: - expires_at = token_data['token']['expires'] - expiry = timeutils.normalize_time( - timeutils.parse_isotime(expires_at)) - except Exception: - LOG.exception(_LE('Unexpected error or malformed token ' - 'determining token expiry: %s'), token) - raise exception.TokenNotFound(_('Failed to validate token')) - - if current_time < expiry: - self.check_revocation(token) - # Token has not expired and has not been revoked. - return None - else: - raise exception.TokenNotFound(_('Failed to validate token')) - - def _token_belongs_to(self, token, belongs_to): - """Check if the token belongs to the right tenant. - - This is only used on v2 tokens. The structural validity of the token - will have already been checked before this method is called. - - """ - if belongs_to: - token_data = token['access']['token'] - if ('tenant' not in token_data or - token_data['tenant']['id'] != belongs_to): - raise exception.Unauthorized() - - def issue_v2_token(self, token_ref, roles_ref=None, catalog_ref=None): - token_id, token_data = self.driver.issue_v2_token( - token_ref, roles_ref, catalog_ref) - - if self._needs_persistence: - data = dict(key=token_id, - id=token_id, - expires=token_data['access']['token']['expires'], - user=token_ref['user'], - tenant=token_ref['tenant'], - metadata=token_ref['metadata'], - token_data=token_data, - bind=token_ref.get('bind'), - trust_id=token_ref['metadata'].get('trust_id'), - token_version=self.V2) - self._create_token(token_id, data) - - return token_id, token_data - - def issue_v3_token(self, user_id, method_names, expires_at=None, - project_id=None, domain_id=None, auth_context=None, - trust=None, metadata_ref=None, include_catalog=True, - parent_audit_id=None): - token_id, token_data = self.driver.issue_v3_token( - user_id, method_names, expires_at, project_id, domain_id, - auth_context, trust, metadata_ref, include_catalog, - parent_audit_id) - - if metadata_ref is None: - metadata_ref = {} - - if 'project' in token_data['token']: - # project-scoped token, fill in the v2 token data - # all we care are the role IDs - - # FIXME(gyee): is there really a need to store roles in metadata? - role_ids = [r['id'] for r in token_data['token']['roles']] - metadata_ref = {'roles': role_ids} - - if trust: - metadata_ref.setdefault('trust_id', trust['id']) - metadata_ref.setdefault('trustee_user_id', - trust['trustee_user_id']) - - data = dict(key=token_id, - id=token_id, - expires=token_data['token']['expires_at'], - user=token_data['token']['user'], - tenant=token_data['token'].get('project'), - metadata=metadata_ref, - token_data=token_data, - trust_id=trust['id'] if trust else None, - token_version=self.V3) - if self._needs_persistence: - self._create_token(token_id, data) - return token_id, token_data - - def invalidate_individual_token_cache(self, token_id): - # NOTE(morganfainberg): invalidate takes the exact same arguments as - # the normal method, this means we need to pass "self" in (which gets - # stripped off). - - # FIXME(morganfainberg): Does this cache actually need to be - # invalidated? We maintain a cached revocation list, which should be - # consulted before accepting a token as valid. For now we will - # do the explicit individual token invalidation. - - self._validate_token.invalidate(self, token_id) - self._validate_v2_token.invalidate(self, token_id) - self._validate_v3_token.invalidate(self, token_id) - - def revoke_token(self, token_id, revoke_chain=False): - revoke_by_expires = False - project_id = None - domain_id = None - - token_ref = token_model.KeystoneToken( - token_id=token_id, - token_data=self.validate_token(token_id)) - - user_id = token_ref.user_id - expires_at = token_ref.expires - audit_id = token_ref.audit_id - audit_chain_id = token_ref.audit_chain_id - if token_ref.project_scoped: - project_id = token_ref.project_id - if token_ref.domain_scoped: - domain_id = token_ref.domain_id - - if audit_id is None and not revoke_chain: - LOG.debug('Received token with no audit_id.') - revoke_by_expires = True - - if audit_chain_id is None and revoke_chain: - LOG.debug('Received token with no audit_chain_id.') - revoke_by_expires = True - - if revoke_by_expires: - self.revoke_api.revoke_by_expiration(user_id, expires_at, - project_id=project_id, - domain_id=domain_id) - elif revoke_chain: - self.revoke_api.revoke_by_audit_chain_id(audit_chain_id, - project_id=project_id, - domain_id=domain_id) - else: - self.revoke_api.revoke_by_audit_id(audit_id) - - if CONF.token.revoke_by_id and self._needs_persistence: - self._persistence.delete_token(token_id=token_id) - - def list_revoked_tokens(self): - return self._persistence.list_revoked_tokens() - - def _trust_deleted_event_callback(self, service, resource_type, operation, - payload): - if CONF.token.revoke_by_id: - trust_id = payload['resource_info'] - trust = self.trust_api.get_trust(trust_id, deleted=True) - self._persistence.delete_tokens(user_id=trust['trustor_user_id'], - trust_id=trust_id) - - def _delete_user_tokens_callback(self, service, resource_type, operation, - payload): - if CONF.token.revoke_by_id: - user_id = payload['resource_info'] - self._persistence.delete_tokens_for_user(user_id) - - def _delete_domain_tokens_callback(self, service, resource_type, - operation, payload): - if CONF.token.revoke_by_id: - domain_id = payload['resource_info'] - self._persistence.delete_tokens_for_domain(domain_id=domain_id) - - def _delete_user_project_tokens_callback(self, service, resource_type, - operation, payload): - if CONF.token.revoke_by_id: - user_id = payload['resource_info']['user_id'] - project_id = payload['resource_info']['project_id'] - self._persistence.delete_tokens_for_user(user_id=user_id, - project_id=project_id) - - def _delete_project_tokens_callback(self, service, resource_type, - operation, payload): - if CONF.token.revoke_by_id: - project_id = payload['resource_info'] - self._persistence.delete_tokens_for_users( - self.assignment_api.list_user_ids_for_project(project_id), - project_id=project_id) - - def _delete_user_oauth_consumer_tokens_callback(self, service, - resource_type, operation, - payload): - if CONF.token.revoke_by_id: - user_id = payload['resource_info']['user_id'] - consumer_id = payload['resource_info']['consumer_id'] - self._persistence.delete_tokens(user_id=user_id, - consumer_id=consumer_id) - - -@six.add_metaclass(abc.ABCMeta) -class Provider(object): - """Interface description for a Token provider.""" - - @abc.abstractmethod - def needs_persistence(self): - """Determine if the token should be persisted. - - If the token provider requires that the token be persisted to a - backend this should return True, otherwise return False. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_token_version(self, token_data): - """Return the version of the given token data. - - If the given token data is unrecognizable, - UnsupportedTokenVersionException is raised. - - :param token_data: token_data - :type token_data: dict - :returns: token version string - :raises keystone.exception.UnsupportedTokenVersionException: - If the token version is not expected. - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def issue_v2_token(self, token_ref, roles_ref=None, catalog_ref=None): - """Issue a V2 token. - - :param token_ref: token data to generate token from - :type token_ref: dict - :param roles_ref: optional roles list - :type roles_ref: dict - :param catalog_ref: optional catalog information - :type catalog_ref: dict - :returns: (token_id, token_data) - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def issue_v3_token(self, user_id, method_names, expires_at=None, - project_id=None, domain_id=None, auth_context=None, - trust=None, metadata_ref=None, include_catalog=True, - parent_audit_id=None): - """Issue a V3 Token. - - :param user_id: identity of the user - :type user_id: string - :param method_names: names of authentication methods - :type method_names: list - :param expires_at: optional time the token will expire - :type expires_at: string - :param project_id: optional project identity - :type project_id: string - :param domain_id: optional domain identity - :type domain_id: string - :param auth_context: optional context from the authorization plugins - :type auth_context: dict - :param trust: optional trust reference - :type trust: dict - :param metadata_ref: optional metadata reference - :type metadata_ref: dict - :param include_catalog: optional, include the catalog in token data - :type include_catalog: boolean - :param parent_audit_id: optional, the audit id of the parent token - :type parent_audit_id: string - :returns: (token_id, token_data) - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def validate_v2_token(self, token_ref): - """Validate the given V2 token and return the token data. - - Must raise Unauthorized exception if unable to validate token. - - :param token_ref: the token reference - :type token_ref: dict - :returns: token data - :raises keystone.exception.TokenNotFound: If the token doesn't exist. - - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def validate_non_persistent_token(self, token_id): - """Validate a given non-persistent token id and return the token_data. - - :param token_id: the token id - :type token_id: string - :returns: token data - :raises keystone.exception.TokenNotFound: When the token is invalid - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def validate_v3_token(self, token_ref): - """Validate the given V3 token and return the token_data. - - :param token_ref: the token reference - :type token_ref: dict - :returns: token data - :raises keystone.exception.TokenNotFound: If the token doesn't exist. - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def _get_token_id(self, token_data): - """Generate the token_id based upon the data in token_data. - - :param token_data: token information - :type token_data: dict - :returns: token identifier - :rtype: six.text_type - """ - raise exception.NotImplemented() # pragma: no cover diff --git a/keystone-moon/keystone/token/providers/__init__.py b/keystone-moon/keystone/token/providers/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/token/providers/common.py b/keystone-moon/keystone/token/providers/common.py deleted file mode 100644 index 94729178..00000000 --- a/keystone-moon/keystone/token/providers/common.py +++ /dev/null @@ -1,808 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg -from oslo_log import log -from oslo_serialization import jsonutils -import six -from six.moves.urllib import parse - -from keystone.common import controller as common_controller -from keystone.common import dependency -from keystone.common import utils -from keystone import exception -from keystone.federation import constants as federation_constants -from keystone.i18n import _, _LE -from keystone import token -from keystone.token import provider - - -LOG = log.getLogger(__name__) -CONF = cfg.CONF - - -@dependency.requires('catalog_api', 'resource_api', 'assignment_api') -class V2TokenDataHelper(object): - """Creates V2 token data.""" - - def v3_to_v2_token(self, v3_token_data): - """Convert v3 token data into v2.0 token data. - - This method expects a dictionary generated from - V3TokenDataHelper.get_token_data() and converts it to look like a v2.0 - token dictionary. - - :param v3_token_data: dictionary formatted for v3 tokens - :returns: dictionary formatted for v2 tokens - :raises keystone.exception.Unauthorized: If a specific token type is - not supported in v2. - - """ - token_data = {} - # Build v2 token - v3_token = v3_token_data['token'] - - # NOTE(lbragstad): Version 2.0 tokens don't know about any domain other - # than the default domain specified in the configuration. - domain_id = v3_token.get('domain', {}).get('id') - if domain_id and CONF.identity.default_domain_id != domain_id: - msg = ('Unable to validate domain-scoped tokens outside of the ' - 'default domain') - raise exception.Unauthorized(msg) - - token = {} - token['expires'] = v3_token.get('expires_at') - token['issued_at'] = v3_token.get('issued_at') - token['audit_ids'] = v3_token.get('audit_ids') - - if 'project' in v3_token: - # v3 token_data does not contain all tenant attributes - tenant = self.resource_api.get_project( - v3_token['project']['id']) - # Drop domain specific fields since v2 calls are not domain-aware. - token['tenant'] = common_controller.V2Controller.v3_to_v2_project( - tenant) - token_data['token'] = token - - # Build v2 user - v3_user = v3_token['user'] - - user = common_controller.V2Controller.v3_to_v2_user(v3_user) - - if 'OS-TRUST:trust' in v3_token: - msg = ('Unable to validate trust-scoped tokens using version v2.0 ' - 'API.') - raise exception.Unauthorized(msg) - - if 'OS-OAUTH1' in v3_token: - msg = ('Unable to validate Oauth tokens using the version v2.0 ' - 'API.') - raise exception.Unauthorized(msg) - - # Set user roles - user['roles'] = [] - role_ids = [] - for role in v3_token.get('roles', []): - role_ids.append(role.pop('id')) - user['roles'].append(role) - user['roles_links'] = [] - - token_data['user'] = user - - # Get and build v2 service catalog - token_data['serviceCatalog'] = [] - if 'tenant' in token: - catalog_ref = self.catalog_api.get_catalog( - user['id'], token['tenant']['id']) - if catalog_ref: - token_data['serviceCatalog'] = self.format_catalog(catalog_ref) - - # Build v2 metadata - metadata = {} - metadata['roles'] = role_ids - # Setting is_admin to keep consistency in v2 response - metadata['is_admin'] = 0 - token_data['metadata'] = metadata - - return {'access': token_data} - - @classmethod - def format_token(cls, token_ref, roles_ref=None, catalog_ref=None, - trust_ref=None): - audit_info = None - user_ref = token_ref['user'] - metadata_ref = token_ref['metadata'] - if roles_ref is None: - roles_ref = [] - expires = token_ref.get('expires', provider.default_expire_time()) - if expires is not None: - if not isinstance(expires, six.text_type): - expires = utils.isotime(expires) - - token_data = token_ref.get('token_data') - if token_data: - token_audit = token_data.get( - 'access', token_data).get('token', {}).get('audit_ids') - audit_info = token_audit - - if audit_info is None: - audit_info = provider.audit_info(token_ref.get('parent_audit_id')) - - o = {'access': {'token': {'id': token_ref['id'], - 'expires': expires, - 'issued_at': utils.isotime(subsecond=True), - 'audit_ids': audit_info - }, - 'user': {'id': user_ref['id'], - 'name': user_ref['name'], - 'username': user_ref['name'], - 'roles': roles_ref, - 'roles_links': metadata_ref.get('roles_links', - []) - } - } - } - if 'bind' in token_ref: - o['access']['token']['bind'] = token_ref['bind'] - if 'tenant' in token_ref and token_ref['tenant']: - token_ref['tenant']['enabled'] = True - o['access']['token']['tenant'] = token_ref['tenant'] - if catalog_ref is not None: - o['access']['serviceCatalog'] = V2TokenDataHelper.format_catalog( - catalog_ref) - if metadata_ref: - if 'is_admin' in metadata_ref: - o['access']['metadata'] = {'is_admin': - metadata_ref['is_admin']} - else: - o['access']['metadata'] = {'is_admin': 0} - if 'roles' in metadata_ref: - o['access']['metadata']['roles'] = metadata_ref['roles'] - if CONF.trust.enabled and trust_ref: - o['access']['trust'] = {'trustee_user_id': - trust_ref['trustee_user_id'], - 'id': trust_ref['id'], - 'trustor_user_id': - trust_ref['trustor_user_id'], - 'impersonation': - trust_ref['impersonation'] - } - return o - - @classmethod - def format_catalog(cls, catalog_ref): - """Munge catalogs from internal to output format. - - Internal catalogs look like:: - - {$REGION: { - {$SERVICE: { - $key1: $value1, - ... - } - } - } - - The legacy api wants them to look like:: - - [{'name': $SERVICE[name], - 'type': $SERVICE, - 'endpoints': [{ - 'tenantId': $tenant_id, - ... - 'region': $REGION, - }], - 'endpoints_links': [], - }] - - """ - if not catalog_ref: - return [] - - services = {} - for region, region_ref in catalog_ref.items(): - for service, service_ref in region_ref.items(): - new_service_ref = services.get(service, {}) - new_service_ref['name'] = service_ref.pop('name') - new_service_ref['type'] = service - new_service_ref['endpoints_links'] = [] - service_ref['region'] = region - - endpoints_ref = new_service_ref.get('endpoints', []) - endpoints_ref.append(service_ref) - - new_service_ref['endpoints'] = endpoints_ref - services[service] = new_service_ref - - return list(services.values()) - - -@dependency.requires('assignment_api', 'catalog_api', 'federation_api', - 'identity_api', 'resource_api', 'role_api', 'trust_api') -class V3TokenDataHelper(object): - """Token data helper.""" - - def __init__(self): - # Keep __init__ around to ensure dependency injection works. - super(V3TokenDataHelper, self).__init__() - - def _get_filtered_domain(self, domain_id): - domain_ref = self.resource_api.get_domain(domain_id) - return {'id': domain_ref['id'], 'name': domain_ref['name']} - - def _get_filtered_project(self, project_id): - project_ref = self.resource_api.get_project(project_id) - filtered_project = { - 'id': project_ref['id'], - 'name': project_ref['name']} - if project_ref['domain_id'] is not None: - filtered_project['domain'] = ( - self._get_filtered_domain(project_ref['domain_id'])) - else: - # Projects acting as a domain do not have a domain_id attribute - filtered_project['domain'] = None - return filtered_project - - def _populate_scope(self, token_data, domain_id, project_id): - if 'domain' in token_data or 'project' in token_data: - # scope already exist, no need to populate it again - return - - if domain_id: - token_data['domain'] = self._get_filtered_domain(domain_id) - if project_id: - token_data['project'] = self._get_filtered_project(project_id) - - def _populate_is_admin_project(self, token_data): - # TODO(ayoung): Support the ability for a project acting as a domain - # to be the admin project once the rest of the code for projects - # acting as domains is merged. Code will likely be: - # (r.admin_project_name == None and project['is_domain'] == True - # and project['name'] == r.admin_project_domain_name) - project = token_data['project'] - r = CONF.resource - if (project['name'] == r.admin_project_name and - project['domain']['name'] == r.admin_project_domain_name): - token_data['is_admin_project'] = True - - def _get_roles_for_user(self, user_id, domain_id, project_id): - roles = [] - if domain_id: - roles = self.assignment_api.get_roles_for_user_and_domain( - user_id, domain_id) - if project_id: - roles = self.assignment_api.get_roles_for_user_and_project( - user_id, project_id) - return [self.role_api.get_role(role_id) for role_id in roles] - - def populate_roles_for_groups(self, token_data, group_ids, - project_id=None, domain_id=None, - user_id=None): - """Populate roles basing on provided groups and project/domain - - Used for ephemeral users with dynamically assigned groups. - This method does not return anything, yet it modifies token_data in - place. - - :param token_data: a dictionary used for building token response - :param group_ids: list of group IDs a user is a member of - :param project_id: project ID to scope to - :param domain_id: domain ID to scope to - :param user_id: user ID - - :raises keystone.exception.Unauthorized: when no roles were found for a - (group_ids, project_id) or (group_ids, domain_id) pairs. - - """ - def check_roles(roles, user_id, project_id, domain_id): - # User was granted roles so simply exit this function. - if roles: - return - if project_id: - msg = _('User %(user_id)s has no access ' - 'to project %(project_id)s') % { - 'user_id': user_id, - 'project_id': project_id} - elif domain_id: - msg = _('User %(user_id)s has no access ' - 'to domain %(domain_id)s') % { - 'user_id': user_id, - 'domain_id': domain_id} - # Since no roles were found a user is not authorized to - # perform any operations. Raise an exception with - # appropriate error message. - raise exception.Unauthorized(msg) - - roles = self.assignment_api.get_roles_for_groups(group_ids, - project_id, - domain_id) - check_roles(roles, user_id, project_id, domain_id) - token_data['roles'] = roles - - def _populate_user(self, token_data, user_id, trust): - if 'user' in token_data: - # no need to repopulate user if it already exists - return - - user_ref = self.identity_api.get_user(user_id) - if CONF.trust.enabled and trust and 'OS-TRUST:trust' not in token_data: - trustor_user_ref = (self.identity_api.get_user( - trust['trustor_user_id'])) - try: - self.identity_api.assert_user_enabled(trust['trustor_user_id']) - except AssertionError: - raise exception.Forbidden(_('Trustor is disabled.')) - if trust['impersonation']: - user_ref = trustor_user_ref - token_data['OS-TRUST:trust'] = ( - { - 'id': trust['id'], - 'trustor_user': {'id': trust['trustor_user_id']}, - 'trustee_user': {'id': trust['trustee_user_id']}, - 'impersonation': trust['impersonation'] - }) - filtered_user = { - 'id': user_ref['id'], - 'name': user_ref['name'], - 'domain': self._get_filtered_domain(user_ref['domain_id'])} - token_data['user'] = filtered_user - - def _populate_oauth_section(self, token_data, access_token): - if access_token: - access_token_id = access_token['id'] - consumer_id = access_token['consumer_id'] - token_data['OS-OAUTH1'] = ({'access_token_id': access_token_id, - 'consumer_id': consumer_id}) - - def _populate_roles(self, token_data, user_id, domain_id, project_id, - trust, access_token): - if 'roles' in token_data: - # no need to repopulate roles - return - - if access_token: - filtered_roles = [] - authed_role_ids = jsonutils.loads(access_token['role_ids']) - all_roles = self.role_api.list_roles() - for role in all_roles: - for authed_role in authed_role_ids: - if authed_role == role['id']: - filtered_roles.append({'id': role['id'], - 'name': role['name']}) - token_data['roles'] = filtered_roles - return - - if CONF.trust.enabled and trust: - # If redelegated_trust_id is set, then we must traverse the - # trust_chain in order to determine who the original trustor is. We - # need to do this because the user ID of the original trustor helps - # us determine scope in the redelegated context. - if trust.get('redelegated_trust_id'): - trust_chain = self.trust_api.get_trust_pedigree(trust['id']) - token_user_id = trust_chain[-1]['trustor_user_id'] - else: - token_user_id = trust['trustor_user_id'] - - token_project_id = trust['project_id'] - # trusts do not support domains yet - token_domain_id = None - else: - token_user_id = user_id - token_project_id = project_id - token_domain_id = domain_id - - if token_domain_id or token_project_id: - filtered_roles = [] - if CONF.trust.enabled and trust: - # First expand out any roles that were in the trust to include - # any implied roles, whether global or domain specific - refs = [{'role_id': role['id']} for role in trust['roles']] - effective_trust_roles = ( - self.assignment_api.add_implied_roles(refs)) - # Now get the current role assignments for the trustor, - # including any domain specific roles. - assignment_list = self.assignment_api.list_role_assignments( - user_id=token_user_id, - project_id=token_project_id, - effective=True, strip_domain_roles=False) - current_effective_trustor_roles = ( - list(set([x['role_id'] for x in assignment_list]))) - # Go through each of the effective trust roles, making sure the - # trustor still has them, if any have been removed, then we - # will treat the trust as invalid - for trust_role in effective_trust_roles: - - match_roles = [x for x in current_effective_trustor_roles - if x == trust_role['role_id']] - if match_roles: - role = self.role_api.get_role(match_roles[0]) - if role['domain_id'] is None: - filtered_roles.append(role) - else: - raise exception.Forbidden( - _('Trustee has no delegated roles.')) - else: - for role in self._get_roles_for_user(token_user_id, - token_domain_id, - token_project_id): - filtered_roles.append({'id': role['id'], - 'name': role['name']}) - - # user has no project or domain roles, therefore access denied - if not filtered_roles: - if token_project_id: - msg = _('User %(user_id)s has no access ' - 'to project %(project_id)s') % { - 'user_id': user_id, - 'project_id': token_project_id} - else: - msg = _('User %(user_id)s has no access ' - 'to domain %(domain_id)s') % { - 'user_id': user_id, - 'domain_id': token_domain_id} - LOG.debug(msg) - raise exception.Unauthorized(msg) - - token_data['roles'] = filtered_roles - - def _populate_service_catalog(self, token_data, user_id, - domain_id, project_id, trust): - if 'catalog' in token_data: - # no need to repopulate service catalog - return - - if CONF.trust.enabled and trust: - user_id = trust['trustor_user_id'] - if project_id or domain_id: - service_catalog = self.catalog_api.get_v3_catalog( - user_id, project_id) - token_data['catalog'] = service_catalog - - def _populate_service_providers(self, token_data): - if 'service_providers' in token_data: - return - - service_providers = self.federation_api.get_enabled_service_providers() - if service_providers: - token_data['service_providers'] = service_providers - - def _populate_token_dates(self, token_data, expires=None, trust=None, - issued_at=None): - if not expires: - expires = provider.default_expire_time() - if not isinstance(expires, six.string_types): - expires = utils.isotime(expires, subsecond=True) - token_data['expires_at'] = expires - token_data['issued_at'] = (issued_at or - utils.isotime(subsecond=True)) - - def _populate_audit_info(self, token_data, audit_info=None): - if audit_info is None or isinstance(audit_info, six.string_types): - token_data['audit_ids'] = provider.audit_info(audit_info) - elif isinstance(audit_info, list): - token_data['audit_ids'] = audit_info - else: - msg = (_('Invalid audit info data type: %(data)s (%(type)s)') % - {'data': audit_info, 'type': type(audit_info)}) - LOG.error(msg) - raise exception.UnexpectedError(msg) - - def get_token_data(self, user_id, method_names, domain_id=None, - project_id=None, expires=None, trust=None, token=None, - include_catalog=True, bind=None, access_token=None, - issued_at=None, audit_info=None): - token_data = {'methods': method_names} - - # We've probably already written these to the token - if token: - for x in ('roles', 'user', 'catalog', 'project', 'domain'): - if x in token: - token_data[x] = token[x] - - if bind: - token_data['bind'] = bind - - self._populate_scope(token_data, domain_id, project_id) - if token_data.get('project'): - self._populate_is_admin_project(token_data) - self._populate_user(token_data, user_id, trust) - self._populate_roles(token_data, user_id, domain_id, project_id, trust, - access_token) - self._populate_audit_info(token_data, audit_info) - - if include_catalog: - self._populate_service_catalog(token_data, user_id, domain_id, - project_id, trust) - self._populate_service_providers(token_data) - self._populate_token_dates(token_data, expires=expires, trust=trust, - issued_at=issued_at) - self._populate_oauth_section(token_data, access_token) - return {'token': token_data} - - -@dependency.requires('catalog_api', 'identity_api', 'oauth_api', - 'resource_api', 'role_api', 'trust_api') -class BaseProvider(provider.Provider): - def __init__(self, *args, **kwargs): - super(BaseProvider, self).__init__(*args, **kwargs) - self.v3_token_data_helper = V3TokenDataHelper() - self.v2_token_data_helper = V2TokenDataHelper() - - def get_token_version(self, token_data): - if token_data and isinstance(token_data, dict): - if 'token_version' in token_data: - if token_data['token_version'] in token.provider.VERSIONS: - return token_data['token_version'] - # FIXME(morganfainberg): deprecate the following logic in future - # revisions. It is better to just specify the token_version in - # the token_data itself. This way we can support future versions - # that might have the same fields. - if 'access' in token_data: - return token.provider.V2 - if 'token' in token_data and 'methods' in token_data['token']: - return token.provider.V3 - raise exception.UnsupportedTokenVersionException() - - def issue_v2_token(self, token_ref, roles_ref=None, - catalog_ref=None): - if token_ref.get('bind') and not self._supports_bind_authentication: - msg = _('The configured token provider does not support bind ' - 'authentication.') - raise exception.NotImplemented(message=msg) - - metadata_ref = token_ref['metadata'] - trust_ref = None - if CONF.trust.enabled and metadata_ref and 'trust_id' in metadata_ref: - trust_ref = self.trust_api.get_trust(metadata_ref['trust_id']) - - token_data = self.v2_token_data_helper.format_token( - token_ref, roles_ref, catalog_ref, trust_ref) - token_id = self._get_token_id(token_data) - token_data['access']['token']['id'] = token_id - return token_id, token_data - - def _is_mapped_token(self, auth_context): - return (federation_constants.IDENTITY_PROVIDER in auth_context and - federation_constants.PROTOCOL in auth_context) - - def issue_v3_token(self, user_id, method_names, expires_at=None, - project_id=None, domain_id=None, auth_context=None, - trust=None, metadata_ref=None, include_catalog=True, - parent_audit_id=None): - if auth_context and auth_context.get('bind'): - # NOTE(lbragstad): Check if the token provider being used actually - # supports bind authentication methods before proceeding. - if not self._supports_bind_authentication: - raise exception.NotImplemented(_( - 'The configured token provider does not support bind ' - 'authentication.')) - - # for V2, trust is stashed in metadata_ref - if (CONF.trust.enabled and not trust and metadata_ref and - 'trust_id' in metadata_ref): - trust = self.trust_api.get_trust(metadata_ref['trust_id']) - - if CONF.trust.enabled and trust: - if user_id != trust['trustee_user_id']: - raise exception.Forbidden(_('User is not a trustee.')) - - token_ref = None - if auth_context and self._is_mapped_token(auth_context): - token_ref = self._handle_mapped_tokens( - auth_context, project_id, domain_id) - - access_token = None - if 'oauth1' in method_names: - access_token_id = auth_context['access_token_id'] - access_token = self.oauth_api.get_access_token(access_token_id) - - token_data = self.v3_token_data_helper.get_token_data( - user_id, - method_names, - domain_id=domain_id, - project_id=project_id, - expires=expires_at, - trust=trust, - bind=auth_context.get('bind') if auth_context else None, - token=token_ref, - include_catalog=include_catalog, - access_token=access_token, - audit_info=parent_audit_id) - - token_id = self._get_token_id(token_data) - return token_id, token_data - - def _handle_mapped_tokens(self, auth_context, project_id, domain_id): - user_id = auth_context['user_id'] - group_ids = auth_context['group_ids'] - idp = auth_context[federation_constants.IDENTITY_PROVIDER] - protocol = auth_context[federation_constants.PROTOCOL] - token_data = { - 'user': { - 'id': user_id, - 'name': parse.unquote(user_id), - federation_constants.FEDERATION: { - 'groups': [{'id': x} for x in group_ids], - 'identity_provider': {'id': idp}, - 'protocol': {'id': protocol} - }, - 'domain': { - 'id': CONF.federation.federated_domain_name, - 'name': CONF.federation.federated_domain_name - } - } - } - - if project_id or domain_id: - self.v3_token_data_helper.populate_roles_for_groups( - token_data, group_ids, project_id, domain_id, user_id) - - return token_data - - def _verify_token_ref(self, token_ref): - """Verify and return the given token_ref.""" - if not token_ref: - raise exception.Unauthorized() - return token_ref - - def _assert_is_not_federation_token(self, token_ref): - """Make sure we aren't using v2 auth on a federation token.""" - token_data = token_ref.get('token_data') - if (token_data and self.get_token_version(token_data) == - token.provider.V3): - if 'OS-FEDERATION' in token_data['token']['user']: - msg = _('Attempting to use OS-FEDERATION token with V2 ' - 'Identity Service, use V3 Authentication') - raise exception.Unauthorized(msg) - - def _assert_default_domain(self, token_ref): - """Make sure we are operating on default domain only.""" - if (token_ref.get('token_data') and - self.get_token_version(token_ref.get('token_data')) == - token.provider.V3): - # this is a V3 token - msg = _('Non-default domain is not supported') - # domain scoping is prohibited - if token_ref['token_data']['token'].get('domain'): - raise exception.Unauthorized( - _('Domain scoped token is not supported')) - # if token is scoped to trust, both trustor and trustee must - # be in the default domain. Furthermore, the delegated project - # must also be in the default domain - metadata_ref = token_ref['metadata'] - if CONF.trust.enabled and 'trust_id' in metadata_ref: - trust_ref = self.trust_api.get_trust(metadata_ref['trust_id']) - trustee_user_ref = self.identity_api.get_user( - trust_ref['trustee_user_id']) - if (trustee_user_ref['domain_id'] != - CONF.identity.default_domain_id): - raise exception.Unauthorized(msg) - trustor_user_ref = self.identity_api.get_user( - trust_ref['trustor_user_id']) - if (trustor_user_ref['domain_id'] != - CONF.identity.default_domain_id): - raise exception.Unauthorized(msg) - project_ref = self.resource_api.get_project( - trust_ref['project_id']) - if (project_ref['domain_id'] != - CONF.identity.default_domain_id): - raise exception.Unauthorized(msg) - - def validate_v2_token(self, token_ref): - try: - self._assert_is_not_federation_token(token_ref) - self._assert_default_domain(token_ref) - # FIXME(gyee): performance or correctness? Should we return the - # cached token or reconstruct it? Obviously if we are going with - # the cached token, any role, project, or domain name changes - # will not be reflected. One may argue that with PKI tokens, - # we are essentially doing cached token validation anyway. - # Lets go with the cached token strategy. Since token - # management layer is now pluggable, one can always provide - # their own implementation to suit their needs. - token_data = token_ref.get('token_data') - if (self.get_token_version(token_data) != token.provider.V2): - # Validate the V3 token as V2 - token_data = self.v2_token_data_helper.v3_to_v2_token( - token_data) - - trust_id = token_data['access'].get('trust', {}).get('id') - if trust_id: - msg = ('Unable to validate trust-scoped tokens using version ' - 'v2.0 API.') - raise exception.Unauthorized(msg) - - return token_data - except exception.ValidationError: - LOG.exception(_LE('Failed to validate token')) - token_id = token_ref['token_data']['access']['token']['id'] - raise exception.TokenNotFound(token_id=token_id) - - def validate_non_persistent_token(self, token_id): - try: - (user_id, methods, audit_ids, domain_id, project_id, trust_id, - federated_info, access_token_id, created_at, expires_at) = ( - self.token_formatter.validate_token(token_id)) - except exception.ValidationError as e: - raise exception.TokenNotFound(e) - - token_dict = None - trust_ref = None - if federated_info: - # NOTE(lbragstad): We need to rebuild information about the - # federated token as well as the federated token roles. This is - # because when we validate a non-persistent token, we don't have a - # token reference to pull the federated token information out of. - # As a result, we have to extract it from the token itself and - # rebuild the federated context. These private methods currently - # live in the keystone.token.providers.fernet.Provider() class. - token_dict = self._rebuild_federated_info(federated_info, user_id) - if project_id or domain_id: - self._rebuild_federated_token_roles(token_dict, federated_info, - user_id, project_id, - domain_id) - if trust_id: - trust_ref = self.trust_api.get_trust(trust_id) - - access_token = None - if access_token_id: - access_token = self.oauth_api.get_access_token(access_token_id) - - return self.v3_token_data_helper.get_token_data( - user_id, - method_names=methods, - domain_id=domain_id, - project_id=project_id, - issued_at=created_at, - expires=expires_at, - trust=trust_ref, - token=token_dict, - access_token=access_token, - audit_info=audit_ids) - - def validate_v3_token(self, token_ref): - # FIXME(gyee): performance or correctness? Should we return the - # cached token or reconstruct it? Obviously if we are going with - # the cached token, any role, project, or domain name changes - # will not be reflected. One may argue that with PKI tokens, - # we are essentially doing cached token validation anyway. - # Lets go with the cached token strategy. Since token - # management layer is now pluggable, one can always provide - # their own implementation to suit their needs. - - trust_id = token_ref.get('trust_id') - if trust_id: - # token trust validation - self.trust_api.get_trust(trust_id) - - token_data = token_ref.get('token_data') - if not token_data or 'token' not in token_data: - # token ref is created by V2 API - project_id = None - project_ref = token_ref.get('tenant') - if project_ref: - project_id = project_ref['id'] - - issued_at = token_ref['token_data']['access']['token']['issued_at'] - audit = token_ref['token_data']['access']['token'].get('audit_ids') - - token_data = self.v3_token_data_helper.get_token_data( - token_ref['user']['id'], - ['password', 'token'], - project_id=project_id, - bind=token_ref.get('bind'), - expires=token_ref['expires'], - issued_at=issued_at, - audit_info=audit) - return token_data diff --git a/keystone-moon/keystone/token/providers/fernet/__init__.py b/keystone-moon/keystone/token/providers/fernet/__init__.py deleted file mode 100644 index 953ef624..00000000 --- a/keystone-moon/keystone/token/providers/fernet/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.token.providers.fernet.core import * # noqa diff --git a/keystone-moon/keystone/token/providers/fernet/core.py b/keystone-moon/keystone/token/providers/fernet/core.py deleted file mode 100644 index ff6fe9cc..00000000 --- a/keystone-moon/keystone/token/providers/fernet/core.py +++ /dev/null @@ -1,211 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_config import cfg - -from keystone.common import dependency -from keystone.common import utils as ks_utils -from keystone.federation import constants as federation_constants -from keystone.token.providers import common -from keystone.token.providers.fernet import token_formatters as tf - - -CONF = cfg.CONF - - -@dependency.requires('trust_api', 'oauth_api') -class Provider(common.BaseProvider): - def __init__(self, *args, **kwargs): - super(Provider, self).__init__(*args, **kwargs) - - self.token_formatter = tf.TokenFormatter() - - def needs_persistence(self): - """Should the token be written to a backend.""" - return False - - def issue_v2_token(self, *args, **kwargs): - token_id, token_data = super(Provider, self).issue_v2_token( - *args, **kwargs) - self._build_issued_at_info(token_id, token_data) - return token_id, token_data - - def issue_v3_token(self, *args, **kwargs): - token_id, token_data = super(Provider, self).issue_v3_token( - *args, **kwargs) - self._build_issued_at_info(token_id, token_data) - return token_id, token_data - - def _build_issued_at_info(self, token_id, token_data): - # NOTE(roxanaghe, lbragstad): We must use the creation time that - # Fernet builds into it's token. The Fernet spec details that the - # token creation time is built into the token, outside of the payload - # provided by Keystone. This is the reason why we don't pass the - # issued_at time in the payload. This also means that we shouldn't - # return a token reference with a creation time that we created - # when Fernet uses a different creation time. We should use the - # creation time provided by Fernet because it's the creation time - # that we have to rely on when we validate the token. - fernet_creation_datetime_obj = self.token_formatter.creation_time( - token_id) - if token_data.get('access'): - token_data['access']['token']['issued_at'] = ks_utils.isotime( - at=fernet_creation_datetime_obj, subsecond=True) - else: - token_data['token']['issued_at'] = ks_utils.isotime( - at=fernet_creation_datetime_obj, subsecond=True) - - def _build_federated_info(self, token_data): - """Extract everything needed for federated tokens. - - This dictionary is passed to federated token formatters, which unpack - the values and build federated Fernet tokens. - - """ - token_data = token_data['token'] - try: - user = token_data['user'] - federation = user[federation_constants.FEDERATION] - idp_id = federation['identity_provider']['id'] - protocol_id = federation['protocol']['id'] - except KeyError: - # The token data doesn't have federated info, so we aren't dealing - # with a federated token and no federated info to build. - return - - group_ids = federation.get('groups') - - return {'group_ids': group_ids, - 'idp_id': idp_id, - 'protocol_id': protocol_id} - - def _rebuild_federated_info(self, federated_dict, user_id): - """Format federated information into the token reference. - - The federated_dict is passed back from the federated token formatters. - The responsibility of this method is to format the information passed - back from the token formatter into the token reference before - constructing the token data from the V3TokenDataHelper. - - """ - g_ids = federated_dict['group_ids'] - idp_id = federated_dict['idp_id'] - protocol_id = federated_dict['protocol_id'] - - federated_info = { - 'groups': g_ids, - 'identity_provider': {'id': idp_id}, - 'protocol': {'id': protocol_id} - } - - token_dict = { - 'user': { - federation_constants.FEDERATION: federated_info, - 'id': user_id, - 'name': user_id, - 'domain': {'id': CONF.federation.federated_domain_name, - 'name': CONF.federation.federated_domain_name, }, - } - } - - return token_dict - - def _rebuild_federated_token_roles(self, token_dict, federated_dict, - user_id, project_id, domain_id): - """Populate roles based on (groups, project/domain) pair. - - We must populate roles from (groups, project/domain) as ephemeral users - don't exist in the backend. Upon success, a ``roles`` key will be added - to ``token_dict``. - - :param token_dict: dictionary with data used for building token - :param federated_dict: federated information such as identity provider - protocol and set of group IDs - :param user_id: user ID - :param project_id: project ID the token is being scoped to - :param domain_id: domain ID the token is being scoped to - - """ - group_ids = [x['id'] for x in federated_dict['group_ids']] - self.v3_token_data_helper.populate_roles_for_groups( - token_dict, group_ids, project_id, domain_id, user_id) - - def _extract_v2_token_data(self, token_data): - user_id = token_data['access']['user']['id'] - expires_at = token_data['access']['token']['expires'] - audit_ids = token_data['access']['token'].get('audit_ids') - methods = ['password'] - if len(audit_ids) > 1: - methods.append('token') - project_id = token_data['access']['token'].get('tenant', {}).get('id') - domain_id = None - trust_id = None - access_token_id = None - federated_info = None - return (user_id, expires_at, audit_ids, methods, domain_id, project_id, - trust_id, access_token_id, federated_info) - - def _extract_v3_token_data(self, token_data): - """Extract information from a v3 token reference.""" - user_id = token_data['token']['user']['id'] - expires_at = token_data['token']['expires_at'] - audit_ids = token_data['token']['audit_ids'] - methods = token_data['token'].get('methods') - domain_id = token_data['token'].get('domain', {}).get('id') - project_id = token_data['token'].get('project', {}).get('id') - trust_id = token_data['token'].get('OS-TRUST:trust', {}).get('id') - access_token_id = token_data['token'].get('OS-OAUTH1', {}).get( - 'access_token_id') - federated_info = self._build_federated_info(token_data) - - return (user_id, expires_at, audit_ids, methods, domain_id, project_id, - trust_id, access_token_id, federated_info) - - def _get_token_id(self, token_data): - """Generate the token_id based upon the data in token_data. - - :param token_data: token information - :type token_data: dict - :rtype: six.text_type - - """ - # NOTE(lbragstad): Only v2.0 token responses include an 'access' - # attribute. - if token_data.get('access'): - (user_id, expires_at, audit_ids, methods, domain_id, project_id, - trust_id, access_token_id, federated_info) = ( - self._extract_v2_token_data(token_data)) - else: - (user_id, expires_at, audit_ids, methods, domain_id, project_id, - trust_id, access_token_id, federated_info) = ( - self._extract_v3_token_data(token_data)) - - return self.token_formatter.create_token( - user_id, - expires_at, - audit_ids, - methods=methods, - domain_id=domain_id, - project_id=project_id, - trust_id=trust_id, - federated_info=federated_info, - access_token_id=access_token_id - ) - - @property - def _supports_bind_authentication(self): - """Return if the token provider supports bind authentication methods. - - :returns: False - - """ - return False diff --git a/keystone-moon/keystone/token/providers/fernet/token_formatters.py b/keystone-moon/keystone/token/providers/fernet/token_formatters.py deleted file mode 100644 index dfdd06e8..00000000 --- a/keystone-moon/keystone/token/providers/fernet/token_formatters.py +++ /dev/null @@ -1,677 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import base64 -import datetime -import struct -import uuid - -from cryptography import fernet -import msgpack -from oslo_config import cfg -from oslo_log import log -from oslo_utils import timeutils -from six.moves import map -from six.moves import urllib - -from keystone.auth import plugins as auth_plugins -from keystone.common import utils as ks_utils -from keystone import exception -from keystone.i18n import _, _LI -from keystone.token import provider -from keystone.token.providers.fernet import utils - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - -# Fernet byte indexes as as computed by pypi/keyless_fernet and defined in -# https://github.com/fernet/spec -TIMESTAMP_START = 1 -TIMESTAMP_END = 9 - - -class TokenFormatter(object): - """Packs and unpacks payloads into tokens for transport.""" - - @property - def crypto(self): - """Return a cryptography instance. - - You can extend this class with a custom crypto @property to provide - your own token encoding / decoding. For example, using a different - cryptography library (e.g. ``python-keyczar``) or to meet arbitrary - security requirements. - - This @property just needs to return an object that implements - ``encrypt(plaintext)`` and ``decrypt(ciphertext)``. - - """ - keys = utils.load_keys() - - if not keys: - raise exception.KeysNotFound() - - fernet_instances = [fernet.Fernet(key) for key in keys] - return fernet.MultiFernet(fernet_instances) - - def pack(self, payload): - """Pack a payload for transport as a token. - - :type payload: six.binary_type - :rtype: six.text_type - - """ - # base64 padding (if any) is not URL-safe - return self.crypto.encrypt(payload).rstrip(b'=').decode('utf-8') - - def unpack(self, token): - """Unpack a token, and validate the payload. - - :type token: six.text_type - :rtype: six.binary_type - - """ - # TODO(lbragstad): Restore padding on token before decoding it. - # Initially in Kilo, Fernet tokens were returned to the user with - # padding appended to the token. Later in Liberty this padding was - # removed and restored in the Fernet provider. The following if - # statement ensures that we can validate tokens with and without token - # padding, in the event of an upgrade and the tokens that are issued - # throughout the upgrade. Remove this if statement when Mitaka opens - # for development and exclusively use the restore_padding() class - # method. - if token.endswith('%3D'): - token = urllib.parse.unquote(token) - else: - token = TokenFormatter.restore_padding(token) - - try: - return self.crypto.decrypt(token.encode('utf-8')) - except fernet.InvalidToken: - raise exception.ValidationError( - _('This is not a recognized Fernet token %s') % token) - - @classmethod - def restore_padding(cls, token): - """Restore padding based on token size. - - :param token: token to restore padding on - :type token: six.text_type - :returns: token with correct padding - - """ - # Re-inflate the padding - mod_returned = len(token) % 4 - if mod_returned: - missing_padding = 4 - mod_returned - token += '=' * missing_padding - return token - - @classmethod - def creation_time(cls, fernet_token): - """Returns the creation time of a valid Fernet token. - - :type fernet_token: six.text_type - - """ - fernet_token = TokenFormatter.restore_padding(fernet_token) - # fernet_token is six.text_type - - # Fernet tokens are base64 encoded, so we need to unpack them first - # urlsafe_b64decode() requires six.binary_type - token_bytes = base64.urlsafe_b64decode(fernet_token.encode('utf-8')) - - # slice into the byte array to get just the timestamp - timestamp_bytes = token_bytes[TIMESTAMP_START:TIMESTAMP_END] - - # convert those bytes to an integer - # (it's a 64-bit "unsigned long long int" in C) - timestamp_int = struct.unpack(">Q", timestamp_bytes)[0] - - # and with an integer, it's trivial to produce a datetime object - created_at = datetime.datetime.utcfromtimestamp(timestamp_int) - - return created_at - - def create_token(self, user_id, expires_at, audit_ids, methods=None, - domain_id=None, project_id=None, trust_id=None, - federated_info=None, access_token_id=None): - """Given a set of payload attributes, generate a Fernet token.""" - for payload_class in PAYLOAD_CLASSES: - if payload_class.create_arguments_apply( - project_id=project_id, domain_id=domain_id, - trust_id=trust_id, federated_info=federated_info, - access_token_id=access_token_id): - break - - version = payload_class.version - payload = payload_class.assemble( - user_id, methods, project_id, domain_id, expires_at, audit_ids, - trust_id, federated_info, access_token_id - ) - - versioned_payload = (version,) + payload - serialized_payload = msgpack.packb(versioned_payload) - token = self.pack(serialized_payload) - - # NOTE(lbragstad): We should warn against Fernet tokens that are over - # 255 characters in length. This is mostly due to persisting the tokens - # in a backend store of some kind that might have a limit of 255 - # characters. Even though Keystone isn't storing a Fernet token - # anywhere, we can't say it isn't being stored somewhere else with - # those kind of backend constraints. - if len(token) > 255: - LOG.info(_LI('Fernet token created with length of %d ' - 'characters, which exceeds 255 characters'), - len(token)) - - return token - - def validate_token(self, token): - """Validates a Fernet token and returns the payload attributes. - - :type token: six.text_type - - """ - serialized_payload = self.unpack(token) - versioned_payload = msgpack.unpackb(serialized_payload) - version, payload = versioned_payload[0], versioned_payload[1:] - - for payload_class in PAYLOAD_CLASSES: - if version == payload_class.version: - (user_id, methods, project_id, domain_id, expires_at, - audit_ids, trust_id, federated_info, access_token_id) = ( - payload_class.disassemble(payload)) - break - else: - # If the token_format is not recognized, raise ValidationError. - raise exception.ValidationError(_( - 'This is not a recognized Fernet payload version: %s') % - version) - - # rather than appearing in the payload, the creation time is encoded - # into the token format itself - created_at = TokenFormatter.creation_time(token) - created_at = ks_utils.isotime(at=created_at, subsecond=True) - expires_at = timeutils.parse_isotime(expires_at) - expires_at = ks_utils.isotime(at=expires_at, subsecond=True) - - return (user_id, methods, audit_ids, domain_id, project_id, trust_id, - federated_info, access_token_id, created_at, expires_at) - - -class BasePayload(object): - # each payload variant should have a unique version - version = None - - @classmethod - def create_arguments_apply(cls, **kwargs): - """Check the arguments to see if they apply to this payload variant. - - :returns: True if the arguments indicate that this payload class is - needed for the token otherwise returns False. - :rtype: bool - - """ - raise NotImplementedError() - - @classmethod - def assemble(cls, user_id, methods, project_id, domain_id, expires_at, - audit_ids, trust_id, federated_info, access_token_id): - """Assemble the payload of a token. - - :param user_id: identifier of the user in the token request - :param methods: list of authentication methods used - :param project_id: ID of the project to scope to - :param domain_id: ID of the domain to scope to - :param expires_at: datetime of the token's expiration - :param audit_ids: list of the token's audit IDs - :param trust_id: ID of the trust in effect - :param federated_info: dictionary containing group IDs, the identity - provider ID, protocol ID, and federated domain - ID - :param access_token_id: ID of the secret in OAuth1 authentication - :returns: the payload of a token - - """ - raise NotImplementedError() - - @classmethod - def disassemble(cls, payload): - """Disassemble an unscoped payload into the component data. - - The tuple consists of:: - - (user_id, methods, project_id, domain_id, expires_at_str, - audit_ids, trust_id, federated_info, access_token_id) - - * ``methods`` are the auth methods. - * federated_info is a dict contains the group IDs, the identity - provider ID, the protocol ID, and the federated domain ID - - Fields will be set to None if they didn't apply to this payload type. - - :param payload: this variant of payload - :returns: a tuple of the payloads component data - - """ - raise NotImplementedError() - - @classmethod - def convert_uuid_hex_to_bytes(cls, uuid_string): - """Compress UUID formatted strings to bytes. - - :param uuid_string: uuid string to compress to bytes - :returns: a byte representation of the uuid - - """ - uuid_obj = uuid.UUID(uuid_string) - return uuid_obj.bytes - - @classmethod - def convert_uuid_bytes_to_hex(cls, uuid_byte_string): - """Generate uuid.hex format based on byte string. - - :param uuid_byte_string: uuid string to generate from - :returns: uuid hex formatted string - - """ - uuid_obj = uuid.UUID(bytes=uuid_byte_string) - return uuid_obj.hex - - @classmethod - def _convert_time_string_to_float(cls, time_string): - """Convert a time formatted string to a float. - - :param time_string: time formatted string - :returns: a timestamp as a float - - """ - time_object = timeutils.parse_isotime(time_string) - return (timeutils.normalize_time(time_object) - - datetime.datetime.utcfromtimestamp(0)).total_seconds() - - @classmethod - def _convert_float_to_time_string(cls, time_float): - """Convert a floating point timestamp to a string. - - :param time_float: integer representing timestamp - :returns: a time formatted strings - - """ - time_object = datetime.datetime.utcfromtimestamp(time_float) - return ks_utils.isotime(time_object, subsecond=True) - - @classmethod - def attempt_convert_uuid_hex_to_bytes(cls, value): - """Attempt to convert value to bytes or return value. - - :param value: value to attempt to convert to bytes - :returns: tuple containing boolean indicating whether user_id was - stored as bytes and uuid value as bytes or the original value - - """ - try: - return (True, cls.convert_uuid_hex_to_bytes(value)) - except ValueError: - # this might not be a UUID, depending on the situation (i.e. - # federation) - return (False, value) - - -class UnscopedPayload(BasePayload): - version = 0 - - @classmethod - def create_arguments_apply(cls, **kwargs): - return True - - @classmethod - def assemble(cls, user_id, methods, project_id, domain_id, expires_at, - audit_ids, trust_id, federated_info, access_token_id): - b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id) - methods = auth_plugins.convert_method_list_to_integer(methods) - expires_at_int = cls._convert_time_string_to_float(expires_at) - b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes, - audit_ids)) - return (b_user_id, methods, expires_at_int, b_audit_ids) - - @classmethod - def disassemble(cls, payload): - (is_stored_as_bytes, user_id) = payload[0] - if is_stored_as_bytes: - user_id = cls.convert_uuid_bytes_to_hex(user_id) - methods = auth_plugins.convert_integer_to_method_list(payload[1]) - expires_at_str = cls._convert_float_to_time_string(payload[2]) - audit_ids = list(map(provider.base64_encode, payload[3])) - project_id = None - domain_id = None - trust_id = None - federated_info = None - access_token_id = None - return (user_id, methods, project_id, domain_id, expires_at_str, - audit_ids, trust_id, federated_info, access_token_id) - - -class DomainScopedPayload(BasePayload): - version = 1 - - @classmethod - def create_arguments_apply(cls, **kwargs): - return kwargs['domain_id'] - - @classmethod - def assemble(cls, user_id, methods, project_id, domain_id, expires_at, - audit_ids, trust_id, federated_info, access_token_id): - b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id) - methods = auth_plugins.convert_method_list_to_integer(methods) - try: - b_domain_id = cls.convert_uuid_hex_to_bytes(domain_id) - except ValueError: - # the default domain ID is configurable, and probably isn't a UUID - if domain_id == CONF.identity.default_domain_id: - b_domain_id = domain_id - else: - raise - expires_at_int = cls._convert_time_string_to_float(expires_at) - b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes, - audit_ids)) - return (b_user_id, methods, b_domain_id, expires_at_int, b_audit_ids) - - @classmethod - def disassemble(cls, payload): - (is_stored_as_bytes, user_id) = payload[0] - if is_stored_as_bytes: - user_id = cls.convert_uuid_bytes_to_hex(user_id) - methods = auth_plugins.convert_integer_to_method_list(payload[1]) - try: - domain_id = cls.convert_uuid_bytes_to_hex(payload[2]) - except ValueError: - # the default domain ID is configurable, and probably isn't a UUID - if payload[2] == CONF.identity.default_domain_id: - domain_id = payload[2] - else: - raise - expires_at_str = cls._convert_float_to_time_string(payload[3]) - audit_ids = list(map(provider.base64_encode, payload[4])) - project_id = None - trust_id = None - federated_info = None - access_token_id = None - return (user_id, methods, project_id, domain_id, expires_at_str, - audit_ids, trust_id, federated_info, access_token_id) - - -class ProjectScopedPayload(BasePayload): - version = 2 - - @classmethod - def create_arguments_apply(cls, **kwargs): - return kwargs['project_id'] - - @classmethod - def assemble(cls, user_id, methods, project_id, domain_id, expires_at, - audit_ids, trust_id, federated_info, access_token_id): - b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id) - methods = auth_plugins.convert_method_list_to_integer(methods) - b_project_id = cls.attempt_convert_uuid_hex_to_bytes(project_id) - expires_at_int = cls._convert_time_string_to_float(expires_at) - b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes, - audit_ids)) - return (b_user_id, methods, b_project_id, expires_at_int, b_audit_ids) - - @classmethod - def disassemble(cls, payload): - (is_stored_as_bytes, user_id) = payload[0] - if is_stored_as_bytes: - user_id = cls.convert_uuid_bytes_to_hex(user_id) - methods = auth_plugins.convert_integer_to_method_list(payload[1]) - (is_stored_as_bytes, project_id) = payload[2] - if is_stored_as_bytes: - project_id = cls.convert_uuid_bytes_to_hex(project_id) - expires_at_str = cls._convert_float_to_time_string(payload[3]) - audit_ids = list(map(provider.base64_encode, payload[4])) - domain_id = None - trust_id = None - federated_info = None - access_token_id = None - return (user_id, methods, project_id, domain_id, expires_at_str, - audit_ids, trust_id, federated_info, access_token_id) - - -class TrustScopedPayload(BasePayload): - version = 3 - - @classmethod - def create_arguments_apply(cls, **kwargs): - return kwargs['trust_id'] - - @classmethod - def assemble(cls, user_id, methods, project_id, domain_id, expires_at, - audit_ids, trust_id, federated_info, access_token_id): - b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id) - methods = auth_plugins.convert_method_list_to_integer(methods) - b_project_id = cls.attempt_convert_uuid_hex_to_bytes(project_id) - b_trust_id = cls.convert_uuid_hex_to_bytes(trust_id) - expires_at_int = cls._convert_time_string_to_float(expires_at) - b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes, - audit_ids)) - - return (b_user_id, methods, b_project_id, expires_at_int, b_audit_ids, - b_trust_id) - - @classmethod - def disassemble(cls, payload): - (is_stored_as_bytes, user_id) = payload[0] - if is_stored_as_bytes: - user_id = cls.convert_uuid_bytes_to_hex(user_id) - methods = auth_plugins.convert_integer_to_method_list(payload[1]) - (is_stored_as_bytes, project_id) = payload[2] - if is_stored_as_bytes: - project_id = cls.convert_uuid_bytes_to_hex(project_id) - expires_at_str = cls._convert_float_to_time_string(payload[3]) - audit_ids = list(map(provider.base64_encode, payload[4])) - trust_id = cls.convert_uuid_bytes_to_hex(payload[5]) - domain_id = None - federated_info = None - access_token_id = None - return (user_id, methods, project_id, domain_id, expires_at_str, - audit_ids, trust_id, federated_info, access_token_id) - - -class FederatedUnscopedPayload(BasePayload): - version = 4 - - @classmethod - def create_arguments_apply(cls, **kwargs): - return kwargs['federated_info'] - - @classmethod - def pack_group_id(cls, group_dict): - return cls.attempt_convert_uuid_hex_to_bytes(group_dict['id']) - - @classmethod - def unpack_group_id(cls, group_id_in_bytes): - (is_stored_as_bytes, group_id) = group_id_in_bytes - if is_stored_as_bytes: - group_id = cls.convert_uuid_bytes_to_hex(group_id) - return {'id': group_id} - - @classmethod - def assemble(cls, user_id, methods, project_id, domain_id, expires_at, - audit_ids, trust_id, federated_info, access_token_id): - b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id) - methods = auth_plugins.convert_method_list_to_integer(methods) - b_group_ids = list(map(cls.pack_group_id, - federated_info['group_ids'])) - b_idp_id = cls.attempt_convert_uuid_hex_to_bytes( - federated_info['idp_id']) - protocol_id = federated_info['protocol_id'] - expires_at_int = cls._convert_time_string_to_float(expires_at) - b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes, - audit_ids)) - - return (b_user_id, methods, b_group_ids, b_idp_id, protocol_id, - expires_at_int, b_audit_ids) - - @classmethod - def disassemble(cls, payload): - (is_stored_as_bytes, user_id) = payload[0] - if is_stored_as_bytes: - user_id = cls.convert_uuid_bytes_to_hex(user_id) - methods = auth_plugins.convert_integer_to_method_list(payload[1]) - group_ids = list(map(cls.unpack_group_id, payload[2])) - (is_stored_as_bytes, idp_id) = payload[3] - if is_stored_as_bytes: - idp_id = cls.convert_uuid_bytes_to_hex(idp_id) - protocol_id = payload[4] - expires_at_str = cls._convert_float_to_time_string(payload[5]) - audit_ids = list(map(provider.base64_encode, payload[6])) - federated_info = dict(group_ids=group_ids, idp_id=idp_id, - protocol_id=protocol_id) - project_id = None - domain_id = None - trust_id = None - access_token_id = None - return (user_id, methods, project_id, domain_id, expires_at_str, - audit_ids, trust_id, federated_info, access_token_id) - - -class FederatedScopedPayload(FederatedUnscopedPayload): - version = None - - @classmethod - def assemble(cls, user_id, methods, project_id, domain_id, expires_at, - audit_ids, trust_id, federated_info, access_token_id): - b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id) - methods = auth_plugins.convert_method_list_to_integer(methods) - b_scope_id = cls.attempt_convert_uuid_hex_to_bytes( - project_id or domain_id) - b_group_ids = list(map(cls.pack_group_id, - federated_info['group_ids'])) - b_idp_id = cls.attempt_convert_uuid_hex_to_bytes( - federated_info['idp_id']) - protocol_id = federated_info['protocol_id'] - expires_at_int = cls._convert_time_string_to_float(expires_at) - b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes, - audit_ids)) - - return (b_user_id, methods, b_scope_id, b_group_ids, b_idp_id, - protocol_id, expires_at_int, b_audit_ids) - - @classmethod - def disassemble(cls, payload): - (is_stored_as_bytes, user_id) = payload[0] - if is_stored_as_bytes: - user_id = cls.convert_uuid_bytes_to_hex(user_id) - methods = auth_plugins.convert_integer_to_method_list(payload[1]) - (is_stored_as_bytes, scope_id) = payload[2] - if is_stored_as_bytes: - scope_id = cls.convert_uuid_bytes_to_hex(scope_id) - project_id = ( - scope_id - if cls.version == FederatedProjectScopedPayload.version else None) - domain_id = ( - scope_id - if cls.version == FederatedDomainScopedPayload.version else None) - group_ids = list(map(cls.unpack_group_id, payload[3])) - (is_stored_as_bytes, idp_id) = payload[4] - if is_stored_as_bytes: - idp_id = cls.convert_uuid_bytes_to_hex(idp_id) - protocol_id = payload[5] - expires_at_str = cls._convert_float_to_time_string(payload[6]) - audit_ids = list(map(provider.base64_encode, payload[7])) - federated_info = dict(idp_id=idp_id, protocol_id=protocol_id, - group_ids=group_ids) - trust_id = None - access_token_id = None - return (user_id, methods, project_id, domain_id, expires_at_str, - audit_ids, trust_id, federated_info, access_token_id) - - -class FederatedProjectScopedPayload(FederatedScopedPayload): - version = 5 - - @classmethod - def create_arguments_apply(cls, **kwargs): - return kwargs['project_id'] and kwargs['federated_info'] - - -class FederatedDomainScopedPayload(FederatedScopedPayload): - version = 6 - - @classmethod - def create_arguments_apply(cls, **kwargs): - return kwargs['domain_id'] and kwargs['federated_info'] - - -class OauthScopedPayload(BasePayload): - version = 7 - - @classmethod - def create_arguments_apply(cls, **kwargs): - return kwargs['access_token_id'] - - @classmethod - def assemble(cls, user_id, methods, project_id, domain_id, expires_at, - audit_ids, trust_id, federated_info, access_token_id): - b_user_id = cls.attempt_convert_uuid_hex_to_bytes(user_id) - methods = auth_plugins.convert_method_list_to_integer(methods) - b_project_id = cls.attempt_convert_uuid_hex_to_bytes(project_id) - expires_at_int = cls._convert_time_string_to_float(expires_at) - b_audit_ids = list(map(provider.random_urlsafe_str_to_bytes, - audit_ids)) - b_access_token_id = cls.attempt_convert_uuid_hex_to_bytes( - access_token_id) - return (b_user_id, methods, b_project_id, b_access_token_id, - expires_at_int, b_audit_ids) - - @classmethod - def disassemble(cls, payload): - (is_stored_as_bytes, user_id) = payload[0] - if is_stored_as_bytes: - user_id = cls.convert_uuid_bytes_to_hex(user_id) - methods = auth_plugins.convert_integer_to_method_list(payload[1]) - (is_stored_as_bytes, project_id) = payload[2] - if is_stored_as_bytes: - project_id = cls.convert_uuid_bytes_to_hex(project_id) - (is_stored_as_bytes, access_token_id) = payload[3] - if is_stored_as_bytes: - access_token_id = cls.convert_uuid_bytes_to_hex(access_token_id) - expires_at_str = cls._convert_float_to_time_string(payload[4]) - audit_ids = list(map(provider.base64_encode, payload[5])) - domain_id = None - trust_id = None - federated_info = None - - return (user_id, methods, project_id, domain_id, expires_at_str, - audit_ids, trust_id, federated_info, access_token_id) - - -# For now, the order of the classes in the following list is important. This -# is because the way they test that the payload applies to them in -# the create_arguments_apply method requires that the previous ones rejected -# the payload arguments. For example, UnscopedPayload must be last since it's -# the catch-all after all the other payloads have been checked. -# TODO(blk-u): Clean up the create_arguments_apply methods so that they don't -# depend on the previous classes then these can be in any order. -PAYLOAD_CLASSES = [ - OauthScopedPayload, - TrustScopedPayload, - FederatedProjectScopedPayload, - FederatedDomainScopedPayload, - FederatedUnscopedPayload, - ProjectScopedPayload, - DomainScopedPayload, - UnscopedPayload, -] diff --git a/keystone-moon/keystone/token/providers/fernet/utils.py b/keystone-moon/keystone/token/providers/fernet/utils.py deleted file mode 100644 index 1c3552d4..00000000 --- a/keystone-moon/keystone/token/providers/fernet/utils.py +++ /dev/null @@ -1,270 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import os -import stat - -from cryptography import fernet -from oslo_config import cfg -from oslo_log import log - -from keystone.i18n import _LE, _LW, _LI - - -LOG = log.getLogger(__name__) - -CONF = cfg.CONF - - -def validate_key_repository(requires_write=False): - """Validate permissions on the key repository directory.""" - # NOTE(lbragstad): We shouldn't need to check if the directory was passed - # in as None because we don't set allow_no_values to True. - - # ensure current user has sufficient access to the key repository - is_valid = (os.access(CONF.fernet_tokens.key_repository, os.R_OK) and - os.access(CONF.fernet_tokens.key_repository, os.X_OK)) - if requires_write: - is_valid = (is_valid and - os.access(CONF.fernet_tokens.key_repository, os.W_OK)) - - if not is_valid: - LOG.error( - _LE('Either [fernet_tokens] key_repository does not exist or ' - 'Keystone does not have sufficient permission to access it: ' - '%s'), CONF.fernet_tokens.key_repository) - else: - # ensure the key repository isn't world-readable - stat_info = os.stat(CONF.fernet_tokens.key_repository) - if(stat_info.st_mode & stat.S_IROTH or - stat_info.st_mode & stat.S_IXOTH): - LOG.warning(_LW( - '[fernet_tokens] key_repository is world readable: %s'), - CONF.fernet_tokens.key_repository) - - return is_valid - - -def _convert_to_integers(id_value): - """Cast user and group system identifiers to integers.""" - # NOTE(lbragstad) os.chown() will raise a TypeError here if - # keystone_user_id and keystone_group_id are not integers. Let's - # cast them to integers if we can because it's possible to pass non-integer - # values into the fernet_setup utility. - try: - id_int = int(id_value) - except ValueError as e: - msg = _LE('Unable to convert Keystone user or group ID. Error: %s') - LOG.error(msg, e) - raise - - return id_int - - -def create_key_directory(keystone_user_id=None, keystone_group_id=None): - """If the configured key directory does not exist, attempt to create it.""" - if not os.access(CONF.fernet_tokens.key_repository, os.F_OK): - LOG.info(_LI( - '[fernet_tokens] key_repository does not appear to exist; ' - 'attempting to create it')) - - try: - os.makedirs(CONF.fernet_tokens.key_repository, 0o700) - except OSError: - LOG.error(_LE( - 'Failed to create [fernet_tokens] key_repository: either it ' - 'already exists or you don\'t have sufficient permissions to ' - 'create it')) - - if keystone_user_id and keystone_group_id: - os.chown( - CONF.fernet_tokens.key_repository, - keystone_user_id, - keystone_group_id) - elif keystone_user_id or keystone_group_id: - LOG.warning(_LW( - 'Unable to change the ownership of [fernet_tokens] ' - 'key_repository without a keystone user ID and keystone group ' - 'ID both being provided: %s') % - CONF.fernet_tokens.key_repository) - - -def _create_new_key(keystone_user_id, keystone_group_id): - """Securely create a new encryption key. - - Create a new key that is readable by the Keystone group and Keystone user. - """ - key = fernet.Fernet.generate_key() # key is bytes - - # This ensures the key created is not world-readable - old_umask = os.umask(0o177) - if keystone_user_id and keystone_group_id: - old_egid = os.getegid() - old_euid = os.geteuid() - os.setegid(keystone_group_id) - os.seteuid(keystone_user_id) - elif keystone_user_id or keystone_group_id: - LOG.warning(_LW( - 'Unable to change the ownership of the new key without a keystone ' - 'user ID and keystone group ID both being provided: %s') % - CONF.fernet_tokens.key_repository) - # Determine the file name of the new key - key_file = os.path.join(CONF.fernet_tokens.key_repository, '0') - try: - with open(key_file, 'w') as f: - f.write(key.decode('utf-8')) # convert key to str for the file. - finally: - # After writing the key, set the umask back to it's original value. Do - # the same with group and user identifiers if a Keystone group or user - # was supplied. - os.umask(old_umask) - if keystone_user_id and keystone_group_id: - os.seteuid(old_euid) - os.setegid(old_egid) - - LOG.info(_LI('Created a new key: %s'), key_file) - - -def initialize_key_repository(keystone_user_id=None, keystone_group_id=None): - """Create a key repository and bootstrap it with a key. - - :param keystone_user_id: User ID of the Keystone user. - :param keystone_group_id: Group ID of the Keystone user. - - """ - # make sure we have work to do before proceeding - if os.access(os.path.join(CONF.fernet_tokens.key_repository, '0'), - os.F_OK): - LOG.info(_LI('Key repository is already initialized; aborting.')) - return - - # bootstrap an existing key - _create_new_key(keystone_user_id, keystone_group_id) - - # ensure that we end up with a primary and secondary key - rotate_keys(keystone_user_id, keystone_group_id) - - -def rotate_keys(keystone_user_id=None, keystone_group_id=None): - """Create a new primary key and revoke excess active keys. - - :param keystone_user_id: User ID of the Keystone user. - :param keystone_group_id: Group ID of the Keystone user. - - Key rotation utilizes the following behaviors: - - - The highest key number is used as the primary key (used for encryption). - - All keys can be used for decryption. - - New keys are always created as key "0," which serves as a placeholder - before promoting it to be the primary key. - - This strategy allows you to safely perform rotation on one node in a - cluster, before syncing the results of the rotation to all other nodes - (during both key rotation and synchronization, all nodes must recognize all - primary keys). - - """ - # read the list of key files - key_files = dict() - for filename in os.listdir(CONF.fernet_tokens.key_repository): - path = os.path.join(CONF.fernet_tokens.key_repository, str(filename)) - if os.path.isfile(path): - try: - key_id = int(filename) - except ValueError: # nosec : name isn't a number, ignore the file. - pass - else: - key_files[key_id] = path - - LOG.info(_LI('Starting key rotation with %(count)s key files: %(list)s'), { - 'count': len(key_files), - 'list': list(key_files.values())}) - - # determine the number of the new primary key - current_primary_key = max(key_files.keys()) - LOG.info(_LI('Current primary key is: %s'), current_primary_key) - new_primary_key = current_primary_key + 1 - LOG.info(_LI('Next primary key will be: %s'), new_primary_key) - - # promote the next primary key to be the primary - os.rename( - os.path.join(CONF.fernet_tokens.key_repository, '0'), - os.path.join(CONF.fernet_tokens.key_repository, str(new_primary_key))) - key_files.pop(0) - key_files[new_primary_key] = os.path.join( - CONF.fernet_tokens.key_repository, - str(new_primary_key)) - LOG.info(_LI('Promoted key 0 to be the primary: %s'), new_primary_key) - - # add a new key to the rotation, which will be the *next* primary - _create_new_key(keystone_user_id, keystone_group_id) - - max_active_keys = CONF.fernet_tokens.max_active_keys - # check for bad configuration - if max_active_keys < 1: - LOG.warning(_LW( - '[fernet_tokens] max_active_keys must be at least 1 to maintain a ' - 'primary key.')) - max_active_keys = 1 - - # purge excess keys - - # Note that key_files doesn't contain the new active key that was created, - # only the old active keys. - keys = sorted(key_files.keys(), reverse=True) - while len(keys) > (max_active_keys - 1): - index_to_purge = keys.pop() - key_to_purge = key_files[index_to_purge] - LOG.info(_LI('Excess key to purge: %s'), key_to_purge) - os.remove(key_to_purge) - - -def load_keys(): - """Load keys from disk into a list. - - The first key in the list is the primary key used for encryption. All - other keys are active secondary keys that can be used for decrypting - tokens. - - """ - if not validate_key_repository(): - return [] - - # build a dictionary of key_number:encryption_key pairs - keys = dict() - for filename in os.listdir(CONF.fernet_tokens.key_repository): - path = os.path.join(CONF.fernet_tokens.key_repository, str(filename)) - if os.path.isfile(path): - with open(path, 'r') as key_file: - try: - key_id = int(filename) - except ValueError: # nosec : filename isn't a number, ignore - # this file since it's not a key. - pass - else: - keys[key_id] = key_file.read() - - if len(keys) != CONF.fernet_tokens.max_active_keys: - # If there haven't been enough key rotations to reach max_active_keys, - # or if the configured value of max_active_keys has changed since the - # last rotation, then reporting the discrepancy might be useful. Once - # the number of keys matches max_active_keys, this log entry is too - # repetitive to be useful. - LOG.info(_LI( - 'Loaded %(count)d encryption keys (max_active_keys=%(max)d) from: ' - '%(dir)s'), { - 'count': len(keys), - 'max': CONF.fernet_tokens.max_active_keys, - 'dir': CONF.fernet_tokens.key_repository}) - - # return the encryption_keys, sorted by key number, descending - return [keys[x] for x in sorted(keys.keys(), reverse=True)] diff --git a/keystone-moon/keystone/token/providers/pki.py b/keystone-moon/keystone/token/providers/pki.py deleted file mode 100644 index 6a5a2999..00000000 --- a/keystone-moon/keystone/token/providers/pki.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Keystone PKI Token Provider""" - -from keystoneclient.common import cms -from oslo_config import cfg -from oslo_log import log -from oslo_log import versionutils -from oslo_serialization import jsonutils - -from keystone.common import environment -from keystone.common import utils -from keystone import exception -from keystone.i18n import _, _LE -from keystone.token.providers import common - - -CONF = cfg.CONF - -LOG = log.getLogger(__name__) - - -@versionutils.deprecated( - as_of=versionutils.deprecated.MITAKA, - what='the PKI token provider', - in_favor_of='the Fernet or UUID token providers') -class Provider(common.BaseProvider): - def _get_token_id(self, token_data): - try: - # force conversion to a string as the keystone client cms code - # produces unicode. This can be removed if the client returns - # str() - # TODO(ayoung): Make to a byte_str for Python3 - token_json = jsonutils.dumps(token_data, cls=utils.PKIEncoder) - token_id = str(cms.cms_sign_token(token_json, - CONF.signing.certfile, - CONF.signing.keyfile)) - return token_id - except environment.subprocess.CalledProcessError: - LOG.exception(_LE('Unable to sign token')) - raise exception.UnexpectedError(_( - 'Unable to sign token.')) - - @property - def _supports_bind_authentication(self): - """Return if the token provider supports bind authentication methods. - - :returns: True - """ - return True - - def needs_persistence(self): - """Should the token be written to a backend.""" - return True diff --git a/keystone-moon/keystone/token/providers/pkiz.py b/keystone-moon/keystone/token/providers/pkiz.py deleted file mode 100644 index 3e78d2e4..00000000 --- a/keystone-moon/keystone/token/providers/pkiz.py +++ /dev/null @@ -1,64 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Keystone Compressed PKI Token Provider""" - -from keystoneclient.common import cms -from oslo_config import cfg -from oslo_log import log -from oslo_log import versionutils -from oslo_serialization import jsonutils - -from keystone.common import environment -from keystone.common import utils -from keystone import exception -from keystone.i18n import _ -from keystone.token.providers import common - - -CONF = cfg.CONF - -LOG = log.getLogger(__name__) -ERROR_MESSAGE = _('Unable to sign token.') - - -@versionutils.deprecated( - as_of=versionutils.deprecated.MITAKA, - what='the PKIZ token provider', - in_favor_of='the Fernet or UUID token providers') -class Provider(common.BaseProvider): - def _get_token_id(self, token_data): - try: - # force conversion to a string as the keystone client cms code - # produces unicode. This can be removed if the client returns - # str() - # TODO(ayoung): Make to a byte_str for Python3 - token_json = jsonutils.dumps(token_data, cls=utils.PKIEncoder) - token_id = str(cms.pkiz_sign(token_json, - CONF.signing.certfile, - CONF.signing.keyfile)) - return token_id - except environment.subprocess.CalledProcessError: - LOG.exception(ERROR_MESSAGE) - raise exception.UnexpectedError(ERROR_MESSAGE) - - @property - def _supports_bind_authentication(self): - """Return if the token provider supports bind authentication methods. - - :returns: True - """ - return True - - def needs_persistence(self): - """Should the token be written to a backend.""" - return True diff --git a/keystone-moon/keystone/token/providers/uuid.py b/keystone-moon/keystone/token/providers/uuid.py deleted file mode 100644 index f9a91617..00000000 --- a/keystone-moon/keystone/token/providers/uuid.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Keystone UUID Token Provider""" - -from __future__ import absolute_import - -import uuid - -from keystone.token.providers import common - - -class Provider(common.BaseProvider): - def __init__(self, *args, **kwargs): - super(Provider, self).__init__(*args, **kwargs) - - def _get_token_id(self, token_data): - return uuid.uuid4().hex - - @property - def _supports_bind_authentication(self): - """Return if the token provider supports bind authentication methods. - - :returns: True - """ - return True - - def needs_persistence(self): - """Should the token be written to a backend.""" - return True diff --git a/keystone-moon/keystone/token/routers.py b/keystone-moon/keystone/token/routers.py deleted file mode 100644 index bcd40ee4..00000000 --- a/keystone-moon/keystone/token/routers.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -from keystone.common import wsgi -from keystone.token import controllers - - -class Router(wsgi.ComposableRouter): - def add_routes(self, mapper): - token_controller = controllers.Auth() - mapper.connect('/tokens', - controller=token_controller, - action='authenticate', - conditions=dict(method=['POST'])) - mapper.connect('/tokens/revoked', - controller=token_controller, - action='revocation_list', - conditions=dict(method=['GET'])) - mapper.connect('/tokens/{token_id}', - controller=token_controller, - action='validate_token', - conditions=dict(method=['GET'])) - # NOTE(morganfainberg): For policy enforcement reasons, the - # ``validate_token_head`` method is still used for HEAD requests. - # The controller method makes the same call as the validate_token - # call and lets wsgi.render_response remove the body data. - mapper.connect('/tokens/{token_id}', - controller=token_controller, - action='validate_token_head', - conditions=dict(method=['HEAD'])) - mapper.connect('/tokens/{token_id}', - controller=token_controller, - action='delete_token', - conditions=dict(method=['DELETE'])) - mapper.connect('/tokens/{token_id}/endpoints', - controller=token_controller, - action='endpoints', - conditions=dict(method=['GET'])) - - # Certificates used to verify auth tokens - mapper.connect('/certificates/ca', - controller=token_controller, - action='ca_cert', - conditions=dict(method=['GET'])) - - mapper.connect('/certificates/signing', - controller=token_controller, - action='signing_cert', - conditions=dict(method=['GET'])) diff --git a/keystone-moon/keystone/token/utils.py b/keystone-moon/keystone/token/utils.py deleted file mode 100644 index 96a09246..00000000 --- a/keystone-moon/keystone/token/utils.py +++ /dev/null @@ -1,27 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystoneclient.common import cms -from oslo_config import cfg - - -def generate_unique_id(token_id): - """Return a unique ID for a token. - - The returned value is useful as the primary key of a database table, - memcache store, or other lookup table. - - :returns: Given a PKI token, returns it's hashed value. Otherwise, - returns the passed-in value (such as a UUID token ID or an - existing hash). - """ - return cms.cms_hash_token(token_id, mode=cfg.CONF.token.hash_algorithm) diff --git a/keystone-moon/keystone/trust/__init__.py b/keystone-moon/keystone/trust/__init__.py deleted file mode 100644 index bd7297ea..00000000 --- a/keystone-moon/keystone/trust/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.trust import controllers # noqa -from keystone.trust.core import * # noqa diff --git a/keystone-moon/keystone/trust/backends/__init__.py b/keystone-moon/keystone/trust/backends/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/trust/backends/sql.py b/keystone-moon/keystone/trust/backends/sql.py deleted file mode 100644 index cb8446b3..00000000 --- a/keystone-moon/keystone/trust/backends/sql.py +++ /dev/null @@ -1,183 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import time - -from oslo_utils import timeutils -from six.moves import range - -from keystone.common import sql -from keystone import exception -from keystone import trust - - -# The maximum number of iterations that will be attempted for optimistic -# locking on consuming a limited-use trust. -MAXIMUM_CONSUME_ATTEMPTS = 10 - - -class TrustModel(sql.ModelBase, sql.DictBase): - __tablename__ = 'trust' - attributes = ['id', 'trustor_user_id', 'trustee_user_id', - 'project_id', 'impersonation', 'expires_at', - 'remaining_uses', 'deleted_at'] - id = sql.Column(sql.String(64), primary_key=True) - # user id of owner - trustor_user_id = sql.Column(sql.String(64), nullable=False,) - # user_id of user allowed to consume this preauth - trustee_user_id = sql.Column(sql.String(64), nullable=False) - project_id = sql.Column(sql.String(64)) - impersonation = sql.Column(sql.Boolean, nullable=False) - deleted_at = sql.Column(sql.DateTime) - expires_at = sql.Column(sql.DateTime) - remaining_uses = sql.Column(sql.Integer, nullable=True) - extra = sql.Column(sql.JsonBlob()) - __table_args__ = (sql.UniqueConstraint( - 'trustor_user_id', 'trustee_user_id', 'project_id', - 'impersonation', 'expires_at', - name='duplicate_trust_constraint'),) - - -class TrustRole(sql.ModelBase): - __tablename__ = 'trust_role' - attributes = ['trust_id', 'role_id'] - trust_id = sql.Column(sql.String(64), primary_key=True, nullable=False) - role_id = sql.Column(sql.String(64), primary_key=True, nullable=False) - - -class Trust(trust.TrustDriverV8): - @sql.handle_conflicts(conflict_type='trust') - def create_trust(self, trust_id, trust, roles): - with sql.session_for_write() as session: - ref = TrustModel.from_dict(trust) - ref['id'] = trust_id - if ref.get('expires_at') and ref['expires_at'].tzinfo is not None: - ref['expires_at'] = timeutils.normalize_time(ref['expires_at']) - session.add(ref) - added_roles = [] - for role in roles: - trust_role = TrustRole() - trust_role.trust_id = trust_id - trust_role.role_id = role['id'] - added_roles.append({'id': role['id']}) - session.add(trust_role) - trust_dict = ref.to_dict() - trust_dict['roles'] = added_roles - return trust_dict - - def _add_roles(self, trust_id, session, trust_dict): - roles = [] - for role in session.query(TrustRole).filter_by(trust_id=trust_id): - roles.append({'id': role.role_id}) - trust_dict['roles'] = roles - - @sql.handle_conflicts(conflict_type='trust') - def consume_use(self, trust_id): - - for attempt in range(MAXIMUM_CONSUME_ATTEMPTS): - with sql.session_for_write() as session: - try: - query_result = (session.query(TrustModel.remaining_uses). - filter_by(id=trust_id). - filter_by(deleted_at=None).one()) - except sql.NotFound: - raise exception.TrustNotFound(trust_id=trust_id) - - remaining_uses = query_result.remaining_uses - - if remaining_uses is None: - # unlimited uses, do nothing - break - elif remaining_uses > 0: - # NOTE(morganfainberg): use an optimistic locking method - # to ensure we only ever update a trust that has the - # expected number of remaining uses. - rows_affected = ( - session.query(TrustModel). - filter_by(id=trust_id). - filter_by(deleted_at=None). - filter_by(remaining_uses=remaining_uses). - update({'remaining_uses': (remaining_uses - 1)}, - synchronize_session=False)) - if rows_affected == 1: - # Successfully consumed a single limited-use trust. - # Since trust_id is the PK on the Trust table, there is - # no case we should match more than 1 row in the - # update. We either update 1 row or 0 rows. - break - else: - raise exception.TrustUseLimitReached(trust_id=trust_id) - # NOTE(morganfainberg): Ensure we have a yield point for eventlet - # here. This should cost us nothing otherwise. This can be removed - # if/when oslo_db cleanly handles yields on db calls. - time.sleep(0) - else: - # NOTE(morganfainberg): In the case the for loop is not prematurely - # broken out of, this else block is executed. This means the trust - # was not unlimited nor was it consumed (we hit the maximum - # iteration limit). This is just an indicator that we were unable - # to get the optimistic lock rather than silently failing or - # incorrectly indicating a trust was consumed. - raise exception.TrustConsumeMaximumAttempt(trust_id=trust_id) - - def get_trust(self, trust_id, deleted=False): - with sql.session_for_read() as session: - query = session.query(TrustModel).filter_by(id=trust_id) - if not deleted: - query = query.filter_by(deleted_at=None) - ref = query.first() - if ref is None: - raise exception.TrustNotFound(trust_id=trust_id) - if ref.expires_at is not None and not deleted: - now = timeutils.utcnow() - if now > ref.expires_at: - raise exception.TrustNotFound(trust_id=trust_id) - # Do not return trusts that can't be used anymore - if ref.remaining_uses is not None and not deleted: - if ref.remaining_uses <= 0: - raise exception.TrustNotFound(trust_id=trust_id) - trust_dict = ref.to_dict() - - self._add_roles(trust_id, session, trust_dict) - return trust_dict - - @sql.handle_conflicts(conflict_type='trust') - def list_trusts(self): - with sql.session_for_read() as session: - trusts = session.query(TrustModel).filter_by(deleted_at=None) - return [trust_ref.to_dict() for trust_ref in trusts] - - @sql.handle_conflicts(conflict_type='trust') - def list_trusts_for_trustee(self, trustee_user_id): - with sql.session_for_read() as session: - trusts = (session.query(TrustModel). - filter_by(deleted_at=None). - filter_by(trustee_user_id=trustee_user_id)) - return [trust_ref.to_dict() for trust_ref in trusts] - - @sql.handle_conflicts(conflict_type='trust') - def list_trusts_for_trustor(self, trustor_user_id): - with sql.session_for_read() as session: - trusts = (session.query(TrustModel). - filter_by(deleted_at=None). - filter_by(trustor_user_id=trustor_user_id)) - return [trust_ref.to_dict() for trust_ref in trusts] - - @sql.handle_conflicts(conflict_type='trust') - def delete_trust(self, trust_id): - with sql.session_for_write() as session: - trust_ref = session.query(TrustModel).get(trust_id) - if not trust_ref: - raise exception.TrustNotFound(trust_id=trust_id) - trust_ref.deleted_at = timeutils.utcnow() diff --git a/keystone-moon/keystone/trust/controllers.py b/keystone-moon/keystone/trust/controllers.py deleted file mode 100644 index 00581304..00000000 --- a/keystone-moon/keystone/trust/controllers.py +++ /dev/null @@ -1,270 +0,0 @@ -# Copyright 2013 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import uuid - -from oslo_utils import timeutils -import six - -from keystone import assignment -from keystone.common import controller -from keystone.common import dependency -from keystone.common import utils -from keystone.common import validation -from keystone import exception -from keystone.i18n import _ -from keystone import notifications -from keystone.trust import schema - - -def _trustor_trustee_only(trust, user_id): - if (user_id != trust.get('trustee_user_id') and - user_id != trust.get('trustor_user_id')): - raise exception.Forbidden() - - -def _admin_trustor_only(context, trust, user_id): - if user_id != trust.get('trustor_user_id') and not context['is_admin']: - raise exception.Forbidden() - - -@dependency.requires('assignment_api', 'identity_api', 'resource_api', - 'role_api', 'token_provider_api', 'trust_api') -class TrustV3(controller.V3Controller): - collection_name = "trusts" - member_name = "trust" - - @classmethod - def base_url(cls, context, path=None): - """Construct a path and pass it to V3Controller.base_url method.""" - # NOTE(stevemar): Overriding path to /OS-TRUST/trusts so that - # V3Controller.base_url handles setting the self link correctly. - path = '/OS-TRUST/' + cls.collection_name - return super(TrustV3, cls).base_url(context, path=path) - - def _get_user_id(self, context): - try: - token_ref = utils.get_token_ref(context) - except exception.Unauthorized: - return None - return token_ref.user_id - - def get_trust(self, context, trust_id): - user_id = self._get_user_id(context) - trust = self.trust_api.get_trust(trust_id) - _trustor_trustee_only(trust, user_id) - self._fill_in_roles(context, trust, - self.role_api.list_roles()) - return TrustV3.wrap_member(context, trust) - - def _fill_in_roles(self, context, trust, all_roles): - if trust.get('expires_at') is not None: - trust['expires_at'] = (utils.isotime - (trust['expires_at'], - subsecond=True)) - - if 'roles' not in trust: - trust['roles'] = [] - trust_full_roles = [] - for trust_role in trust['roles']: - if isinstance(trust_role, six.string_types): - trust_role = {'id': trust_role} - matching_roles = [x for x in all_roles - if x['id'] == trust_role['id']] - if matching_roles: - full_role = assignment.controllers.RoleV3.wrap_member( - context, matching_roles[0])['role'] - trust_full_roles.append(full_role) - trust['roles'] = trust_full_roles - trust['roles_links'] = { - 'self': (self.base_url(context) + "/%s/roles" % trust['id']), - 'next': None, - 'previous': None} - - def _normalize_role_list(self, trust, all_roles): - trust_roles = [] - all_role_names = {r['name']: r for r in all_roles} - for role in trust.get('roles', []): - if 'id' in role: - trust_roles.append({'id': role['id']}) - elif 'name' in role: - rolename = role['name'] - if rolename in all_role_names: - trust_roles.append({'id': - all_role_names[rolename]['id']}) - else: - raise exception.RoleNotFound(_("role %s is not defined") % - rolename) - else: - raise exception.ValidationError(attribute='id or name', - target='roles') - return trust_roles - - @controller.protected() - @validation.validated(schema.trust_create, 'trust') - def create_trust(self, context, trust): - """Create a new trust. - - The user creating the trust must be the trustor. - - """ - auth_context = context.get('environment', - {}).get('KEYSTONE_AUTH_CONTEXT', {}) - - # Check if delegated via trust - if auth_context.get('is_delegated_auth'): - # Redelegation case - src_trust_id = auth_context['trust_id'] - if not src_trust_id: - raise exception.Forbidden( - _('Redelegation allowed for delegated by trust only')) - - redelegated_trust = self.trust_api.get_trust(src_trust_id) - else: - redelegated_trust = None - - if trust.get('project_id'): - self._require_role(trust) - self._require_user_is_trustor(context, trust) - self._require_trustee_exists(trust['trustee_user_id']) - all_roles = self.role_api.list_roles() - # Normalize roles - normalized_roles = self._normalize_role_list(trust, all_roles) - trust['roles'] = normalized_roles - self._require_trustor_has_role_in_project(trust) - trust['expires_at'] = self._parse_expiration_date( - trust.get('expires_at')) - trust_id = uuid.uuid4().hex - initiator = notifications._get_request_audit_info(context) - new_trust = self.trust_api.create_trust(trust_id, trust, - normalized_roles, - redelegated_trust, - initiator) - self._fill_in_roles(context, new_trust, all_roles) - return TrustV3.wrap_member(context, new_trust) - - def _require_trustee_exists(self, trustee_user_id): - self.identity_api.get_user(trustee_user_id) - - def _require_user_is_trustor(self, context, trust): - user_id = self._get_user_id(context) - if user_id != trust.get('trustor_user_id'): - raise exception.Forbidden( - _("The authenticated user should match the trustor.")) - - def _require_role(self, trust): - if not trust.get('roles'): - raise exception.Forbidden( - _('At least one role should be specified.')) - - def _get_trustor_roles(self, trust): - original_trust = trust.copy() - while original_trust.get('redelegated_trust_id'): - original_trust = self.trust_api.get_trust( - original_trust['redelegated_trust_id']) - - if not self._attribute_is_empty(trust, 'project_id'): - self.resource_api.get_project(original_trust['project_id']) - # Get a list of roles including any domain specific roles - assignment_list = self.assignment_api.list_role_assignments( - user_id=original_trust['trustor_user_id'], - project_id=original_trust['project_id'], - effective=True, strip_domain_roles=False) - return list(set([x['role_id'] for x in assignment_list])) - else: - return [] - - def _require_trustor_has_role_in_project(self, trust): - trustor_roles = self._get_trustor_roles(trust) - for trust_role in trust['roles']: - matching_roles = [x for x in trustor_roles - if x == trust_role['id']] - if not matching_roles: - raise exception.RoleNotFound(role_id=trust_role['id']) - - def _parse_expiration_date(self, expiration_date): - if expiration_date is None: - return None - if not expiration_date.endswith('Z'): - expiration_date += 'Z' - try: - expiration_time = timeutils.parse_isotime(expiration_date) - except ValueError: - raise exception.ValidationTimeStampError() - if timeutils.is_older_than(expiration_time, 0): - raise exception.ValidationExpirationError() - return expiration_time - - def _check_role_for_trust(self, context, trust_id, role_id): - """Checks if a role has been assigned to a trust.""" - trust = self.trust_api.get_trust(trust_id) - user_id = self._get_user_id(context) - _trustor_trustee_only(trust, user_id) - if not any(role['id'] == role_id for role in trust['roles']): - raise exception.RoleNotFound(role_id=role_id) - - @controller.protected() - def list_trusts(self, context): - query = context['query_string'] - trusts = [] - if not query: - self.assert_admin(context) - trusts += self.trust_api.list_trusts() - if 'trustor_user_id' in query: - user_id = query['trustor_user_id'] - calling_user_id = self._get_user_id(context) - if user_id != calling_user_id: - raise exception.Forbidden() - trusts += (self.trust_api. - list_trusts_for_trustor(user_id)) - if 'trustee_user_id' in query: - user_id = query['trustee_user_id'] - calling_user_id = self._get_user_id(context) - if user_id != calling_user_id: - raise exception.Forbidden() - trusts += self.trust_api.list_trusts_for_trustee(user_id) - for trust in trusts: - # get_trust returns roles, list_trusts does not - # It seems in some circumstances, roles does not - # exist in the query response, so check first - if 'roles' in trust: - del trust['roles'] - if trust.get('expires_at') is not None: - trust['expires_at'] = (utils.isotime - (trust['expires_at'], - subsecond=True)) - return TrustV3.wrap_collection(context, trusts) - - @controller.protected() - def delete_trust(self, context, trust_id): - trust = self.trust_api.get_trust(trust_id) - user_id = self._get_user_id(context) - _admin_trustor_only(context, trust, user_id) - initiator = notifications._get_request_audit_info(context) - self.trust_api.delete_trust(trust_id, initiator) - - @controller.protected() - def list_roles_for_trust(self, context, trust_id): - trust = self.get_trust(context, trust_id)['trust'] - user_id = self._get_user_id(context) - _trustor_trustee_only(trust, user_id) - return {'roles': trust['roles'], - 'links': trust['roles_links']} - - @controller.protected() - def get_role_for_trust(self, context, trust_id, role_id): - """Get a role that has been assigned to a trust.""" - self._check_role_for_trust(context, trust_id, role_id) - role = self.role_api.get_role(role_id) - return assignment.controllers.RoleV3.wrap_member(context, role) diff --git a/keystone-moon/keystone/trust/core.py b/keystone-moon/keystone/trust/core.py deleted file mode 100644 index 43069deb..00000000 --- a/keystone-moon/keystone/trust/core.py +++ /dev/null @@ -1,251 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Main entry point into the Trust service.""" - -import abc - -from oslo_config import cfg -import six -from six.moves import zip - -from keystone.common import dependency -from keystone.common import manager -from keystone import exception -from keystone.i18n import _ -from keystone import notifications - - -CONF = cfg.CONF - - -@dependency.requires('identity_api') -@dependency.provider('trust_api') -class Manager(manager.Manager): - """Default pivot point for the Trust backend. - - See :mod:`keystone.common.manager.Manager` for more details on how this - dynamically calls the backend. - - """ - - driver_namespace = 'keystone.trust' - - _TRUST = "OS-TRUST:trust" - - def __init__(self): - super(Manager, self).__init__(CONF.trust.driver) - - @staticmethod - def _validate_redelegation(redelegated_trust, trust): - # Validate against: - # 0 < redelegation_count <= max_redelegation_count - max_redelegation_count = CONF.trust.max_redelegation_count - redelegation_depth = redelegated_trust.get('redelegation_count', 0) - if not (0 < redelegation_depth <= max_redelegation_count): - raise exception.Forbidden( - _('Remaining redelegation depth of %(redelegation_depth)d' - ' out of allowed range of [0..%(max_count)d]') % - {'redelegation_depth': redelegation_depth, - 'max_count': max_redelegation_count}) - - # remaining_uses is None - remaining_uses = trust.get('remaining_uses') - if remaining_uses is not None: - raise exception.Forbidden( - _('Field "remaining_uses" is set to %(value)s' - ' while it must not be set in order to redelegate a trust'), - value=remaining_uses) - - # expiry times - trust_expiry = trust.get('expires_at') - redelegated_expiry = redelegated_trust['expires_at'] - if trust_expiry: - # redelegated trust is from backend and has no tzinfo - if redelegated_expiry < trust_expiry.replace(tzinfo=None): - raise exception.Forbidden( - _('Requested expiration time is more ' - 'than redelegated trust can provide')) - else: - trust['expires_at'] = redelegated_expiry - - # trust roles is a subset of roles of the redelegated trust - parent_roles = set(role['id'] - for role in redelegated_trust['roles']) - if not all(role['id'] in parent_roles for role in trust['roles']): - raise exception.Forbidden( - _('Some of requested roles are not in redelegated trust')) - - def get_trust_pedigree(self, trust_id): - trust = self.driver.get_trust(trust_id) - trust_chain = [trust] - while trust and trust.get('redelegated_trust_id'): - trust = self.driver.get_trust(trust['redelegated_trust_id']) - trust_chain.append(trust) - - return trust_chain - - def get_trust(self, trust_id, deleted=False): - trust = self.driver.get_trust(trust_id, deleted) - - if trust and trust.get('redelegated_trust_id') and not deleted: - trust_chain = self.get_trust_pedigree(trust_id) - - for parent, child in zip(trust_chain[1:], trust_chain): - self._validate_redelegation(parent, child) - try: - self.identity_api.assert_user_enabled( - parent['trustee_user_id']) - except (AssertionError, exception.NotFound): - raise exception.Forbidden( - _('One of the trust agents is disabled or deleted')) - - return trust - - def create_trust(self, trust_id, trust, roles, redelegated_trust=None, - initiator=None): - """Create a new trust. - - :returns: a new trust - """ - # Default for initial trust in chain is max_redelegation_count - max_redelegation_count = CONF.trust.max_redelegation_count - requested_count = trust.get('redelegation_count') - redelegatable = (trust.pop('allow_redelegation', False) - and requested_count != 0) - if not redelegatable: - trust['redelegation_count'] = requested_count = 0 - remaining_uses = trust.get('remaining_uses') - if remaining_uses is not None and remaining_uses <= 0: - msg = _('remaining_uses must be a positive integer or null.') - raise exception.ValidationError(msg) - else: - # Validate requested redelegation depth - if requested_count and requested_count > max_redelegation_count: - raise exception.Forbidden( - _('Requested redelegation depth of %(requested_count)d ' - 'is greater than allowed %(max_count)d') % - {'requested_count': requested_count, - 'max_count': max_redelegation_count}) - # Decline remaining_uses - if trust.get('remaining_uses') is not None: - raise exception.ValidationError( - _('remaining_uses must not be set if redelegation is ' - 'allowed')) - - if redelegated_trust: - trust['redelegated_trust_id'] = redelegated_trust['id'] - remaining_count = redelegated_trust['redelegation_count'] - 1 - - # Validate depth consistency - if (redelegatable and requested_count and - requested_count != remaining_count): - msg = _('Modifying "redelegation_count" upon redelegation is ' - 'forbidden. Omitting this parameter is advised.') - raise exception.Forbidden(msg) - trust.setdefault('redelegation_count', remaining_count) - - # Check entire trust pedigree validity - pedigree = self.get_trust_pedigree(redelegated_trust['id']) - for t in pedigree: - self._validate_redelegation(t, trust) - - trust.setdefault('redelegation_count', max_redelegation_count) - ref = self.driver.create_trust(trust_id, trust, roles) - - notifications.Audit.created(self._TRUST, trust_id, initiator=initiator) - - return ref - - def delete_trust(self, trust_id, initiator=None): - """Remove a trust. - - :raises keystone.exception.TrustNotFound: If the trust doesn't exist. - - Recursively remove given and redelegated trusts - """ - trust = self.driver.get_trust(trust_id) - trusts = self.driver.list_trusts_for_trustor( - trust['trustor_user_id']) - - for t in trusts: - if t.get('redelegated_trust_id') == trust_id: - # recursive call to make sure all notifications are sent - try: - self.delete_trust(t['id']) - except exception.TrustNotFound: # nosec - # if trust was deleted by concurrent process - # consistency must not suffer - pass - - # end recursion - self.driver.delete_trust(trust_id) - - notifications.Audit.deleted(self._TRUST, trust_id, initiator) - - -@six.add_metaclass(abc.ABCMeta) -class TrustDriverV8(object): - - @abc.abstractmethod - def create_trust(self, trust_id, trust, roles): - """Create a new trust. - - :returns: a new trust - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def get_trust(self, trust_id, deleted=False): - """Get a trust by the trust id. - - :param trust_id: the trust identifier - :type trust_id: string - :param deleted: return the trust even if it is deleted, expired, or - has no consumptions left - :type deleted: bool - """ - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_trusts(self): - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_trusts_for_trustee(self, trustee): - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def list_trusts_for_trustor(self, trustor): - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def delete_trust(self, trust_id): - raise exception.NotImplemented() # pragma: no cover - - @abc.abstractmethod - def consume_use(self, trust_id): - """Consume one use of a trust. - - One use of a trust is consumed when the trust was created with a - limitation on its uses, provided there are still uses available. - - :raises keystone.exception.TrustUseLimitReached: If no remaining uses - for trust. - :raises keystone.exception.TrustNotFound: If the trust doesn't exist. - """ - raise exception.NotImplemented() # pragma: no cover - - -Driver = manager.create_legacy_driver(TrustDriverV8) diff --git a/keystone-moon/keystone/trust/routers.py b/keystone-moon/keystone/trust/routers.py deleted file mode 100644 index 3a6243cc..00000000 --- a/keystone-moon/keystone/trust/routers.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -"""WSGI Routers for the Trust service.""" - -import functools - -from keystone.common import json_home -from keystone.common import wsgi -from keystone.trust import controllers - - -_build_resource_relation = functools.partial( - json_home.build_v3_extension_resource_relation, extension_name='OS-TRUST', - extension_version='1.0') - -TRUST_ID_PARAMETER_RELATION = json_home.build_v3_extension_parameter_relation( - 'OS-TRUST', '1.0', 'trust_id') - - -class Routers(wsgi.RoutersBase): - - def append_v3_routers(self, mapper, routers): - trust_controller = controllers.TrustV3() - - self._add_resource( - mapper, trust_controller, - path='/OS-TRUST/trusts', - get_action='list_trusts', - post_action='create_trust', - rel=_build_resource_relation(resource_name='trusts')) - self._add_resource( - mapper, trust_controller, - path='/OS-TRUST/trusts/{trust_id}', - get_action='get_trust', - delete_action='delete_trust', - rel=_build_resource_relation(resource_name='trust'), - path_vars={ - 'trust_id': TRUST_ID_PARAMETER_RELATION, - }) - self._add_resource( - mapper, trust_controller, - path='/OS-TRUST/trusts/{trust_id}/roles', - get_action='list_roles_for_trust', - rel=_build_resource_relation(resource_name='trust_roles'), - path_vars={ - 'trust_id': TRUST_ID_PARAMETER_RELATION, - }) - self._add_resource( - mapper, trust_controller, - path='/OS-TRUST/trusts/{trust_id}/roles/{role_id}', - get_head_action='get_role_for_trust', - rel=_build_resource_relation(resource_name='trust_role'), - path_vars={ - 'trust_id': TRUST_ID_PARAMETER_RELATION, - 'role_id': json_home.Parameters.ROLE_ID, - }) diff --git a/keystone-moon/keystone/trust/schema.py b/keystone-moon/keystone/trust/schema.py deleted file mode 100644 index 673b786b..00000000 --- a/keystone-moon/keystone/trust/schema.py +++ /dev/null @@ -1,49 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone.common import validation -from keystone.common.validation import parameter_types - - -_trust_properties = { - # NOTE(lbragstad): These are set as external_id_string because they have - # the ability to be read as LDAP user identifiers, which could be something - # other than uuid. - 'trustor_user_id': parameter_types.external_id_string, - 'trustee_user_id': parameter_types.external_id_string, - 'impersonation': parameter_types.boolean, - 'project_id': validation.nullable(parameter_types.id_string), - 'remaining_uses': { - 'type': ['integer', 'null'], - 'minimum': 1 - }, - 'expires_at': { - 'type': ['null', 'string'] - }, - 'allow_redelegation': { - 'type': ['boolean', 'null'] - }, - 'redelegation_count': { - 'type': ['integer', 'null'], - 'minimum': 0 - }, - # TODO(lbragstad): Need to find a better way to do this. We should be - # checking that a role is a list of IDs and/or names. - 'roles': validation.add_array_type(parameter_types.id_string) -} - -trust_create = { - 'type': 'object', - 'properties': _trust_properties, - 'required': ['trustor_user_id', 'trustee_user_id', 'impersonation'], - 'additionalProperties': True -} diff --git a/keystone-moon/keystone/v2_crud/__init__.py b/keystone-moon/keystone/v2_crud/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/v2_crud/admin_crud.py b/keystone-moon/keystone/v2_crud/admin_crud.py deleted file mode 100644 index 86ccfcd8..00000000 --- a/keystone-moon/keystone/v2_crud/admin_crud.py +++ /dev/null @@ -1,240 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from keystone import assignment -from keystone import catalog -from keystone.common import extension -from keystone.common import wsgi -from keystone import identity -from keystone import resource - - -extension.register_admin_extension( - 'OS-KSADM', { - 'name': 'OpenStack Keystone Admin', - 'namespace': 'http://docs.openstack.org/identity/api/ext/' - 'OS-KSADM/v1.0', - 'alias': 'OS-KSADM', - 'updated': '2013-07-11T17:14:00-00:00', - 'description': 'OpenStack extensions to Keystone v2.0 API ' - 'enabling Administrative Operations.', - 'links': [ - { - 'rel': 'describedby', - 'type': 'text/html', - 'href': 'http://developer.openstack.org/' - 'api-ref-identity-v2-ext.html', - } - ]}) - - -class Router(wsgi.ComposableRouter): - """Previously known as the OS-KSADM extension. - - Provides a bunch of CRUD operations for internal data types. - - """ - - def add_routes(self, mapper): - tenant_controller = resource.controllers.Tenant() - assignment_tenant_controller = ( - assignment.controllers.TenantAssignment()) - user_controller = identity.controllers.User() - role_controller = assignment.controllers.Role() - assignment_role_controller = assignment.controllers.RoleAssignmentV2() - service_controller = catalog.controllers.Service() - endpoint_controller = catalog.controllers.Endpoint() - - # Tenant Operations - mapper.connect( - '/tenants', - controller=tenant_controller, - action='create_project', - conditions=dict(method=['POST'])) - mapper.connect( - '/tenants/{tenant_id}', - controller=tenant_controller, - action='update_project', - conditions=dict(method=['PUT', 'POST'])) - mapper.connect( - '/tenants/{tenant_id}', - controller=tenant_controller, - action='delete_project', - conditions=dict(method=['DELETE'])) - mapper.connect( - '/tenants/{tenant_id}/users', - controller=assignment_tenant_controller, - action='get_project_users', - conditions=dict(method=['GET'])) - - # User Operations - mapper.connect( - '/users', - controller=user_controller, - action='get_users', - conditions=dict(method=['GET'])) - mapper.connect( - '/users', - controller=user_controller, - action='create_user', - conditions=dict(method=['POST'])) - # NOTE(termie): not in diablo - mapper.connect( - '/users/{user_id}', - controller=user_controller, - action='update_user', - conditions=dict(method=['PUT'])) - mapper.connect( - '/users/{user_id}', - controller=user_controller, - action='delete_user', - conditions=dict(method=['DELETE'])) - - # COMPAT(diablo): the copy with no OS-KSADM is from diablo - mapper.connect( - '/users/{user_id}/password', - controller=user_controller, - action='set_user_password', - conditions=dict(method=['PUT'])) - mapper.connect( - '/users/{user_id}/OS-KSADM/password', - controller=user_controller, - action='set_user_password', - conditions=dict(method=['PUT'])) - - # COMPAT(diablo): the copy with no OS-KSADM is from diablo - mapper.connect( - '/users/{user_id}/tenant', - controller=user_controller, - action='update_user', - conditions=dict(method=['PUT'])) - mapper.connect( - '/users/{user_id}/OS-KSADM/tenant', - controller=user_controller, - action='update_user', - conditions=dict(method=['PUT'])) - - # COMPAT(diablo): the copy with no OS-KSADM is from diablo - mapper.connect( - '/users/{user_id}/enabled', - controller=user_controller, - action='set_user_enabled', - conditions=dict(method=['PUT'])) - mapper.connect( - '/users/{user_id}/OS-KSADM/enabled', - controller=user_controller, - action='set_user_enabled', - conditions=dict(method=['PUT'])) - - # User Roles - mapper.connect( - '/users/{user_id}/roles/OS-KSADM/{role_id}', - controller=assignment_role_controller, - action='add_role_to_user', - conditions=dict(method=['PUT'])) - mapper.connect( - '/users/{user_id}/roles/OS-KSADM/{role_id}', - controller=assignment_role_controller, - action='remove_role_from_user', - conditions=dict(method=['DELETE'])) - - # COMPAT(diablo): User Roles - mapper.connect( - '/users/{user_id}/roleRefs', - controller=assignment_role_controller, - action='get_role_refs', - conditions=dict(method=['GET'])) - mapper.connect( - '/users/{user_id}/roleRefs', - controller=assignment_role_controller, - action='create_role_ref', - conditions=dict(method=['POST'])) - mapper.connect( - '/users/{user_id}/roleRefs/{role_ref_id}', - controller=assignment_role_controller, - action='delete_role_ref', - conditions=dict(method=['DELETE'])) - - # User-Tenant Roles - mapper.connect( - '/tenants/{tenant_id}/users/{user_id}/roles/OS-KSADM/{role_id}', - controller=assignment_role_controller, - action='add_role_to_user', - conditions=dict(method=['PUT'])) - mapper.connect( - '/tenants/{tenant_id}/users/{user_id}/roles/OS-KSADM/{role_id}', - controller=assignment_role_controller, - action='remove_role_from_user', - conditions=dict(method=['DELETE'])) - - # Service Operations - mapper.connect( - '/OS-KSADM/services', - controller=service_controller, - action='get_services', - conditions=dict(method=['GET'])) - mapper.connect( - '/OS-KSADM/services', - controller=service_controller, - action='create_service', - conditions=dict(method=['POST'])) - mapper.connect( - '/OS-KSADM/services/{service_id}', - controller=service_controller, - action='delete_service', - conditions=dict(method=['DELETE'])) - mapper.connect( - '/OS-KSADM/services/{service_id}', - controller=service_controller, - action='get_service', - conditions=dict(method=['GET'])) - - # Endpoint Templates - mapper.connect( - '/endpoints', - controller=endpoint_controller, - action='get_endpoints', - conditions=dict(method=['GET'])) - mapper.connect( - '/endpoints', - controller=endpoint_controller, - action='create_endpoint', - conditions=dict(method=['POST'])) - mapper.connect( - '/endpoints/{endpoint_id}', - controller=endpoint_controller, - action='delete_endpoint', - conditions=dict(method=['DELETE'])) - - # Role Operations - mapper.connect( - '/OS-KSADM/roles', - controller=role_controller, - action='create_role', - conditions=dict(method=['POST'])) - mapper.connect( - '/OS-KSADM/roles', - controller=role_controller, - action='get_roles', - conditions=dict(method=['GET'])) - mapper.connect( - '/OS-KSADM/roles/{role_id}', - controller=role_controller, - action='get_role', - conditions=dict(method=['GET'])) - mapper.connect( - '/OS-KSADM/roles/{role_id}', - controller=role_controller, - action='delete_role', - conditions=dict(method=['DELETE'])) diff --git a/keystone-moon/keystone/v2_crud/user_crud.py b/keystone-moon/keystone/v2_crud/user_crud.py deleted file mode 100644 index 9da7f31f..00000000 --- a/keystone-moon/keystone/v2_crud/user_crud.py +++ /dev/null @@ -1,134 +0,0 @@ -# Copyright 2012 Red Hat, Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import copy -import uuid - -from oslo_log import log - -from keystone.common import dependency -from keystone.common import extension -from keystone.common import wsgi -from keystone import exception -from keystone import identity -from keystone.models import token_model - - -LOG = log.getLogger(__name__) - - -extension.register_public_extension( - 'OS-KSCRUD', { - 'name': 'OpenStack Keystone User CRUD', - 'namespace': 'http://docs.openstack.org/identity/api/ext/' - 'OS-KSCRUD/v1.0', - 'alias': 'OS-KSCRUD', - 'updated': '2013-07-07T12:00:0-00:00', - 'description': 'OpenStack extensions to Keystone v2.0 API ' - 'enabling User Operations.', - 'links': [ - { - 'rel': 'describedby', - 'type': 'text/html', - 'href': 'http://developer.openstack.org/' - 'api-ref-identity-v2-ext.html', - } - ]}) - - -@dependency.requires('catalog_api', 'identity_api', 'resource_api', - 'token_provider_api') -class UserController(identity.controllers.User): - def set_user_password(self, context, user_id, user): - token_id = context.get('token_id') - original_password = user.get('original_password') - - token_data = self.token_provider_api.validate_token(token_id) - token_ref = token_model.KeystoneToken(token_id=token_id, - token_data=token_data) - - if token_ref.user_id != user_id: - raise exception.Forbidden('Token belongs to another user') - if original_password is None: - raise exception.ValidationError(target='user', - attribute='original password') - - try: - user_ref = self.identity_api.authenticate( - context, - user_id=token_ref.user_id, - password=original_password) - if not user_ref.get('enabled', True): - # NOTE(dolph): why can't you set a disabled user's password? - raise exception.Unauthorized('User is disabled') - except AssertionError: - raise exception.Unauthorized() - - update_dict = {'password': user['password'], 'id': user_id} - - admin_context = copy.copy(context) - admin_context['is_admin'] = True - super(UserController, self).set_user_password(admin_context, - user_id, - update_dict) - - # Issue a new token based upon the original token data. This will - # always be a V2.0 token. - - # TODO(morganfainberg): Add a mechanism to issue a new token directly - # from a token model so that this code can go away. This is likely - # not the norm as most cases do not need to yank apart a token to - # issue a new one. - new_token_ref = {} - metadata_ref = {} - roles_ref = None - - new_token_ref['user'] = user_ref - if token_ref.bind: - new_token_ref['bind'] = token_ref.bind - if token_ref.project_id: - new_token_ref['tenant'] = self.resource_api.get_project( - token_ref.project_id) - if token_ref.role_names: - roles_ref = [dict(name=value) - for value in token_ref.role_names] - if token_ref.role_ids: - metadata_ref['roles'] = token_ref.role_ids - if token_ref.trust_id: - metadata_ref['trust'] = { - 'id': token_ref.trust_id, - 'trustee_user_id': token_ref.trustee_user_id} - new_token_ref['metadata'] = metadata_ref - new_token_ref['id'] = uuid.uuid4().hex - - catalog_ref = self.catalog_api.get_catalog(user_id, - token_ref.project_id) - - new_token_id, new_token_data = self.token_provider_api.issue_v2_token( - token_ref=new_token_ref, roles_ref=roles_ref, - catalog_ref=catalog_ref) - LOG.debug('TOKEN_REF %s', new_token_data) - return new_token_data - - -class Router(wsgi.ComposableRouter): - """Provides a subset of CRUD operations for internal data types.""" - - def add_routes(self, mapper): - user_controller = UserController() - - mapper.connect('/OS-KSCRUD/users/{user_id}', - controller=user_controller, - action='set_user_password', - conditions=dict(method=['PATCH'])) diff --git a/keystone-moon/keystone/version/__init__.py b/keystone-moon/keystone/version/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/keystone-moon/keystone/version/controllers.py b/keystone-moon/keystone/version/controllers.py deleted file mode 100644 index 2a7bacdf..00000000 --- a/keystone-moon/keystone/version/controllers.py +++ /dev/null @@ -1,215 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from oslo_serialization import jsonutils -import webob - -from keystone.common import extension -from keystone.common import json_home -from keystone.common import wsgi -from keystone import exception - - -MEDIA_TYPE_JSON = 'application/vnd.openstack.identity-%s+json' - -_VERSIONS = [] - -# NOTE(blk-u): latest_app will be set by keystone.version.service.loadapp(). It -# gets set to the application that was just loaded. In the case of keystone-all -# loadapp() gets called twice, once for the public app and once for the admin -# app. In the case of httpd/keystone, loadapp() gets called once for the public -# app if this is the public instance or loadapp() gets called for the admin app -# if it's the admin instance. -# This is used to fetch the /v3 JSON Home response. The /v3 JSON Home response -# is the same whether it's the admin or public service so either admin or -# public works. -latest_app = None - - -def request_v3_json_home(new_prefix): - if 'v3' not in _VERSIONS: - # No V3 support, so return an empty JSON Home document. - return {'resources': {}} - - req = webob.Request.blank( - '/v3', headers={'Accept': 'application/json-home'}) - v3_json_home_str = req.get_response(latest_app).body - v3_json_home = jsonutils.loads(v3_json_home_str) - json_home.translate_urls(v3_json_home, new_prefix) - - return v3_json_home - - -class Extensions(wsgi.Application): - """Base extensions controller to be extended by public and admin API's.""" - - # extend in subclass to specify the set of extensions - @property - def extensions(self): - return None - - def get_extensions_info(self, context): - return {'extensions': {'values': list(self.extensions.values())}} - - def get_extension_info(self, context, extension_alias): - try: - return {'extension': self.extensions[extension_alias]} - except KeyError: - raise exception.NotFound(target=extension_alias) - - -class AdminExtensions(Extensions): - @property - def extensions(self): - return extension.ADMIN_EXTENSIONS - - -class PublicExtensions(Extensions): - @property - def extensions(self): - return extension.PUBLIC_EXTENSIONS - - -def register_version(version): - _VERSIONS.append(version) - - -class MimeTypes(object): - JSON = 'application/json' - JSON_HOME = 'application/json-home' - - -def v3_mime_type_best_match(context): - - # accept_header is a WebOb MIMEAccept object so supports best_match. - accept_header = context['accept_header'] - - if not accept_header: - return MimeTypes.JSON - - SUPPORTED_TYPES = [MimeTypes.JSON, MimeTypes.JSON_HOME] - return accept_header.best_match(SUPPORTED_TYPES) - - -class Version(wsgi.Application): - - def __init__(self, version_type, routers=None): - self.endpoint_url_type = version_type - self._routers = routers - - super(Version, self).__init__() - - def _get_identity_url(self, context, version): - """Returns a URL to keystone's own endpoint.""" - url = self.base_url(context, self.endpoint_url_type) - return '%s/%s/' % (url, version) - - def _get_versions_list(self, context): - """The list of versions is dependent on the context.""" - versions = {} - if 'v2.0' in _VERSIONS: - versions['v2.0'] = { - 'id': 'v2.0', - 'status': 'stable', - 'updated': '2014-04-17T00:00:00Z', - 'links': [ - { - 'rel': 'self', - 'href': self._get_identity_url(context, 'v2.0'), - }, { - 'rel': 'describedby', - 'type': 'text/html', - 'href': 'http://docs.openstack.org/' - } - ], - 'media-types': [ - { - 'base': 'application/json', - 'type': MEDIA_TYPE_JSON % 'v2.0' - } - ] - } - - if 'v3' in _VERSIONS: - versions['v3'] = { - 'id': 'v3.6', - 'status': 'stable', - 'updated': '2016-04-04T00:00:00Z', - 'links': [ - { - 'rel': 'self', - 'href': self._get_identity_url(context, 'v3'), - } - ], - 'media-types': [ - { - 'base': 'application/json', - 'type': MEDIA_TYPE_JSON % 'v3' - } - ] - } - - return versions - - def get_versions(self, context): - - req_mime_type = v3_mime_type_best_match(context) - if req_mime_type == MimeTypes.JSON_HOME: - v3_json_home = request_v3_json_home('/v3') - return wsgi.render_response( - body=v3_json_home, - headers=(('Content-Type', MimeTypes.JSON_HOME),)) - - versions = self._get_versions_list(context) - return wsgi.render_response(status=(300, 'Multiple Choices'), body={ - 'versions': { - 'values': list(versions.values()) - } - }) - - def get_version_v2(self, context): - versions = self._get_versions_list(context) - if 'v2.0' in _VERSIONS: - return wsgi.render_response(body={ - 'version': versions['v2.0'] - }) - else: - raise exception.VersionNotFound(version='v2.0') - - def _get_json_home_v3(self): - - def all_resources(): - for router in self._routers: - for resource in router.v3_resources: - yield resource - - return { - 'resources': dict(all_resources()) - } - - def get_version_v3(self, context): - versions = self._get_versions_list(context) - if 'v3' in _VERSIONS: - req_mime_type = v3_mime_type_best_match(context) - - if req_mime_type == MimeTypes.JSON_HOME: - return wsgi.render_response( - body=self._get_json_home_v3(), - headers=(('Content-Type', MimeTypes.JSON_HOME),)) - - return wsgi.render_response(body={ - 'version': versions['v3'] - }) - else: - raise exception.VersionNotFound(version='v3') diff --git a/keystone-moon/keystone/version/routers.py b/keystone-moon/keystone/version/routers.py deleted file mode 100644 index 5da4951c..00000000 --- a/keystone-moon/keystone/version/routers.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -""" -The only types of routers in this file should be ``ComposingRouters``. - -The routers for the backends should be in the backend-specific router modules. -For example, the ``ComposableRouter`` for ``identity`` belongs in:: - - keystone.identity.routers - -""" - - -from keystone.common import wsgi -from keystone.version import controllers - - -class Extension(wsgi.ComposableRouter): - def __init__(self, is_admin=True): - if is_admin: - self.controller = controllers.AdminExtensions() - else: - self.controller = controllers.PublicExtensions() - - def add_routes(self, mapper): - extensions_controller = self.controller - mapper.connect('/extensions', - controller=extensions_controller, - action='get_extensions_info', - conditions=dict(method=['GET'])) - mapper.connect('/extensions/{extension_alias}', - controller=extensions_controller, - action='get_extension_info', - conditions=dict(method=['GET'])) - - -class VersionV2(wsgi.ComposableRouter): - def __init__(self, description): - self.description = description - - def add_routes(self, mapper): - version_controller = controllers.Version(self.description) - mapper.connect('/', - controller=version_controller, - action='get_version_v2') - - -class VersionV3(wsgi.ComposableRouter): - def __init__(self, description, routers): - self.description = description - self._routers = routers - - def add_routes(self, mapper): - version_controller = controllers.Version(self.description, - routers=self._routers) - mapper.connect('/', - controller=version_controller, - action='get_version_v3') - - -class Versions(wsgi.ComposableRouter): - def __init__(self, description): - self.description = description - - def add_routes(self, mapper): - version_controller = controllers.Version(self.description) - mapper.connect('/', - controller=version_controller, - action='get_versions') diff --git a/keystone-moon/keystone/version/service.py b/keystone-moon/keystone/version/service.py deleted file mode 100644 index b0ed3b76..00000000 --- a/keystone-moon/keystone/version/service.py +++ /dev/null @@ -1,161 +0,0 @@ -# Copyright 2012 OpenStack Foundation -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import functools -import sys - -from oslo_config import cfg -from oslo_log import log -from paste import deploy -import routes - -from keystone.assignment import routers as assignment_routers -from keystone.auth import routers as auth_routers -from keystone.catalog import routers as catalog_routers -from keystone.common import wsgi -from keystone.credential import routers as credential_routers -from keystone.endpoint_policy import routers as endpoint_policy_routers -from keystone.federation import routers as federation_routers -from keystone.i18n import _LW -from keystone.identity import routers as identity_routers -from keystone.oauth1 import routers as oauth1_routers -from keystone.policy import routers as policy_routers -from keystone.resource import routers as resource_routers -from keystone.revoke import routers as revoke_routers -from keystone.token import _simple_cert as simple_cert_ext -from keystone.token import routers as token_routers -from keystone.trust import routers as trust_routers -from keystone.v2_crud import admin_crud -from keystone.v2_crud import user_crud -from keystone.version import controllers -from keystone.version import routers - - -CONF = cfg.CONF -LOG = log.getLogger(__name__) - - -def loadapp(conf, name): - # NOTE(blk-u): Save the application being loaded in the controllers module. - # This is similar to how public_app_factory() and v3_app_factory() - # register the version with the controllers module. - controllers.latest_app = deploy.loadapp(conf, name=name) - return controllers.latest_app - - -def fail_gracefully(f): - """Logs exceptions and aborts.""" - @functools.wraps(f) - def wrapper(*args, **kw): - try: - return f(*args, **kw) - except Exception as e: - LOG.debug(e, exc_info=True) - - # exception message is printed to all logs - LOG.critical(e) - sys.exit(1) - - return wrapper - - -def warn_local_conf(f): - @functools.wraps(f) - def wrapper(*args, **local_conf): - if local_conf: - LOG.warning(_LW('\'local conf\' from PasteDeploy INI is being ' - 'ignored.')) - return f(*args, **local_conf) - return wrapper - - -@fail_gracefully -@warn_local_conf -def public_app_factory(global_conf, **local_conf): - controllers.register_version('v2.0') - return wsgi.ComposingRouter(routes.Mapper(), - [assignment_routers.Public(), - token_routers.Router(), - user_crud.Router(), - routers.VersionV2('public'), - routers.Extension(False)]) - - -@fail_gracefully -@warn_local_conf -def admin_app_factory(global_conf, **local_conf): - controllers.register_version('v2.0') - return wsgi.ComposingRouter(routes.Mapper(), - [identity_routers.Admin(), - assignment_routers.Admin(), - token_routers.Router(), - resource_routers.Admin(), - admin_crud.Router(), - routers.VersionV2('admin'), - routers.Extension()]) - - -@fail_gracefully -@warn_local_conf -def public_version_app_factory(global_conf, **local_conf): - return wsgi.ComposingRouter(routes.Mapper(), - [routers.Versions('public')]) - - -@fail_gracefully -@warn_local_conf -def admin_version_app_factory(global_conf, **local_conf): - return wsgi.ComposingRouter(routes.Mapper(), - [routers.Versions('admin')]) - - -@fail_gracefully -@warn_local_conf -def v3_app_factory(global_conf, **local_conf): - controllers.register_version('v3') - mapper = routes.Mapper() - sub_routers = [] - _routers = [] - - # NOTE(dstanek): Routers should be ordered by their frequency of use in - # a live system. This is due to the routes implementation. The most - # frequently used routers should appear first. - all_api_routers = [auth_routers, - assignment_routers, - catalog_routers, - credential_routers, - identity_routers, - policy_routers, - resource_routers, - revoke_routers, - federation_routers, - oauth1_routers, - # TODO(morganfainberg): Remove the simple_cert router - # when PKI and PKIZ tokens are removed. - simple_cert_ext] - - if CONF.trust.enabled: - all_api_routers.append(trust_routers) - - if CONF.endpoint_policy.enabled: - all_api_routers.append(endpoint_policy_routers) - - for api_routers in all_api_routers: - routers_instance = api_routers.Routers() - _routers.append(routers_instance) - routers_instance.append_v3_routers(mapper, sub_routers) - - # Add in the v3 version api - sub_routers.append(routers.VersionV3('public', _routers)) - return wsgi.ComposingRouter(mapper, sub_routers) -- cgit 1.2.3-korg