summaryrefslogtreecommitdiffstats
path: root/keystone-moon/keystone/cmd/cli.py
blob: f95007e0e8e7f1e6204a9a2483781c678336bbd7 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.

from __future__ import absolute_import
from __future__ import print_function

import os
import sys
import uuid

from oslo_config import cfg
from oslo_log import log
from oslo_log import versionutils
from oslo_serialization import jsonutils
import pbr.version

from keystone.common import config
from keystone.common import driver_hints
from keystone.common import openssl
from keystone.common import sql
from keystone.common.sql import migration_helpers
from keystone.common import utils
from keystone import exception
from keystone.federation import idp
from keystone.federation import utils as mapping_engine
from keystone.i18n import _, _LW, _LI
from keystone.server import backends
from keystone import token


CONF = cfg.CONF
LOG = log.getLogger(__name__)


class BaseApp(object):

    name = None

    @classmethod
    def add_argument_parser(cls, subparsers):
        parser = subparsers.add_parser(cls.name, help=cls.__doc__)
        parser.set_defaults(cmd_class=cls)
        return parser


class BootStrap(BaseApp):
    """Perform the basic bootstrap process"""

    name = "bootstrap"

    def __init__(self):
        self.load_backends()
        self.project_id = uuid.uuid4().hex
        self.role_id = uuid.uuid4().hex
        self.service_id = None
        self.service_name = None
        self.username = None
        self.project_name = None
        self.role_name = None
        self.password = None
        self.public_url = None
        self.internal_url = None
        self.admin_url = None
        self.region_id = None
        self.endpoints = {}

    @classmethod
    def add_argument_parser(cls, subparsers):
        parser = super(BootStrap, cls).add_argument_parser(subparsers)
        parser.add_argument('--bootstrap-username', default='admin',
                            metavar='OS_BOOTSTRAP_USERNAME',
                            help=('The username of the initial keystone '
                                  'user during bootstrap process.'))
        # NOTE(morganfainberg): See below for ENV Variable that can be used
        # in lieu of the command-line arguments.
        parser.add_argument('--bootstrap-password', default=None,
                            metavar='OS_BOOTSTRAP_PASSWORD',
                            help='The bootstrap user password')
        parser.add_argument('--bootstrap-project-name', default='admin',
                            metavar='OS_BOOTSTRAP_PROJECT_NAME',
                            help=('The initial project created during the '
                                  'keystone bootstrap process.'))
        parser.add_argument('--bootstrap-role-name', default='admin',
                            metavar='OS_BOOTSTRAP_ROLE_NAME',
                            help=('The initial role-name created during the '
                                  'keystone bootstrap process.'))
        parser.add_argument('--bootstrap-service-name', default='keystone',
                            metavar='OS_BOOTSTRAP_SERVICE_NAME',
                            help=('The initial name for the initial identity '
                                  'service created during the keystone '
                                  'bootstrap process.'))
        parser.add_argument('--bootstrap-admin-url',
                            metavar='OS_BOOTSTRAP_ADMIN_URL',
                            help=('The initial identity admin url created '
                                  'during the keystone bootstrap process. '
                                  'e.g. http://127.0.0.1:35357/v2.0'))
        parser.add_argument('--bootstrap-public-url',
                            metavar='OS_BOOTSTRAP_PUBLIC_URL',
                            help=('The initial identity public url created '
                                  'during the keystone bootstrap process. '
                                  'e.g. http://127.0.0.1:5000/v2.0'))
        parser.add_argument('--bootstrap-internal-url',
                            metavar='OS_BOOTSTRAP_INTERNAL_URL',
                            help=('The initial identity internal url created '
                                  'during the keystone bootstrap process. '
                                  'e.g. http://127.0.0.1:5000/v2.0'))
        parser.add_argument('--bootstrap-region-id',
                            metavar='OS_BOOTSTRAP_REGION_ID',
                            help=('The initial region_id endpoints will be '
                                  'placed in during the keystone bootstrap '
                                  'process.'))
        return parser

    def load_backends(self):
        drivers = backends.load_backends()
        self.resource_manager = drivers['resource_api']
        self.identity_manager = drivers['identity_api']
        self.assignment_manager = drivers['assignment_api']
        self.catalog_manager = drivers['catalog_api']
        self.role_manager = drivers['role_api']

    def _get_config(self):
        self.username = (
            os.environ.get('OS_BOOTSTRAP_USERNAME') or
            CONF.command.bootstrap_username)
        self.project_name = (
            os.environ.get('OS_BOOTSTRAP_PROJECT_NAME') or
            CONF.command.bootstrap_project_name)
        self.role_name = (
            os.environ.get('OS_BOOTSTRAP_ROLE_NAME') or
            CONF.command.bootstrap_role_name)
        self.password = (
            os.environ.get('OS_BOOTSTRAP_PASSWORD') or
            CONF.command.bootstrap_password)
        self.service_name = (
            os.environ.get('OS_BOOTSTRAP_SERVICE_NAME') or
            CONF.command.bootstrap_service_name)
        self.admin_url = (
            os.environ.get('OS_BOOTSTRAP_ADMIN_URL') or
            CONF.command.bootstrap_admin_url)
        self.public_url = (
            os.environ.get('OS_BOOTSTRAP_PUBLIC_URL') or
            CONF.command.bootstrap_public_url)
        self.internal_url = (
            os.environ.get('OS_BOOTSTRAP_INTERNAL_URL') or
            CONF.command.bootstrap_internal_url)
        self.region_id = (
            os.environ.get('OS_BOOTSTRAP_REGION_ID') or
            CONF.command.bootstrap_region_id)

    def do_bootstrap(self):
        """Perform the bootstrap actions.

        Create bootstrap user, project, and role so that CMS, humans, or
        scripts can continue to perform initial setup (domains, projects,
        services, endpoints, etc) of Keystone when standing up a new
        deployment.
        """
        self._get_config()

        if self.password is None:
            print(_('Either --bootstrap-password argument or '
                    'OS_BOOTSTRAP_PASSWORD must be set.'))
            raise ValueError

        # NOTE(morganfainberg): Ensure the default domain is in-fact created
        default_domain = {
            'id': CONF.identity.default_domain_id,
            'name': 'Default',
            'enabled': True,
            'description': 'The default domain'
        }
        try:
            self.resource_manager.create_domain(
                domain_id=default_domain['id'],
                domain=default_domain)
            LOG.info(_LI('Created domain %s'), default_domain['id'])
        except exception.Conflict:
            # NOTE(morganfainberg): Domain already exists, continue on.
            LOG.info(_LI('Domain %s already exists, skipping creation.'),
                     default_domain['id'])

        try:
            self.resource_manager.create_project(
                project_id=self.project_id,
                project={'enabled': True,
                         'id': self.project_id,
                         'domain_id': default_domain['id'],
                         'description': 'Bootstrap project for initializing '
                                        'the cloud.',
                         'name': self.project_name}
            )
            LOG.info(_LI('Created project %s'), self.project_name)
        except exception.Conflict:
            LOG.info(_LI('Project %s already exists, skipping creation.'),
                     self.project_name)
            project = self.resource_manager.get_project_by_name(
                self.project_name, default_domain['id'])
            self.project_id = project['id']

        # NOTE(morganfainberg): Do not create the user if it already exists.
        try:
            user = self.identity_manager.get_user_by_name(self.username,
                                                          default_domain['id'])
            LOG.info(_LI('User %s already exists, skipping creation.'),
                     self.username)
        except exception.UserNotFound:
            user = self.identity_manager.create_user(
                user_ref={'name': self.username,
                          'enabled': True,
                          'domain_id': default_domain['id'],
                          'password': self.password
                          }
            )
            LOG.info(_LI('Created user %s'), self.username)

        # NOTE(morganfainberg): Do not create the role if it already exists.
        try:
            self.role_manager.create_role(
                role_id=self.role_id,
                role={'name': self.role_name,
                      'id': self.role_id},
            )
            LOG.info(_LI('Created Role %s'), self.role_name)
        except exception.Conflict:
            LOG.info(_LI('Role %s exists, skipping creation.'), self.role_name)
            # NOTE(davechen): There is no backend method to get the role
            # by name, so build the hints to list the roles and filter by
            # name instead.
            hints = driver_hints.Hints()
            hints.add_filter('name', self.role_name)
            role = self.role_manager.list_roles(hints)
            self.role_id = role[0]['id']

        # NOTE(morganfainberg): Handle the case that the role assignment has
        # already occurred.
        try:
            self.assignment_manager.add_role_to_user_and_project(
                user_id=user['id'],
                tenant_id=self.project_id,
                role_id=self.role_id
            )
            LOG.info(_LI('Granted %(role)s on %(project)s to user'
                         ' %(username)s.'),
                     {'role': self.role_name,
                      'project': self.project_name,
                      'username': self.username})
        except exception.Conflict:
            LOG.info(_LI('User %(username)s already has %(role)s on '
                         '%(project)s.'),
                     {'username': self.username,
                      'role': self.role_name,
                      'project': self.project_name})

        if self.region_id:
            try:
                self.catalog_manager.create_region(
                    region_ref={'id': self.region_id}
                )
                LOG.info(_LI('Created Region %s'), self.region_id)
            except exception.Conflict:
                LOG.info(_LI('Region %s exists, skipping creation.'),
                         self.region_id)

        if self.public_url or self.admin_url or self.internal_url:
            hints = driver_hints.Hints()
            hints.add_filter('type', 'identity')
            services = self.catalog_manager.list_services(hints)

            if services:
                service_ref = services[0]

                hints = driver_hints.Hints()
                hints.add_filter('service_id', service_ref['id'])
                if self.region_id:
                    hints.add_filter('region_id', self.region_id)

                endpoints = self.catalog_manager.list_endpoints(hints)
            else:
                service_ref = {'id': uuid.uuid4().hex,
                               'name': self.service_name,
                               'type': 'identity',
                               'enabled': True}

                self.catalog_manager.create_service(
                    service_id=service_ref['id'],
                    service_ref=service_ref)

                endpoints = []

            self.service_id = service_ref['id']

            available_interfaces = {e['interface']: e for e in endpoints}
            expected_endpoints = {'public': self.public_url,
                                  'internal': self.internal_url,
                                  'admin': self.admin_url}

            for interface, url in expected_endpoints.items():
                if not url:
                    # not specified to bootstrap command
                    continue

                try:
                    endpoint_ref = available_interfaces[interface]
                except KeyError:
                    endpoint_ref = {'id': uuid.uuid4().hex,
                                    'interface': interface,
                                    'url': url,
                                    'service_id': self.service_id,
                                    'enabled': True}

                    if self.region_id:
                        endpoint_ref['region_id'] = self.region_id

                    self.catalog_manager.create_endpoint(
                        endpoint_id=endpoint_ref['id'],
                        endpoint_ref=endpoint_ref)

                    LOG.info(_LI('Created %(interface)s endpoint %(url)s'),
                             {'interface': interface, 'url': url})
                else:
                    # NOTE(jamielennox): electing not to update existing
                    # endpoints here. There may be call to do so in future.
                    LOG.info(_LI('Skipping %s endpoint as already created'),
                             interface)

                self.endpoints[interface] = endpoint_ref['id']

    @classmethod
    def main(cls):
        klass = cls()
        klass.do_bootstrap()


class DbSync(BaseApp):
    """Sync the database."""

    name = 'db_sync'

    @classmethod
    def add_argument_parser(cls, subparsers):
        parser = super(DbSync, cls).add_argument_parser(subparsers)
        parser.add_argument('version', default=None, nargs='?',
                            help=('Migrate the database up to a specified '
                                  'version. If not provided, db_sync will '
                                  'migrate the database to the latest known '
                                  'version. Schema downgrades are not '
                                  'supported.'))
        parser.add_argument('--extension', default=None,
                            help=('Migrate the database for the specified '
                                  'extension. If not provided, db_sync will '
                                  'migrate the common repository.'))

        return parser

    @staticmethod
    def main():
        version = CONF.command.version
        extension = CONF.command.extension
        migration_helpers.sync_database_to_version(extension, version)


class DbVersion(BaseApp):
    """Print the current migration version of the database."""

    name = 'db_version'

    @classmethod
    def add_argument_parser(cls, subparsers):
        parser = super(DbVersion, cls).add_argument_parser(subparsers)
        parser.add_argument('--extension', default=None,
                            help=('Print the migration version of the '
                                  'database for the specified extension. If '
                                  'not provided, print it for the common '
                                  'repository.'))

    @staticmethod
    def main():
        extension = CONF.command.extension
        migration_helpers.print_db_version(extension)


class BasePermissionsSetup(BaseApp):
    """Common user/group setup for file permissions."""

    @classmethod
    def add_argument_parser(cls, subparsers):
        parser = super(BasePermissionsSetup,
                       cls).add_argument_parser(subparsers)
        running_as_root = (os.geteuid() == 0)
        parser.add_argument('--keystone-user', required=running_as_root)
        parser.add_argument('--keystone-group', required=running_as_root)
        return parser

    @staticmethod
    def get_user_group():
        keystone_user_id = None
        keystone_group_id = None

        try:
            a = CONF.command.keystone_user
            if a:
                keystone_user_id = utils.get_unix_user(a)[0]
        except KeyError:
            raise ValueError("Unknown user '%s' in --keystone-user" % a)

        try:
            a = CONF.command.keystone_group
            if a:
                keystone_group_id = utils.get_unix_group(a)[0]
        except KeyError:
            raise ValueError("Unknown group '%s' in --keystone-group" % a)

        return keystone_user_id, keystone_group_id


class BaseCertificateSetup(BasePermissionsSetup):
    """Provides common options for certificate setup."""

    @classmethod
    def add_argument_parser(cls, subparsers):
        parser = super(BaseCertificateSetup,
                       cls).add_argument_parser(subparsers)
        parser.add_argument('--rebuild', default=False, action='store_true',
                            help=('Rebuild certificate files: erase previous '
                                  'files and regenerate them.'))
        return parser


class PKISetup(BaseCertificateSetup):
    """Set up Key pairs and certificates for token signing and verification.

    This is NOT intended for production use, see Keystone Configuration
    documentation for details. As of the Mitaka release, this command has
    been DEPRECATED and may be removed in the 'O' release.
    """

    name = 'pki_setup'

    @classmethod
    def main(cls):
        versionutils.report_deprecated_feature(
            LOG,
            _LW("keystone-manage pki_setup is deprecated as of Mitaka in "
                "favor of not using PKI tokens and may be removed in 'O' "
                "release."))
        LOG.warning(_LW('keystone-manage pki_setup is not recommended for '
                        'production use.'))
        keystone_user_id, keystone_group_id = cls.get_user_group()
        conf_pki = openssl.ConfigurePKI(keystone_user_id, keystone_group_id,
                                        rebuild=CONF.command.rebuild)
        conf_pki.run()


class SSLSetup(BaseCertificateSetup):
    """Create key pairs and certificates for HTTPS connections.

    This is NOT intended for production use, see Keystone Configuration
    documentation for details.
    """

    name = 'ssl_setup'

    @classmethod
    def main(cls):
        LOG.warning(_LW('keystone-manage ssl_setup is not recommended for '
                        'production use.'))
        keystone_user_id, keystone_group_id = cls.get_user_group()
        conf_ssl = openssl.ConfigureSSL(keystone_user_id, keystone_group_id,
                                        rebuild=CONF.command.rebuild)
        conf_ssl.run()


class FernetSetup(BasePermissionsSetup):
    """Setup a key repository for Fernet tokens.

    This also creates a primary key used for both creating and validating
    Fernet tokens. To improve security, you should rotate your keys (using
    keystone-manage fernet_rotate, for example).

    """

    name = 'fernet_setup'

    @classmethod
    def main(cls):
        from keystone.token.providers.fernet import utils as fernet

        keystone_user_id, keystone_group_id = cls.get_user_group()
        fernet.create_key_directory(keystone_user_id, keystone_group_id)
        if fernet.validate_key_repository(requires_write=True):
            fernet.initialize_key_repository(
                keystone_user_id, keystone_group_id)


class FernetRotate(BasePermissionsSetup):
    """Rotate Fernet encryption keys.

    This assumes you have already run keystone-manage fernet_setup.

    A new primary key is placed into rotation, which is used for new tokens.
    The old primary key is demoted to secondary, which can then still be used
    for validating tokens. Excess secondary keys (beyond [fernet_tokens]
    max_active_keys) are revoked. Revoked keys are permanently deleted. A new
    staged key will be created and used to validate tokens. The next time key
    rotation takes place, the staged key will be put into rotation as the
    primary key.

    Rotating keys too frequently, or with [fernet_tokens] max_active_keys set
    too low, will cause tokens to become invalid prior to their expiration.

    """

    name = 'fernet_rotate'

    @classmethod
    def main(cls):
        from keystone.token.providers.fernet import utils as fernet

        keystone_user_id, keystone_group_id = cls.get_user_group()
        if fernet.validate_key_repository(requires_write=True):
            fernet.rotate_keys(keystone_user_id, keystone_group_id)


class TokenFlush(BaseApp):
    """Flush expired tokens from the backend."""

    name = 'token_flush'

    @classmethod
    def main(cls):
        token_manager = token.persistence.PersistenceManager()
        token_manager.flush_expired_tokens()


class MappingPurge(BaseApp):
    """Purge the mapping table."""

    name = 'mapping_purge'

    @classmethod
    def add_argument_parser(cls, subparsers):
        parser = super(MappingPurge, cls).add_argument_parser(subparsers)
        parser.add_argument('--all', default=False, action='store_true',
                            help=('Purge all mappings.'))
        parser.add_argument('--domain-name', default=None,
                            help=('Purge any mappings for the domain '
                                  'specified.'))
        parser.add_argument('--public-id', default=None,
                            help=('Purge the mapping for the Public ID '
                                  'specified.'))
        parser.add_argument('--local-id', default=None,
                            help=('Purge the mappings for the Local ID '
                                  'specified.'))
        parser.add_argument('--type', default=None, choices=['user', 'group'],
                            help=('Purge any mappings for the type '
                                  'specified.'))
        return parser

    @staticmethod
    def main():
        def validate_options():
            # NOTE(henry-nash): It would be nice to use the argparse automated
            # checking for this validation, but the only way I can see doing
            # that is to make the default (i.e. if no optional parameters
            # are specified) to purge all mappings - and that sounds too
            # dangerous as a default.  So we use it in a slightly
            # unconventional way, where all parameters are optional, but you
            # must specify at least one.
            if (CONF.command.all is False and
                CONF.command.domain_name is None and
                CONF.command.public_id is None and
                CONF.command.local_id is None and
                    CONF.command.type is None):
                raise ValueError(_('At least one option must be provided'))

            if (CONF.command.all is True and
                (CONF.command.domain_name is not None or
                 CONF.command.public_id is not None or
                 CONF.command.local_id is not None or
                 CONF.command.type is not None)):
                raise ValueError(_('--all option cannot be mixed with '
                                   'other options'))

        def get_domain_id(name):
            try:
                return resource_manager.get_domain_by_name(name)['id']
            except KeyError:
                raise ValueError(_("Unknown domain '%(name)s' specified by "
                                   "--domain-name") % {'name': name})

        validate_options()
        drivers = backends.load_backends()
        resource_manager = drivers['resource_api']
        mapping_manager = drivers['id_mapping_api']

        # Now that we have validated the options, we know that at least one
        # option has been specified, and if it was the --all option then this
        # was the only option specified.
        #
        # The mapping dict is used to filter which mappings are purged, so
        # leaving it empty means purge them all
        mapping = {}
        if CONF.command.domain_name is not None:
            mapping['domain_id'] = get_domain_id(CONF.command.domain_name)
        if CONF.command.public_id is not None:
            mapping['public_id'] = CONF.command.public_id
        if CONF.command.local_id is not None:
            mapping['local_id'] = CONF.command.local_id
        if CONF.command.type is not None:
            mapping['type'] = CONF.command.type

        mapping_manager.purge_mappings(mapping)


DOMAIN_CONF_FHEAD = 'keystone.'
DOMAIN_CONF_FTAIL = '.conf'


def _domain_config_finder(conf_dir):
    """Return a generator of all domain config files found in a directory.

    Donmain configs match the filename pattern of
    'keystone.<domain_name>.conf'.

    :returns: generator yeilding (filename, domain_name) tuples
    """
    LOG.info(_LI('Scanning %r for domain config files'), conf_dir)
    for r, d, f in os.walk(conf_dir):
        for fname in f:
            if (fname.startswith(DOMAIN_CONF_FHEAD) and
                    fname.endswith(DOMAIN_CONF_FTAIL)):
                if fname.count('.') >= 2:
                    domain_name = fname[len(DOMAIN_CONF_FHEAD):
                                        -len(DOMAIN_CONF_FTAIL)]
                    yield (os.path.join(r, fname), domain_name)
                    continue

            LOG.warning(_LW('Ignoring file (%s) while scanning '
                            'domain config directory'), fname)


class DomainConfigUploadFiles(object):

    def __init__(self, domain_config_finder=_domain_config_finder):
        super(DomainConfigUploadFiles, self).__init__()
        self.load_backends()
        self._domain_config_finder = domain_config_finder

    def load_backends(self):
        drivers = backends.load_backends()
        self.resource_manager = drivers['resource_api']
        self.domain_config_manager = drivers['domain_config_api']

    def valid_options(self):
        """Validate the options, returning True if they are indeed valid.

        It would be nice to use the argparse automated checking for this
        validation, but the only way I can see doing that is to make the
        default (i.e. if no optional parameters are specified) to upload
        all configuration files - and that sounds too dangerous as a
        default. So we use it in a slightly unconventional way, where all
        parameters are optional, but you must specify at least one.

        """
        if (CONF.command.all is False and
                CONF.command.domain_name is None):
            print(_('At least one option must be provided, use either '
                    '--all or --domain-name'))
            raise ValueError

        if (CONF.command.all is True and
                CONF.command.domain_name is not None):
            print(_('The --all option cannot be used with '
                    'the --domain-name option'))
            raise ValueError

    def upload_config_to_database(self, file_name, domain_name):
        """Upload a single config file to the database.

        :param file_name: the file containing the config options
        :param domain_name: the domain name

        :raises ValueError: the domain does not exist or already has domain
            specific configurations defined.
        :raises Exceptions from oslo config: there is an issue with options
            defined in the config file or its format.

        The caller of this method should catch the errors raised and handle
        appropriately in order that the best UX experience can be provided for
        both the case of when a user has asked for a specific config file to
        be uploaded, as well as all config files in a directory.

        """
        try:
            domain_ref = (
                self.resource_manager.get_domain_by_name(domain_name))
        except exception.DomainNotFound:
            print(_('Invalid domain name: %(domain)s found in config file '
                    'name: %(file)s - ignoring this file.') % {
                        'domain': domain_name,
                        'file': file_name})
            raise ValueError

        if self.domain_config_manager.get_config_with_sensitive_info(
                domain_ref['id']):
            print(_('Domain: %(domain)s already has a configuration '
                    'defined - ignoring file: %(file)s.') % {
                        'domain': domain_name,
                        'file': file_name})
            raise ValueError

        sections = {}
        try:
            parser = cfg.ConfigParser(file_name, sections)
            parser.parse()
        except Exception:
            # We explicitly don't try and differentiate the error cases, in
            # order to keep the code in this tool more robust as oslo.config
            # changes.
            print(_('Error parsing configuration file for domain: %(domain)s, '
                    'file: %(file)s.') % {
                        'domain': domain_name,
                        'file': file_name})
            raise

        for group in sections:
            for option in sections[group]:
                    sections[group][option] = sections[group][option][0]
        self.domain_config_manager.create_config(domain_ref['id'], sections)

    def upload_configs_to_database(self, file_name, domain_name):
        """Upload configs from file and load into database.

        This method will be called repeatedly for all the config files in the
        config directory. To provide a better UX, we differentiate the error
        handling in this case (versus when the user has asked for a single
        config file to be uploaded).

        """
        try:
            self.upload_config_to_database(file_name, domain_name)
        except ValueError:  # nosec
            # We've already given all the info we can in a message, so carry
            # on to the next one
            pass
        except Exception:
            # Some other error occurred relating to this specific config file
            # or domain. Since we are trying to upload all the config files,
            # we'll continue and hide this exception. However, we tell the
            # user how to get more info about this error by re-running with
            # just the domain at fault. When we run in single-domain mode we
            # will NOT hide the exception.
            print(_('To get a more detailed information on this error, re-run '
                    'this command for the specific domain, i.e.: '
                    'keystone-manage domain_config_upload --domain-name %s') %
                  domain_name)
            pass

    def read_domain_configs_from_files(self):
        """Read configs from file(s) and load into database.

        The command line parameters have already been parsed and the CONF
        command option will have been set. It is either set to the name of an
        explicit domain, or it's None to indicate that we want all domain
        config files.

        """
        domain_name = CONF.command.domain_name
        conf_dir = CONF.identity.domain_config_dir
        if not os.path.exists(conf_dir):
            print(_('Unable to locate domain config directory: %s') % conf_dir)
            raise ValueError

        if domain_name:
            # Request is to upload the configs for just one domain
            fname = DOMAIN_CONF_FHEAD + domain_name + DOMAIN_CONF_FTAIL
            self.upload_config_to_database(
                os.path.join(conf_dir, fname), domain_name)
            return

        for filename, domain_name in self._domain_config_finder(conf_dir):
            self.upload_configs_to_database(filename, domain_name)

    def run(self):
        # First off, let's just check we can talk to the domain database
        try:
            self.resource_manager.list_domains(driver_hints.Hints())
        except Exception:
            # It is likely that there is some SQL or other backend error
            # related to set up
            print(_('Unable to access the keystone database, please check it '
                    'is configured correctly.'))
            raise

        try:
            self.valid_options()
            self.read_domain_configs_from_files()
        except ValueError:
            # We will already have printed out a nice message, so indicate
            # to caller the non-success error code to be used.
            return 1


class DomainConfigUpload(BaseApp):
    """Upload the domain specific configuration files to the database."""

    name = 'domain_config_upload'

    @classmethod
    def add_argument_parser(cls, subparsers):
        parser = super(DomainConfigUpload, cls).add_argument_parser(subparsers)
        parser.add_argument('--all', default=False, action='store_true',
                            help='Upload contents of all domain specific '
                                 'configuration files. Either use this option '
                                 'or use the --domain-name option to choose a '
                                 'specific domain.')
        parser.add_argument('--domain-name', default=None,
                            help='Upload contents of the specific '
                                 'configuration file for the given domain. '
                                 'Either use this option or use the --all '
                                 'option to upload contents for all domains.')
        return parser

    @staticmethod
    def main():
        dcu = DomainConfigUploadFiles()
        status = dcu.run()
        if status is not None:
            sys.exit(status)


class SamlIdentityProviderMetadata(BaseApp):
    """Generate Identity Provider metadata."""

    name = 'saml_idp_metadata'

    @staticmethod
    def main():
        metadata = idp.MetadataGenerator().generate_metadata()
        print(metadata.to_string())


class MappingEngineTester(BaseApp):
    """Execute mapping engine locally."""

    name = 'mapping_engine'

    @staticmethod
    def read_rules(path):
        try:
            with open(path) as file:
                return jsonutils.load(file)
        except ValueError as e:
            raise SystemExit(_('Error while parsing rules '
                               '%(path)s: %(err)s') % {'path': path, 'err': e})

    @staticmethod
    def read_file(path):
        try:
            with open(path) as file:
                return file.read().strip()
        except IOError as e:
            raise SystemExit(_("Error while opening file "
                               "%(path)s: %(err)s") % {'path': path, 'err': e})

    @staticmethod
    def normalize_assertion(assertion):
        def split(line):
            try:
                k, v = line.split(':', 1)
                return k.strip(), v.strip()
            except ValueError as e:
                msg = _("Error while parsing line: '%(line)s': %(err)s")
                raise SystemExit(msg % {'line': line, 'err': e})
        assertion = assertion.split('\n')
        assertion_dict = {}
        prefix = CONF.command.prefix
        for line in assertion:
            k, v = split(line)
            if prefix:
                if k.startswith(prefix):
                    assertion_dict[k] = v
            else:
                assertion_dict[k] = v
        return assertion_dict

    @staticmethod
    def normalize_rules(rules):
        if isinstance(rules, list):
            return {'rules': rules}
        else:
            return rules

    @classmethod
    def main(cls):
        if not CONF.command.engine_debug:
            mapping_engine.LOG.logger.setLevel('WARN')

        rules = MappingEngineTester.read_rules(CONF.command.rules)
        rules = MappingEngineTester.normalize_rules(rules)
        mapping_engine.validate_mapping_structure(rules)

        assertion = MappingEngineTester.read_file(CONF.command.input)
        assertion = MappingEngineTester.normalize_assertion(assertion)
        rp = mapping_engine.RuleProcessor(rules['rules'])
        print(jsonutils.dumps(rp.process(assertion), indent=2))

    @classmethod
    def add_argument_parser(cls, subparsers):
        parser = super(MappingEngineTester,
                       cls).add_argument_parser(subparsers)

        parser.add_argument('--rules', default=None, required=True,
                            help=("Path to the file with "
                                  "rules to be executed. "
                                  "Content must be a proper JSON structure, "
                                  "with a top-level key 'rules' and "
                                  "corresponding value being a list."))
        parser.add_argument('--input', default=None, required=True,
                            help=("Path to the file with input attributes. "
                                  "The content consists of ':' separated "
                                  "parameter names and their values. "
                                  "There is only one key-value pair per line. "
                                  "A ';' in the value is a separator and then "
                                  "a value is treated as a list. Example:\n "
                                  "EMAIL: me@example.com\n"
                                  "LOGIN: me\n"
                                  "GROUPS: group1;group2;group3"))
        parser.add_argument('--prefix', default=None,
                            help=("A prefix used for each environment "
                                  "variable in the assertion. For example, "
                                  "all environment variables may have the "
                                  "prefix ASDF_."))
        parser.add_argument('--engine-debug',
                            default=False, action="store_true",
                            help=("Enable debug messages from the mapping "
                                  "engine."))


CMDS = [
    BootStrap,
    DbSync,
    DbVersion,
    DomainConfigUpload,
    FernetRotate,
    FernetSetup,
    MappingPurge,
    MappingEngineTester,
    PKISetup,
    SamlIdentityProviderMetadata,
    SSLSetup,
    TokenFlush,
]


def add_command_parsers(subparsers):
    for cmd in CMDS:
        cmd.add_argument_parser(subparsers)


command_opt = cfg.SubCommandOpt('command',
                                title='Commands',
                                help='Available commands',
                                handler=add_command_parsers)


def main(argv=None, config_files=None):
    CONF.register_cli_opt(command_opt)

    config.configure()
    sql.initialize()
    config.set_default_for_default_log_levels()

    CONF(args=argv[1:],
         project='keystone',
         version=pbr.version.VersionInfo('keystone').version_string(),
         usage='%(prog)s [' + '|'.join([cmd.name for cmd in CMDS]) + ']',
         default_config_files=config_files)
    config.setup_logging()
    CONF.command.cmd_class.main()