diff options
Diffstat (limited to 'tools/yaml-validate.py')
-rwxr-xr-x | tools/yaml-validate.py | 111 |
1 files changed, 100 insertions, 11 deletions
diff --git a/tools/yaml-validate.py b/tools/yaml-validate.py index 92d76d23..233ec185 100755 --- a/tools/yaml-validate.py +++ b/tools/yaml-validate.py @@ -20,8 +20,15 @@ import yaml required_params = ['EndpointMap', 'ServiceNetMap', 'DefaultPasswords', 'RoleName', 'RoleParameters'] +# NOTE(bnemec): The duplication in this list is intentional. For the +# transition to generated environments we have two copies of these files, +# so they need to be listed twice. Once the deprecated version can be removed +# the duplicate entries can be as well. envs_containing_endpoint_map = ['tls-endpoints-public-dns.yaml', 'tls-endpoints-public-ip.yaml', + 'tls-everywhere-endpoints-dns.yaml', + 'tls-endpoints-public-dns.yaml', + 'tls-endpoints-public-ip.yaml', 'tls-everywhere-endpoints-dns.yaml'] ENDPOINT_MAP_FILE = 'endpoint_map.yaml' REQUIRED_DOCKER_SECTIONS = ['service_name', 'docker_config', 'puppet_config', @@ -32,6 +39,24 @@ OPTIONAL_DOCKER_SECTIONS = ['docker_puppet_tasks', 'upgrade_tasks', REQUIRED_DOCKER_PUPPET_CONFIG_SECTIONS = ['config_volume', 'step_config', 'config_image'] OPTIONAL_DOCKER_PUPPET_CONFIG_SECTIONS = [ 'puppet_tags' ] +# Mapping of parameter names to a list of the fields we should _not_ enforce +# consistency across files on. This should only contain parameters whose +# definition we cannot change for backwards compatibility reasons. New +# parameters to the templates should not be added to this list. +PARAMETER_DEFINITION_EXCLUSIONS = {'ManagementNetCidr': ['default'], + 'ManagementAllocationPools': ['default'], + 'ExternalNetCidr': ['default'], + 'ExternalAllocationPools': ['default'], + 'StorageNetCidr': ['default'], + 'StorageAllocationPools': ['default'], + 'StorageMgmtNetCidr': ['default'], + 'StorageMgmtAllocationPools': ['default'], + } + +PREFERRED_CAMEL_CASE = { + 'ec2api': 'Ec2Api', + 'haproxy': 'HAProxy', +} def exit_usage(): @@ -39,6 +64,11 @@ def exit_usage(): sys.exit(1) +def to_camel_case(string): + return PREFERRED_CAMEL_CASE.get(string, ''.join(s.capitalize() or '_' for + s in string.split('_'))) + + def get_base_endpoint_map(filename): try: tpl = yaml.load(open(filename).read()) @@ -163,6 +193,13 @@ def validate_docker_service(filename, tpl): % (key, filename)) return 1 + config_volume = puppet_config.get('config_volume') + expected_config_image_parameter = "Docker%sConfigImage" % to_camel_case(config_volume) + if config_volume and not expected_config_image_parameter in tpl.get('parameters', []): + print('ERROR: Missing %s heat parameter for %s config_volume.' + % (expected_config_image_parameter, config_volume)) + return 1 + if 'parameters' in tpl: for param in required_params: if param not in tpl['parameters']: @@ -204,7 +241,30 @@ def validate_service(filename, tpl): return 0 -def validate(filename): +def validate(filename, param_map): + """Validate a Heat template + + :param filename: The path to the file to validate + :param param_map: A dict which will be populated with the details of the + parameters in the template. The dict will have the + following structure: + + {'ParameterName': [ + {'filename': ./file1.yaml, + 'data': {'description': '', + 'type': string, + 'default': '', + ...} + }, + {'filename': ./file2.yaml, + 'data': {'description': '', + 'type': string, + 'default': '', + ...} + }, + ... + ]} + """ print('Validating %s' % filename) retval = 0 try: @@ -219,12 +279,10 @@ def validate(filename): # qdr aliases rabbitmq service to provide alternative messaging backend if (filename.startswith('./puppet/services/') and - filename not in ['./puppet/services/services.yaml', - './puppet/services/qdr.yaml']): + filename not in ['./puppet/services/qdr.yaml']): retval = validate_service(filename, tpl) - if (filename.startswith('./docker/services/') and - filename != './docker/services/services.yaml'): + if filename.startswith('./docker/services/'): retval = validate_docker_service(filename, tpl) if filename.endswith('hyperconverged-ceph.yaml'): @@ -235,7 +293,9 @@ def validate(filename): return 1 # yaml is OK, now walk the parameters and output a warning for unused ones if 'heat_template_version' in tpl: - for p in tpl.get('parameters', {}): + for p, data in tpl.get('parameters', {}).items(): + definition = {'data': data, 'filename': filename} + param_map.setdefault(p, []).append(definition) if p in required_params: continue str_p = '\'%s\'' % p @@ -255,6 +315,7 @@ exit_val = 0 failed_files = [] base_endpoint_map = None env_endpoint_maps = list() +param_map = {} for base_path in path_args: if os.path.isdir(base_path): @@ -262,7 +323,7 @@ for base_path in path_args: for f in files: if f.endswith('.yaml') and not f.endswith('.j2.yaml'): file_path = os.path.join(subdir, f) - failed = validate(file_path) + failed = validate(file_path, param_map) if failed: failed_files.append(file_path) exit_val |= failed @@ -273,7 +334,7 @@ for base_path in path_args: if env_endpoint_map: env_endpoint_maps.append(env_endpoint_map) elif os.path.isfile(base_path) and base_path.endswith('.yaml'): - failed = validate(base_path) + failed = validate(base_path, param_map) if failed: failed_files.append(base_path) exit_val |= failed @@ -294,9 +355,9 @@ if base_endpoint_map and \ else: print("%s matches base endpoint map" % env_endpoint_map['file']) else: - print("ERROR: Can't validate endpoint maps since a file is missing. " - "If you meant to delete one of these files you should update this " - "tool as well.") + print("ERROR: Did not find expected number of environments containing the " + "EndpointMap parameter. If you meant to add or remove one of these " + "environments then you also need to update this tool.") if not base_endpoint_map: failed_files.append(ENDPOINT_MAP_FILE) if len(env_endpoint_maps) != len(envs_containing_endpoint_map): @@ -305,6 +366,34 @@ else: failed_files.extend(set(envs_containing_endpoint_map) - matched_files) exit_val |= 1 +# Validate that duplicate parameters defined in multiple files all have the +# same definition. +mismatch_count = 0 +for p, defs in param_map.items(): + # Nothing to validate if the parameter is only defined once + if len(defs) == 1: + continue + check_data = [d['data'] for d in defs] + # Override excluded fields so they don't affect the result + exclusions = PARAMETER_DEFINITION_EXCLUSIONS.get(p, []) + ex_dict = {} + for field in exclusions: + ex_dict[field] = 'IGNORED' + for d in check_data: + d.update(ex_dict) + # If all items in the list are not == the first, then the check fails + if check_data.count(check_data[0]) != len(check_data): + mismatch_count += 1 + # TODO(bnemec): Make this a hard failure once all the templates have + # been fixed. + #exit_val |= 1 + #failed_files.extend([d['filename'] for d in defs]) + print('Mismatched parameter definitions found for "%s"' % p) + print('Definitions found:') + for d in defs: + print(' %s:\n %s' % (d['filename'], d['data'])) +print('Mismatched parameter definitions: %d' % mismatch_count) + if failed_files: print('Validation failed on:') for f in failed_files: |