aboutsummaryrefslogtreecommitdiffstats
path: root/deploy
diff options
context:
space:
mode:
Diffstat (limited to 'deploy')
-rw-r--r--deploy/README.templater371
-rw-r--r--deploy/cloud/configure_nodes.py54
-rw-r--r--deploy/config/plugins/fuel-nfvkvm_0.9.0.yaml34
-rw-r--r--deploy/deploy-config.py250
-rw-r--r--deploy/scenario/ha_nfv-kvm_heat_ceilometer_scenario.yaml8
-rwxr-xr-xdeploy/templater.py185
6 files changed, 795 insertions, 107 deletions
diff --git a/deploy/README.templater b/deploy/README.templater
new file mode 100644
index 000000000..b5d52ab9d
--- /dev/null
+++ b/deploy/README.templater
@@ -0,0 +1,371 @@
+##############################################################################
+# Copyright (c) 2016 Ericsson AB and others.
+# peter.barabas@ericsson.com
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+======== TEMPLATING SUPPORT IN YAML CONFIGURATION FILES ========
+
+deploy/templater.py makes it possible to use templates to generate configuration
+files. It takes 2 input YAML files and an output file as arguments. One being
+the dictionary (called the base file), which is used to look up values in; the
+other file is the template, where the substitution will take place. Templater
+will write the result to an output file, specified as the 3rd argument.
+
+
+======== SYNTAX OF TEMPLATE FILES ========
+
+A template file can contain any valid YAML data and template variables, whose
+syntax is described below:
+
+1. Single value references
+
+ %{title}
+
+ %{environment/net_segment_type}
+
+ Either a root element, or a path can be specified.
+
+2. YAML sections
+
+ %{nodes}
+
+ %{network/networking_parameters}
+
+ Either a root element, or a path can be specified.
+
+3. Interface lookup for network
+
+ %{interface(storage)}
+
+ Specify a network type as argument to interface().
+
+4. Interface lookup for network and role
+
+ %{interface(public,compute)}
+
+ Specify a network type and a role as arguments to interface().
+
+5. File inclusion
+
+ %{include(templates/interfaces.yaml)}
+
+ Filename with absolute or relative path.
+
+
+======== EXAMPLES ========
+
+Base YAML file (excerpt):
+
+title: Deployment Environment Adapter (DEA)
+version: 1.1
+created: Wed Mar 30 08:16:04 2016
+environment:
+ name: vCity
+ net_segment_type: tun
+wanted_release: Liberty on Ubuntu 14.04
+nodes:
+- id: 1
+ interfaces: interfaces_1
+ role: ceph-osd,compute
+ transformations: transformations_1
+- id: 2
+ interfaces: interfaces_1
+ role: ceph-osd,compute
+ transformations: transformations_1
+- id: 3
+ interfaces: interfaces_1
+ role: ceph-osd,compute
+ transformations: transformations_1
+- id: 4
+ interfaces: interfaces_2
+ role: controller,mongo
+ transformations: transformations_2
+- id: 5
+ interfaces: interfaces_2
+ role: controller,mongo
+ transformations: transformations_2
+- id: 6
+ interfaces: interfaces_2
+ role: controller,mongo
+ transformations: transformations_2
+interfaces_1:
+ ens3:
+ - fuelweb_admin
+ - management
+ ens4:
+ - storage
+ ens5:
+ - private
+ ens6:
+ - public
+interfaces_2:
+ ens3:
+ - fuelweb_admin
+ - management
+ ens4:
+ - storage
+ - private
+ - public
+network:
+ networks:
+ - cidr: 172.16.0.0/24
+ gateway: 172.16.0.1
+ ip_ranges:
+ - - 172.16.0.2
+ - 172.16.0.126
+ meta:
+ cidr: 172.16.0.0/24
+ configurable: true
+ floating_range_var: floating_ranges
+ ip_range:
+ - 172.16.0.2
+ - 172.16.0.126
+ map_priority: 1
+ name: public
+ notation: ip_ranges
+ render_addr_mask: public
+ render_type: null
+ use_gateway: true
+ vips:
+ - haproxy
+ - vrouter
+ vlan_start: null
+ name: public
+ vlan_start: null
+ - cidr: 192.168.1.0/24
+ gateway: null
+ ip_ranges:
+ - - 192.168.1.1
+ - 192.168.1.254
+ meta:
+ cidr: 192.168.1.0/24
+ configurable: true
+ map_priority: 2
+ name: storage
+ notation: cidr
+ render_addr_mask: storage
+ render_type: cidr
+ use_gateway: false
+ vlan_start: 102
+ name: storage
+ vlan_start: 102
+
+
+--- Example 1 ---
+
+Template file:
+
+deployment-scenario-metadata:
+ title: %{title}
+ version: 0.1
+dea-override-config:
+ environment:
+ net_segment_type: %{environment/net_segment_type}
+ nodes:
+ %{nodes}
+
+
+Result:
+
+deployment-scenario-metadata:
+ title: Deployment Environment Adapter (DEA)
+ version: 0.1
+dea-override-config:
+ environment:
+ net_segment_type: tun
+ nodes:
+ - id: 1
+ interfaces: interfaces_1
+ role: ceph-osd,compute
+ transformations: transformations_1
+ - id: 2
+ interfaces: interfaces_1
+ role: ceph-osd,compute
+ transformations: transformations_1
+ - id: 3
+ interfaces: interfaces_1
+ role: ceph-osd,compute
+ transformations: transformations_1
+ - id: 4
+ interfaces: interfaces_2
+ role: controller,mongo
+ transformations: transformations_2
+ - id: 5
+ interfaces: interfaces_2
+ role: controller,mongo
+ transformations: transformations_2
+ - id: 6
+ interfaces: interfaces_2
+ role: controller,mongo
+ transformations: transformations_2
+
+
+--- Example 2 ---
+
+Template file:
+
+dea-override-config:
+ network:
+ networks:
+ %{network/networks}
+
+
+Result:
+
+dea-override-config:
+ network:
+ networks:
+ - cidr: 172.16.0.0/24
+ gateway: 172.16.0.1
+ ip_ranges:
+ - - 172.16.0.2
+ - 172.16.0.126
+ meta:
+ cidr: 172.16.0.0/24
+ configurable: true
+ floating_range_var: floating_ranges
+ ip_range:
+ - 172.16.0.2
+ - 172.16.0.126
+ map_priority: 1
+ name: public
+ notation: ip_ranges
+ render_addr_mask: public
+ render_type: null
+ use_gateway: true
+ vips:
+ - haproxy
+ - vrouter
+ vlan_start: null
+ name: public
+ vlan_start: null
+ - cidr: 192.168.1.0/24
+ gateway: null
+ ip_ranges:
+ - - 192.168.1.1
+ - 192.168.1.254
+ meta:
+ cidr: 192.168.1.0/24
+ configurable: true
+ map_priority: 2
+ name: storage
+ notation: cidr
+ render_addr_mask: storage
+ render_type: cidr
+ use_gateway: false
+ vlan_start: 102
+ name: storage
+ vlan_start: 102
+
+
+--- Example 3 ---
+
+Template file:
+
+storage_if: %{interface(storage)}
+compute_private_if: %{interface(private,compute)}
+# Management interface of a mongo node
+mongo_mgmt_if: %{interface(management,mongo)}
+controller_private_if: %{interface(private,controller)}
+
+
+Result:
+
+storage_if: ens4
+compute_private_if: ens5
+# Management interface of a mongo node
+mongo_mgmt_if: ens3
+controller_private_if: ens4
+
+
+--- Example 4 ---
+
+Template file:
+
+version: 1.1
+created: Mon Jun 13 19:39:35 2016
+comment: None
+%{include(environment.yaml)}
+
+
+environment.yaml:
+
+environment:
+ name: F9-NOSDN-NOFEATURE-VXLAN-BAREMETAL
+ net_segment_type: tun
+
+
+Result:
+
+version: 1.1
+created: Mon Jun 13 19:39:35 2016
+comment: None
+environment:
+ name: F9-NOSDN-NOFEATURE-VXLAN-BAREMETAL
+ net_segment_type: tun
+
+
+--- Example 5 ---
+
+Template file (except):
+
+settings:
+ editable:
+ access:
+ email:
+ description: Email address for Administrator
+ label: Email
+ regex:
+ error: Invalid email
+ source: ^\S+@\S+$
+ type: text
+ value: admin@localhost
+ weight: 40
+# ...
+# lines omitted for brevity
+ %{include(templates/cgroups.yaml)}
+
+
+cgroups.yaml:
+
+ cgroups:
+ metadata:
+ always_editable: true
+ group: general
+ label: Cgroups conguration for services
+ restrictions:
+ - action: hide
+ condition: 'true'
+ weight: 90
+
+
+Result:
+
+settings:
+ editable:
+ access:
+ email:
+ description: Email address for Administrator
+ label: Email
+ regex:
+ error: Invalid email
+ source: ^\S+@\S+$
+ type: text
+ value: admin@localhost
+ weight: 40
+# ...
+# again, lines omitted for brevity
+ cgroups:
+ metadata:
+ always_editable: true
+ group: general
+ label: Cgroups conguration for services
+ restrictions:
+ - action: hide
+ condition: 'true'
+ weight: 90
+
diff --git a/deploy/cloud/configure_nodes.py b/deploy/cloud/configure_nodes.py
index b4875cc6a..20ecc1724 100644
--- a/deploy/cloud/configure_nodes.py
+++ b/deploy/cloud/configure_nodes.py
@@ -7,10 +7,12 @@
# http://www.apache.org/licenses/LICENSE-2.0
###############################################################################
+import copy
+import glob
+import io
+import six
import yaml
-import io
-import glob
from common import (
exec_cmd,
@@ -47,6 +49,11 @@ class ConfigureNodes(object):
# need it for the network config.
self.download_deployment_config()
for node_id, roles_blade in self.node_id_roles_dict.iteritems():
+ # Modify node attributes
+ self.download_attributes(node_id)
+ self.modify_node_attributes(node_id, roles_blade)
+ self.upload_attributes(node_id)
+ # Modify interfaces configuration
self.download_interface_config(node_id)
self.modify_node_interface(node_id, roles_blade)
self.modify_node_network_schemes(node_id, roles_blade)
@@ -93,6 +100,35 @@ class ConfigureNodes(object):
exec_cmd('fuel node --env %s --node %s --network --upload '
'--dir %s' % (self.env_id, node_id, self.yaml_config_dir))
+ def download_attributes(self, node_id):
+ log('Download attributes for node %s' % node_id)
+ exec_cmd('fuel node --env %s --node %s --attributes --download '
+ '--dir %s' % (self.env_id, node_id, self.yaml_config_dir))
+
+ def upload_attributes(self, node_id):
+ log('Upload attributes for node %s' % node_id)
+ exec_cmd('fuel node --env %s --node %s --attributes --upload '
+ '--dir %s' % (self.env_id, node_id, self.yaml_config_dir))
+
+ def modify_node_attributes(self, node_id, roles_blade):
+ log('Modify attributes for node {0}'.format(node_id))
+ dea_key = self.dea.get_node_property(roles_blade[1], 'attributes')
+ if not dea_key:
+ # Node attributes are not overridden. Nothing to do.
+ return
+ new_attributes = self.dea.get_property(dea_key)
+ attributes_yaml = ('%s/node_%s/attributes.yaml'
+ % (self.yaml_config_dir, node_id))
+ check_file_exists(attributes_yaml)
+ backup('%s/node_%s' % (self.yaml_config_dir, node_id))
+
+ with open(attributes_yaml) as stream:
+ attributes = yaml.load(stream)
+ result_attributes = self._merge_dicts(attributes, new_attributes)
+
+ with open(attributes_yaml, 'w') as stream:
+ yaml.dump(result_attributes, stream, default_flow_style=False)
+
def modify_node_interface(self, node_id, roles_blade):
log('Modify interface config for node %s' % node_id)
interface_yaml = ('%s/node_%s/interfaces.yaml'
@@ -122,3 +158,17 @@ class ConfigureNodes(object):
with io.open(interface_yaml, 'w') as stream:
yaml.dump(interfaces, stream, default_flow_style=False)
+
+ def _merge_dicts(self, dict1, dict2):
+ """Recursively merge dictionaries."""
+ result = copy.deepcopy(dict1)
+ for k, v in six.iteritems(dict2):
+ if isinstance(result.get(k), list) and isinstance(v, list):
+ result[k].extend(v)
+ continue
+ if isinstance(result.get(k), dict) and isinstance(v, dict):
+ result[k] = self._merge_dicts(result[k], v)
+ continue
+ result[k] = copy.deepcopy(v)
+ return result
+
diff --git a/deploy/config/plugins/fuel-nfvkvm_0.9.0.yaml b/deploy/config/plugins/fuel-nfvkvm_0.9.0.yaml
new file mode 100644
index 000000000..85f3221f7
--- /dev/null
+++ b/deploy/config/plugins/fuel-nfvkvm_0.9.0.yaml
@@ -0,0 +1,34 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+# jonas.bjurel@ericsson.com
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+plugin-config-metadata:
+ title: NFV KVM fuel Plugin configuration template
+ version: 0.9.0
+ created: 27.07.2016
+ comment: None
+
+fuel-plugin-kvm:
+ metadata:
+ #chosen_id: Assigned during installation
+ class: plugin
+ default: false
+ enabled: true
+ label: fuel-plugin-kvm
+ toggleable: true
+ versions:
+ - metadata:
+ always_editable: false
+ #plugin_id: 1 Assigned during installation
+ plugin_version: 0.9.0
+ use_kvm:
+ label: 'EXPERIMENTAL: KVM enhancements for NFV'
+ type: checkbox
+ value: true
+ weight: 20
+ weight: 70
diff --git a/deploy/deploy-config.py b/deploy/deploy-config.py
index 65d51b228..d87103b6c 100644
--- a/deploy/deploy-config.py
+++ b/deploy/deploy-config.py
@@ -21,6 +21,7 @@
# 2) deployment-scenario dha-override-config section
###############################################################################
+
import os
import yaml
import sys
@@ -44,24 +45,38 @@ from common import (
ArgParser,
)
+
def parse_arguments():
parser = ArgParser(prog='python %s' % __file__)
parser.add_argument('-dha', dest='dha_uri', action='store',
- default=False, help='dha configuration file FQDN URI', required=True)
+ default=False,
+ help='dha configuration file FQDN URI',
+ required=True)
parser.add_argument('-deab', dest='dea_base_uri', action='store',
- default=False, help='dea base configuration FQDN URI', required=True)
- parser.add_argument('-deao', dest='dea_pod_override_uri', action='store',
- default=False, help='dea POD override configuration FQDN URI',
+ default=False,
+ help='dea base configuration FQDN URI',
required=True)
- parser.add_argument('-scenario-base-uri', dest='scenario_base_uri', action='store',
- default=False, help='Deploymen scenario base directory URI',
+ parser.add_argument('-deao', dest='dea_pod_override_uri',
+ action='store',
+ default=False,
+ help='dea POD override configuration FQDN URI',
+ required=True)
+ parser.add_argument('-scenario-base-uri',
+ dest='scenario_base_uri',
+ action='store',
+ default=False,
+ help='Deployment scenario base directory URI',
required=True)
parser.add_argument('-scenario', dest='scenario', action='store',
- default=False, help='Deploymen scenario short-name (priority), or base file name (in the absense of a shortname defenition)',
+ default=False,
+ help=('Deployment scenario short-name (priority),'
+ 'or base file name (in the absense of a'
+ 'shortname defenition)'),
required=True)
parser.add_argument('-plugins', dest='plugins_uri', action='store',
- default=False, help='Plugin configurations directory URI',
+ default=False,
+ help='Plugin configurations directory URI',
required=True)
parser.add_argument('-output', dest='output_path', action='store',
default=False,
@@ -78,6 +93,7 @@ def parse_arguments():
'output_path': args.output_path}
return kwargs
+
def warning(msg):
red = '\033[0;31m'
NC = '\033[0m'
@@ -85,10 +101,12 @@ def warning(msg):
'msg': msg,
'NC': NC})
+
def setup_yaml():
- represent_dict_order = lambda self, data: self.represent_mapping('tag:yaml.org,2002:map', data.items())
+ represent_dict_order = lambda self, data: self.represent_mapping('tag:yaml.org,2002:map', data.items())
yaml.add_representer(collections.OrderedDict, represent_dict_order)
+
def sha_uri(uri):
response = urllib2.urlopen(uri)
data = response.read()
@@ -96,34 +114,33 @@ def sha_uri(uri):
sha1.update(data)
return sha1.hexdigest()
+
def merge_fuel_plugin_version_list(list1, list2):
final_list = []
# When the plugin version in not there in list1 it will
# not be copied
for e_l1 in list1:
- plugin_version = e_l1.get('metadata',
- {'plugin_version', None}).get('plugin_version')
+ plugin_version = e_l1.get('metadata', {}).get('plugin_version')
plugin_version_found = False
for e_l2 in list2:
- if plugin_version == e_l2.get('metadata',
- {'plugin_version',
- None}).get('plugin_version'):
- final_list.append(dict(mergedicts(e_l1, e_l2)))
+ if plugin_version == e_l2.get('metadata', {}).get('plugin_version'):
+ final_list.append(dict(merge_dicts(e_l1, e_l2)))
plugin_version_found = True
if not plugin_version_found:
final_list.append(e_l1)
return final_list
+
def merge_lists(list1, list2):
if list1 and list2:
if isinstance(list1[0], dict):
if 'plugin_version' in list1[0].get('metadata', {}):
return merge_fuel_plugin_version_list(list1, list2)
else:
- warning("Lists with dictionary inside are not merge able! "
- "List2 will overwrite List1. "
- "List1: %s; List2: %s"
- % (list1, list2))
+ warning("Lists with dictionary inside are not mergeable! "
+ "List2 will overwrite List1. "
+ "List1: %s\nList2: %s"
+ % (list1, list2))
return list2
else:
return list2
@@ -132,11 +149,12 @@ def merge_lists(list1, list2):
else:
return list2
-def mergedicts(dict1, dict2):
- for k in set(dict1.keys()).union(dict2.keys()):
+
+def merge_dicts(dict1, dict2):
+ for k in set(dict1).union(dict2):
if k in dict1 and k in dict2:
if isinstance(dict1[k], dict) and isinstance(dict2[k], dict):
- yield (k, dict(mergedicts(dict1[k], dict2[k])))
+ yield (k, dict(merge_dicts(dict1[k], dict2[k])))
elif isinstance(dict1[k], list) and isinstance(dict2[k], list):
yield (k, merge_lists(dict1[k], dict2[k]))
else:
@@ -149,16 +167,17 @@ def mergedicts(dict1, dict2):
else:
yield (k, dict2[k])
+
setup_yaml()
kwargs = parse_arguments()
# Generate final dea.yaml by merging following config files/fragments in revers priority order:
# "dea-base", "dea-pod-override", "deplyment-scenario/module-config-override"
# and "deployment-scenario/dea-override"
-print 'Generating final dea.yaml configuration....'
+print('Generating final dea.yaml configuration....')
# Fetch dea-base, extract and purge meta-data
-print 'Parsing dea-base from: ' + kwargs["dea_base_uri"] + "...."
+print('Parsing dea-base from: ' + kwargs["dea_base_uri"] + "....")
response = urllib2.urlopen(kwargs["dea_base_uri"])
dea_base_conf = yaml.load(response.read())
dea_base_title = dea_base_conf['dea-base-config-metadata']['title']
@@ -170,7 +189,7 @@ dea_base_conf.pop('dea-base-config-metadata')
final_dea_conf = dea_base_conf
# Fetch dea-pod-override, extract and purge meta-data, merge with previous dea data structure
-print 'Parsing the dea-pod-override from: ' + kwargs["dea_pod_override_uri"] + "...."
+print('Parsing the dea-pod-override from: ' + kwargs["dea_pod_override_uri"] + "....")
response = urllib2.urlopen(kwargs["dea_pod_override_uri"])
dea_pod_override_conf = yaml.load(response.read())
if dea_pod_override_conf:
@@ -179,19 +198,21 @@ if dea_pod_override_conf:
dea_pod_creation = dea_pod_override_conf['dea-pod-override-config-metadata']['created']
dea_pod_sha = sha_uri(kwargs["dea_pod_override_uri"])
dea_pod_comment = dea_pod_override_conf['dea-pod-override-config-metadata']['comment']
- print 'Merging dea-base and dea-pod-override configuration ....'
+ print('Merging dea-base and dea-pod-override configuration ....')
dea_pod_override_conf.pop('dea-pod-override-config-metadata')
if dea_pod_override_conf:
- final_dea_conf = dict(mergedicts(final_dea_conf, dea_pod_override_conf))
+ final_dea_conf = dict(merge_dicts(final_dea_conf, dea_pod_override_conf))
# Fetch deployment-scenario, extract and purge meta-data, merge deployment-scenario/
# dea-override-configith previous dea data structure
-print 'Parsing deployment-scenario from: ' + kwargs["scenario"] + "...."
+print('Parsing deployment-scenario from: ' + kwargs["scenario"] + "....")
response = urllib2.urlopen(kwargs["scenario_base_uri"] + "/scenario.yaml")
scenario_short_translation_conf = yaml.load(response.read())
if kwargs["scenario"] in scenario_short_translation_conf:
- scenario_uri = kwargs["scenario_base_uri"] + "/" + scenario_short_translation_conf[kwargs["scenario"]]['configfile']
+ scenario_uri = (kwargs["scenario_base_uri"]
+ + "/"
+ + scenario_short_translation_conf[kwargs["scenario"]]['configfile'])
else:
scenario_uri = kwargs["scenario_base_uri"] + "/" + kwargs["scenario"]
response = urllib2.urlopen(scenario_uri)
@@ -205,14 +226,15 @@ if deploy_scenario_conf:
deploy_scenario_comment = deploy_scenario_conf['deployment-scenario-metadata']['comment']
deploy_scenario_conf.pop('deployment-scenario-metadata')
else:
- print "Deployment scenario file not found or is empty"
- print "Cannot continue, exiting ...."
+ print("Deployment scenario file not found or is empty")
+ print("Cannot continue, exiting ....")
sys.exit(1)
dea_scenario_override_conf = deploy_scenario_conf["dea-override-config"]
if dea_scenario_override_conf:
- print 'Merging dea-base-, dea-pod-override- and deployment-scenario configuration into final dea.yaml configuration....'
- final_dea_conf = dict(mergedicts(final_dea_conf, dea_scenario_override_conf))
+ print('Merging dea-base-, dea-pod-override- and deployment-scenario '
+ 'configuration into final dea.yaml configuration....')
+ final_dea_conf = dict(merge_dicts(final_dea_conf, dea_scenario_override_conf))
# Fetch plugin-configuration configuration files, extract and purge meta-data,
# merge/append with previous dea data structure, override plugin-configuration with
@@ -226,15 +248,32 @@ module_shas = []
module_comments = []
if deploy_scenario_conf["stack-extensions"]:
for module in deploy_scenario_conf["stack-extensions"]:
- print 'Loading configuration for module: ' + module["module"] + ' and merging it to final dea.yaml configuration....'
- response = urllib2.urlopen(kwargs["plugins_uri"] + '/' + module["module-config-name"] + '_' + module["module-config-version"] + '.yaml')
+ print('Loading configuration for module: '
+ + module["module"]
+ + ' and merging it to final dea.yaml configuration....')
+ response = urllib2.urlopen(kwargs["plugins_uri"]
+ + '/'
+ + module["module-config-name"]
+ + '_'
+ + module["module-config-version"]
+ + '.yaml')
module_conf = yaml.load(response.read())
modules.append(module["module"])
- module_uris.append(kwargs["plugins_uri"] + '/' + module["module-config-name"] + '_' + module["module-config-version"] + '.yaml')
+ module_uris.append(kwargs["plugins_uri"]
+ + '/'
+ + module["module-config-name"]
+ + '_'
+ + module["module-config-version"]
+ + '.yaml')
module_titles.append(str(module_conf['plugin-config-metadata']['title']))
module_versions.append(str(module_conf['plugin-config-metadata']['version']))
module_creations.append(str(module_conf['plugin-config-metadata']['created']))
- module_shas.append(sha_uri(kwargs["plugins_uri"] + '/' + module["module-config-name"] + '_' + module["module-config-version"] + '.yaml'))
+ module_shas.append(sha_uri(kwargs["plugins_uri"]
+ + '/'
+ + module["module-config-name"]
+ + '_'
+ + module["module-config-version"]
+ + '.yaml'))
module_comments.append(str(module_conf['plugin-config-metadata']['comment']))
module_conf.pop('plugin-config-metadata')
final_dea_conf['settings']['editable'].update(module_conf)
@@ -244,59 +283,62 @@ if deploy_scenario_conf["stack-extensions"]:
dea_scenario_module_override_conf['settings'] = {}
dea_scenario_module_override_conf['settings']['editable'] = {}
dea_scenario_module_override_conf['settings']['editable'][module["module"]] = scenario_module_override_conf
- final_dea_conf = dict(mergedicts(final_dea_conf, dea_scenario_module_override_conf))
+ final_dea_conf = dict(merge_dicts(final_dea_conf, dea_scenario_module_override_conf))
# Dump final dea.yaml including configuration management meta-data to argument provided
# directory
if not os.path.exists(kwargs["output_path"]):
os.makedirs(kwargs["output_path"])
-print 'Dumping final dea.yaml to ' + kwargs["output_path"] + '/dea.yaml....'
+print('Dumping final dea.yaml to ' + kwargs["output_path"] + '/dea.yaml....')
with open(kwargs["output_path"] + '/dea.yaml', "w") as f:
- f.write("title: DEA.yaml file automatically generated from the configuration files stated in the \"configuration-files\" fragment below\n")
- f.write("version: " + str(calendar.timegm(time.gmtime())) + "\n")
- f.write("created: " + str(time.strftime("%d/%m/%Y")) + " " + str(time.strftime("%H:%M:%S")) + "\n")
- f.write("comment: none\n")
-
- f.write("configuration-files:\n")
- f.write(" dea-base:\n")
- f.write(" uri: " + kwargs["dea_base_uri"] + "\n")
- f.write(" title: " + str(dea_base_title) + "\n")
- f.write(" version: " + str(dea_base_version) + "\n")
- f.write(" created: " + str(dea_base_creation) + "\n")
- f.write(" sha1: " + str(dea_base_sha) + "\n")
- f.write(" comment: " + str(dea_base_comment) + "\n")
-
- f.write(" pod-override:\n")
- f.write(" uri: " + kwargs["dea_pod_override_uri"] + "\n")
- f.write(" title: " + str(dea_pod_title) + "\n")
- f.write(" version: " + str(dea_pod_version) + "\n")
- f.write(" created: " + str(dea_pod_creation) + "\n")
- f.write(" sha1: " + str(dea_pod_sha) + "\n")
- f.write(" comment: " + str(dea_pod_comment) + "\n")
-
- f.write(" deployment-scenario:\n")
- f.write(" uri: " + str(scenario_uri) + "\n")
- f.write(" title: " + str(deploy_scenario_title) + "\n")
- f.write(" version: " + str(deploy_scenario_version) + "\n")
- f.write(" created: " + str(deploy_scenario_creation) + "\n")
- f.write(" sha1: " + str(deploy_scenario_sha) + "\n")
- f.write(" comment: " + str(deploy_scenario_comment) + "\n")
+ f.write("\n".join([("title: DEA.yaml file automatically generated from the"
+ 'configuration files stated in the "configuration-files"'
+ "fragment below"),
+ "version: " + str(calendar.timegm(time.gmtime())),
+ "created: " + str(time.strftime("%d/%m/%Y")) + " "
+ + str(time.strftime("%H:%M:%S")),
+ "comment: none\n"]))
+
+ f.write("\n".join(["configuration-files:",
+ " dea-base:",
+ " uri: " + kwargs["dea_base_uri"],
+ " title: " + str(dea_base_title),
+ " version: " + str(dea_base_version),
+ " created: " + str(dea_base_creation),
+ " sha1: " + str(dea_base_sha),
+ " comment: " + str(dea_base_comment) + "\n"]))
+
+ f.write("\n".join([" pod-override:",
+ " uri: " + kwargs["dea_pod_override_uri"],
+ " title: " + str(dea_pod_title),
+ " version: " + str(dea_pod_version),
+ " created: " + str(dea_pod_creation),
+ " sha1: " + str(dea_pod_sha),
+ " comment: " + str(dea_pod_comment) + "\n"]))
+
+ f.write("\n".join([" deployment-scenario:",
+ " uri: " + str(scenario_uri),
+ " title: " + str(deploy_scenario_title),
+ " version: " + str(deploy_scenario_version),
+ " created: " + str(deploy_scenario_creation),
+ " sha1: " + str(deploy_scenario_sha),
+ " comment: " + str(deploy_scenario_comment) + "\n"]))
f.write(" plugin-modules:\n")
- for k in range(0,len(modules)):
- f.write(" - module: " + modules[k] + "\n")
- f.write(" uri: " + module_uris[k] + "\n")
- f.write(" title: " + module_titles[k] + "\n")
- f.write(" version: " + module_versions[k] + "\n")
- f.write(" created: " + module_creations[k] + "\n")
- f.write(" sha-1: " + module_shas[k] + "\n")
- f.write(" comment: " + module_comments[k] + "\n")
+ for k, _ in enumerate(modules):
+ f.write("\n".join([" - module: " + modules[k],
+ " uri: " + module_uris[k],
+ " title: " + module_titles[k],
+ " version: " + module_versions[k],
+ " created: " + module_creations[k],
+ " sha-1: " + module_shas[k],
+ " comment: " + module_comments[k] + "\n"]))
yaml.dump(final_dea_conf, f, default_flow_style=False)
# Load POD dha and override it with "deployment-scenario/dha-override-config" section
-print 'Generating final dha.yaml configuration....'
-print 'Parsing dha-pod yaml configuration....'
+print('Generating final dha.yaml configuration....')
+print('Parsing dha-pod yaml configuration....')
response = urllib2.urlopen(kwargs["dha_uri"])
dha_pod_conf = yaml.load(response.read())
dha_pod_title = dha_pod_conf['dha-pod-config-metadata']['title']
@@ -312,33 +354,39 @@ dha_scenario_override_conf = deploy_scenario_conf["dha-override-config"]
# is no way to programatically override a physical environment:
# wireing, IPMI set-up, etc.
# For Physical environments, dha.yaml overrides will be silently ignored
-if dha_scenario_override_conf and (final_dha_conf['adapter'] == 'libvirt' or final_dha_conf['adapter'] == 'esxi' or final_dha_conf['adapter'] == 'vbox'):
- print 'Merging dha-pod and deployment-scenario override information to final dha.yaml configuration....'
- final_dha_conf = dict(mergedicts(final_dha_conf, dha_scenario_override_conf))
+if dha_scenario_override_conf and (final_dha_conf['adapter'] == 'libvirt'
+ or final_dha_conf['adapter'] == 'esxi'
+ or final_dha_conf['adapter'] == 'vbox'):
+ print('Merging dha-pod and deployment-scenario override information to final dha.yaml configuration....')
+ final_dha_conf = dict(merge_dicts(final_dha_conf, dha_scenario_override_conf))
# Dump final dha.yaml to argument provided directory
-print 'Dumping final dha.yaml to ' + kwargs["output_path"] + '/dha.yaml....'
+print('Dumping final dha.yaml to ' + kwargs["output_path"] + '/dha.yaml....')
with open(kwargs["output_path"] + '/dha.yaml', "w") as f:
- f.write("title: DHA.yaml file automatically generated from the configuration files stated in the \"configuration-files\" fragment below\n")
- f.write("version: " + str(calendar.timegm(time.gmtime())) + "\n")
- f.write("created: " + str(time.strftime("%d/%m/%Y")) + " " + str(time.strftime("%H:%M:%S")) + "\n")
- f.write("comment: none\n")
+ f.write("\n".join([("title: DHA.yaml file automatically generated from"
+ "the configuration files stated in the"
+ '"configuration-files" fragment below'),
+ "version: " + str(calendar.timegm(time.gmtime())),
+ "created: " + str(time.strftime("%d/%m/%Y")) + " "
+ + str(time.strftime("%H:%M:%S")),
+ "comment: none\n"]))
f.write("configuration-files:\n")
- f.write(" dha-pod-configuration:\n")
- f.write(" uri: " + kwargs["dha_uri"] + "\n")
- f.write(" title: " + str(dha_pod_title) + "\n")
- f.write(" version: " + str(dha_pod_version) + "\n")
- f.write(" created: " + str(dha_pod_creation) + "\n")
- f.write(" sha-1: " + str(dha_pod_sha) + "\n")
- f.write(" comment: " + str(dha_pod_comment) + "\n")
-
- f.write(" deployment-scenario:\n")
- f.write(" uri: " + str(scenario_uri) + "\n")
- f.write(" title: " + str(deploy_scenario_title) + "\n")
- f.write(" version: " + str(deploy_scenario_version) + "\n")
- f.write(" created: " + str(deploy_scenario_creation) + "\n")
- f.write(" sha-1: " + str(deploy_scenario_sha) + "\n")
- f.write(" comment: " + str(deploy_scenario_comment) + "\n")
+ f.write("\n".join([" dha-pod-configuration:",
+ " uri: " + kwargs["dha_uri"],
+ " title: " + str(dha_pod_title),
+ " version: " + str(dha_pod_version),
+ " created: " + str(dha_pod_creation),
+ " sha-1: " + str(dha_pod_sha),
+ " comment: " + str(dha_pod_comment) + "\n"]))
+
+ f.write("\n".join([" deployment-scenario:",
+ " uri: " + str(scenario_uri),
+ " title: " + str(deploy_scenario_title),
+ " version: " + str(deploy_scenario_version),
+ " created: " + str(deploy_scenario_creation),
+ " sha-1: " + str(deploy_scenario_sha),
+ " comment: " + str(deploy_scenario_comment) + "\n"]))
+
yaml.dump(final_dha_conf, f, default_flow_style=False)
diff --git a/deploy/scenario/ha_nfv-kvm_heat_ceilometer_scenario.yaml b/deploy/scenario/ha_nfv-kvm_heat_ceilometer_scenario.yaml
index 2941a726e..f9863d11e 100644
--- a/deploy/scenario/ha_nfv-kvm_heat_ceilometer_scenario.yaml
+++ b/deploy/scenario/ha_nfv-kvm_heat_ceilometer_scenario.yaml
@@ -22,8 +22,8 @@
# deployment configuration meta-data
deployment-scenario-metadata:
title: NFV KVM HA deployment
- version: 0.0.5
- created: Mar 18 2016
+ version: 0.0.6
+ created: 27.07.2016
comment: Ceph cannot be assigned to compute, ceph is collocated with one of the controllers
##############################################################################
@@ -32,9 +32,9 @@ deployment-scenario-metadata:
# <module-config-base-uri>/<module-config-name>_<module-config-version>.yaml
# It does so by copying the config file to the local plugin config directory
stack-extensions:
- - module: fuel-plugin-qemu
+ - module: fuel-plugin-kvm
module-config-name: fuel-nfvkvm
- module-config-version: 0.0.2
+ module-config-version: 0.9.0
module-config-override:
# Module config overrides
diff --git a/deploy/templater.py b/deploy/templater.py
new file mode 100755
index 000000000..6b41e1f3c
--- /dev/null
+++ b/deploy/templater.py
@@ -0,0 +1,185 @@
+#!/usr/bin/env python
+###############################################################################
+# Copyright (c) 2016 Ericsson AB and others.
+# peter.barabas@ericsson.com
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+###############################################################################
+
+
+import io
+import re
+import yaml
+from common import(
+ err,
+ ArgParser,
+)
+
+
+TAG_START = '%{'
+TAG_END = '}'
+DELIMITER = '/'
+
+
+class Templater(object):
+ def __init__(self, base_file, template_file, output_file):
+ self.template_file = template_file
+ self.output_file = output_file
+ self.base = self.load_yaml(base_file)
+
+ def load_yaml(self, filename):
+ try:
+ with io.open(filename) as yaml_file:
+ return yaml.load(yaml_file)
+ except Exception as error:
+ err('Error opening YAML file: %s' % error)
+
+ def save_yaml(self, filename, content):
+ try:
+ with io.open(filename, 'w') as yaml_file:
+ yaml_file.write(content)
+ except Exception as error:
+ err('Error writing YAML file: %s' % error)
+
+ def get_indent(self, line):
+ return len(line) - len(line.lstrip(' '))
+
+ def format_fragment(self, fragment, indent):
+ result = ''
+ is_first_line = True
+
+ for line in fragment.splitlines():
+ # Skip indenting the first line as it is already indented
+ if is_first_line:
+ line += '\n'
+ is_first_line = False
+ else:
+ line = ' ' * indent + line + '\n'
+
+ result += line
+
+ return result.rstrip('\n')
+
+ def format_substitution(self, string):
+ if isinstance(string, basestring):
+ return string
+ else:
+ return yaml.dump(string, default_flow_style=False)
+
+ def parse_interface_tag(self, tag):
+ # Remove 'interface(' prefix, trailing ')' and split arguments
+ args = tag[len('interface('):].rstrip(')').split(',')
+
+ if len(args) == 1 and not args[0]:
+ err('No arguments for interface().')
+ elif len(args) == 2 and (not args[0] or not args[1]):
+ err('Empty argument for interface().')
+ elif len(args) > 2:
+ err('Too many arguments for interface().')
+ else:
+ return args
+
+ def get_interface_from_network(self, interfaces, network):
+ nics = self.base[interfaces]
+ for nic in nics:
+ if network in nics[nic]:
+ return nic
+
+ err('Network not found: %s' % network)
+
+ def get_role_interfaces(self, role):
+ nodes = self.base['nodes']
+ for node in nodes:
+ if role in node['role']:
+ return node['interfaces']
+
+ err('Role not found: %s' % role)
+
+ def lookup_interface(self, args):
+ nodes = self.base['nodes']
+
+ if len(args) == 1:
+ interfaces = nodes[0]['interfaces']
+ if len(args) == 2:
+ interfaces = self.get_role_interfaces(args[1])
+
+ return self.get_interface_from_network(interfaces, args[0])
+
+ def parse_include_tag(self, tag):
+ # Remove 'include(' prefix and trailing ')'
+ filename = tag[len('include('):].rstrip(')')
+
+ if not filename:
+ err('No argument for include().')
+
+ return filename
+
+ def include_file(self, filename):
+ fragment = self.load_yaml(filename)
+ return yaml.dump(fragment, default_flow_style=False)
+
+ def parse_tag(self, tag, indent):
+ fragment = ''
+
+ if 'interface(' in tag:
+ args = self.parse_interface_tag(tag)
+ fragment = self.lookup_interface(args)
+ elif 'include(' in tag:
+ filename = self.parse_include_tag(tag)
+ fragment = self.include_file(filename)
+ else:
+ path = tag.split(DELIMITER)
+ fragment = self.base
+ for i in path:
+ if i in fragment:
+ fragment = fragment.get(i)
+ else:
+ err('Error: key "%s" does not exist in base YAML file' % i)
+
+ fragment = self.format_substitution(fragment)
+
+ return self.format_fragment(fragment, indent)
+
+ def run(self):
+ result = ''
+
+ regex = re.compile(re.escape(TAG_START) + r'([a-z].+)' + re.escape(TAG_END),
+ flags=re.IGNORECASE)
+ with io.open(self.template_file) as f:
+ for line in f:
+ indent = self.get_indent(line)
+ result += re.sub(regex,
+ lambda match: self.parse_tag(match.group(1), indent),
+ line)
+
+ self.save_yaml(self.output_file, result)
+
+
+def parse_arguments():
+ description = '''Process 'template_file' using 'base_file' as source for
+template variable substitution and write the results to 'output_file'.'''
+
+ parser = ArgParser(prog='python %s' % __file__,
+ description=description)
+ parser.add_argument('base_file',
+ help='Base YAML filename')
+ parser.add_argument('template_file',
+ help='Fragment filename')
+ parser.add_argument('output_file',
+ help='Output filename')
+
+ args = parser.parse_args()
+ return(args.base_file, args.template_file, args.output_file)
+
+
+def main():
+ base_file, template_file, output_file = parse_arguments()
+
+ templater = Templater(base_file, template_file, output_file)
+ templater.run()
+
+
+if __name__ == '__main__':
+ main()