summaryrefslogtreecommitdiffstats
path: root/deploy/cloud/configure_nodes.py
diff options
context:
space:
mode:
authorAlexandru Avadanii <Alexandru.Avadanii@enea.com>2017-09-05 13:25:08 +0000
committerGerrit Code Review <gerrit@opnfv.org>2017-09-05 13:25:08 +0000
commita1a413ad65c31ebf5dc42924f7ed04ab02a04872 (patch)
treebfe49085fe03cc6578ca9cd00ea82802bb57d1be /deploy/cloud/configure_nodes.py
parent14d7bf43d3790a0a5fb69c9eff0e93b9fd63c5ba (diff)
parent1b89628e4571a65245a743e4a85d38438a119b3d (diff)
Merge "build, deploy: Remove obsolete Fuel@Openstack code"
Diffstat (limited to 'deploy/cloud/configure_nodes.py')
-rw-r--r--deploy/cloud/configure_nodes.py194
1 files changed, 0 insertions, 194 deletions
diff --git a/deploy/cloud/configure_nodes.py b/deploy/cloud/configure_nodes.py
deleted file mode 100644
index a50973af6..000000000
--- a/deploy/cloud/configure_nodes.py
+++ /dev/null
@@ -1,194 +0,0 @@
-###############################################################################
-# Copyright (c) 2015 Ericsson AB and others.
-# szilard.cserey@ericsson.com
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-###############################################################################
-
-import copy
-import io
-
-import six
-import yaml
-
-from common import (
- exec_cmd,
- check_file_exists,
- log,
- backup,
-)
-
-
-class ConfigureNodes(object):
-
- def __init__(self, yaml_config_dir, env_id, node_id_roles_dict, dea):
- self.yaml_config_dir = yaml_config_dir
- self.env_id = env_id
- self.node_id_roles_dict = node_id_roles_dict
- self.dea = dea
-
- def config_nodes(self):
- log('Configure nodes')
-
- # Assign nodes to environment with given roles
- for node_id, roles_blade in self.node_id_roles_dict.iteritems():
- exec_cmd('fuel node set --node-id %s --role %s --env %s'
- % (node_id, roles_blade[0], self.env_id))
-
- for node_id, roles_blade in self.node_id_roles_dict.iteritems():
- # Modify interfaces configuration
- self.download_interface_config(node_id)
- self.modify_node_interface(node_id, roles_blade)
- self.upload_interface_config(node_id)
- # Modify node attributes
- self.download_attributes(node_id)
- self.modify_node_attributes(node_id, roles_blade)
- self.upload_attributes(node_id)
-
- # Currently not used, we use default deployment facts
- # which are generated by fuel based on type segmentation
- # and network to nic assignment
- #
- # Download our modified deployment configuration, which includes our
- # changes to network topology etc.
- #self.download_deployment_config()
- #for node_id, roles_blade in self.node_id_roles_dict.iteritems():
- # self.modify_node_network_schemes(node_id, roles_blade)
- #self.upload_deployment_config()
-
- def modify_node_network_schemes(self, node_id, roles_blade):
- log('Modify network transformations for node %s' % node_id)
- type = self.dea.get_node_property(roles_blade[1], 'transformations')
- transformations = self.dea.get_property(type)
- deployment_dir = '%s/deployment_%s' % (
- self.yaml_config_dir, self.env_id)
- backup(deployment_dir)
- node_file = ('%s/%s.yaml' % (deployment_dir, node_id))
- with io.open(node_file) as stream:
- node = yaml.load(stream)
-
- node['network_scheme'].update(transformations)
-
- with io.open(node_file, 'w') as stream:
- yaml.dump(node, stream, default_flow_style=False)
-
- def download_deployment_config(self):
- log('Download deployment config for environment %s' % self.env_id)
- exec_cmd('fuel deployment --env %s --default --dir %s'
- % (self.env_id, self.yaml_config_dir))
-
- def upload_deployment_config(self):
- log('Upload deployment config for environment %s' % self.env_id)
- exec_cmd('fuel deployment --env %s --upload --dir %s'
- % (self.env_id, self.yaml_config_dir))
-
- def download_interface_config(self, node_id):
- log('Download interface config for node %s' % node_id)
- exec_cmd('fuel node --env %s --node %s --network --download '
- '--dir %s' % (self.env_id, node_id, self.yaml_config_dir))
-
- def upload_interface_config(self, node_id):
- log('Upload interface config for node %s' % node_id)
- exec_cmd('fuel node --env %s --node %s --network --upload '
- '--dir %s' % (self.env_id, node_id, self.yaml_config_dir))
-
- def download_attributes(self, node_id):
- log('Download attributes for node %s' % node_id)
- exec_cmd('fuel node --env %s --node %s --attributes --download '
- '--dir %s' % (self.env_id, node_id, self.yaml_config_dir))
-
- def upload_attributes(self, node_id):
- log('Upload attributes for node %s' % node_id)
- exec_cmd('fuel node --env %s --node %s --attributes --upload '
- '--dir %s' % (self.env_id, node_id, self.yaml_config_dir))
-
- def modify_node_attributes(self, node_id, roles_blade):
- log('Modify attributes for node {0}'.format(node_id))
- dea_key = self.dea.get_node_property(roles_blade[1], 'attributes')
- if not dea_key:
- # Node attributes are not overridden. Nothing to do.
- return
- new_attributes = self.dea.get_property(dea_key)
- attributes_yaml = ('%s/node_%s/attributes.yaml'
- % (self.yaml_config_dir, node_id))
- check_file_exists(attributes_yaml)
- backup('%s/node_%s' % (self.yaml_config_dir, node_id))
-
- with open(attributes_yaml) as stream:
- attributes = yaml.load(stream)
- result_attributes = self._merge_dicts(attributes, new_attributes)
-
- with open(attributes_yaml, 'w') as stream:
- yaml.dump(result_attributes, stream, default_flow_style=False)
-
- # interface configuration can
- # looks like this:
- #
- # interfaces_dpdk:
- # ens3:
- # - fuelweb_admin
- # ens4:
- # - storage
- # - management
- # ens5:
- # - interface_properties:
- # dpdk:
- # enabled:
- # value: true
- # - private
- # ens6:
- # - public
- def modify_node_interface(self, node_id, roles_blade):
- log('Modify interface config for node %s' % node_id)
- interface_yaml = ('%s/node_%s/interfaces.yaml'
- % (self.yaml_config_dir, node_id))
- check_file_exists(interface_yaml)
- backup('%s/node_%s' % (self.yaml_config_dir, node_id))
-
- with io.open(interface_yaml) as stream:
- interfaces = yaml.load(stream)
-
- net_name_id = {}
- for interface in interfaces:
- for network in interface['assigned_networks']:
- net_name_id[network['name']] = network['id']
-
- type = self.dea.get_node_property(roles_blade[1], 'interfaces')
- interface_config = self.dea.get_property(type)
-
- for interface in interfaces:
- interface['assigned_networks'] = []
- if interface['name'] in interface_config:
- for prop in interface_config[interface['name']]:
- net = {}
- # net name
- if isinstance(prop, six.string_types):
- net['id'] = net_name_id[prop]
- net['name'] = prop
- interface['assigned_networks'].append(net)
- # network properties
- elif isinstance(prop, dict):
- if 'interface_properties' not in prop:
- log('Interface configuration contains unknown dict: %s' % prop)
- continue
- interface['attributes'] = self._merge_dicts(
- interface.get('attributes', {}),
- prop.get('interface_properties', {}))
-
- with io.open(interface_yaml, 'w') as stream:
- yaml.dump(interfaces, stream, default_flow_style=False)
-
- def _merge_dicts(self, dict1, dict2):
- """Recursively merge dictionaries."""
- result = copy.deepcopy(dict1)
- for k, v in six.iteritems(dict2):
- if isinstance(result.get(k), list) and isinstance(v, list):
- result[k].extend(v)
- continue
- if isinstance(result.get(k), dict) and isinstance(v, dict):
- result[k] = self._merge_dicts(result[k], v)
- continue
- result[k] = copy.deepcopy(v)
- return result