summaryrefslogtreecommitdiffstats
path: root/app/utils
diff options
context:
space:
mode:
authorKoren Lev <korenlev@gmail.com>2017-07-27 15:04:07 +0000
committerGerrit Code Review <gerrit@opnfv.org>2017-07-27 15:04:07 +0000
commit162c03ef301396072c1934e7e7e0c40a841b4fe2 (patch)
tree7a2a2781949252436450ff5832962785061a774a /app/utils
parentb88c78e3cf2bef22aa2f1c4d0bf305e303bc15f0 (diff)
parent7e83d0876ddb84a45e130eeba28bc40ef53c074b (diff)
Merge "Calipso initial release for OPNFV"
Diffstat (limited to 'app/utils')
-rw-r--r--app/utils/__init__.py10
-rw-r--r--app/utils/binary_converter.py27
-rw-r--r--app/utils/config_file.py48
-rw-r--r--app/utils/constants.py37
-rw-r--r--app/utils/deep_merge.py77
-rw-r--r--app/utils/dict_naming_converter.py40
-rw-r--r--app/utils/exceptions.py13
-rw-r--r--app/utils/inventory_mgr.py445
-rw-r--r--app/utils/logging/__init__.py10
-rw-r--r--app/utils/logging/console_logger.py21
-rw-r--r--app/utils/logging/file_logger.py23
-rw-r--r--app/utils/logging/full_logger.py47
-rw-r--r--app/utils/logging/logger.py99
-rw-r--r--app/utils/logging/message_logger.py21
-rw-r--r--app/utils/logging/mongo_logging_handler.py53
-rw-r--r--app/utils/metadata_parser.py83
-rw-r--r--app/utils/mongo_access.py137
-rw-r--r--app/utils/singleton.py16
-rw-r--r--app/utils/special_char_converter.py32
-rw-r--r--app/utils/ssh_conn.py94
-rw-r--r--app/utils/ssh_connection.py217
-rw-r--r--app/utils/string_utils.py59
-rw-r--r--app/utils/util.py172
23 files changed, 1781 insertions, 0 deletions
diff --git a/app/utils/__init__.py b/app/utils/__init__.py
new file mode 100644
index 0000000..1e85a2a
--- /dev/null
+++ b/app/utils/__init__.py
@@ -0,0 +1,10 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+
diff --git a/app/utils/binary_converter.py b/app/utils/binary_converter.py
new file mode 100644
index 0000000..70d4e40
--- /dev/null
+++ b/app/utils/binary_converter.py
@@ -0,0 +1,27 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+from utils.logging.console_logger import ConsoleLogger
+
+
+class BinaryConverter:
+
+ def __init__(self):
+ super().__init__()
+ self.log = ConsoleLogger()
+
+ def binary2str(self, txt):
+ if not isinstance(txt, bytes):
+ return str(txt)
+ try:
+ s = txt.decode("utf-8")
+ except TypeError:
+ s = str(txt)
+ return s
+
diff --git a/app/utils/config_file.py b/app/utils/config_file.py
new file mode 100644
index 0000000..1982acc
--- /dev/null
+++ b/app/utils/config_file.py
@@ -0,0 +1,48 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+import os
+
+
+class ConfigFile:
+
+ def __init__(self, file_path):
+ super().__init__()
+ if not os.path.isfile(file_path):
+ raise ValueError("config file doesn't exist in "
+ "the system: {0}"
+ .format(file_path))
+ self.config_file = file_path
+
+ def read_config(self):
+ params = {}
+ try:
+ with open(self.config_file) as f:
+ for line in f:
+ line = line.strip()
+ if line.startswith("#") or " " not in line:
+ continue
+ index = line.index(" ")
+ key = line[: index].strip()
+ value = line[index + 1:].strip()
+ if value:
+ params[key] = value
+ except Exception as e:
+ raise e
+ return params
+
+ @staticmethod
+ def get(file_name):
+ # config file is taken from app/config by default
+ # look in the current work directory to get the
+ # config path
+ python_path = os.environ['PYTHONPATH']
+ if os.pathsep in python_path:
+ python_path = python_path.split(os.pathsep)[0]
+ return python_path + '/config/' + file_name
diff --git a/app/utils/constants.py b/app/utils/constants.py
new file mode 100644
index 0000000..7aa0343
--- /dev/null
+++ b/app/utils/constants.py
@@ -0,0 +1,37 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+from enum import Enum
+
+
+class StringEnum(Enum):
+ def __str__(self):
+ return str(self.value)
+
+ def __repr__(self):
+ return repr(self.value)
+
+
+class ScanStatus(StringEnum):
+ PENDING = "pending"
+ RUNNING = "running"
+ COMPLETED = "completed"
+ FAILED = "failed"
+
+
+class OperationalStatus(StringEnum):
+ STOPPED = "stopped"
+ RUNNING = "running"
+ ERROR = "error"
+
+
+class EnvironmentFeatures(StringEnum):
+ SCANNING = "scanning"
+ MONITORING = "monitoring"
+ LISTENING = "listening"
diff --git a/app/utils/deep_merge.py b/app/utils/deep_merge.py
new file mode 100644
index 0000000..acb54ff
--- /dev/null
+++ b/app/utils/deep_merge.py
@@ -0,0 +1,77 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+"""
+Do a deep merge of dictionariesi,
+recursively merging dictionaries with boltons.iterutils.remap
+
+Taken from:
+https://gist.github.com/mahmoud/db02d16ac89fa401b968
+
+
+This is an extension of the technique first detailed here:
+http://sedimental.org/remap.html#add_common_keys
+In short, it calls remap on each container, back to front,
+using the accumulating previous values as the default for
+the current iteration.
+"""
+
+from boltons.iterutils import remap, get_path, default_enter, default_visit
+
+
+def remerge(target_list, sourced=False):
+ """
+ Takes a list of containers (e.g., dicts) and merges them using
+ boltons.iterutils.remap. Containers later in the list take
+ precedence (last-wins).
+ By default, returns a new, merged top-level container. With the
+ *sourced* option, `remerge` expects a list of (*name*, container*)
+ pairs, and will return a source map: a dictionary mapping between
+ path and the name of the container it came from.
+ """
+
+ if not sourced:
+ target_list = [(id(t), t) for t in target_list]
+
+ ret = None
+ source_map = {}
+
+ def remerge_enter(path, key, value):
+ new_parent, new_items = default_enter(path, key, value)
+ if ret and not path and key is None:
+ new_parent = ret
+ try:
+ cur_val = get_path(ret, path + (key,))
+ except KeyError:
+ pass
+ else:
+ # TODO: type check?
+ new_parent = cur_val
+
+ if isinstance(value, list):
+ # lists are purely additive.
+ # See https://github.com/mahmoud/boltons/issues/81
+ new_parent.extend(value)
+ new_items = []
+
+ return new_parent, new_items
+
+ for t_name, target in target_list:
+ if sourced:
+ def remerge_visit(path, key, value):
+ source_map[path + (key,)] = t_name
+ return True
+ else:
+ remerge_visit = default_visit
+
+ ret = remap(target, enter=remerge_enter, visit=remerge_visit)
+
+ if not sourced:
+ return ret
+ return ret, source_map
diff --git a/app/utils/dict_naming_converter.py b/app/utils/dict_naming_converter.py
new file mode 100644
index 0000000..91fea2e
--- /dev/null
+++ b/app/utils/dict_naming_converter.py
@@ -0,0 +1,40 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+from bson.objectid import ObjectId
+
+
+class DictNamingConverter:
+
+ # Convert a nested dictionary from one convention to another.
+ # Args:
+ # d (dict): dictionary (nested or not) to be converted.
+ # cf (func): convert function - takes the string in one convention,
+ # returns it in the other one.
+ # Returns:
+ # Dictionary with the new keys.
+ @staticmethod
+ def change_dict_naming_convention(d, cf):
+ new = {}
+ if not d:
+ return d
+ if isinstance(d, str):
+ return d
+ if isinstance(d, ObjectId):
+ return d
+ for k, v in d.items():
+ new_v = v
+ if isinstance(v, dict):
+ new_v = DictNamingConverter.change_dict_naming_convention(v, cf)
+ elif isinstance(v, list):
+ new_v = list()
+ for x in v:
+ new_v.append(DictNamingConverter.change_dict_naming_convention(x, cf))
+ new[cf(k)] = new_v
+ return new
diff --git a/app/utils/exceptions.py b/app/utils/exceptions.py
new file mode 100644
index 0000000..07e46dc
--- /dev/null
+++ b/app/utils/exceptions.py
@@ -0,0 +1,13 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+
+
+class ScanArgumentsError(ValueError):
+ pass
diff --git a/app/utils/inventory_mgr.py b/app/utils/inventory_mgr.py
new file mode 100644
index 0000000..2fe2894
--- /dev/null
+++ b/app/utils/inventory_mgr.py
@@ -0,0 +1,445 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+from datetime import datetime
+
+import bson
+
+from utils.constants import EnvironmentFeatures
+from utils.logging.console_logger import ConsoleLogger
+from utils.mongo_access import MongoAccess
+from utils.singleton import Singleton
+
+
+def inv_initialization_required(func):
+ def decorated(self, *args, **kwargs):
+ if self.inventory_collection is None:
+ raise TypeError("Inventory collection is not set.")
+ return func(self, *args, **kwargs)
+ return decorated
+
+
+class InventoryMgr(MongoAccess, metaclass=Singleton):
+
+ def __init__(self):
+ super().__init__()
+ self.log = ConsoleLogger()
+ self.inventory_collection = None
+ self.inventory_collection_name = None
+ self.collections = {}
+ self.monitoring_setup_manager = None
+
+ def set_collection(self, collection_type: str = None,
+ use_default_name: bool = False):
+ # do not allow setting the collection more than once
+ if not self.collections.get(collection_type):
+ collection_name = collection_type \
+ if use_default_name \
+ else self.get_coll_name(collection_type)
+
+ self.log.info("Using {} collection: {}"
+ .format(collection_type, collection_name))
+
+ self.collections[collection_type] = MongoAccess.db[collection_name]
+
+ def set_inventory_collection(self, collection_name: str = None):
+ if not self.inventory_collection:
+ if not collection_name:
+ collection_name = "inventory"
+
+ self.log.info("Using inventory collection: {}"
+ .format(collection_name))
+
+ collection = MongoAccess.db[collection_name]
+ self.collections["inventory"] = collection
+ self.inventory_collection = collection
+ self.inventory_collection_name = collection_name
+
+ def get_coll_name(self, coll_name):
+ if not self.inventory_collection_name:
+ raise TypeError("inventory_collection_name is not set")
+
+ return self.inventory_collection_name.replace("inventory", coll_name) \
+ if self.inventory_collection_name.startswith("inventory") \
+ else self.inventory_collection_name + "_" + coll_name
+
+ def set_collections(self, inventory_collection: str = None):
+ self.set_inventory_collection(inventory_collection)
+ self.set_collection("links")
+ self.set_collection("link_types")
+ self.set_collection("clique_types")
+ self.set_collection("clique_constraints")
+ self.set_collection("cliques")
+ self.set_collection("monitoring_config")
+ self.set_collection("constants", use_default_name=True)
+ self.set_collection("scans")
+ self.set_collection("messages")
+ self.set_collection("monitoring_config_templates",
+ use_default_name=True)
+ self.set_collection("environments_config")
+ self.set_collection("supported_environments")
+
+ def clear(self, scan_plan):
+ if scan_plan.inventory_only:
+ collections = {"inventory"}
+ elif scan_plan.links_only:
+ collections = {"links"}
+ elif scan_plan.cliques_only:
+ collections = {"cliques"}
+ else:
+ collections = {"inventory", "links", "cliques", "monitoring_config"}
+
+ env_cond = {} if scan_plan.clear_all else {"environment": scan_plan.env}
+
+ for collection_name in collections:
+ collection = self.collections[collection_name]
+ self.log.info("clearing collection: " + collection.full_name)
+ # delete docs from the collection,
+ # either all or just for the specified environment
+ collection.delete_many(env_cond)
+
+ # return single match
+ def get_by_id(self, environment, item_id):
+ return self.find({
+ "environment": environment,
+ "id": item_id
+ }, get_single=True)
+
+ # return matches for ID in list of values
+ def get_by_ids(self, environment, ids_list):
+ return self.find({
+ "environment": environment,
+ "id": {"$in": ids_list}
+ })
+
+ def get_by_field(self, environment, item_type, field_name, field_value,
+ get_single=False):
+ if field_value:
+ return self.find({"environment": environment,
+ "type": item_type,
+ field_name: field_value},
+ get_single=get_single)
+ else:
+ return self.find({"environment": environment,
+ "type": item_type},
+ get_single=get_single)
+
+ def get(self, environment, item_type, item_id, get_single=False):
+ return self.get_by_field(environment, item_type, "id", item_id,
+ get_single=get_single)
+
+ def get_children(self, environment, item_type, parent_id):
+ if parent_id:
+ if not item_type:
+ return self.find({"environment": environment,
+ "parent_id": parent_id})
+ else:
+ return self.find({"environment": environment,
+ "type": item_type,
+ "parent_id": parent_id})
+ else:
+ return self.find({"environment": environment,
+ "type": item_type})
+
+ def get_single(self, environment, item_type, item_id):
+ matches = self.find({"environment": environment,
+ "type": item_type,
+ "id": item_id})
+ if len(matches) > 1:
+ raise ValueError("Found multiple matches for item: " +
+ "type=" + item_type + ", id=" + item_id)
+ if len(matches) == 0:
+ raise ValueError("No matches for item: " +
+ "type=" + item_type + ", id=" + item_id)
+ return matches[0]
+
+ # item must contain properties 'environment', 'type' and 'id'
+ def set(self, item, collection=None):
+ col = collection
+ mongo_id = None
+ projects = None
+ if "_id" in item:
+ mongo_id = item.pop("_id", None)
+
+ if not collection or collection == self.collections['inventory']:
+ # make sure we have environment, type & id
+ self.check(item, "environment")
+ self.check(item, "type")
+ self.check(item, "id")
+
+ item["last_scanned"] = datetime.now()
+ item.pop("projects", [])
+
+ obj_name = item["name_path"]
+ obj_name = obj_name[obj_name.rindex('/') + 1:]
+
+ if 'object_name' not in item:
+ item['object_name'] = obj_name
+
+ self.set_collections() # make sure we have all collections set
+ if not col:
+ col = self.collections['inventory']
+
+ find_tuple = {"environment": item["environment"],
+ "type": item["type"], "id": item["id"]}
+ else:
+ find_tuple = {'_id': bson.ObjectId(mongo_id)}
+ doc = col.find_one(find_tuple)
+ if not doc:
+ raise ValueError('set(): could not find document with _id=' +
+ mongo_id)
+
+ col.update_one(find_tuple,
+ {'$set': self.encode_mongo_keys(item)},
+ upsert=True)
+ if mongo_id:
+ # restore original mongo ID of document, in case we need to use it
+ item['_id'] = mongo_id
+ if projects:
+ col.update_one(find_tuple,
+ {'$addToSet': {"projects": {'$each': projects}}},
+ upsert=True)
+
+ @staticmethod
+ def check(obj, field_name):
+ arg = obj[field_name]
+ if not arg or not str(arg).rstrip():
+ raise ValueError("Inventory item - " +
+ "the following field is not defined: " +
+ field_name)
+
+ # note: to use general find, call find_items(),
+ # which also does process_results
+ @inv_initialization_required
+ def find(self, search, projection=None, collection=None, get_single=False):
+ coll = self.inventory_collection if not collection \
+ else self.collections[collection]
+ if get_single is True:
+ return self.decode_object_id(
+ self.decode_mongo_keys(
+ coll.find_one(search, projection=projection)
+ )
+ )
+ else:
+ return list(
+ map(
+ self.decode_object_id,
+ map(
+ self.decode_mongo_keys,
+ coll.find(search, projection=projection))
+ )
+ )
+
+ def find_one(self, search, projection=None, collection=None) -> dict:
+ return self.find(search, projection, collection, True)
+
+ def find_items(self, search,
+ projection=None,
+ get_single=False,
+ collection=None):
+ return self.find(search, projection, collection, get_single)
+
+ # record a link between objects in the inventory, to be used in graphs
+ # returns - the new link document
+ # parameters -
+ # environment: name of environment
+ # host: name of host
+ # source: node mongo _id
+ # source_id: node id value of source node
+ # target: node mongo _id
+ # target_id: node id value of target node
+ # link_type: string showing types of connected objects, e.g. "instance-vnic"
+ # link_name: label for the link itself
+ # state: up/down
+ # link_weight: integer, position/priority for graph placement
+ # source_label, target_label: labels for the ends of the link (optional)
+ def create_link(self, env, host, src, source_id, target, target_id,
+ link_type, link_name, state, link_weight,
+ source_label="", target_label="",
+ extra_attributes=None):
+ s = bson.ObjectId(src)
+ t = bson.ObjectId(target)
+ link = {
+ "environment": env,
+ "host": host,
+ "source": s,
+ "source_id": source_id,
+ "target": t,
+ "target_id": target_id,
+ "link_type": link_type,
+ "link_name": link_name,
+ "state": state,
+ "link_weight": link_weight,
+ "source_label": source_label,
+ "target_label": target_label,
+ "attributes": extra_attributes if extra_attributes else {}
+ }
+ return self.write_link(link)
+
+ def write_link(self, link):
+ find_tuple = {
+ 'environment': link['environment'],
+ 'source_id': link['source_id'],
+ 'target_id': link['target_id']
+ }
+ if "_id" in link:
+ link.pop("_id", None)
+ link_encoded = self.encode_mongo_keys(link)
+ links_col = self.collections["links"]
+ result = links_col.update_one(find_tuple, {'$set': link_encoded},
+ upsert=True)
+ link['_id'] = result.upserted_id
+ return link
+
+ def values_replace_in_object(self, o, values_replacement):
+ for k in values_replacement.keys():
+ if k not in o:
+ continue
+ repl = values_replacement[k]
+ if 'from' not in repl or 'to' not in repl:
+ continue
+ o[k] = o[k].replace(repl['from'], repl['to'])
+ self.set(o)
+
+ # perform replacement of substring in values of objects in the inventory
+ # input:
+ # - search: dict with search parameters
+ # - values_replacement: dict,
+ # - keys: names of keys for which to replace the values
+ # - values: dict with "from" (value to be replaced) and "to" (new value)
+ @inv_initialization_required
+ def values_replace(self, search, values_replacement):
+ for doc in self.inventory_collection.find(search):
+ self.values_replace_in_object(doc, values_replacement)
+
+ def delete(self, coll, query_filter):
+ collection = self.collections[coll]
+ if not collection:
+ self.log.warn('delete(): collection not found - ' + coll)
+ return
+ result = collection.delete_many(query_filter)
+ count = result.deleted_count
+ self.log.info('delete(): ' + ('deleted ' + str(count) + ' documents'
+ if count else 'no matching documents'))
+ return count
+
+ def get_env_config(self, env: str):
+ return self.find_one(search={'name': env},
+ collection='environments_config')
+
+ def is_feature_supported(self, env: str, feature: EnvironmentFeatures)\
+ -> bool:
+ env_config = self.get_env_config(env)
+ if not env_config:
+ return False
+
+ # Workaround for mechanism_drivers field type
+ mechanism_driver = env_config['mechanism_drivers'][0] \
+ if isinstance(env_config['mechanism_drivers'], list) \
+ else env_config['mechanism_drivers']
+
+ full_env = {'environment.distribution': env_config['distribution'],
+ 'environment.type_drivers': env_config['type_drivers'],
+ 'environment.mechanism_drivers': mechanism_driver}
+ return self.is_feature_supported_in_env(full_env, feature)
+
+ def is_feature_supported_in_env(self, env_def: dict,
+ feature: EnvironmentFeatures) -> bool:
+
+ result = self.collections['supported_environments'].find_one(env_def)
+ if not result:
+ return False
+ features_in_env = result.get('features', {})
+ return features_in_env.get(feature.value) is True
+
+ def save_inventory_object(self, o: dict, parent: dict,
+ environment: str, type_to_fetch: dict = None) -> bool:
+ if not type_to_fetch:
+ type_to_fetch = {}
+
+ o["id"] = str(o["id"])
+ o["environment"] = environment
+ if type_to_fetch.get("type"):
+ o["type"] = type_to_fetch["type"]
+ o["show_in_tree"] = type_to_fetch.get("show_in_tree", True)
+
+ parent_id_path = parent.get("id_path", "/{}".format(environment))
+ parent_name_path = parent.get("name_path", "/{}".format(environment))
+
+ try:
+ # case of dynamic folder added by need
+ master_parent_type = o["master_parent_type"]
+ master_parent_id = o["master_parent_id"]
+ master_parent = self.get_by_id(environment, master_parent_id)
+ if not master_parent:
+ self.log.error("failed to find master parent " +
+ master_parent_id)
+ return False
+ folder_id_path = "/".join((master_parent["id_path"], o["parent_id"]))
+ folder_name_path = "/".join((master_parent["name_path"], o["parent_text"]))
+ folder = {
+ "environment": parent["environment"],
+ "parent_id": master_parent_id,
+ "parent_type": master_parent_type,
+ "id": o["parent_id"],
+ "id_path": folder_id_path,
+ "show_in_tree": True,
+ "name_path": folder_name_path,
+ "name": o["parent_id"],
+ "type": o["parent_type"],
+ "text": o["parent_text"]
+ }
+ # remove master_parent_type & master_parent_id after use,
+ # as they're there just ro help create the dynamic folder
+ o.pop("master_parent_type", True)
+ o.pop("master_parent_id", True)
+ self.set(folder)
+ except KeyError:
+ pass
+
+ if o.get("text"):
+ o["name"] = o["text"]
+ elif not o.get("name"):
+ o["name"] = o["id"]
+
+ if "parent_id" not in o and parent:
+ parent_id = parent["id"]
+ o["parent_id"] = parent_id
+ o["parent_type"] = parent["type"]
+ elif "parent_id" in o and o["parent_id"] != parent["id"]:
+ # using alternate parent - fetch parent path from inventory
+ parent_obj = self.get_by_id(environment, o["parent_id"])
+ if parent_obj:
+ parent_id_path = parent_obj["id_path"]
+ parent_name_path = parent_obj["name_path"]
+ o["id_path"] = "/".join((parent_id_path, o["id"].strip()))
+ o["name_path"] = "/".join((parent_name_path, o["name"]))
+
+ # keep list of projects that an object is in
+ associated_projects = []
+ keys_to_remove = []
+ for k in o:
+ if k.startswith("in_project-"):
+ proj_name = k[k.index('-') + 1:]
+ associated_projects.append(proj_name)
+ keys_to_remove.append(k)
+ for k in keys_to_remove:
+ o.pop(k)
+ if len(associated_projects) > 0:
+ projects = o["projects"] if "projects" in o.keys() else []
+ projects.extend(associated_projects)
+ if projects:
+ o["projects"] = projects
+
+ if "create_object" not in o or o["create_object"]:
+ # add/update object in DB
+ self.set(o)
+ if self.is_feature_supported(environment, EnvironmentFeatures.MONITORING):
+ self.monitoring_setup_manager.create_setup(o)
+ return True
diff --git a/app/utils/logging/__init__.py b/app/utils/logging/__init__.py
new file mode 100644
index 0000000..1e85a2a
--- /dev/null
+++ b/app/utils/logging/__init__.py
@@ -0,0 +1,10 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+
diff --git a/app/utils/logging/console_logger.py b/app/utils/logging/console_logger.py
new file mode 100644
index 0000000..bb8b2ed
--- /dev/null
+++ b/app/utils/logging/console_logger.py
@@ -0,0 +1,21 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+import logging
+
+from utils.logging.logger import Logger
+
+
+class ConsoleLogger(Logger):
+
+ def __init__(self, level: str = Logger.default_level):
+ super().__init__(logger_name="{}-Console".format(self.PROJECT_NAME),
+ level=level)
+ self.add_handler(logging.StreamHandler())
+
diff --git a/app/utils/logging/file_logger.py b/app/utils/logging/file_logger.py
new file mode 100644
index 0000000..e205bc3
--- /dev/null
+++ b/app/utils/logging/file_logger.py
@@ -0,0 +1,23 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+import logging.handlers
+
+from utils.logging.logger import Logger
+
+
+class FileLogger(Logger):
+
+ LOG_DIRECTORY = "/local_dir/log/calipso/"
+
+ def __init__(self, log_file: str, level: str = Logger.default_level):
+ super().__init__(logger_name="{}-File".format(self.PROJECT_NAME),
+ level=level)
+ self.add_handler(logging.handlers.WatchedFileHandler(log_file))
+
diff --git a/app/utils/logging/full_logger.py b/app/utils/logging/full_logger.py
new file mode 100644
index 0000000..a88f00e
--- /dev/null
+++ b/app/utils/logging/full_logger.py
@@ -0,0 +1,47 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+import logging
+import logging.handlers
+
+from utils.logging.logger import Logger
+from utils.logging.mongo_logging_handler import MongoLoggingHandler
+
+
+class FullLogger(Logger):
+
+ def __init__(self, env: str = None, log_file: str = None,
+ level: str = Logger.default_level):
+ super().__init__(logger_name="{}-Full".format(self.PROJECT_NAME),
+ level=level)
+
+ # Console handler
+ self.add_handler(logging.StreamHandler())
+
+ # Message handler
+ self.add_handler(MongoLoggingHandler(env, self.level))
+
+ # File handler
+ if log_file:
+ self.add_handler(logging.handlers.WatchedFileHandler(log_file))
+
+ # Make sure we update MessageHandler with new env
+ def set_env(self, env):
+ super().set_env(env)
+
+ defined_handler = next(
+ filter(
+ lambda handler: handler.__class__ == MongoLoggingHandler.__class__,
+ self.log.handlers
+ ), None)
+
+ if defined_handler:
+ defined_handler.env = env
+ else:
+ self.add_handler(MongoLoggingHandler(env, self.level))
diff --git a/app/utils/logging/logger.py b/app/utils/logging/logger.py
new file mode 100644
index 0000000..bcf8287
--- /dev/null
+++ b/app/utils/logging/logger.py
@@ -0,0 +1,99 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+import logging
+from abc import ABC
+
+
+class Logger(ABC):
+ DEBUG = 'DEBUG'
+ INFO = 'INFO'
+ WARNING = 'WARNING'
+ ERROR = 'ERROR'
+ CRITICAL = 'CRITICAL'
+
+ PROJECT_NAME = 'CALIPSO'
+
+ levels = [DEBUG, INFO, WARNING, ERROR, CRITICAL]
+ log_format = '%(asctime)s %(levelname)s: %(message)s'
+ formatter = logging.Formatter(log_format)
+ default_level = INFO
+
+ def __init__(self, logger_name: str = PROJECT_NAME,
+ level: str = default_level):
+ super().__init__()
+ self.check_level(level)
+ self.log = logging.getLogger(logger_name)
+ logging.basicConfig(format=self.log_format,
+ level=level)
+ self.log.propagate = False
+ self.set_loglevel(level)
+ self.env = None
+ self.level = level
+
+ def set_env(self, env):
+ self.env = env
+
+ @staticmethod
+ def check_level(level):
+ if level.upper() not in Logger.levels:
+ raise ValueError('Invalid log level: {}. Supported levels: ({})'
+ .format(level, ", ".join(Logger.levels)))
+
+ @staticmethod
+ def get_numeric_level(loglevel):
+ Logger.check_level(loglevel)
+ numeric_level = getattr(logging, loglevel.upper(), Logger.default_level)
+ if not isinstance(numeric_level, int):
+ raise ValueError('Invalid log level: {}'.format(loglevel))
+ return numeric_level
+
+ def set_loglevel(self, loglevel):
+ # assuming loglevel is bound to the string value obtained from the
+ # command line argument. Convert to upper case to allow the user to
+ # specify --log=DEBUG or --log=debug
+ numeric_level = self.get_numeric_level(loglevel)
+
+ for handler in self.log.handlers:
+ handler.setLevel(numeric_level)
+ self.log.setLevel(numeric_level)
+ self.level = loglevel
+
+ def _log(self, level, message, *args, exc_info=False, **kwargs):
+ self.log.log(level, message, *args, exc_info=exc_info, **kwargs)
+
+ def debug(self, message, *args, **kwargs):
+ self._log(logging.DEBUG, message, *args, **kwargs)
+
+ def info(self, message, *args, **kwargs):
+ self._log(logging.INFO, message, *args, **kwargs)
+
+ def warning(self, message, *args, **kwargs):
+ self._log(logging.WARNING, message, *args, **kwargs)
+
+ def warn(self, message, *args, **kwargs):
+ self.warning(message, *args, **kwargs)
+
+ def error(self, message, *args, **kwargs):
+ self._log(logging.ERROR, message, *args, **kwargs)
+
+ def exception(self, message, *args, **kwargs):
+ self._log(logging.ERROR, message, exc_info=True, *args, **kwargs)
+
+ def critical(self, message, *args, **kwargs):
+ self._log(logging.CRITICAL, message, *args, **kwargs)
+
+ def add_handler(self, handler):
+ handler_defined = handler.__class__ in map(lambda h: h.__class__,
+ self.log.handlers)
+
+ if not handler_defined:
+ handler.setLevel(self.level)
+ handler.setFormatter(self.formatter)
+ self.log.addHandler(handler)
diff --git a/app/utils/logging/message_logger.py b/app/utils/logging/message_logger.py
new file mode 100644
index 0000000..02e098f
--- /dev/null
+++ b/app/utils/logging/message_logger.py
@@ -0,0 +1,21 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+import logging
+
+from utils.logging.logger import Logger
+from utils.logging.mongo_logging_handler import MongoLoggingHandler
+
+
+class MessageLogger(Logger):
+
+ def __init__(self, env: str = None, level: str = None):
+ super().__init__(logger_name="{}-Message".format(self.PROJECT_NAME),
+ level=level)
+ self.add_handler(MongoLoggingHandler(env, self.level))
diff --git a/app/utils/logging/mongo_logging_handler.py b/app/utils/logging/mongo_logging_handler.py
new file mode 100644
index 0000000..b69270e
--- /dev/null
+++ b/app/utils/logging/mongo_logging_handler.py
@@ -0,0 +1,53 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+import datetime
+import logging
+
+from messages.message import Message
+from utils.inventory_mgr import InventoryMgr
+from utils.logging.logger import Logger
+from utils.string_utils import stringify_datetime
+
+
+class MongoLoggingHandler(logging.Handler):
+ """
+ Logging handler for MongoDB
+ """
+ SOURCE_SYSTEM = 'Calipso'
+
+ def __init__(self, env: str, level: str):
+ super().__init__(Logger.get_numeric_level(level))
+ self.str_level = level
+ self.env = env
+ self.inv = None
+
+ def emit(self, record):
+ # Try to invoke InventoryMgr for logging
+ if not self.inv:
+ try:
+ self.inv = InventoryMgr()
+ except:
+ return
+
+ # make sure we do not try to log to DB when DB is not ready
+ if not (self.inv.is_db_ready()
+ and 'messages' in self.inv.collections):
+ return
+
+ # make ID from current timestamp
+ now = datetime.datetime.utcnow()
+ d = now - datetime.datetime(1970, 1, 1)
+ ts = stringify_datetime(now)
+ timestamp_id = '{}.{}.{}'.format(d.days, d.seconds, d.microseconds)
+ source = self.SOURCE_SYSTEM
+ message = Message(msg_id=timestamp_id, env=self.env, source=source,
+ msg=Logger.formatter.format(record), ts=ts,
+ level=record.levelname)
+ self.inv.collections['messages'].insert_one(message.get()) \ No newline at end of file
diff --git a/app/utils/metadata_parser.py b/app/utils/metadata_parser.py
new file mode 100644
index 0000000..1ed49ab
--- /dev/null
+++ b/app/utils/metadata_parser.py
@@ -0,0 +1,83 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+import json
+import os
+from abc import abstractmethod, ABCMeta
+
+from utils.util import get_extension
+
+
+class MetadataParser(metaclass=ABCMeta):
+
+ def __init__(self):
+ super().__init__()
+ self.errors = []
+
+ @abstractmethod
+ def get_required_fields(self) -> list:
+ pass
+
+ def validate_metadata(self, metadata: dict) -> bool:
+ if not isinstance(metadata, dict):
+ raise ValueError('metadata needs to be a valid dict')
+
+ # make sure metadata json contains all fields we need
+ required_fields = self.get_required_fields()
+ if not all([field in metadata for field in required_fields]):
+ raise ValueError("Metadata json should contain "
+ "all the following fields: {}"
+ .format(', '.join(required_fields)))
+ return True
+
+ @staticmethod
+ def _load_json_file(file_path: str):
+ with open(file_path) as data_file:
+ return json.load(data_file)
+
+ def _parse_json_file(self, file_path: str):
+ metadata = self._load_json_file(file_path)
+
+ # validate metadata correctness
+ if not self.validate_metadata(metadata):
+ return None
+
+ return metadata
+
+ @staticmethod
+ def check_metadata_file_ok(file_path: str):
+ extension = get_extension(file_path)
+ if extension != 'json':
+ raise ValueError("Extension '{}' is not supported. "
+ "Please provide a .json metadata file."
+ .format(extension))
+
+ if not os.path.isfile(file_path):
+ raise ValueError("Couldn't load metadata file. "
+ "Path '{}' doesn't exist or is not a file"
+ .format(file_path))
+
+ def parse_metadata_file(self, file_path: str) -> dict:
+ # reset errors in case same parser is used to read multiple inputs
+ self.errors = []
+ self.check_metadata_file_ok(file_path)
+
+ # Try to parse metadata file if it has one of the supported extensions
+ metadata = self._parse_json_file(file_path)
+ self.check_errors()
+ return metadata
+
+ def check_errors(self):
+ if self.errors:
+ raise ValueError("Errors encountered during "
+ "metadata file parsing:\n{}"
+ .format("\n".join(self.errors)))
+
+ def add_error(self, msg):
+ self.errors.append(msg)
diff --git a/app/utils/mongo_access.py b/app/utils/mongo_access.py
new file mode 100644
index 0000000..1425017
--- /dev/null
+++ b/app/utils/mongo_access.py
@@ -0,0 +1,137 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+import os
+
+from pymongo import MongoClient
+
+from utils.config_file import ConfigFile
+from utils.dict_naming_converter import DictNamingConverter
+from utils.logging.console_logger import ConsoleLogger
+from utils.logging.file_logger import FileLogger
+
+
+# Provides access to MongoDB using PyMongo library
+#
+# Notes on authentication:
+# default config file is calipso_mongo_access.conf
+# you can also specify name of file from CLI with --mongo_config
+
+
+class MongoAccess(DictNamingConverter):
+ client = None
+ db = None
+ default_conf_file = '/local_dir/calipso_mongo_access.conf'
+ config_file = None
+
+ DB_NAME = 'calipso'
+ LOG_FILENAME = 'mongo_access.log'
+ DEFAULT_LOG_FILE = os.path.join(os.path.abspath("."), LOG_FILENAME)
+
+ def __init__(self):
+ super().__init__()
+ self.log_file = os.path.join(FileLogger.LOG_DIRECTORY,
+ MongoAccess.LOG_FILENAME)
+
+ try:
+ self.log = FileLogger(self.log_file)
+ except OSError as e:
+ ConsoleLogger().warning("Couldn't use file {} for logging. "
+ "Using default location: {}.\n"
+ "Error: {}"
+ .format(self.log_file,
+ self.DEFAULT_LOG_FILE,
+ e))
+
+ self.log_file = self.DEFAULT_LOG_FILE
+ self.log = FileLogger(self.log_file)
+
+ self.connect_params = {}
+ self.mongo_connect(self.config_file)
+
+ def is_db_ready(self) -> bool:
+ return MongoAccess.client is not None
+
+ @staticmethod
+ def set_config_file(_conf_file):
+ MongoAccess.config_file = _conf_file
+
+ def mongo_connect(self, config_file_path=""):
+ if MongoAccess.client:
+ return
+
+ self.connect_params = {
+ "server": "localhost",
+ "port": 27017
+ }
+
+ if not config_file_path:
+ config_file_path = self.default_conf_file
+
+ try:
+ config_file = ConfigFile(config_file_path)
+ # read connection parameters from config file
+ config_params = config_file.read_config()
+ self.connect_params.update(config_params)
+ except Exception as e:
+ self.log.exception(e)
+ raise
+
+ self.prepare_connect_uri()
+ MongoAccess.client = MongoClient(
+ self.connect_params["server"],
+ self.connect_params["port"]
+ )
+ MongoAccess.db = getattr(MongoAccess.client,
+ config_params.get('auth_db', self.DB_NAME))
+ self.log.info('Connected to MongoDB')
+
+ def prepare_connect_uri(self):
+ params = self.connect_params
+ self.log.debug('connecting to MongoDb server: {}'
+ .format(params['server']))
+ uri = 'mongodb://'
+ if 'password' in params:
+ uri = uri + params['user'] + ':' + params['password'] + '@'
+ uri = uri + params['server']
+ if 'auth_db' in params:
+ uri = uri + '/' + params['auth_db']
+ self.connect_params['server'] = uri
+
+ @staticmethod
+ def update_document(collection, document, upsert=False):
+ if isinstance(collection, str):
+ collection = MongoAccess.db[collection]
+ doc_id = document.pop('_id')
+ collection.update_one({'_id': doc_id}, {'$set': document},
+ upsert=upsert)
+ document['_id'] = doc_id
+
+ @staticmethod
+ def encode_dots(s):
+ return s.replace(".", "[dot]")
+
+ @staticmethod
+ def decode_dots(s):
+ return s.replace("[dot]", ".")
+
+ # Mongo will not accept dot (".") in keys, or $ in start of keys
+ # $ in beginning of key does not happen in OpenStack,
+ # so need to translate only "." --> "[dot]"
+ @staticmethod
+ def encode_mongo_keys(item):
+ return MongoAccess.change_dict_naming_convention(item, MongoAccess.encode_dots)
+
+ @staticmethod
+ def decode_mongo_keys(item):
+ return MongoAccess.change_dict_naming_convention(item, MongoAccess.decode_dots)
+
+ @staticmethod
+ def decode_object_id(item: dict):
+ return dict(item, **{"_id": str(item["_id"])}) if item and "_id" in item else item
diff --git a/app/utils/singleton.py b/app/utils/singleton.py
new file mode 100644
index 0000000..fc1147f
--- /dev/null
+++ b/app/utils/singleton.py
@@ -0,0 +1,16 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+class Singleton(type):
+ _instances = {}
+
+ def __call__(cls, *args, **kwargs):
+ if cls not in cls._instances:
+ cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
+ return cls._instances[cls]
diff --git a/app/utils/special_char_converter.py b/app/utils/special_char_converter.py
new file mode 100644
index 0000000..fb469bb
--- /dev/null
+++ b/app/utils/special_char_converter.py
@@ -0,0 +1,32 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+import re
+
+
+class SpecialCharConverter:
+
+ translated_re = re.compile(r'---[.][.][0-9]+[.][.]---')
+
+ def encode_special_characters(self, s):
+ SPECIAL_CHARS = [':', '/']
+ for c in SPECIAL_CHARS:
+ if c in s:
+ s = s.replace(c, '---..' + str(ord(c)) + '..---')
+ return s
+
+ def decode_special_characters(self, s):
+ replaced = []
+ for m in re.finditer(self.translated_re, s):
+ match = m.group(0)
+ char_code = match[5:len(match)-5]
+ if char_code not in replaced:
+ replaced.append(char_code)
+ s = s.replace(match, chr(int(char_code)))
+ return s
diff --git a/app/utils/ssh_conn.py b/app/utils/ssh_conn.py
new file mode 100644
index 0000000..d4b7954
--- /dev/null
+++ b/app/utils/ssh_conn.py
@@ -0,0 +1,94 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+import os
+
+from discover.configuration import Configuration
+from utils.inventory_mgr import InventoryMgr
+from utils.ssh_connection import SshConnection
+
+
+class SshConn(SshConnection):
+ config = None
+ ssh = None
+ connections = {}
+
+ max_call_count_per_con = 100
+ timeout = 15 # timeout for exec in seconds
+
+ def __init__(self, host_name, for_sftp=False):
+ self.config = Configuration()
+ self.env_config = self.config.get_env_config()
+ self.env = self.env_config['name']
+ self.conf = self.config.get('CLI')
+ self.gateway = None
+ self.host = None
+ self.host_conf = self.get_host_conf(host_name)
+ self.ssh = None
+ self.ftp = None
+ self.for_sftp = for_sftp
+ self.key = None
+ self.port = None
+ self.user = None
+ self.pwd = None
+ self.check_definitions()
+ super().__init__(self.host, self.user, _pwd=self.pwd, _key=self.key,
+ _port=self.port, for_sftp=for_sftp)
+ self.inv = InventoryMgr()
+ if host_name in self.connections and not self.ssh:
+ self.ssh = self.connections[host_name]
+
+ def get_host_conf(self, host_name):
+ if 'hosts' in self.conf:
+ if not host_name:
+ raise ValueError('SshConn(): host must be specified ' +
+ 'if multi-host CLI config is used')
+ if host_name not in self.conf['hosts']:
+ raise ValueError('host details missing: ' + host_name)
+ return self.conf['hosts'][host_name]
+ else:
+ return self.conf
+
+ def check_definitions(self):
+ try:
+ self.host = self.host_conf['host']
+ if self.host in self.connections:
+ self.ssh = self.connections[self.host]
+ except KeyError:
+ raise ValueError('Missing definition of host for CLI access')
+ try:
+ self.user = self.host_conf['user']
+ except KeyError:
+ raise ValueError('Missing definition of user for CLI access')
+ try:
+ self.key = self.host_conf['key']
+ if self.key and not os.path.exists(self.key):
+ raise ValueError('Key file not found: ' + self.key)
+ except KeyError:
+ pass
+ try:
+ self.pwd = self.host_conf['pwd']
+ except KeyError:
+ self.pwd = None
+ if not self.key and not self.pwd:
+ raise ValueError('Must specify key or password for CLI access')
+
+ gateway_hosts = {}
+
+ @staticmethod
+ def get_gateway_host(host):
+ if not SshConn.gateway_hosts.get(host):
+ ssh = SshConn(host)
+ gateway = ssh.exec('uname -n')
+ SshConn.gateway_hosts[host] = gateway.strip()
+ return SshConn.gateway_hosts[host]
+
+ def is_gateway_host(self, host):
+ gateway_host = self.get_gateway_host(host)
+ return host == gateway_host
diff --git a/app/utils/ssh_connection.py b/app/utils/ssh_connection.py
new file mode 100644
index 0000000..0fa197a
--- /dev/null
+++ b/app/utils/ssh_connection.py
@@ -0,0 +1,217 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+import os
+
+import paramiko
+
+from utils.binary_converter import BinaryConverter
+
+
+class SshConnection(BinaryConverter):
+ config = None
+ ssh = None
+ connections = {}
+ cli_connections = {}
+ sftp_connections = {}
+
+ max_call_count_per_con = 100
+ timeout = 15 # timeout for exec in seconds
+
+ DEFAULT_PORT = 22
+
+ def __init__(self, _host: str, _user: str, _pwd: str=None, _key: str = None,
+ _port: int = None, _call_count_limit: int=None,
+ for_sftp: bool = False):
+ super().__init__()
+ self.host = _host
+ self.ssh = None
+ self.ftp = None
+ self.for_sftp = for_sftp
+ self.key = _key
+ self.port = _port
+ self.user = _user
+ self.pwd = _pwd
+ self.check_definitions()
+ self.fetched_host_details = False
+ self.call_count = 0
+ self.call_count_limit = 0 if for_sftp \
+ else (SshConnection.max_call_count_per_con
+ if _call_count_limit is None else _call_count_limit)
+ if for_sftp:
+ self.sftp_connections[_host] = self
+ else:
+ self.cli_connections[_host] = self
+
+ def check_definitions(self):
+ if not self.host:
+ raise ValueError('Missing definition of host for CLI access')
+ if not self.user:
+ raise ValueError('Missing definition of user ' +
+ 'for CLI access to host {}'.format(self.host))
+ if self.key and not os.path.exists(self.key):
+ raise ValueError('Key file not found: ' + self.key)
+ if not self.key and not self.pwd:
+ raise ValueError('Must specify key or password ' +
+ 'for CLI access to host {}'.format(self.host))
+
+ @staticmethod
+ def get_ssh(host, for_sftp=False):
+ if for_sftp:
+ return SshConnection.cli_connections.get(host)
+ return SshConnection.sftp_connections.get(host)
+
+ @staticmethod
+ def get_connection(host, for_sftp=False):
+ key = ('sftp-' if for_sftp else '') + host
+ return SshConnection.connections.get(key)
+
+ def disconnect(self):
+ if self.ssh:
+ self.ssh.close()
+
+ @staticmethod
+ def disconnect_all():
+ for ssh in SshConnection.cli_connections.values():
+ ssh.disconnect()
+ SshConnection.cli_connections = {}
+ for ssh in SshConnection.sftp_connections.values():
+ ssh.disconnect()
+ SshConnection.sftp_connections = {}
+
+ def get_host(self):
+ return self.host
+
+ def get_user(self):
+ return self.user
+
+ def set_call_limit(self, _limit: int):
+ self.call_count_limit = _limit
+
+ def connect(self, reconnect=False) -> bool:
+ connection = self.get_connection(self.host, self.for_sftp)
+ if connection:
+ self.ssh = connection
+ if reconnect:
+ self.log.info("SshConnection: " +
+ "****** forcing reconnect: %s ******",
+ self.host)
+ elif self.call_count >= self.call_count_limit > 0:
+ self.log.info("SshConnection: ****** reconnecting: %s, " +
+ "due to call count: %s ******",
+ self.host, self.call_count)
+ else:
+ return True
+ connection.close()
+ self.ssh = None
+ self.ssh = paramiko.SSHClient()
+ connection_key = ('sftp-' if self.for_sftp else '') + self.host
+ SshConnection.connections[connection_key] = self.ssh
+ self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+ if self.key:
+ k = paramiko.RSAKey.from_private_key_file(self.key)
+ self.ssh.connect(hostname=self.host, username=self.user, pkey=k,
+ port=self.port if self.port is not None
+ else self.DEFAULT_PORT,
+ password=self.pwd, timeout=30)
+ else:
+ try:
+ port = self.port if self.port is not None else self.DEFAULT_PORT
+ self.ssh.connect(self.host,
+ username=self.user,
+ password=self.pwd,
+ port=port,
+ timeout=30)
+ except paramiko.ssh_exception.AuthenticationException:
+ self.log.error('Failed SSH connect to host {}, port={}'
+ .format(self.host, port))
+ self.ssh = None
+ self.call_count = 0
+ return self.ssh is not None
+
+ def exec(self, cmd):
+ if not self.connect():
+ return ''
+ self.call_count += 1
+ self.log.debug("call count: %s, running call:\n%s\n",
+ str(self.call_count), cmd)
+ stdin, stdout, stderr = self.ssh.exec_command(cmd, timeout=self.timeout)
+ stdin.close()
+ err = self.binary2str(stderr.read())
+ if err:
+ # ignore messages about loading plugin
+ err_lines = [l for l in err.splitlines()
+ if 'Loaded plugin: ' not in l]
+ if err_lines:
+ self.log.error("CLI access: \n" +
+ "Host: {}\nCommand: {}\nError: {}\n".
+ format(self.host, cmd, err))
+ stderr.close()
+ stdout.close()
+ return ""
+ ret = self.binary2str(stdout.read())
+ stderr.close()
+ stdout.close()
+ return ret
+
+ def copy_file(self, local_path, remote_path, mode=None):
+ if not self.connect():
+ return
+ if not self.ftp:
+ self.ftp = self.ssh.open_sftp()
+ try:
+ self.ftp.put(local_path, remote_path)
+ except IOError as e:
+ self.log.error('SFTP copy_file failed to copy file: ' +
+ 'local: ' + local_path +
+ ', remote host: ' + self.host +
+ ', error: ' + str(e))
+ return str(e)
+ try:
+ remote_file = self.ftp.file(remote_path, 'a+')
+ except IOError as e:
+ self.log.error('SFTP copy_file failed to open file after put(): ' +
+ 'local: ' + local_path +
+ ', remote host: ' + self.host +
+ ', error: ' + str(e))
+ return str(e)
+ try:
+ if mode:
+ remote_file.chmod(mode)
+ except IOError as e:
+ self.log.error('SFTP copy_file failed to chmod file: ' +
+ 'local: ' + local_path +
+ ', remote host: ' + self.host +
+ ', port: ' + self.port +
+ ', error: ' + str(e))
+ return str(e)
+ self.log.info('SFTP copy_file success: '
+ 'host={},port={},{} -> {}'.format(
+ str(self.host), str(self.port), str(local_path), str(remote_path)))
+ return ''
+
+ def copy_file_from_remote(self, remote_path, local_path):
+ if not self.connect():
+ return
+ if not self.ftp:
+ self.ftp = self.ssh.open_sftp()
+ try:
+ self.ftp.get(remote_path, local_path)
+ except IOError as e:
+ self.log.error('SFTP copy_file_from_remote failed to copy file: '
+ 'remote host: {}, '
+ 'remote_path: {}, local: {}, error: {}'
+ .format(self.host, remote_path, local_path, str(e)))
+ return str(e)
+ self.log.info('SFTP copy_file_from_remote success: host={},{} -> {}'.
+ format(self.host, remote_path, local_path))
+ return ''
+
+ def is_gateway_host(self, host):
+ return True
diff --git a/app/utils/string_utils.py b/app/utils/string_utils.py
new file mode 100644
index 0000000..1f51992
--- /dev/null
+++ b/app/utils/string_utils.py
@@ -0,0 +1,59 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+import json
+from datetime import datetime
+
+from bson import ObjectId
+
+
+def jsonify(obj, prettify=False):
+ if prettify:
+ return json.dumps(obj, sort_keys=True, indent=4, separators=(',', ': '))
+ else:
+ return json.dumps(obj)
+
+
+# stringify datetime object
+def stringify_datetime(dt):
+ return dt.strftime("%Y-%m-%dT%H:%M:%S.%f%z")
+
+
+# stringify ObjectId
+def stringify_object_id(object_id):
+ return str(object_id)
+
+
+stringify_map = {
+ ObjectId: stringify_object_id,
+ datetime: stringify_datetime
+}
+
+
+def stringify_object_values_by_type(obj, object_type):
+ if isinstance(obj, dict):
+ for key, value in obj.items():
+ if isinstance(value, object_type):
+ obj[key] = stringify_map[object_type](value)
+ else:
+ stringify_object_values_by_type(value, object_type)
+ elif isinstance(obj, list):
+ for index, value in enumerate(obj):
+ if isinstance(value, object_type):
+ obj[index] = stringify_map[object_type](value)
+ else:
+ stringify_object_values_by_type(value, object_type)
+
+
+# convert some values of the specific types of the object into string
+# e.g convert all the ObjectId to string
+# convert all the datetime object to string
+def stringify_object_values_by_types(obj, object_types):
+ for object_type in object_types:
+ stringify_object_values_by_type(obj, object_type)
diff --git a/app/utils/util.py b/app/utils/util.py
new file mode 100644
index 0000000..4695879
--- /dev/null
+++ b/app/utils/util.py
@@ -0,0 +1,172 @@
+###############################################################################
+# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
+# and others #
+# #
+# All rights reserved. This program and the accompanying materials #
+# are made available under the terms of the Apache License, Version 2.0 #
+# which accompanies this distribution, and is available at #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+###############################################################################
+import importlib
+import signal
+from argparse import Namespace
+from typing import Dict, Callable
+
+import os
+import re
+
+from bson.objectid import ObjectId
+
+
+class SignalHandler:
+
+ def __init__(self, signals=(signal.SIGTERM, signal.SIGINT)):
+ super().__init__()
+ self.terminated = False
+ for sig in signals:
+ signal.signal(sig, self.handle)
+
+ def handle(self, signum, frame):
+ self.terminated = True
+
+
+class ClassResolver:
+ instances = {}
+
+ # convert class name in camel case to module file name in underscores
+ @staticmethod
+ def get_module_file_by_class_name(class_name):
+ s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', class_name)
+ module_file = re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
+ return module_file
+
+ # convert module file name in underscores to class name in camel case
+ @staticmethod
+ def get_class_name_by_module(module_name):
+ name_parts = [word.capitalize() for word in module_name.split('_')]
+ class_name = ''.join(name_parts)
+ return class_name
+
+
+ @staticmethod
+ def get_fully_qualified_class(class_name: str = None,
+ package_name: str = "discover",
+ module_name: str = None):
+ module_file = module_name if module_name \
+ else ClassResolver.get_module_file_by_class_name(class_name)
+ module_parts = [package_name, module_file]
+ module_name = ".".join(module_parts)
+ try:
+ class_module = importlib.import_module(module_name)
+ except ImportError:
+ raise ValueError('could not import module {}'.format(module_name))
+
+ clazz = getattr(class_module, class_name)
+ return clazz
+
+ @staticmethod
+ def prepare_class(class_name: str = None,
+ package_name: str = "discover",
+ module_name: str = None):
+ if not class_name and not module_name:
+ raise ValueError('class_name or module_name must be provided')
+ if not class_name:
+ class_name = ClassResolver.get_class_name_by_module(module_name)
+ if class_name in ClassResolver.instances:
+ return 'instance', ClassResolver.instances[class_name]
+ clazz = ClassResolver.get_fully_qualified_class(class_name, package_name,
+ module_name)
+ return 'class', clazz
+
+ @staticmethod
+ def get_instance_of_class(class_name: str = None,
+ package_name: str = "discover",
+ module_name: str = None):
+ val_type, clazz = \
+ ClassResolver.prepare_class(class_name=class_name,
+ package_name=package_name,
+ module_name=module_name)
+ if val_type == 'instance':
+ return clazz
+ instance = clazz()
+ ClassResolver.instances[class_name] = instance
+ return instance
+
+ @staticmethod
+ def get_instance_single_arg(arg: object,
+ class_name: str = None,
+ package_name: str = "discover",
+ module_name: str = None):
+ val_type, clazz = \
+ ClassResolver.prepare_class(class_name=class_name,
+ package_name=package_name,
+ module_name=module_name)
+ if val_type == 'instance':
+ return clazz
+ instance = clazz(arg)
+ ClassResolver.instances[class_name] = instance
+ return instance
+
+
+# TODO: translate the following comment
+# when search in the mongo db, need to
+# generate the ObjectId with the string
+def generate_object_ids(keys, obj):
+ for key in keys:
+ if key in obj:
+ o = obj.pop(key)
+ if o:
+ try:
+ o = ObjectId(o)
+ except Exception:
+ raise Exception("{0} is not a valid object id".
+ format(o))
+ obj[key] = o
+
+
+# Get arguments from CLI or another source
+# and convert them to dict to enforce uniformity.
+# Throws a TypeError if arguments can't be converted to dict.
+def setup_args(args: dict,
+ defaults: Dict[str, object],
+ get_cmd_args: Callable[[], Namespace] = None):
+ if defaults is None:
+ defaults = {}
+
+ if args is None and get_cmd_args is not None:
+ args = vars(get_cmd_args())
+ elif not isinstance(args, dict):
+ try:
+ args = dict(args)
+ except TypeError:
+ try:
+ args = vars(args)
+ except TypeError:
+ raise TypeError("Wrong arguments format")
+
+ return dict(defaults, **args)
+
+
+def encode_router_id(host_id: str, uuid: str):
+ return '-'.join([host_id, 'qrouter', uuid])
+
+
+def decode_router_id(router_id: str):
+ return router_id.split('qrouter-')[-1]
+
+
+def get_extension(file_path: str) -> str:
+ return os.path.splitext(file_path)[1][1:]
+
+
+def encode_aci_dn(object_id):
+ return object_id.replace("topology/", "").replace("/", "___").replace("-", "__")
+
+
+def decode_aci_dn(object_id):
+ return object_id.replace("___", "/").replace("__", "-")
+
+
+def get_object_path_part(path: str, part_name: str):
+ match = re.match(".*/{}/(.+?)/.*".format(part_name), path)
+ return match.group(1) if match else None