summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authoryayogev <yaronyogev@gmail.com>2018-02-27 17:00:05 +0200
committeryayogev <yaronyogev@gmail.com>2018-02-27 17:00:05 +0200
commit648a394f7a318443dfd82f790f83a79616c26905 (patch)
tree719508e9d99771c3de056a9c2914d461c35fb967
parent100add41cfe2b987524b190c1c92771a3c4f1d5f (diff)
US3541 merge various fixes to OPNFV branch
timestamp of last commit tt was merged: 26-Jan-2018 16:25. Change-Id: I7b0bf7885d7d0badb81c794a52c480b905d78459 Signed-off-by: yayogev <yaronyogev@gmail.com>
-rw-r--r--INFO2
-rw-r--r--app/discover/clique_finder.py114
-rw-r--r--app/discover/configuration.py4
-rw-r--r--app/discover/events/event_base.py3
-rw-r--r--app/discover/events/event_instance_add.py2
-rw-r--r--app/discover/events/event_interface_add.py6
-rw-r--r--app/discover/events/event_port_add.py8
-rw-r--r--app/discover/events/event_port_delete.py2
-rw-r--r--app/discover/events/event_router_add.py4
-rw-r--r--app/discover/events/event_router_update.py4
-rw-r--r--app/discover/events/event_subnet_add.py6
-rw-r--r--app/discover/events/event_subnet_update.py6
-rw-r--r--app/discover/fetcher.py51
-rw-r--r--app/discover/fetchers/api/api_fetch_availability_zones.py11
-rw-r--r--app/discover/fetchers/api/api_fetch_network.py18
-rw-r--r--app/discover/fetchers/api/api_fetch_networks.py15
-rw-r--r--app/discover/fetchers/api/api_fetch_port.py8
-rw-r--r--app/discover/fetchers/api/api_fetch_ports.py8
-rw-r--r--app/discover/fetchers/api/api_fetch_project_hosts.py4
-rw-r--r--app/discover/fetchers/cli/cli_access.py15
-rw-r--r--app/discover/fetchers/cli/cli_fetch_host_pnics.py7
-rw-r--r--app/discover/fetchers/cli/cli_fetch_vconnectors_ovs.py54
-rw-r--r--app/discover/fetchers/cli/cli_fetch_vservice_vnics.py10
-rw-r--r--app/discover/fetchers/db/db_fetch_oteps.py2
-rw-r--r--app/discover/link_finders/find_links_for_vedges.py2
-rwxr-xr-xapp/discover/scan.py11
-rw-r--r--app/discover/scan_manager.py4
-rw-r--r--app/discover/scanner.py75
-rw-r--r--app/install/calipso-installer.py19
-rw-r--r--app/install/db/monitoring_config_templates.json5
-rw-r--r--app/messages/message.py7
-rw-r--r--app/monitoring/checks/check_instance_communictions.py4
-rwxr-xr-xapp/monitoring/checks/check_ping.py2
-rwxr-xr-xapp/monitoring/checks/check_pnic_ovs.py4
-rwxr-xr-xapp/monitoring/checks/check_pnic_vpp.py2
-rw-r--r--app/monitoring/checks/check_vconnector.py2
-rwxr-xr-xapp/monitoring/checks/check_vedge_ovs.py2
-rwxr-xr-xapp/monitoring/checks/check_vedge_vpp.py2
-rwxr-xr-xapp/monitoring/checks/check_vnic_vconnector.py2
-rwxr-xr-xapp/monitoring/checks/check_vnic_vpp.py2
-rw-r--r--app/monitoring/checks/check_vservice.py2
-rw-r--r--app/monitoring/handlers/handle_otep.py2
-rw-r--r--app/monitoring/handlers/monitoring_check_handler.py32
-rw-r--r--app/monitoring/setup/monitoring_handler.py27
-rw-r--r--app/test/api/responders_test/resource/test_clique_types.py3
-rw-r--r--app/test/api/responders_test/test_data/clique_types.py18
-rw-r--r--app/test/fetch/api_fetch/test_data/api_fetch_networks.py1
-rw-r--r--app/test/fetch/api_fetch/test_data/api_fetch_ports.py3
-rw-r--r--app/test/fetch/db_fetch/test_data/db_fetch_oteps.py2
-rw-r--r--app/test/scan/test_data/scanner.py63
-rw-r--r--app/test/scan/test_scanner.py19
-rwxr-xr-xapp/test/verify.sh14
-rw-r--r--app/utils/dict_naming_converter.py35
-rw-r--r--app/utils/inventory_mgr.py71
-rw-r--r--app/utils/logging/console_logger.py1
-rw-r--r--app/utils/logging/full_logger.py44
-rw-r--r--app/utils/logging/logger.py7
-rw-r--r--app/utils/logging/message_logger.py14
-rw-r--r--app/utils/logging/mongo_logging_handler.py20
59 files changed, 583 insertions, 304 deletions
diff --git a/INFO b/INFO
index 83fdb69..3a83585 100644
--- a/INFO
+++ b/INFO
@@ -1,4 +1,4 @@
-Project: Virtual Infrastructure Network Assurance (Calipso)
+Project: Calipso - Virtual Infrastructure Networking Assurance
Project Creation Date: March 21st 2017
Project Category:
Lifecycle State:
diff --git a/app/discover/clique_finder.py b/app/discover/clique_finder.py
index 4d68eb4..4e04e7e 100644
--- a/app/discover/clique_finder.py
+++ b/app/discover/clique_finder.py
@@ -42,43 +42,74 @@ class CliqueFinder(Fetcher):
return self.links.find({'target': db_id})
def find_cliques(self):
- self.log.info("scanning for cliques")
+ self.log.info("Scanning for cliques")
clique_types = self.get_clique_types().values()
for clique_type in clique_types:
self.find_cliques_for_type(clique_type)
- self.log.info("finished scanning for cliques")
+ self.log.info("Finished scanning for cliques")
# Calculate priority score for clique type per environment and configuration
- def _get_priority_score(self, clique_type):
+ def get_priority_score(self, clique_type):
# environment-specific clique type takes precedence
- if self.env == clique_type['environment']:
- return 16
- if (self.env_config['distribution'] == clique_type.get('distribution')
- and
- self.env_config['distribution_version'] ==
- clique_type.get('distribution_version')):
- return 8
- if clique_type.get('mechanism_drivers') \
- in self.env_config['mechanism_drivers']:
- return 4
- if self.env_config['type_drivers'] == clique_type.get('type_drivers'):
- return 2
- if clique_type.get('environment', '') == 'ANY':
- # environment=ANY serves as fallback option, but it's not mandatory
- return 1
- else:
+ env = clique_type.get('environment')
+ config = self.env_config
+ # ECT - Clique Type with Environment name
+ if env:
+ if self.env == env:
+ return 2**6
+ if env == 'ANY':
+ # environment=ANY serves as fallback option
+ return 2**0
return 0
+ # NECT - Clique Type without Environment name
+ else:
+ env_type = clique_type.get('environment_type')
+ # TODO: remove backward compatibility ('if not env_type' check)
+ if env_type and env_type != config.get('environment_type'):
+ return 0
+
+ score = 0
+
+ distribution = clique_type.get('distribution')
+ if distribution:
+ if config['distribution'] != distribution:
+ return 0
+
+ score += 2**5
+
+ dv = clique_type.get('distribution_version')
+ if dv:
+ if dv != config['distribution_version']:
+ return 0
+ score += 2**4
+
+ mechanism_drivers = clique_type.get('mechanism_drivers')
+ if mechanism_drivers:
+ if mechanism_drivers not in config['mechanism_drivers']:
+ return 0
+ score += 2**3
+
+ type_drivers = clique_type.get('type_drivers')
+ if type_drivers:
+ if type_drivers != config['type_drivers']:
+ return 0
+ score += 2**2
+
+ # If no configuration is specified, this clique type
+ # is a fallback for its environment type
+ return max(score, 2**1)
# Get clique type with max priority
# for given focal point type
def _get_clique_type(self, clique_types):
- scored_clique_types = [{'score': self._get_priority_score(clique_type),
+ scored_clique_types = [{'score': self.get_priority_score(clique_type),
'clique_type': clique_type}
for clique_type in clique_types]
max_score = max(scored_clique_types, key=lambda t: t['score'])
if max_score['score'] == 0:
- self.log.warn('No matching clique types for focal point type: {}'
- .format(clique_types[0].get('focal_point_type')))
+ self.log.warn('No matching clique types '
+ 'for focal point type: {fp_type}'
+ .format(fp_type=clique_types[0].get('focal_point_type')))
return None
return max_score.get('clique_type')
@@ -143,8 +174,9 @@ class CliqueFinder(Fetcher):
clique["constraints"][c] = val
allow_implicit = clique_type.get('use_implicit_links', False)
for link_type in clique_type["link_types"]:
- self.check_link_type(clique, link_type, nodes_of_type,
- allow_implicit=allow_implicit)
+ if not self.check_link_type(clique, link_type, nodes_of_type,
+ allow_implicit=allow_implicit):
+ break
# after adding the links to the clique, create/update the clique
if not clique["links"]:
@@ -197,7 +229,7 @@ class CliqueFinder(Fetcher):
return CliqueFinder.link_type_reversed.get(link_type)
def check_link_type(self, clique, link_type, nodes_of_type,
- allow_implicit=False):
+ allow_implicit=False) -> bool:
# check if it's backwards
link_type_reversed = self.get_link_type_reversed(link_type)
# handle case of links like T<-->T
@@ -213,15 +245,16 @@ class CliqueFinder(Fetcher):
matches = self.links.find_one(link_search_condition)
use_reversed = True if matches else False
if self_linked or not use_reversed:
- self.check_link_type_forward(clique, link_type, nodes_of_type,
- allow_implicit=allow_implicit)
+ return self.check_link_type_forward(clique, link_type,
+ nodes_of_type,
+ allow_implicit=allow_implicit)
if self_linked or use_reversed:
- self.check_link_type_back(clique, link_type, nodes_of_type,
- allow_implicit=allow_implicit)
+ return self.check_link_type_back(clique, link_type, nodes_of_type,
+ allow_implicit=allow_implicit)
def check_link_type_for_direction(self, clique, link_type, nodes_of_type,
is_reversed=False,
- allow_implicit=False):
+ allow_implicit=False) -> bool:
if is_reversed:
link_type = self.get_link_type_reversed(link_type)
from_type = link_type[:link_type.index("-")]
@@ -230,7 +263,7 @@ class CliqueFinder(Fetcher):
other_side = 'target' if not is_reversed else 'source'
match_type = to_type if is_reversed else from_type
if match_type not in nodes_of_type.keys():
- return
+ return False
other_side_type = to_type if not is_reversed else from_type
nodes_to_add = set()
for match_point in nodes_of_type[match_type]:
@@ -245,6 +278,7 @@ class CliqueFinder(Fetcher):
nodes_of_type[other_side_type] = set()
nodes_of_type[other_side_type] = \
nodes_of_type[other_side_type] | nodes_to_add
+ return len(nodes_to_add) > 0
def find_matches_for_point(self, match_point, clique, link_type,
side_to_match, other_side,
@@ -271,13 +305,15 @@ class CliqueFinder(Fetcher):
return nodes_to_add
def check_link_type_forward(self, clique, link_type, nodes_of_type,
- allow_implicit=False):
- self.check_link_type_for_direction(clique, link_type, nodes_of_type,
- is_reversed=False,
- allow_implicit=allow_implicit)
+ allow_implicit=False) -> bool:
+ return self.check_link_type_for_direction(clique, link_type,
+ nodes_of_type,
+ is_reversed=False,
+ allow_implicit=allow_implicit)
def check_link_type_back(self, clique, link_type, nodes_of_type,
- allow_implicit=False):
- self.check_link_type_for_direction(clique, link_type, nodes_of_type,
- is_reversed=True,
- allow_implicit=allow_implicit)
+ allow_implicit=False) -> bool:
+ return self.check_link_type_for_direction(clique, link_type,
+ nodes_of_type,
+ is_reversed=True,
+ allow_implicit=allow_implicit)
diff --git a/app/discover/configuration.py b/app/discover/configuration.py
index c7bc0c0..9ec8f96 100644
--- a/app/discover/configuration.py
+++ b/app/discover/configuration.py
@@ -47,6 +47,10 @@ class Configuration(metaclass=Singleton):
def get_env_name(self):
return self.env_name
+ def get_env_type(self):
+ return 'OpenStack' if 'environment_type' not in self.environment \
+ else self.environment['environment_type']
+
def update_env(self, values):
self.collection.update_one({"name": self.env_name},
{'$set': MongoAccess.encode_mongo_keys(values)})
diff --git a/app/discover/events/event_base.py b/app/discover/events/event_base.py
index 6b3b290..4b466e1 100644
--- a/app/discover/events/event_base.py
+++ b/app/discover/events/event_base.py
@@ -11,6 +11,7 @@ from abc import abstractmethod, ABC
from discover.fetcher import Fetcher
from utils.inventory_mgr import InventoryMgr
+from utils.origins import ScanOrigin, ScanOrigins
class EventResult:
@@ -23,6 +24,8 @@ class EventResult:
self.message = message
self.related_object = related_object
self.display_context = display_context
+ self.origin = ScanOrigin(origin_id=None,
+ origin_type=ScanOrigins.EVENT)
class EventBase(Fetcher, ABC):
diff --git a/app/discover/events/event_instance_add.py b/app/discover/events/event_instance_add.py
index 4dd2b20..a8717a5 100644
--- a/app/discover/events/event_instance_add.py
+++ b/app/discover/events/event_instance_add.py
@@ -25,7 +25,7 @@ class EventInstanceAdd(EventBase):
# scan instance
scanner = Scanner()
- scanner.set_env(env)
+ scanner.setup(env=env, origin=self.origin)
scanner.scan("ScanInstancesRoot", instances_root,
limit_to_child_id=instance_id,
limit_to_child_type='instance')
diff --git a/app/discover/events/event_interface_add.py b/app/discover/events/event_interface_add.py
index e54bedb..f0ba569 100644
--- a/app/discover/events/event_interface_add.py
+++ b/app/discover/events/event_interface_add.py
@@ -30,7 +30,7 @@ class EventInterfaceAdd(EventBase):
def add_gateway_port(self, env, project, network_name, router_doc, host_id):
fetcher = CliFetchHostVservice()
- fetcher.set_env(env)
+ fetcher.setup(env=env, origin=self.origin)
router_id = router_doc['id']
router = fetcher.get_vservice(host_id, router_id)
device_id = decode_router_id(router_id)
@@ -101,7 +101,7 @@ class EventInterfaceAdd(EventBase):
# add router-interface port document.
if not ApiAccess.regions:
fetcher = ApiFetchRegions()
- fetcher.set_env(env)
+ fetcher.setup(env=env, origin=self.origin)
fetcher.get(project_id)
port_doc = EventSubnetAdd().add_port_document(env, port_id,
network_name=network_name)
@@ -134,7 +134,7 @@ class EventInterfaceAdd(EventBase):
# update vservice-vnic, vnic-network,
FindLinksForVserviceVnics().add_links(search={"parent_id": router_id})
scanner = Scanner()
- scanner.set_env(env)
+ scanner.setup(env=env, origin=self.origin)
scanner.scan_cliques()
self.log.info("Finished router-interface added.")
diff --git a/app/discover/events/event_port_add.py b/app/discover/events/event_port_add.py
index 9220015..e03db34 100644
--- a/app/discover/events/event_port_add.py
+++ b/app/discover/events/event_port_add.py
@@ -168,7 +168,7 @@ class EventPortAdd(EventBase):
"router": ('Gateways', router_name)}
fetcher = CliFetchVserviceVnics()
- fetcher.set_env(env)
+ fetcher.setup(env=env, origin=self.origin)
namespace = 'q{}-{}'.format(object_type, object_id)
vnic_documents = fetcher.handle_service(host['id'], namespace, enable_cache=False)
if not vnic_documents:
@@ -258,7 +258,7 @@ class EventPortAdd(EventBase):
# update instance
instance_fetcher = ApiFetchHostInstances()
- instance_fetcher.set_env(env)
+ instance_fetcher.setup(env=env, origin=self.origin)
instance_docs = instance_fetcher.get(host_id + '-')
instance = next(filter(lambda i: i['id'] == instance_id, instance_docs), None)
@@ -278,7 +278,7 @@ class EventPortAdd(EventBase):
# set ovs as default type.
vnic_fetcher = CliFetchInstanceVnics()
- vnic_fetcher.set_env(env)
+ vnic_fetcher.setup(env=env, origin=self.origin)
vnic_docs = vnic_fetcher.get(instance_id + '-')
vnic = next(filter(lambda vnic: vnic['mac_address'] == mac_address, vnic_docs), None)
@@ -298,7 +298,7 @@ class EventPortAdd(EventBase):
for fetcher in fetchers_implementing_add_links:
fetcher.add_links()
scanner = Scanner()
- scanner.set_env(env)
+ scanner.setup(env=env, origin=self.origin)
scanner.scan_cliques()
port_document = self.inv.get_by_id(env, port['id'])
diff --git a/app/discover/events/event_port_delete.py b/app/discover/events/event_port_delete.py
index 1e55870..937d8df 100644
--- a/app/discover/events/event_port_delete.py
+++ b/app/discover/events/event_port_delete.py
@@ -61,7 +61,7 @@ class EventPortDelete(EventDeleteBase):
# update instance mac address.
if port_doc['mac_address'] == instance_doc['mac_address']:
instance_fetcher = ApiFetchHostInstances()
- instance_fetcher.set_env(env)
+ instance_fetcher.setup(env=env, origin=self.origin)
host_id = port_doc['binding:host_id']
instance_id = port_doc['device_id']
instance_docs = instance_fetcher.get(host_id + '-')
diff --git a/app/discover/events/event_router_add.py b/app/discover/events/event_router_add.py
index 1fb2244..0f8bc05 100644
--- a/app/discover/events/event_router_add.py
+++ b/app/discover/events/event_router_add.py
@@ -100,7 +100,7 @@ class EventRouterAdd(EventBase):
host = self.inv.get_by_id(env, host_id)
fetcher = CliFetchHostVservice()
- fetcher.set_env(env)
+ fetcher.setup(env=env, origin=self.origin)
router_doc = fetcher.get_vservice(host_id, router_id)
gateway_info = router['external_gateway_info']
@@ -114,7 +114,7 @@ class EventRouterAdd(EventBase):
# scan links and cliques
FindLinksForVserviceVnics().add_links(search={"parent_id": router_id})
scanner = Scanner()
- scanner.set_env(env)
+ scanner.setup(env=env, origin=self.origin)
scanner.scan_cliques()
self.log.info("Finished router added.")
diff --git a/app/discover/events/event_router_update.py b/app/discover/events/event_router_update.py
index b63b224..f20f07e 100644
--- a/app/discover/events/event_router_update.py
+++ b/app/discover/events/event_router_update.py
@@ -60,7 +60,7 @@ class EventRouterUpdate(EventBase):
# add gw_port_id info and port document.
fetcher = CliFetchHostVservice()
- fetcher.set_env(env)
+ fetcher.setup(env=env, origin=self.origin)
router_vservice = fetcher.get_vservice(host_id, router_full_id)
if router_vservice.get('gw_port_id'):
router_doc['gw_port_id'] = router_vservice['gw_port_id']
@@ -74,7 +74,7 @@ class EventRouterUpdate(EventBase):
# update the cliques.
scanner = Scanner()
- scanner.set_env(env)
+ scanner.setup(env=env, origin=self.origin)
scanner.scan_cliques()
self.log.info("Finished router update.")
return EventResult(result=True,
diff --git a/app/discover/events/event_subnet_add.py b/app/discover/events/event_subnet_add.py
index 4126e0c..0a91803 100644
--- a/app/discover/events/event_subnet_add.py
+++ b/app/discover/events/event_subnet_add.py
@@ -29,7 +29,7 @@ class EventSubnetAdd(EventBase):
# document does not has project attribute. In this case, network_name should not be provided.
fetcher = ApiFetchPort()
- fetcher.set_env(env)
+ fetcher.setup(env=env, origin=self.origin)
ports = fetcher.get(port_id)
if ports:
@@ -133,7 +133,7 @@ class EventSubnetAdd(EventBase):
# update network
if not ApiAccess.regions:
fetcher = ApiFetchRegions()
- fetcher.set_env(env)
+ fetcher.setup(env=env, origin=self.origin)
fetcher.get(project_id)
self.log.info("add new subnet.")
@@ -146,7 +146,7 @@ class EventSubnetAdd(EventBase):
FindLinksForVserviceVnics().add_links(search={"parent_id": "qdhcp-%s-vnics" % network_id})
scanner = Scanner()
- scanner.set_env(env)
+ scanner.setup(env=env, origin=self.origin)
scanner.scan_cliques()
self.log.info("Finished subnet added.")
return EventResult(result=True,
diff --git a/app/discover/events/event_subnet_update.py b/app/discover/events/event_subnet_update.py
index 59b0afb..2c58e70 100644
--- a/app/discover/events/event_subnet_update.py
+++ b/app/discover/events/event_subnet_update.py
@@ -50,7 +50,7 @@ class EventSubnetUpdate(EventBase):
# make sure that self.regions is not empty.
if not ApiAccess.regions:
fetcher = ApiFetchRegions()
- fetcher.set_env(env)
+ fetcher.setup(env=env, origin=self.origin)
fetcher.get(project_id)
self.log.info("add port binding to DHCP server.")
@@ -69,12 +69,12 @@ class EventSubnetUpdate(EventBase):
# add link for vservice - vnic
FindLinksForVserviceVnics().add_links(search={"id": "qdhcp-%s" % network_id})
scanner = Scanner()
- scanner.set_env(env)
+ scanner.setup(env=env, origin=self.origin)
scanner.scan_cliques()
FindLinksForVserviceVnics(). \
add_links(search={"id": "qdhcp-%s" % network_id})
scanner = Scanner()
- scanner.set_env(env)
+ scanner.setup(env=env, origin=self.origin)
scanner.scan_cliques()
if subnet['enable_dhcp'] is False and subnets[key]['enable_dhcp']:
diff --git a/app/discover/fetcher.py b/app/discover/fetcher.py
index 8d7fdbb..707cd60 100644
--- a/app/discover/fetcher.py
+++ b/app/discover/fetcher.py
@@ -8,16 +8,21 @@
# http://www.apache.org/licenses/LICENSE-2.0 #
###############################################################################
from discover.configuration import Configuration
+from utils.origins import Origin
from utils.logging.full_logger import FullLogger
class Fetcher:
+ ENV_TYPE_KUBERNETES = 'Kubernetes'
+ ENV_TYPE_OPENSTACK = 'OpenStack'
+
def __init__(self):
super().__init__()
self.env = None
self.log = FullLogger()
self.configuration = None
+ self.origin = None
@staticmethod
def escape(string):
@@ -25,11 +30,55 @@ class Fetcher:
def set_env(self, env):
self.env = env
- self.log.set_env(env)
+ self.log.setup(env=env)
self.configuration = Configuration()
+ def setup(self, env, origin: Origin = None):
+ self.set_env(env=env)
+ if origin:
+ self.origin = origin
+ self.log.setup(origin=origin)
+
def get_env(self):
return self.env
def get(self, object_id):
return None
+
+ def set_folder_parent(self,
+ o: dict,
+ object_type: str =None,
+ master_parent_type: str =None,
+ master_parent_id: str =None,
+ parent_objects_name=None,
+ parent_type: str =None,
+ parent_id: str =None,
+ parent_text: str =None):
+ if object_type:
+ o['type'] = object_type
+ if not parent_objects_name:
+ parent_objects_name = '{}s'.format(object_type)
+ if not master_parent_type:
+ self.log.error('set_folder_parent: must specify: '
+ 'master_parent_type, master_parent_id, '
+ 'parent_type', 'parent_id')
+ return
+ if not parent_objects_name and not parent_type:
+ self.log.error('set_folder_parent: must specify: '
+ 'either parent_objects_name (e.g. "vedges") '
+ 'or parent_type and parent_id')
+ return
+ if parent_objects_name and not parent_type:
+ parent_type = '{}_folder'.format(parent_objects_name)
+ if parent_objects_name and not parent_id:
+ parent_id = '{}-{}'.format(master_parent_id, parent_objects_name)
+ o.update({
+ 'master_parent_type': master_parent_type,
+ 'master_parent_id': master_parent_id,
+ 'parent_type': parent_type,
+ 'parent_id': parent_id
+ })
+ if parent_text:
+ o['parent_text'] = parent_text
+ elif parent_objects_name:
+ o['parent_text'] = parent_objects_name.capitalize()
diff --git a/app/discover/fetchers/api/api_fetch_availability_zones.py b/app/discover/fetchers/api/api_fetch_availability_zones.py
index 196893b..ad9550e 100644
--- a/app/discover/fetchers/api/api_fetch_availability_zones.py
+++ b/app/discover/fetchers/api/api_fetch_availability_zones.py
@@ -28,7 +28,7 @@ class ApiFetchAvailabilityZones(ApiAccess):
# because the later does not inclde the "internal" zone in the results
endpoint = self.get_region_url_nover(region, "nova")
req_url = endpoint + "/v2/" + token["tenant"]["id"] + \
- "/os-availability-zone/detail"
+ "/os-availability-zone/detail"
headers = {
"X-Auth-Project-Id": project,
"X-Auth-Token": token["id"]
@@ -45,11 +45,10 @@ class ApiFetchAvailabilityZones(ApiAccess):
for doc in azs:
doc["id"] = doc["zoneName"]
doc["name"] = doc.pop("zoneName")
- doc["master_parent_type"] = "region"
- doc["master_parent_id"] = region
- doc["parent_type"] = "availability_zones_folder"
- doc["parent_id"] = region + "-availability_zones"
- doc["parent_text"] = "Availability Zones"
+ self.set_folder_parent(doc, object_type="availability_zone",
+ master_parent_type="region",
+ master_parent_id=region,
+ parent_text="Availability Zones")
doc["available"] = doc["zoneState"]["available"]
doc.pop("zoneState")
ret.append(doc)
diff --git a/app/discover/fetchers/api/api_fetch_network.py b/app/discover/fetchers/api/api_fetch_network.py
index 889b8a5..b253773 100644
--- a/app/discover/fetchers/api/api_fetch_network.py
+++ b/app/discover/fetchers/api/api_fetch_network.py
@@ -23,7 +23,8 @@ class ApiFetchNetwork(ApiAccess):
return []
ret = []
for region in self.regions:
- # TODO: refactor legacy code (Unresolved reference - self.get_for_region)
+ # TODO: refactor legacy code
+ # (Unresolved reference - self.get_for_region)
ret.extend(self.get_for_region(region, token, project_id))
return ret
@@ -37,7 +38,7 @@ class ApiFetchNetwork(ApiAccess):
"X-Auth-Token": token["id"]
}
response = self.get_url(req_url, headers)
- if not "network" in response:
+ if "network" not in response:
return []
network = response["network"]
subnets = network['subnets']
@@ -60,13 +61,12 @@ class ApiFetchNetwork(ApiAccess):
network["cidrs"] = cidrs
network["subnet_ids"] = subnet_ids
- network["master_parent_type"] = "project"
- network["master_parent_id"] = network["tenant_id"]
- network["parent_type"] = "networks_folder"
- network["parent_id"] = network["tenant_id"] + "-networks"
- network["parent_text"] = "Networks"
- # set the 'network' attribute for network objects to the name of network,
- # to allow setting constraint on network when creating network clique
+ self.set_folder_parent(network, object_type="network",
+ master_parent_type="project",
+ master_parent_id=network["tenant_id"])
+ # set the 'network' attribute for network objects to the name of
+ # network, to allow setting constraint on network when creating
+ # network clique
network['network'] = network["id"]
# get the project name
project = self.inv.get_by_id(self.get_env(), network["tenant_id"])
diff --git a/app/discover/fetchers/api/api_fetch_networks.py b/app/discover/fetchers/api/api_fetch_networks.py
index 4b70f65..f76517a 100644
--- a/app/discover/fetchers/api/api_fetch_networks.py
+++ b/app/discover/fetchers/api/api_fetch_networks.py
@@ -34,7 +34,7 @@ class ApiFetchNetworks(ApiAccess):
"X-Auth-Token": token["id"]
}
response = self.get_url(req_url, headers)
- if not "networks" in response:
+ if "networks" not in response:
return []
networks = response["networks"]
req_url = endpoint + "/v2.0/subnets"
@@ -46,7 +46,6 @@ class ApiFetchNetworks(ApiAccess):
for s in subnets:
subnets_hash[s["id"]] = s
for doc in networks:
- doc["master_parent_type"] = "project"
project_id = doc["tenant_id"]
if not project_id:
# find project ID of admin project
@@ -57,12 +56,12 @@ class ApiFetchNetworks(ApiAccess):
if not project:
self.log.error("failed to find admin project in DB")
project_id = project["id"]
- doc["master_parent_id"] = project_id
- doc["parent_type"] = "networks_folder"
- doc["parent_id"] = project_id + "-networks"
- doc["parent_text"] = "Networks"
- # set the 'network' attribute for network objects to the name of network,
- # to allow setting constraint on network when creating network clique
+ self.set_folder_parent(doc, object_type='network',
+ master_parent_id=project_id,
+ master_parent_type='project')
+ # set the 'network' attribute for network objects to the name of
+ # network, to allow setting constraint on network when creating
+ # network clique
doc['network'] = doc["id"]
# get the project name
project = self.inv.get_by_id(self.get_env(), project_id)
diff --git a/app/discover/fetchers/api/api_fetch_port.py b/app/discover/fetchers/api/api_fetch_port.py
index f8d9eeb..8de1452 100644
--- a/app/discover/fetchers/api/api_fetch_port.py
+++ b/app/discover/fetchers/api/api_fetch_port.py
@@ -43,11 +43,9 @@ class ApiFetchPort(ApiAccess):
return []
doc = response["port"]
- doc["master_parent_type"] = "network"
- doc["master_parent_id"] = doc["network_id"]
- doc["parent_type"] = "ports_folder"
- doc["parent_id"] = doc["network_id"] + "-ports"
- doc["parent_text"] = "Ports"
+ self.set_folder_parent(doc, object_type="port",
+ master_parent_type="network",
+ master_parent_id=doc["network_id"])
# get the project name
net = self.inv.get_by_id(self.get_env(), doc["network_id"])
if net:
diff --git a/app/discover/fetchers/api/api_fetch_ports.py b/app/discover/fetchers/api/api_fetch_ports.py
index f4c54a6..5e44c1b 100644
--- a/app/discover/fetchers/api/api_fetch_ports.py
+++ b/app/discover/fetchers/api/api_fetch_ports.py
@@ -38,11 +38,9 @@ class ApiFetchPorts(ApiAccess):
return []
ports = response["ports"]
for doc in ports:
- doc["master_parent_type"] = "network"
- doc["master_parent_id"] = doc["network_id"]
- doc["parent_type"] = "ports_folder"
- doc["parent_id"] = doc["network_id"] + "-ports"
- doc["parent_text"] = "Ports"
+ self.set_folder_parent(doc, object_type="port",
+ master_parent_type="network",
+ master_parent_id=doc["network_id"])
# get the project name
net = self.inv.get_by_id(self.get_env(), doc["network_id"])
if net:
diff --git a/app/discover/fetchers/api/api_fetch_project_hosts.py b/app/discover/fetchers/api/api_fetch_project_hosts.py
index 2aeb24f..1059600 100644
--- a/app/discover/fetchers/api/api_fetch_project_hosts.py
+++ b/app/discover/fetchers/api/api_fetch_project_hosts.py
@@ -11,11 +11,11 @@ import json
from discover.fetchers.api.api_access import ApiAccess
from discover.fetchers.db.db_access import DbAccess
-from discover.fetchers.cli.cli_access import CliAccess
+from discover.fetchers.cli.cli_fetch_host_details import CliFetchHostDetails
from utils.ssh_connection import SshError
-class ApiFetchProjectHosts(ApiAccess, DbAccess, CliAccess):
+class ApiFetchProjectHosts(ApiAccess, DbAccess, CliFetchHostDetails):
def __init__(self):
super(ApiFetchProjectHosts, self).__init__()
diff --git a/app/discover/fetchers/cli/cli_access.py b/app/discover/fetchers/cli/cli_access.py
index c77b22a..68b81c8 100644
--- a/app/discover/fetchers/cli/cli_access.py
+++ b/app/discover/fetchers/cli/cli_access.py
@@ -17,7 +17,7 @@ from utils.logging.console_logger import ConsoleLogger
from utils.ssh_conn import SshConn
-class CliAccess(BinaryConverter, Fetcher):
+class CliAccess(Fetcher, BinaryConverter):
connections = {}
ssh_cmd = "ssh -q -o StrictHostKeyChecking=no "
call_count_per_con = {}
@@ -71,8 +71,9 @@ class CliAccess(BinaryConverter, Fetcher):
self.cached_commands[cmd_path] = {"timestamp": curr_time, "result": ret}
return ret
- def run_fetch_lines(self, cmd, ssh_to_host="", enable_cache=True):
- out = self.run(cmd, ssh_to_host, enable_cache)
+ def run_fetch_lines(self, cmd, ssh_to_host="", enable_cache=True,
+ use_sudo=True):
+ out = self.run(cmd, ssh_to_host, enable_cache, use_sudo=use_sudo)
if not out:
return []
# first try to split lines by whitespace
@@ -236,7 +237,7 @@ class CliAccess(BinaryConverter, Fetcher):
self.find_matching_regexps(o, line, regexps)
for regexp_tuple in regexps:
name = regexp_tuple['name']
- if 'name' not in o and 'default' in regexp_tuple:
+ if name not in o and 'default' in regexp_tuple:
o[name] = regexp_tuple['default']
@staticmethod
@@ -247,4 +248,8 @@ class CliAccess(BinaryConverter, Fetcher):
regex = re.compile(regex)
matches = regex.search(line)
if matches and name not in o:
- o[name] = matches.group(1)
+ try:
+ o[name] = matches.group(1)
+ except IndexError as e:
+ self.log.error('failed to find group 1 in match, {}'
+ .format(str(regexp_tuple)))
diff --git a/app/discover/fetchers/cli/cli_fetch_host_pnics.py b/app/discover/fetchers/cli/cli_fetch_host_pnics.py
index 26cd603..81d164d 100644
--- a/app/discover/fetchers/cli/cli_fetch_host_pnics.py
+++ b/app/discover/fetchers/cli/cli_fetch_host_pnics.py
@@ -27,8 +27,8 @@ class CliFetchHostPnics(CliAccess):
'description': 'IPv6 Address'}
]
- def get(self, id):
- host_id = id[:id.rindex("-")]
+ def get(self, parent_id):
+ host_id = parent_id[:parent_id.rindex("-")]
cmd = 'ls -l /sys/class/net | grep ^l | grep -v "/virtual/"'
host = self.inv.get_by_id(self.get_env(), host_id)
if not host:
@@ -39,7 +39,8 @@ class CliFetchHostPnics(CliAccess):
", host: " + str(host))
return []
host_types = host["host_type"]
- if "Network" not in host_types and "Compute" not in host_types:
+ accepted_host_types = ['Network', 'Compute', 'Kube-node']
+ if not [t for t in accepted_host_types if t in host_types]:
return []
interface_lines = self.run_fetch_lines(cmd, host_id)
interfaces = []
diff --git a/app/discover/fetchers/cli/cli_fetch_vconnectors_ovs.py b/app/discover/fetchers/cli/cli_fetch_vconnectors_ovs.py
index ff37569..ac04568 100644
--- a/app/discover/fetchers/cli/cli_fetch_vconnectors_ovs.py
+++ b/app/discover/fetchers/cli/cli_fetch_vconnectors_ovs.py
@@ -18,8 +18,8 @@ class CliFetchVconnectorsOvs(CliFetchVconnectors):
def get_vconnectors(self, host):
host_id = host['id']
- lines = self.run_fetch_lines("brctl show", host_id)
- headers = ["bridge_name", "bridge_id", "stp_enabled", "interfaces"]
+ lines = self.run_fetch_lines('brctl show', host_id)
+ headers = ['bridge_name', 'bridge_id', 'stp_enabled', 'interfaces']
headers_count = len(headers)
# since we hard-coded the headers list, remove the headers line
del lines[:1]
@@ -31,26 +31,32 @@ class CliFetchVconnectorsOvs(CliFetchVconnectors):
results = self.parse_cmd_result_with_whitespace(fixed_lines, headers, False)
ret = []
for doc in results:
- doc["name"] = doc.pop("bridge_name")
- doc["id"] = doc["name"] + "-" + doc.pop("bridge_id")
- doc["host"] = host_id
- doc["connector_type"] = "bridge"
- if "interfaces" in doc:
- interfaces = {}
- interface_names = doc["interfaces"].split(",")
- for interface_name in interface_names:
- # find MAC address for this interface from ports list
- port_id_prefix = interface_name[3:]
- port = self.inv.find_items({
- "environment": self.get_env(),
- "type": "port",
- "binding:host_id": host_id,
- "id": {"$regex": r"^" + re.escape(port_id_prefix)}
- }, get_single=True)
- mac_address = '' if not port else port['mac_address']
- interface = {'name': interface_name, 'mac_address': mac_address}
- interfaces[interface_name] = interface
- doc["interfaces"] = interfaces
- doc['interfaces_names'] = list(interfaces.keys())
- ret.append(doc)
+ doc['name'] = '{}-{}'.format(host_id, doc['bridge_name'])
+ doc['id'] = '{}-{}'.format(doc['name'], doc.pop('bridge_id'))
+ doc['host'] = host_id
+ doc['connector_type'] = 'bridge'
+ self.get_vconnector_interfaces(doc, host_id)
+ ret.append(doc)
return ret
+
+ def get_vconnector_interfaces(self, doc, host_id):
+ if 'interfaces' not in doc:
+ doc['interfaces'] = {}
+ doc['interfaces_names'] = []
+ return
+ interfaces = {}
+ interface_names = doc['interfaces'].split(',')
+ for interface_name in interface_names:
+ # find MAC address for this interface from ports list
+ port_id_prefix = interface_name[3:]
+ port = self.inv.find_items({
+ 'environment': self.get_env(),
+ 'type': 'port',
+ 'binding:host_id': host_id,
+ 'id': {'$regex': r'^' + re.escape(port_id_prefix)}
+ }, get_single=True)
+ mac_address = '' if not port else port['mac_address']
+ interface = {'name': interface_name, 'mac_address': mac_address}
+ interfaces[interface_name] = interface
+ doc['interfaces'] = interfaces
+ doc['interfaces_names'] = list(interfaces.keys())
diff --git a/app/discover/fetchers/cli/cli_fetch_vservice_vnics.py b/app/discover/fetchers/cli/cli_fetch_vservice_vnics.py
index 3bc3a5b..0129d3b 100644
--- a/app/discover/fetchers/cli/cli_fetch_vservice_vnics.py
+++ b/app/discover/fetchers/cli/cli_fetch_vservice_vnics.py
@@ -66,17 +66,15 @@ class CliFetchVserviceVnics(CliAccess):
master_parent_id = "{}-{}".format(host, service)
current = {
"id": host + "-" + name,
- "type": "vnic",
"vnic_type": "vservice_vnic",
"host": host,
"name": name,
- "master_parent_type": "vservice",
- "master_parent_id": master_parent_id,
- "parent_type": "vnics_folder",
- "parent_id": "{}-vnics".format(master_parent_id),
- "parent_text": "vNICs",
"lines": []
}
+ self.set_folder_parent(current, object_type="vnic",
+ master_parent_type="vservice",
+ master_parent_id=master_parent_id,
+ parent_text="vNICs")
interfaces.append(current)
self.handle_line(current, line_remainder)
else:
diff --git a/app/discover/fetchers/db/db_fetch_oteps.py b/app/discover/fetchers/db/db_fetch_oteps.py
index 85376ed..7721136 100644
--- a/app/discover/fetchers/db/db_fetch_oteps.py
+++ b/app/discover/fetchers/db/db_fetch_oteps.py
@@ -82,4 +82,4 @@ class DbFetchOteps(DbAccess, CliAccess, metaclass=Singleton):
interface = l.split(":")[1].strip()
if vconnector:
- doc["vconnector"] = vconnector
+ doc["vconnector"] = '{}-{}'.format(host_id, vconnector)
diff --git a/app/discover/link_finders/find_links_for_vedges.py b/app/discover/link_finders/find_links_for_vedges.py
index f9719b4..afabdbe 100644
--- a/app/discover/link_finders/find_links_for_vedges.py
+++ b/app/discover/link_finders/find_links_for_vedges.py
@@ -104,8 +104,6 @@ class FindLinksForVedges(FindLinks):
if "pnic" in vedge:
if pname != vedge["pnic"]:
return
- elif self.configuration.has_network_plugin('VPP'):
- pass
pnic = self.inv.find_items({
"environment": self.get_env(),
"type": "host_pnic",
diff --git a/app/discover/scan.py b/app/discover/scan.py
index 49f37ff..fb5e833 100755
--- a/app/discover/scan.py
+++ b/app/discover/scan.py
@@ -22,6 +22,7 @@ from discover.scan_error import ScanError
from discover.scanner import Scanner
from monitoring.setup.monitoring_setup_manager import MonitoringSetupManager
from utils.constants import EnvironmentFeatures
+from utils.origins import ScanOrigin, ScanOrigins
from utils.mongo_access import MongoAccess
from utils.exceptions import ScanArgumentsError
from utils.inventory_mgr import InventoryMgr
@@ -112,6 +113,7 @@ class ScanPlan:
class ScanController(Fetcher):
DEFAULTS = {
+ "_id": None,
"env": "",
"mongo_config": "",
"type": "",
@@ -126,7 +128,8 @@ class ScanController(Fetcher):
"cliques_only": False,
"monitoring_setup_only": False,
"clear": False,
- "clear_all": False
+ "clear_all": False,
+ "scheduled": False
}
def __init__(self):
@@ -274,9 +277,13 @@ class ScanController(Fetcher):
self.conf.use_env(env_name)
# generate ScanObject Class and instance.
+ origin = ScanOrigin(origin_id=args['_id'],
+ origin_type=ScanOrigins.SCHEDULED
+ if args["scheduled"]
+ else ScanOrigins.MANUAL)
scanner = Scanner()
scanner.log.set_loglevel(args['loglevel'])
- scanner.set_env(env_name)
+ scanner.setup(env=env_name, origin=origin)
scanner.found_errors[env_name] = False
# decide what scanning operations to do
diff --git a/app/discover/scan_manager.py b/app/discover/scan_manager.py
index 91dd06c..6e31bbd 100644
--- a/app/discover/scan_manager.py
+++ b/app/discover/scan_manager.py
@@ -103,7 +103,8 @@ class ScanManager(Manager):
def _build_scan_args(self, scan_request: dict):
args = {
- 'mongo_config': self.args.mongo_config
+ 'mongo_config': self.args.mongo_config,
+ 'scheduled': True if scan_request.get('interval') else False
}
def set_arg(name_from: str, name_to: str = None):
@@ -113,6 +114,7 @@ class ScanManager(Manager):
if val:
args[name_to] = val
+ set_arg("_id")
set_arg("object_id", "id")
set_arg("log_level", "loglevel")
set_arg("environment", "env")
diff --git a/app/discover/scanner.py b/app/discover/scanner.py
index 8aac40b..8d36baf 100644
--- a/app/discover/scanner.py
+++ b/app/discover/scanner.py
@@ -10,6 +10,7 @@
# base class for scanners
import json
+
import os
import queue
import traceback
@@ -27,9 +28,6 @@ from utils.ssh_connection import SshError
class Scanner(Fetcher):
- ENV_TYPE_OPENSTACK = 'OpenStack'
- ENV_TYPE_KUBERNETES = 'Kubernetes'
-
config = None
environment = None
env = None
@@ -92,11 +90,11 @@ class Scanner(Fetcher):
else basic_cond
if not env_cond:
env_cond = basic_cond
- if 'environment_type' not in env_cond:
+ if 'environment_type' not in env_cond.keys():
env_cond.update(basic_cond)
if not isinstance(env_cond, dict):
- self.log.warn('illegal environment_condition given '
- 'for type {}'.format(type_to_fetch['type']))
+ self.log.warn('Illegal environment_condition given '
+ 'for type {type}'.format(type=type_to_fetch['type']))
return True
conf = self.config.get_env_config()
if 'environment_type' not in conf:
@@ -104,14 +102,24 @@ class Scanner(Fetcher):
for attr, required_val in env_cond.items():
if attr == "mechanism_drivers":
if "mechanism_drivers" not in conf:
- self.log.warn('illegal environment configuration: '
+ self.log.warn('Illegal environment configuration: '
'missing mechanism_drivers')
return False
if not isinstance(required_val, list):
required_val = [required_val]
- return bool(set(required_val) & set(conf["mechanism_drivers"]))
- elif attr not in conf or conf[attr] != required_val:
+ value_ok = bool(set(required_val) &
+ set(conf["mechanism_drivers"]))
+ if not value_ok:
+ return False
+ elif attr not in conf:
return False
+ else:
+ if isinstance(required_val, list):
+ if conf[attr] not in required_val:
+ return False
+ else:
+ if conf[attr] != required_val:
+ return False
# no check failed
return True
@@ -132,18 +140,20 @@ class Scanner(Fetcher):
if not isinstance(fetcher, Fetcher):
type_to_fetch['fetcher'] = fetcher() # make it an instance
fetcher = type_to_fetch["fetcher"]
- fetcher.set_env(self.get_env())
+ fetcher.setup(env=self.get_env(), origin=self.origin)
# get children_scanner instance
children_scanner = type_to_fetch.get("children_scanner")
escaped_id = fetcher.escape(str(obj_id)) if obj_id else obj_id
self.log.info(
- "scanning : type=%s, parent: (type=%s, name=%s, id=%s)",
- type_to_fetch["type"],
- parent.get('type', 'environment'),
- parent.get('name', ''),
- escaped_id)
+ "Scanning: type={type}, "
+ "parent: (type={parent_type}, "
+ "name={parent_name}, "
+ "id={parent_id})".format(type=type_to_fetch["type"],
+ parent_type=parent.get('type', 'environment'),
+ parent_name=parent.get('name', ''),
+ parent_id=escaped_id))
# fetch OpenStack data from environment by CLI, API or MySQL
# or physical devices data from ACI API
@@ -154,18 +164,21 @@ class Scanner(Fetcher):
self.found_errors[self.get_env()] = True
return []
except Exception as e:
- self.log.error("Error while scanning : " +
- "fetcher=%s, " +
- "type=%s, " +
- "parent: (type=%s, name=%s, id=%s), " +
- "error: %s",
- fetcher.__class__.__name__,
- type_to_fetch["type"],
- "environment" if "type" not in parent
- else parent["type"],
- "" if "name" not in parent else parent["name"],
- escaped_id,
- e)
+ self.log.error(
+ "Error while scanning: fetcher={fetcher}, type={type}, "
+ "parent: (type={parent_type}, name={parent_name}, "
+ "id={parent_id}), "
+ "error: {error}".format(fetcher=fetcher.__class__.__name__,
+ type=type_to_fetch["type"],
+ parent_type="environment"
+ if "type" not in parent
+ else parent["type"],
+ parent_name=""
+ if "name" not in parent
+ else parent["name"],
+ parent_id=escaped_id,
+ error=e))
+
traceback.print_exc()
raise ScanError(str(e))
@@ -232,14 +245,16 @@ class Scanner(Fetcher):
self.log.info("Scan complete")
def scan_links(self):
- self.log.info("scanning for links")
+ self.log.info("Scanning for links")
for fetcher in self.link_finders:
- fetcher.set_env(self.get_env())
+ fetcher.setup(env=self.get_env(),
+ origin=self.origin)
fetcher.add_links()
def scan_cliques(self):
clique_scanner = CliqueFinder()
- clique_scanner.set_env(self.get_env())
+ clique_scanner.setup(env=self.get_env(),
+ origin=self.origin)
clique_scanner.find_cliques()
def deploy_monitoring_setup(self):
diff --git a/app/install/calipso-installer.py b/app/install/calipso-installer.py
index 78bb927..ad2b4e0 100644
--- a/app/install/calipso-installer.py
+++ b/app/install/calipso-installer.py
@@ -16,7 +16,12 @@ import dockerpycreds
# note : not used, useful for docker api security if used
import time
import json
-
+import socket
+# by default, we want to use the docker0 interface ip address for inter-contatiner communications,
+# if hostname argument will not be provided as argument for the calipso-installer
+import os
+dockerip = os.popen('ip addr show docker0 | grep "\<inet\>" | awk \'{ print $2 }\' | awk -F "/" \'{ print $1 }\'')
+local_hostname = dockerip.read().replace("\n", "")
C_MONGO_CONFIG = "/local_dir/calipso_mongo_access.conf"
H_MONGO_CONFIG = "/home/calipso/calipso_mongo_access.conf"
@@ -161,7 +166,8 @@ def start_mongo(dbport, copy):
copy_file("clique_types")
copy_file("cliques")
copy_file("constants")
- copy_file("environments_config")
+ copy_file("environments_config"),
+ copy_file("environment_options"),
copy_file("inventory")
copy_file("link_types")
copy_file("links")
@@ -328,10 +334,10 @@ def container_stop(container_name):
# parser for getting optional command arguments:
parser = argparse.ArgumentParser()
parser.add_argument("--hostname",
- help="Hostname or IP address of the server "
- "(default=172.17.0.1)",
+ help="FQDN (ex:mysrv.cisco.com) or IP address of the Server"
+ "(default=docker0 interface ip address)",
type=str,
- default="172.17.0.1",
+ default=local_hostname,
required=False)
parser.add_argument("--webport",
help="Port for the Calipso WebUI "
@@ -401,6 +407,8 @@ parser.add_argument("--copy",
required=False)
args = parser.parse_args()
+print("\nrunning installer against host:", args.hostname, "\n")
+
if args.command == "start-all":
container = "all"
action = "start"
@@ -424,6 +432,7 @@ while container != "all" and container not in container_names:
if container == "q":
exit()
+
# starting the containers per arguments:
if action == "start":
# building /home/calipso/calipso_mongo_access.conf and
diff --git a/app/install/db/monitoring_config_templates.json b/app/install/db/monitoring_config_templates.json
index b5c47df..e0d59e7 100644
--- a/app/install/db/monitoring_config_templates.json
+++ b/app/install/db/monitoring_config_templates.json
@@ -315,14 +315,15 @@
"order" : "1",
"condition" : {
"mechanism_drivers" : [
- "OVS"
+ "OVS",
+ "Flannel"
]
},
"config" : {
"checks" : {
"{objtype}_{objid}" : {
"interval" : 15,
- "command" : "check_vconnector_ovs.py {name}",
+ "command" : "check_vconnector.py {name}",
"standalone" : true,
"type": "metric",
"subscribers" : [
diff --git a/app/messages/message.py b/app/messages/message.py
index e940054..eeef329 100644
--- a/app/messages/message.py
+++ b/app/messages/message.py
@@ -29,7 +29,8 @@ class Message:
object_type: str = None,
ts: datetime = None,
received_ts: datetime = None,
- finished_ts: datetime = None):
+ finished_ts: datetime = None,
+ **kwargs):
super().__init__()
if level and level.lower() in self.LEVELS:
@@ -48,6 +49,7 @@ class Message:
self.received_timestamp = received_ts
self.finished_timestamp = finished_ts
self.viewed = False
+ self.extra = kwargs
def get(self):
return {
@@ -62,5 +64,6 @@ class Message:
"timestamp": self.timestamp,
"received_timestamp": self.received_timestamp,
"finished_timestamp": self.finished_timestamp,
- "viewed": self.viewed
+ "viewed": self.viewed,
+ **self.extra
}
diff --git a/app/monitoring/checks/check_instance_communictions.py b/app/monitoring/checks/check_instance_communictions.py
index d3a94b7..4ce5165 100644
--- a/app/monitoring/checks/check_instance_communictions.py
+++ b/app/monitoring/checks/check_instance_communictions.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
###############################################################################
# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
# and others #
@@ -31,7 +31,7 @@ arp_mac_pos = arp_headers.index('HWaddress')
arp_flags_pos = arp_headers.index('Flags')
-def check_vnic_tuple(vnic_and_service: str):
+def check_vnic_tuple(vnic_and_service):
tuple_parts = vnic_and_service.split(',')
local_service_id = tuple_parts[0]
mac_address = tuple_parts[1]
diff --git a/app/monitoring/checks/check_ping.py b/app/monitoring/checks/check_ping.py
index 35e7234..fbf1304 100755
--- a/app/monitoring/checks/check_ping.py
+++ b/app/monitoring/checks/check_ping.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
###############################################################################
# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
# and others #
diff --git a/app/monitoring/checks/check_pnic_ovs.py b/app/monitoring/checks/check_pnic_ovs.py
index c26e42f..7cfa699 100755
--- a/app/monitoring/checks/check_pnic_ovs.py
+++ b/app/monitoring/checks/check_pnic_ovs.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
###############################################################################
# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
# and others #
@@ -15,7 +15,7 @@ import subprocess
from binary_converter import binary2str
-def nic_not_found(name: str, output: str):
+def nic_not_found(name, output):
print("Error finding NIC {}{}{}\n".format(name, ': ' if output else '',
output))
return 2
diff --git a/app/monitoring/checks/check_pnic_vpp.py b/app/monitoring/checks/check_pnic_vpp.py
index 942fdc2..3db4e49 100755
--- a/app/monitoring/checks/check_pnic_vpp.py
+++ b/app/monitoring/checks/check_pnic_vpp.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
###############################################################################
# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
# and others #
diff --git a/app/monitoring/checks/check_vconnector.py b/app/monitoring/checks/check_vconnector.py
index 237a195..961d7ad 100644
--- a/app/monitoring/checks/check_vconnector.py
+++ b/app/monitoring/checks/check_vconnector.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
###############################################################################
# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
# and others #
diff --git a/app/monitoring/checks/check_vedge_ovs.py b/app/monitoring/checks/check_vedge_ovs.py
index 849af66..33d3f71 100755
--- a/app/monitoring/checks/check_vedge_ovs.py
+++ b/app/monitoring/checks/check_vedge_ovs.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
###############################################################################
# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
# and others #
diff --git a/app/monitoring/checks/check_vedge_vpp.py b/app/monitoring/checks/check_vedge_vpp.py
index 346feae..94d5977 100755
--- a/app/monitoring/checks/check_vedge_vpp.py
+++ b/app/monitoring/checks/check_vedge_vpp.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
###############################################################################
# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
# and others #
diff --git a/app/monitoring/checks/check_vnic_vconnector.py b/app/monitoring/checks/check_vnic_vconnector.py
index 76efd0b..fc8721f 100755
--- a/app/monitoring/checks/check_vnic_vconnector.py
+++ b/app/monitoring/checks/check_vnic_vconnector.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
###############################################################################
# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
# and others #
diff --git a/app/monitoring/checks/check_vnic_vpp.py b/app/monitoring/checks/check_vnic_vpp.py
index 0f77ddd..22cc31d 100755
--- a/app/monitoring/checks/check_vnic_vpp.py
+++ b/app/monitoring/checks/check_vnic_vpp.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
###############################################################################
# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
# and others #
diff --git a/app/monitoring/checks/check_vservice.py b/app/monitoring/checks/check_vservice.py
index a95a46a..2a30a53 100644
--- a/app/monitoring/checks/check_vservice.py
+++ b/app/monitoring/checks/check_vservice.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
###############################################################################
# Copyright (c) 2017 Koren Lev (Cisco Systems), Yaron Yogev (Cisco Systems) #
# and others #
diff --git a/app/monitoring/handlers/handle_otep.py b/app/monitoring/handlers/handle_otep.py
index 0189625..b19baab 100644
--- a/app/monitoring/handlers/handle_otep.py
+++ b/app/monitoring/handlers/handle_otep.py
@@ -29,7 +29,7 @@ class HandleOtep(MonitoringCheckHandler):
self.log.error('Port not found: ' + port_id)
return 1
status = check_result['status']
- port['status'] = self.STATUS_LABEL[status]
+ port['status'] = self.get_label_for_status(status)
port['status_value'] = status
port['status_text'] = check_result['output']
diff --git a/app/monitoring/handlers/monitoring_check_handler.py b/app/monitoring/handlers/monitoring_check_handler.py
index c1f70fb..4902c3c 100644
--- a/app/monitoring/handlers/monitoring_check_handler.py
+++ b/app/monitoring/handlers/monitoring_check_handler.py
@@ -19,14 +19,13 @@ from messages.message import Message
from utils.inventory_mgr import InventoryMgr
from utils.logging.full_logger import FullLogger
from utils.special_char_converter import SpecialCharConverter
-from utils.string_utils import stringify_datetime
SOURCE_SYSTEM = 'Sensu'
ERROR_LEVEL = ['info', 'warn', 'error']
class MonitoringCheckHandler(SpecialCharConverter):
- STATUS_LABEL = ['OK', 'Warning', 'Error']
+ status_labels = {}
TIME_FORMAT = '%Y-%m-%d %H:%M:%S %Z'
def __init__(self, args):
@@ -39,9 +38,30 @@ class MonitoringCheckHandler(SpecialCharConverter):
self.inv = InventoryMgr()
self.inv.log.set_loglevel(args.loglevel)
self.inv.set_collections(args.inventory)
+ self.status_labels = self.get_status_labels()
except FileNotFoundError:
sys.exit(1)
+ def get_status_labels(self):
+ statuses_name_search = {'name': 'monitoring_check_statuses'}
+ labels_data = self.inv.find_one(search=statuses_name_search,
+ collection='constants')
+ if not isinstance(labels_data, dict) or 'data' not in labels_data:
+ return ''
+ labels = {}
+ for status_data in labels_data['data']:
+ if not isinstance(status_data, dict):
+ continue
+ val = int(status_data['value'])
+ label = status_data['label']
+ labels[val] = label
+ return labels
+
+ def get_label_for_status(self, status: int) -> str:
+ if status not in self.status_labels.keys():
+ return ''
+ return self.status_labels.get(status, '')
+
def doc_by_id(self, object_id):
doc = self.inv.get_by_id(self.env, object_id)
if not doc:
@@ -57,7 +77,10 @@ class MonitoringCheckHandler(SpecialCharConverter):
return doc
def set_doc_status(self, doc, status, status_text, timestamp):
- doc['status'] = self.STATUS_LABEL[status] if isinstance(status, int) \
+ doc['status_value'] = status if isinstance(status, int) \
+ else status
+ doc['status'] = self.get_label_for_status(status) \
+ if isinstance(status, int) \
else status
if status_text:
doc['status_text'] = status_text
@@ -83,7 +106,8 @@ class MonitoringCheckHandler(SpecialCharConverter):
obj_id = 'link_{}_{}'.format(doc['source_id'], doc['target_id']) \
if is_link \
else doc['id']
- obj_type = 'link_{}'.format(doc['link_type']) if is_link else doc['type']
+ obj_type = 'link_{}'.format(doc['link_type']) if is_link else \
+ doc['type']
display_context = obj_id if is_link \
else doc['network_id'] if doc['type'] == 'port' else doc['id']
level = error_level if error_level\
diff --git a/app/monitoring/setup/monitoring_handler.py b/app/monitoring/setup/monitoring_handler.py
index 903b8d8..0eeb668 100644
--- a/app/monitoring/setup/monitoring_handler.py
+++ b/app/monitoring/setup/monitoring_handler.py
@@ -105,13 +105,25 @@ class MonitoringHandler(MongoAccess, CliAccess, BinaryConverter):
if 'condition' not in doc:
return True
condition = doc['condition']
- if 'mechanism_drivers' not in condition:
- return True
- required_mechanism_drivers = condition['mechanism_drivers']
- if not isinstance(required_mechanism_drivers, list):
- required_mechanism_drivers = [required_mechanism_drivers]
- intersection = [val for val in required_mechanism_drivers
- if val in self.mechanism_drivers]
+ if not isinstance(condition, dict):
+ self.log.error('incorrect condition in monitoring ({}): '
+ 'condition must be a dict'
+ .format(doc.get(doc.get('type'), '')))
+ return False
+ for key, required_value in condition.items():
+ if not self.check_env_config(key, required_value):
+ return False
+ return True
+
+ def check_env_config(self, config_name, required_config_value):
+ required_config_values = required_config_value \
+ if isinstance(required_config_value, list) \
+ else [required_config_value]
+ conf_values = self.configuration.environment.get(config_name, [])
+ conf_values = conf_values if isinstance(conf_values, list) \
+ else [conf_values]
+ intersection = [val for val in required_config_values
+ if val in conf_values]
return bool(intersection)
def content_replace(self, content):
@@ -435,6 +447,7 @@ class MonitoringHandler(MongoAccess, CliAccess, BinaryConverter):
if '/*' in local_dir else local_dir
if local_dir_base.strip('/*') == remote_path.strip('/*'):
return # same directory - nothing to do
+ self.make_remote_dir(host, remote_path)
cmd = 'cp {} {}'.format(what_to_copy, remote_path)
self.run(cmd, ssh=ssh)
return
diff --git a/app/test/api/responders_test/resource/test_clique_types.py b/app/test/api/responders_test/resource/test_clique_types.py
index 702bc87..5e52cea 100644
--- a/app/test/api/responders_test/resource/test_clique_types.py
+++ b/app/test/api/responders_test/resource/test_clique_types.py
@@ -12,7 +12,6 @@ import json
from test.api.responders_test.test_data import base
from test.api.test_base import TestBase
from test.api.responders_test.test_data import clique_types
-import unittest
from unittest.mock import patch
@@ -70,7 +69,6 @@ class TestCliqueTypes(TestBase):
expected_code=base.BAD_REQUEST_CODE
)
- @unittest.SkipTest
@patch(base.RESPONDER_BASE_READ)
def test_get_clique_type_with_correct_configuration(self, read):
self.validate_get_request(
@@ -272,7 +270,6 @@ class TestCliqueTypes(TestBase):
expected_code=base.BAD_REQUEST_CODE
)
- @unittest.SkipTest
@patch(base.RESPONDER_BASE_READ)
def test_post_clique_type_with_duplicate_configuration(self, read):
data = clique_types.CLIQUE_TYPES_WITH_SPECIFIC_CONFIGURATION[0]
diff --git a/app/test/api/responders_test/test_data/clique_types.py b/app/test/api/responders_test/test_data/clique_types.py
index 534b6f0..0791bdf 100644
--- a/app/test/api/responders_test/test_data/clique_types.py
+++ b/app/test/api/responders_test/test_data/clique_types.py
@@ -60,15 +60,15 @@ CLIQUE_TYPES_WITH_SPECIFIC_ID = [
get_payload(update={'id': CORRECT_ID})
]
-# CLIQUE_TYPES_WITH_SPECIFIC_CONFIGURATION = [
-# get_payload(update={'id': SAMPLE_IDS[0],
-# **TEST_CONFIGURATION},
-# delete=['environment'])
-# ]
+CLIQUE_TYPES_WITH_SPECIFIC_CONFIGURATION = [
+ get_payload(update={'id': SAMPLE_IDS[0],
+ **TEST_CONFIGURATION},
+ delete=['environment'])
+]
-# CLIQUE_TYPES_WITH_SPECIFIC_CONFIGURATION_RESPONSE = {
-# "clique_types": CLIQUE_TYPES_WITH_SPECIFIC_CONFIGURATION
-# }
+CLIQUE_TYPES_WITH_SPECIFIC_CONFIGURATION_RESPONSE = {
+ "clique_types": CLIQUE_TYPES_WITH_SPECIFIC_CONFIGURATION
+}
CLIQUE_TYPES_WITH_SPECIFIC_FOCAL_POINT_TYPE = [
get_payload(update={'id': _id,
@@ -144,4 +144,4 @@ CLIQUE_TYPE_WITH_WRONG_MECH_DRIVERS = get_payload(
CLIQUE_TYPE_WITH_WRONG_TYPE_DRIVERS = get_payload(
update={'type_drivers': WRONG_TYPE_DRIVER}
-)
+) \ No newline at end of file
diff --git a/app/test/fetch/api_fetch/test_data/api_fetch_networks.py b/app/test/fetch/api_fetch/test_data/api_fetch_networks.py
index 5079a92..38c60a3 100644
--- a/app/test/fetch/api_fetch/test_data/api_fetch_networks.py
+++ b/app/test/fetch/api_fetch/test_data/api_fetch_networks.py
@@ -21,6 +21,7 @@ NETWORKS_RESPONSE = {
NETWORKS_RESULT = [
{
+ "type": "network",
"id": "8673c48a-f137-4497-b25d-08b7b218fd17",
"subnets": {
"test23": {
diff --git a/app/test/fetch/api_fetch/test_data/api_fetch_ports.py b/app/test/fetch/api_fetch/test_data/api_fetch_ports.py
index fc0552c..bb1d89f 100644
--- a/app/test/fetch/api_fetch/test_data/api_fetch_ports.py
+++ b/app/test/fetch/api_fetch/test_data/api_fetch_ports.py
@@ -26,6 +26,7 @@ PORTS_RESULT_WITH_NET = [
"name": "fa:16:3e:d7:c5:16",
"network_id": "b6fd5175-4b22-4256-9b1a-9fc4b9dce1fe",
"tenant_id": "75c0eb79ff4a42b0ae4973c8375ddf40",
+ "type": "port",
"master_parent_type": "network",
"master_parent_id": "b6fd5175-4b22-4256-9b1a-9fc4b9dce1fe",
"parent_type": "ports_folder",
@@ -41,6 +42,7 @@ PORTS_RESULT_WITHOUT_NET = [
"name": "16620a58-c48c-4195-b9c1-779a8ba2e6f8",
"network_id": "b6fd5175-4b22-4256-9b1a-9fc4b9dce1fe",
"tenant_id": "75c0eb79ff4a42b0ae4973c8375ddf40",
+ "type": "port",
"master_parent_type": "network",
"master_parent_id": "b6fd5175-4b22-4256-9b1a-9fc4b9dce1fe",
"parent_type": "ports_folder",
@@ -56,6 +58,7 @@ PORTS_RESULT_WITH_PROJECT = [
"name": "fa:16:3e:d7:c5:16",
"network_id": "b6fd5175-4b22-4256-9b1a-9fc4b9dce1fe",
"tenant_id": "75c0eb79ff4a42b0ae4973c8375ddf40",
+ "type": "port",
"master_parent_type": "network",
"master_parent_id": "b6fd5175-4b22-4256-9b1a-9fc4b9dce1fe",
"parent_type": "ports_folder",
diff --git a/app/test/fetch/db_fetch/test_data/db_fetch_oteps.py b/app/test/fetch/db_fetch/test_data/db_fetch_oteps.py
index 6940c61..b0467a7 100644
--- a/app/test/fetch/db_fetch/test_data/db_fetch_oteps.py
+++ b/app/test/fetch/db_fetch/test_data/db_fetch_oteps.py
@@ -128,5 +128,5 @@ OTEP_WITH_CONNECTOR = {
"br-tun": {
}
},
- "vconnector": "br-mesh"
+ "vconnector": "node-5.cisco.com-br-mesh"
}
diff --git a/app/test/scan/test_data/scanner.py b/app/test/scan/test_data/scanner.py
index 500021d..ed2129f 100644
--- a/app/test/scan/test_data/scanner.py
+++ b/app/test/scan/test_data/scanner.py
@@ -10,7 +10,6 @@
import queue
from discover.fetchers.folder_fetcher import FolderFetcher
-
SCANNER_TYPE_FOR_ENV = "ScanEnvironment"
METADATA = {
@@ -64,7 +63,8 @@ TYPES_TO_FETCH = [
},
{
"type": "network_services_folder",
- "fetcher": FolderFetcher("network_services", "network", "Network vServices")
+ "fetcher": FolderFetcher("network_services", "network",
+ "Network vServices")
}
]
@@ -162,9 +162,6 @@ TYPES_TO_FETCHES_FOR_SCAN_AGGREGATE = [{
"fetcher": "DbFetchAggregateHosts"
}]
-
-
-
# id = 'RegionOne-aggregates'
# obj = self.inv.get_by_id(self.env, id)
obj = {'id': 'Mirantis-Liberty-Nvn'}
@@ -172,7 +169,6 @@ id_field = 'id'
child_id = '',
child_type = ''
-
child_data = [
{
'id_path': '/Mirantis-Liberty-Nvn/Mirantis-Liberty-Nvn-regions',
@@ -191,15 +187,23 @@ child_data = [
]
PARENT = {
- "environment" : "Mirantis-Liberty-Xiaocong",
- "id" : "node-6.cisco.com-vservices-dhcps",
- "name" : "node-6.cisco.com-vservices-dhcps",
- "object_name" : "DHCP servers",
- "parent_id" : "node-6.cisco.com-vservices",
- "parent_type" : "vservices_folder",
- "show_in_tree" : True,
- "text" : "DHCP servers",
- "type" : "vservice_dhcps_folder"
+ "environment": "Mirantis-Liberty-Xiaocong",
+ "id": "node-6.cisco.com-vservices-dhcps",
+ "id_path": "/Mirantis-Liberty-Xiaocong/Mirantis-Liberty-Xiaocong-regions"
+ "/RegionOne/RegionOne-availability_zones"
+ "/internal/node-6.cisco.com"
+ "/node-6.cisco.com-vservices/node-6.cisco.com-vservices-dhcps",
+ "name": "node-6.cisco.com-vservices-dhcps",
+ "name_path": "/Mirantis-Liberty-Xiaocong/Regions"
+ "/RegionOne/Availability Zones"
+ "/internal/node-6.cisco.com"
+ "/vServices/DHCP servers",
+ "object_name": "DHCP servers",
+ "parent_id": "node-6.cisco.com-vservices",
+ "parent_type": "vservices_folder",
+ "show_in_tree": True,
+ "text": "DHCP servers",
+ "type": "vservice_dhcps_folder"
}
PARENT_WITHOUT_ID = {
@@ -285,7 +289,6 @@ DB_RESULTS_WITHOUT_MASTER_PARENT_IN_DB = [
}
]
-
DICTIONARY_DB_RESULTS = {
"name": "Mirantis-Liberty-Xiaocong-regions",
"parent_type": "environment",
@@ -296,18 +299,22 @@ DICTIONARY_DB_RESULTS = {
}
MASTER_PARENT = {
- "create_object" : True,
- "environment" : "Mirantis-Liberty-Xiaocong",
- "id" : "node-6.cisco.com-vservices",
- "id_path" : "/Mirantis-Liberty/Mirantis-Liberty-regions/RegionOne/RegionOne-availability_zones/internal/node-6.cisco.com/node-6.cisco.com-vservices",
- "name" : "Vservices",
- "name_path" : "/Mirantis-Liberty/Regions/RegionOne/Availability Zones/internal/node-6.cisco.com/Vservices",
- "object_name" : "Vservices",
- "parent_id" : "node-6.cisco.com",
- "parent_type" : "host",
- "show_in_tree" : True,
- "text" : "Vservices",
- "type" : "vservices_folder"
+ "create_object": True,
+ "environment": "Mirantis-Liberty-Xiaocong",
+ "id": "node-6.cisco.com-vservices",
+ "id_path": "/Mirantis-Liberty/Mirantis-Liberty-regions"
+ "/RegionOne/RegionOne-availability_zones"
+ "/internal/node-6.cisco.com/node-6.cisco.com-vservices",
+ "name": "Vservices",
+ "name_path": "/Mirantis-Liberty/Regions"
+ "/RegionOne/Availability Zones"
+ "/internal/node-6.cisco.com/Vservices",
+ "object_name": "Vservices",
+ "parent_id": "node-6.cisco.com",
+ "parent_type": "host",
+ "show_in_tree": True,
+ "text": "Vservices",
+ "type": "vservices_folder"
}
CONFIGURATIONS_WITHOUT_MECHANISM_DRIVERS = {
diff --git a/app/test/scan/test_scanner.py b/app/test/scan/test_scanner.py
index e93a35b..bd1a0e3 100644
--- a/app/test/scan/test_scanner.py
+++ b/app/test/scan/test_scanner.py
@@ -188,9 +188,11 @@ class TestScanner(TestScan):
# store original method
original_set = self.scanner.inv.set
+ original_get_by_id = self.scanner.inv.get_by_id
# mock method
self.scanner.inv.set = MagicMock()
+ self.scanner.inv.get_by_id = MagicMock(return_value=PARENT)
self.scanner.scan_type(TYPE_TO_FETCH_FOR_ENVIRONMENT, PARENT, ID_FIELD)
self.assertIn("projects", DB_RESULTS_WITH_PROJECT[0],
@@ -199,43 +201,53 @@ class TestScanner(TestScan):
"Can't delete the project key in the object")
self.scanner.inv.set = original_set
+ self.scanner.inv.get_by_id = original_get_by_id
@patch("discover.fetchers.folder_fetcher.FolderFetcher.get")
def test_scan_type_without_create_object(self, fetcher_get):
fetcher_get.return_value = DB_RESULTS_WITHOUT_CREATE_OBJECT
original_set = self.scanner.inv.set
+ original_get_by_id = self.scanner.inv.get_by_id
self.scanner.inv.set = MagicMock()
+ self.scanner.inv.get_by_id = MagicMock(return_value=PARENT)
self.scanner.scan_type(TYPE_TO_FETCH_FOR_ENVIRONMENT, PARENT, ID_FIELD)
self.assertEqual(self.scanner.inv.set.call_count, 0,
"Set the object when the create object is false")
self.scanner.inv.set = original_set
+ self.scanner.inv.get_by_id = original_get_by_id
@patch("discover.fetchers.folder_fetcher.FolderFetcher.get")
def test_scan_type_with_create_object(self, fetcher_get):
fetcher_get.return_value = DB_RESULTS_WITH_CREATE_OBJECT
original_set = self.scanner.inv.set
+ original_get_by_id = self.scanner.inv.get_by_id
self.scanner.inv.set = MagicMock()
+ self.scanner.inv.get_by_id = MagicMock(return_value=PARENT)
+
self.scanner.scan_type(TYPE_TO_FETCH_FOR_ENVIRONMENT, PARENT, ID_FIELD)
self.assertEqual(self.scanner.inv.set.call_count, 1,
"Set the object when the create object is false")
self.scanner.inv.set = original_set
+ self.scanner.inv.get_by_id = original_get_by_id
@patch("discover.fetchers.folder_fetcher.FolderFetcher.get")
def test_scan_type_with_children_scanner(self, fetcher_get):
fetcher_get.return_value = DB_RESULTS_WITH_CREATE_OBJECT
original_set = self.scanner.inv.set
+ original_get_by_id = self.scanner.inv.get_by_id
original_queue_for_scan = self.scanner.queue_for_scan
self.scanner.inv.set = MagicMock()
+ self.scanner.inv.get_by_id = MagicMock(return_value=PARENT)
self.scanner.queue_for_scan = MagicMock()
self.scanner.scan_type(TYPE_TO_FETCH_FOR_ENVIRONMENT, PARENT, ID_FIELD)
@@ -244,6 +256,7 @@ class TestScanner(TestScan):
"Can't put children scanner in the queue")
self.scanner.inv.set = original_set
+ self.scanner.inv.get_by_id = original_get_by_id
self.scanner.queue_for_scan = original_queue_for_scan
@patch("discover.fetchers.folder_fetcher.FolderFetcher.get")
@@ -251,9 +264,11 @@ class TestScanner(TestScan):
fetcher_get.return_value = DB_RESULTS_WITH_CREATE_OBJECT
original_set = self.scanner.inv.set
+ original_get_by_id = self.scanner.inv.get_by_id
original_queue_for_scan = self.scanner.queue_for_scan
self.scanner.inv.set = MagicMock()
+ self.scanner.inv.get_by_id = MagicMock(return_value=PARENT)
self.scanner.queue_for_scan = MagicMock()
self.scanner.scan_type(TYPE_TO_FETCH_FOR_ENV_WITHOUT_CHILDREN_FETCHER,
@@ -263,6 +278,7 @@ class TestScanner(TestScan):
"Can't put children scanner in the queue")
self.scanner.inv.set = original_set
+ self.scanner.inv.get_by_id = original_get_by_id
self.scanner.queue_for_scan = original_queue_for_scan
@patch("discover.fetchers.folder_fetcher.FolderFetcher.get")
@@ -270,9 +286,11 @@ class TestScanner(TestScan):
fetcher_get.return_value = DB_RESULTS_WITH_CREATE_OBJECT
original_set = self.scanner.inv.set
+ original_get_by_id = self.scanner.inv.get_by_id
original_queue_for_scan = self.scanner.queue_for_scan
self.scanner.inv.set = MagicMock()
+ self.scanner.inv.get_by_id = MagicMock(return_value=PARENT)
self.scanner.queue_for_scan = MagicMock()
result = self.scanner.scan_type(TYPE_TO_FETCH_FOR_ENVIRONMENT, PARENT,
@@ -281,6 +299,7 @@ class TestScanner(TestScan):
self.assertNotEqual(result, [], "Can't get children form scan_type")
self.scanner.inv.set = original_set
+ self.scanner.inv.get_by_id = original_get_by_id
self.scanner.queue_for_scan = original_queue_for_scan
def test_scan_with_limit_to_child_type(self):
diff --git a/app/test/verify.sh b/app/test/verify.sh
index bda298c..684195e 100755
--- a/app/test/verify.sh
+++ b/app/test/verify.sh
@@ -7,10 +7,16 @@
# are made available under the terms of the Apache License, Version 2.0 #
# which accompanies this distribution, and is available at #
# http://www.apache.org/licenses/LICENSE-2.0 #
-##############################################################################
-set -o errexit
-set -o nounset
-set -o pipefail
+###############################################################################
+#set -o errexit
+#set -o nounset
+#set -o pipefail
+
+#sudo yum install -y https://centos7.iuscommunity.org/ius-release.rpm
+#sudo yum -y install python35
+#sudo pip install virtualenv
+#virtualenv -p $(which python3.5) $WORKSPACE/venv
+#. $WORKSPACE/venv/bin/activate
PYTHONPATH=$PWD/app python3 -m unittest discover -s app/test/api
PYTHONPATH=$PWD/app python3 -m unittest discover -s app/test/event_based_scan
diff --git a/app/utils/dict_naming_converter.py b/app/utils/dict_naming_converter.py
index 91fea2e..d0f8d42 100644
--- a/app/utils/dict_naming_converter.py
+++ b/app/utils/dict_naming_converter.py
@@ -8,6 +8,7 @@
# http://www.apache.org/licenses/LICENSE-2.0 #
###############################################################################
from bson.objectid import ObjectId
+from datetime import datetime
class DictNamingConverter:
@@ -20,21 +21,33 @@ class DictNamingConverter:
# Returns:
# Dictionary with the new keys.
@staticmethod
- def change_dict_naming_convention(d, cf):
+ def change_dict_naming_convention(d, cf, level: int=0):
new = {}
+ change_convention = DictNamingConverter.change_dict_naming_convention
if not d:
return d
- if isinstance(d, str):
+ if isinstance(d, str) or isinstance(d, int) or isinstance(d, float) \
+ or isinstance(d, bool) or isinstance(d, datetime):
return d
if isinstance(d, ObjectId):
return d
- for k, v in d.items():
- new_v = v
- if isinstance(v, dict):
- new_v = DictNamingConverter.change_dict_naming_convention(v, cf)
- elif isinstance(v, list):
- new_v = list()
- for x in v:
- new_v.append(DictNamingConverter.change_dict_naming_convention(x, cf))
- new[cf(k)] = new_v
+ if isinstance(d, object) and not isinstance(d, dict):
+ for k in dir(d):
+ if k.startswith('_'):
+ continue
+ v = getattr(d, k)
+ if callable(v):
+ continue
+ new[cf(k)] = change_convention(v, cf, level+1)
+ if isinstance(d, dict):
+ for k, v in d.items():
+ new_v = v
+ if isinstance(v, dict):
+ new_v = change_convention(v, cf, level+1)
+ elif isinstance(v, list):
+ new_v = list()
+ for x in v:
+ list_val = change_convention(x, cf, level+1)
+ new_v.append(list_val)
+ new[cf(k)] = new_v
return new
diff --git a/app/utils/inventory_mgr.py b/app/utils/inventory_mgr.py
index bbc5542..97b6cd4 100644
--- a/app/utils/inventory_mgr.py
+++ b/app/utils/inventory_mgr.py
@@ -389,38 +389,9 @@ class InventoryMgr(MongoAccess, metaclass=Singleton):
parent_id_path = parent.get("id_path", "/{}".format(environment))
parent_name_path = parent.get("name_path", "/{}".format(environment))
- try:
- # case of dynamic folder added by need
- master_parent_type = o["master_parent_type"]
- master_parent_id = o["master_parent_id"]
- master_parent = self.get_by_id(environment, master_parent_id)
- if not master_parent:
- self.log.error("failed to find master parent " +
- master_parent_id)
+ if 'master_parent_type' in o:
+ if not self.create_parent_folder(o, parent):
return False
- folder_id_path = "/".join((master_parent["id_path"],
- o["parent_id"]))
- folder_name_path = "/".join((master_parent["name_path"],
- o["parent_text"]))
- folder = {
- "environment": parent["environment"],
- "parent_id": master_parent_id,
- "parent_type": master_parent_type,
- "id": o["parent_id"],
- "id_path": folder_id_path,
- "show_in_tree": True,
- "name_path": folder_name_path,
- "name": o["parent_id"],
- "type": o["parent_type"],
- "text": o["parent_text"]
- }
- # remove master_parent_type & master_parent_id after use,
- # as they're there just ro help create the dynamic folder
- o.pop("master_parent_type", True)
- o.pop("master_parent_id", True)
- self.set(folder)
- except KeyError:
- pass
if o.get("text"):
o["name"] = o["text"]
@@ -459,6 +430,42 @@ class InventoryMgr(MongoAccess, metaclass=Singleton):
if "create_object" not in o or o["create_object"]:
# add/update object in DB
self.set(o)
- if self.is_feature_supported(environment, EnvironmentFeatures.MONITORING):
+ if self.is_feature_supported(environment,
+ EnvironmentFeatures.MONITORING):
self.monitoring_setup_manager.create_setup(o)
return True
+
+ def create_parent_folder(self, o, parent) -> bool:
+ # case of dynamic folder added by need
+ master_parent_type = o["master_parent_type"]
+ master_parent_id = o["master_parent_id"]
+ env_path = '/{}'.format(parent['environment'])
+ master_parent = {'id_path': env_path, 'name_path': env_path} \
+ if master_parent_type == 'environment' \
+ else self.get_by_id(o['environment'], master_parent_id)
+ if not master_parent:
+ self.log.error("failed to find master parent " +
+ master_parent_id)
+ return False
+ folder_id_path = "/".join((master_parent['id_path'],
+ o["parent_id"]))
+ folder_name_path = "/".join((master_parent["name_path"],
+ o["parent_text"]))
+ folder = {
+ "environment": parent["environment"],
+ "parent_id": master_parent_id,
+ "parent_type": master_parent_type,
+ "id": o["parent_id"],
+ "id_path": folder_id_path,
+ "show_in_tree": True,
+ "name_path": folder_name_path,
+ "name": o["parent_id"],
+ "type": o["parent_type"],
+ "text": o["parent_text"]
+ }
+ # remove master_parent_type & master_parent_id after use,
+ # as they're there just ro help create the dynamic folder
+ o.pop("master_parent_type", True)
+ o.pop("master_parent_id", True)
+ self.set(folder)
+ return True
diff --git a/app/utils/logging/console_logger.py b/app/utils/logging/console_logger.py
index bb8b2ed..b1008e4 100644
--- a/app/utils/logging/console_logger.py
+++ b/app/utils/logging/console_logger.py
@@ -18,4 +18,3 @@ class ConsoleLogger(Logger):
super().__init__(logger_name="{}-Console".format(self.PROJECT_NAME),
level=level)
self.add_handler(logging.StreamHandler())
-
diff --git a/app/utils/logging/full_logger.py b/app/utils/logging/full_logger.py
index 411eceb..f6fe5fa 100644
--- a/app/utils/logging/full_logger.py
+++ b/app/utils/logging/full_logger.py
@@ -10,34 +10,62 @@
import logging
import logging.handlers
+from utils.origins import Origin
from utils.logging.logger import Logger
from utils.logging.mongo_logging_handler import MongoLoggingHandler
class FullLogger(Logger):
- def __init__(self, env: str = None, log_file: str = None,
- level: str = Logger.default_level):
+ def __init__(self, env: str = None, origin: Origin = None,
+ log_file: str = None, level: str = Logger.default_level):
super().__init__(logger_name="{}-Full".format(self.PROJECT_NAME),
level=level)
+ self.env = env
+ self.origin = origin
# Console handler
self.add_handler(logging.StreamHandler())
# Message handler
- self.add_handler(MongoLoggingHandler(env, self.level))
+ self.add_handler(MongoLoggingHandler(env=env, origin=origin,
+ level=self.level))
# File handler
if log_file:
self.add_handler(logging.handlers.WatchedFileHandler(log_file))
+ def _get_message_handler(self):
+ defined_handlers = [h for h in self.log.handlers
+ if isinstance(h, MongoLoggingHandler)]
+ return defined_handlers[0] if defined_handlers else None
+
# Make sure we update MessageHandler with new env
def set_env(self, env):
- super().set_env(env)
+ self.env = env
- defined_handler = [h for h in self.log.handlers
- if isinstance(h, MongoLoggingHandler)]
- if defined_handler:
- defined_handler[0].env = env
+ handler = self._get_message_handler()
+ if handler:
+ handler.env = env
else:
self.add_handler(MongoLoggingHandler(env, self.level))
+
+ def set_origin(self, origin: Origin):
+ self.origin = origin
+
+ handler = self._get_message_handler()
+ if handler:
+ handler.origin = origin
+ else:
+ self.add_handler(MongoLoggingHandler(env=self.env,
+ level=self.level,
+ origin=origin))
+
+ def setup(self, **kwargs):
+ env = kwargs.get('env')
+ if env and self.env != env:
+ self.set_env(env)
+
+ origin = kwargs.get('origin')
+ if origin and self.origin != origin:
+ self.set_origin(origin) \ No newline at end of file
diff --git a/app/utils/logging/logger.py b/app/utils/logging/logger.py
index 316d3fd..9628040 100644
--- a/app/utils/logging/logger.py
+++ b/app/utils/logging/logger.py
@@ -34,11 +34,12 @@ class Logger(ABC):
level=level)
self.log.propagate = False
self.set_loglevel(level)
- self.env = None
self.level = level
- def set_env(self, env):
- self.env = env
+ # Subclasses should override this method
+ # to perform runtime changes to handlers, etc.
+ def setup(self, **kwargs):
+ pass
@staticmethod
def check_level(level):
diff --git a/app/utils/logging/message_logger.py b/app/utils/logging/message_logger.py
index 02e098f..d433a0f 100644
--- a/app/utils/logging/message_logger.py
+++ b/app/utils/logging/message_logger.py
@@ -18,4 +18,18 @@ class MessageLogger(Logger):
def __init__(self, env: str = None, level: str = None):
super().__init__(logger_name="{}-Message".format(self.PROJECT_NAME),
level=level)
+ self.env = env
self.add_handler(MongoLoggingHandler(env, self.level))
+
+ def set_env(self, env):
+ self.env = env
+
+ if self.log.handlers:
+ self.log.handlers[0].env = env
+ else:
+ self.add_handler(MongoLoggingHandler(env, self.level))
+
+ def setup(self, **kwargs):
+ env = kwargs.get('env')
+ if env and self.env != env:
+ self.set_env(env)
diff --git a/app/utils/logging/mongo_logging_handler.py b/app/utils/logging/mongo_logging_handler.py
index ffb6f85..3929e02 100644
--- a/app/utils/logging/mongo_logging_handler.py
+++ b/app/utils/logging/mongo_logging_handler.py
@@ -11,9 +11,9 @@ import datetime
import logging
from messages.message import Message
+from utils.origins import Origin
from utils.inventory_mgr import InventoryMgr
from utils.logging.logger import Logger
-from utils.string_utils import stringify_datetime
class MongoLoggingHandler(logging.Handler):
@@ -22,11 +22,12 @@ class MongoLoggingHandler(logging.Handler):
"""
SOURCE_SYSTEM = 'Calipso'
- def __init__(self, env: str, level: str):
+ def __init__(self, env: str, level: str, origin: Origin = None):
super().__init__(Logger.get_numeric_level(level))
self.str_level = level
self.env = env
self.inv = None
+ self.origin = origin
def emit(self, record):
# Try to invoke InventoryMgr for logging
@@ -46,7 +47,22 @@ class MongoLoggingHandler(logging.Handler):
d = now - datetime.datetime(1970, 1, 1)
timestamp_id = '{}.{}.{}'.format(d.days, d.seconds, d.microseconds)
source = self.SOURCE_SYSTEM
+
message = Message(msg_id=timestamp_id, env=self.env, source=source,
msg=Logger.formatter.format(record), ts=now,
level=record.levelname)
+ if self.origin:
+ message.extra['origin_id'] = (
+ str(self.origin.origin_id)
+ if self.origin.origin_id
+ else None
+ )
+ message.extra['origin_type'] = (
+ self.origin.origin_type.value
+ if self.origin.origin_type
+ else None
+ )
+ for extra_field in self.origin.extra:
+ message.extra[extra_field] = getattr(self.origin, extra_field)
+
self.inv.collections['messages'].insert_one(message.get()) \ No newline at end of file