summaryrefslogtreecommitdiffstats
path: root/app/discover
diff options
context:
space:
mode:
Diffstat (limited to 'app/discover')
-rw-r--r--app/discover/clique_finder.py74
-rw-r--r--app/discover/event_manager.py4
-rw-r--r--app/discover/fetchers/api/api_access.py20
-rw-r--r--app/discover/fetchers/db/db_access.py19
-rw-r--r--app/discover/manager.py9
-rwxr-xr-xapp/discover/scan.py13
-rw-r--r--app/discover/scan_manager.py8
7 files changed, 101 insertions, 46 deletions
diff --git a/app/discover/clique_finder.py b/app/discover/clique_finder.py
index 47843e6..57b2e3b 100644
--- a/app/discover/clique_finder.py
+++ b/app/discover/clique_finder.py
@@ -19,6 +19,7 @@ class CliqueFinder(Fetcher):
def __init__(self):
super().__init__()
+ self.env_config = None
self.inv = InventoryMgr()
self.inventory = self.inv.inventory_collection
self.links = self.inv.collections["links"]
@@ -27,6 +28,10 @@ class CliqueFinder(Fetcher):
self.clique_constraints = self.inv.collections["clique_constraints"]
self.cliques = self.inv.collections["cliques"]
+ def set_env(self, env):
+ super().set_env(env)
+ self.env_config = self.configuration.environment
+
def find_cliques_by_link(self, links_list):
return self.links.find({'links': {'$in': links_list}})
@@ -43,23 +48,62 @@ class CliqueFinder(Fetcher):
self.find_cliques_for_type(clique_type)
self.log.info("finished scanning for cliques")
+ # Calculate priority score
+ def _get_priority_score(self, clique_type):
+ if self.env == clique_type['environment']:
+ return 4
+ if (self.env_config['distribution'] == clique_type.get('distribution') and
+ self.env_config['distribution_version'] == clique_type.get('distribution_version')):
+ return 3
+ if clique_type.get('mechanism_drivers') in self.env_config['mechanism_drivers']:
+ return 2
+ if self.env_config['type_drivers'] == clique_type.get('type_drivers'):
+ return 1
+ else:
+ return 0
+
+ # Get clique type with max priority
+ # for given environment configuration and focal point type
+ def _get_clique_type(self, focal_point, clique_types):
+ # If there's no configuration match for the specified environment,
+ # we use the default clique type definition with environment='ANY'
+ fallback_type = next(
+ filter(lambda t: t['environment'] == 'ANY', clique_types),
+ None
+ )
+ if not fallback_type:
+ raise ValueError("No fallback clique type (ANY) "
+ "defined for focal point type '{}'"
+ .format(focal_point))
+
+ clique_types.remove(fallback_type)
+
+ priority_scores = [self._get_priority_score(clique_type)
+ for clique_type
+ in clique_types]
+ max_score = max(priority_scores) if priority_scores else 0
+
+ return (fallback_type
+ if max_score == 0
+ else clique_types[priority_scores.index(max_score)])
+
def get_clique_types(self):
if not self.clique_types_by_type:
- clique_types = self.clique_types \
- .find({"environment": self.get_env()})
- default_clique_types = \
- self.clique_types.find({'environment': 'ANY'})
- for clique_type in clique_types:
- focal_point_type = clique_type['focal_point_type']
- self.clique_types_by_type[focal_point_type] = clique_type
- # if some focal point type does not have an explicit definition in
- # clique_types for this specific environment, use the default
- # clique type definition with environment=ANY
- for clique_type in default_clique_types:
- focal_point_type = clique_type['focal_point_type']
- if focal_point_type not in self.clique_types_by_type:
- self.clique_types_by_type[focal_point_type] = clique_type
- return self.clique_types_by_type
+ clique_types_by_focal_point = self.clique_types.aggregate([{
+ "$group": {
+ "_id": "$focal_point_type",
+ "types": {"$push": "$$ROOT"}
+ }
+ }])
+
+ self.clique_types_by_type = {
+ cliques['_id']: self._get_clique_type(cliques['_id'],
+ cliques['types'])
+ for cliques in
+ clique_types_by_focal_point
+ }
+
+ return self.clique_types_by_type
def find_cliques_for_type(self, clique_type):
focal_point_type = clique_type["focal_point_type"]
diff --git a/app/discover/event_manager.py b/app/discover/event_manager.py
index e2f8282..4855acc 100644
--- a/app/discover/event_manager.py
+++ b/app/discover/event_manager.py
@@ -44,6 +44,7 @@ class EventManager(Manager):
'6.0': DefaultListener,
'7.0': DefaultListener,
'8.0': DefaultListener,
+ '9.0': DefaultListener
},
'RDO': {
'Mitaka': DefaultListener,
@@ -112,7 +113,8 @@ class EventManager(Manager):
def get_listener(self, env: str):
env_config = self.inv.get_env_config(env)
return (self.LISTENERS.get(env_config.get('distribution'), {})
- .get(env_config.get('distribution_version')))
+ .get(env_config.get('distribution_version',
+ DefaultListener)))
def listen_to_events(self, listener: ListenerBase, env_name: str, process_vars: dict):
listener.listen({
diff --git a/app/discover/fetchers/api/api_access.py b/app/discover/fetchers/api/api_access.py
index 84c4de3..f685faf 100644
--- a/app/discover/fetchers/api/api_access.py
+++ b/app/discover/fetchers/api/api_access.py
@@ -36,24 +36,23 @@ class ApiAccess(Fetcher):
"neutron": ["quantum"]
}
- # identitity API v2 version with admin token
- def __init__(self):
+ # identity API v2 version with admin token
+ def __init__(self, config=None):
super(ApiAccess, self).__init__()
if ApiAccess.initialized:
return
- ApiAccess.config = Configuration()
+ ApiAccess.config = {'OpenStack': config} if config else Configuration()
ApiAccess.api_config = ApiAccess.config.get("OpenStack")
- host = ApiAccess.api_config["host"]
+ host = ApiAccess.api_config.get("host", "")
ApiAccess.host = host
- port = ApiAccess.api_config["port"]
+ port = ApiAccess.api_config.get("port", "")
if not (host and port):
raise ValueError('Missing definition of host or port ' +
'for OpenStack API access')
ApiAccess.base_url = "http://" + host + ":" + port
- ApiAccess.admin_token = ApiAccess.api_config["admin_token"]
- ApiAccess.admin_project = ApiAccess.api_config["admin_project"] \
- if "admin_project" in ApiAccess.api_config \
- else 'admin'
+ ApiAccess.admin_token = ApiAccess.api_config.get("admin_token", "")
+ ApiAccess.admin_project = ApiAccess.api_config.get("admin_project",
+ "admin")
ApiAccess.admin_endpoint = "http://" + host + ":" + "35357"
token = self.v2_auth_pwd(ApiAccess.admin_project)
@@ -97,7 +96,8 @@ class ApiAccess(Fetcher):
if subject_token:
return subject_token
req_url = ApiAccess.base_url + "/v2.0/tokens"
- response = requests.post(req_url, json=post_body, headers=headers)
+ response = requests.post(req_url, json=post_body, headers=headers,
+ timeout=5)
response = response.json()
ApiAccess.auth_response[project_id] = response
if 'error' in response:
diff --git a/app/discover/fetchers/db/db_access.py b/app/discover/fetchers/db/db_access.py
index 64d7372..090ab84 100644
--- a/app/discover/fetchers/db/db_access.py
+++ b/app/discover/fetchers/db/db_access.py
@@ -40,11 +40,12 @@ class DbAccess(Fetcher):
# connection timeout set to 30 seconds,
# due to problems over long connections
- TIMEOUT = 30
+ TIMEOUT = 5
- def __init__(self):
+ def __init__(self, mysql_config=None):
super().__init__()
- self.config = Configuration()
+ self.config = {'mysql': mysql_config} if mysql_config \
+ else Configuration()
self.conf = self.config.get("mysql")
self.connect_to_db()
self.neutron_db = self.get_neutron_db_name()
@@ -61,8 +62,9 @@ class DbAccess(Fetcher):
database=_database,
raise_on_warnings=True)
DbAccess.conn.ping(True) # auto-reconnect if necessary
- except:
- self.log.critical("failed to connect to MySQL DB")
+ except Exception as e:
+ self.log.critical("failed to connect to MySQL DB: {}"
+ .format(str(e)))
return
DbAccess.query_count_per_con = 0
@@ -91,10 +93,9 @@ class DbAccess(Fetcher):
DbAccess.conn = None
self.conf = self.config.get("mysql")
cnf = self.conf
- cnf['schema'] = cnf['schema'] if 'schema' in cnf else 'nova'
- self.db_connect(cnf["host"], cnf["port"],
- cnf["user"], cnf["pwd"],
- cnf["schema"])
+ self.db_connect(cnf.get('host', ''), cnf.get('port', ''),
+ cnf.get('user', ''), cnf.get('pwd', ''),
+ cnf.get('schema', 'nova'))
@with_cursor
def get_objects_list_for_id(self, query, object_type, object_id,
diff --git a/app/discover/manager.py b/app/discover/manager.py
index e37bb31..503c8a8 100644
--- a/app/discover/manager.py
+++ b/app/discover/manager.py
@@ -9,6 +9,7 @@
# http://www.apache.org/licenses/LICENSE-2.0 #
###############################################################################
from abc import ABC, abstractmethod
+import datetime
from utils.logging.file_logger import FileLogger
from utils.logging.full_logger import FullLogger
@@ -19,6 +20,14 @@ class Manager(ABC):
MIN_INTERVAL = 0.1 # To prevent needlessly frequent scans
+ INTERVALS = {
+ 'YEARLY': datetime.timedelta(days=365.25),
+ 'MONTHLY': datetime.timedelta(days=365.25/12),
+ 'WEEKLY': datetime.timedelta(weeks=1),
+ 'DAILY': datetime.timedelta(days=1),
+ 'HOURLY': datetime.timedelta(hours=1)
+ }
+
def __init__(self, log_directory: str = None,
mongo_config_file: str = None):
super().__init__()
diff --git a/app/discover/scan.py b/app/discover/scan.py
index 6c40a7f..49f37ff 100755
--- a/app/discover/scan.py
+++ b/app/discover/scan.py
@@ -319,13 +319,20 @@ class ScanController(Fetcher):
SshConnection.disconnect_all()
status = 'ok' if not scanner.found_errors.get(env_name, False) \
else 'errors detected'
+ if status == 'ok' and scan_plan.object_type == "environment":
+ self.mark_env_scanned(scan_plan.env)
self.log.info('Scan completed, status: {}'.format(status))
return True, status
+ def mark_env_scanned(self, env):
+ environments_collection = self.inv.collection['environments_config']
+ environments_collection \
+ .update_one(filter={'name': env},
+ update={'$set': {'scanned': True}})
if __name__ == '__main__':
- scan_manager = ScanController()
- ret, msg = scan_manager.run()
+ scan_controller = ScanController()
+ ret, msg = scan_controller.run()
if not ret:
- scan_manager.log.error(msg)
+ scan_controller.log.error(msg)
sys.exit(0 if ret else 1)
diff --git a/app/discover/scan_manager.py b/app/discover/scan_manager.py
index 12dbec0..6c46d47 100644
--- a/app/discover/scan_manager.py
+++ b/app/discover/scan_manager.py
@@ -170,14 +170,6 @@ class ScanManager(Manager):
.update_many(filter={'name': {'$in': env_scans}},
update={'$set': {'scanned': False}})
- INTERVALS = {
- 'YEARLY': datetime.timedelta(days=365.25),
- 'MONTHLY': datetime.timedelta(days=365.25/12),
- 'WEEKLY': datetime.timedelta(weeks=1),
- 'DAILY': datetime.timedelta(days=1),
- 'HOURLY': datetime.timedelta(hours=1)
- }
-
def _submit_scan_request_for_schedule(self, scheduled_scan, interval, ts):
scans = self.scans_collection
new_scan = {