summaryrefslogtreecommitdiffstats
path: root/apex/common/utils.py
diff options
context:
space:
mode:
Diffstat (limited to 'apex/common/utils.py')
-rw-r--r--apex/common/utils.py180
1 files changed, 155 insertions, 25 deletions
diff --git a/apex/common/utils.py b/apex/common/utils.py
index 13250a45..72a66d10 100644
--- a/apex/common/utils.py
+++ b/apex/common/utils.py
@@ -8,10 +8,12 @@
##############################################################################
import datetime
+import distro
import json
import logging
import os
import pprint
+import socket
import subprocess
import tarfile
import time
@@ -20,6 +22,8 @@ import urllib.request
import urllib.parse
import yaml
+from apex.common import exceptions as exc
+
def str2bool(var):
if isinstance(var, bool):
@@ -71,12 +75,17 @@ def run_ansible(ansible_vars, playbook, host='localhost', user='root',
Executes ansible playbook and checks for errors
:param ansible_vars: dictionary of variables to inject into ansible run
:param playbook: playbook to execute
+ :param host: inventory file or string of target hosts
+ :param user: remote user to run ansible tasks
:param tmp_dir: temp directory to store ansible command
:param dry_run: Do not actually apply changes
:return: None
"""
logging.info("Executing ansible playbook: {}".format(playbook))
- inv_host = "{},".format(host)
+ if not os.path.isfile(host):
+ inv_host = "{},".format(host)
+ else:
+ inv_host = host
if host == 'localhost':
conn_type = 'local'
else:
@@ -137,45 +146,66 @@ def run_ansible(ansible_vars, playbook, host='localhost', user='root',
raise Exception(e)
-def fetch_upstream_and_unpack(dest, url, targets):
+def get_url_modified_date(url):
+ """
+ Returns the last modified date for an Tripleo image artifact
+ :param url: URL to examine
+ :return: datetime object of when artifact was last modified
+ """
+ try:
+ u = urllib.request.urlopen(url)
+ except urllib.error.URLError as e:
+ logging.error("Failed to fetch target url. Error: {}".format(
+ e.reason))
+ raise
+
+ metadata = u.info()
+ headers = metadata.items()
+ for header in headers:
+ if isinstance(header, tuple) and len(header) == 2:
+ if header[0] == 'Last-Modified':
+ return datetime.datetime.strptime(header[1],
+ "%a, %d %b %Y %X GMT")
+
+
+def fetch_upstream_and_unpack(dest, url, targets, fetch=True):
"""
Fetches targets from a url destination and downloads them if they are
newer. Also unpacks tar files in dest dir.
:param dest: Directory to download and unpack files to
:param url: URL where target files are located
:param targets: List of target files to download
+ :param fetch: Whether or not to fetch latest from internet (boolean)
:return: None
"""
os.makedirs(dest, exist_ok=True)
assert isinstance(targets, list)
for target in targets:
- download_target = True
target_url = urllib.parse.urljoin(url, target)
target_dest = os.path.join(dest, target)
- logging.debug("Fetching and comparing upstream target: \n{}".format(
- target_url))
- try:
- u = urllib.request.urlopen(target_url)
- except urllib.error.URLError as e:
- logging.error("Failed to fetch target url. Error: {}".format(
- e.reason))
- raise
- if os.path.isfile(target_dest):
+ target_exists = os.path.isfile(target_dest)
+ if fetch:
+ download_target = True
+ elif not target_exists:
+ logging.warning("no-fetch requested but target: {} is not "
+ "cached, will download".format(target_dest))
+ download_target = True
+ else:
+ logging.info("no-fetch requested and previous cache exists for "
+ "target: {}. Will skip download".format(target_dest))
+ download_target = False
+
+ if download_target:
+ logging.debug("Fetching and comparing upstream"
+ " target: \n{}".format(target_url))
+ # Check if previous file and fetch we need to compare files to
+ # determine if download is necessary
+ if target_exists and download_target:
logging.debug("Previous file found: {}".format(target_dest))
- metadata = u.info()
- headers = metadata.items()
- target_url_date = None
- for header in headers:
- if isinstance(header, tuple) and len(header) == 2:
- if header[0] == 'Last-Modified':
- target_url_date = header[1]
- break
+ target_url_date = get_url_modified_date(target_url)
if target_url_date is not None:
target_dest_mtime = os.path.getmtime(target_dest)
- target_url_mtime = time.mktime(
- datetime.datetime.strptime(target_url_date,
- "%a, %d %b %Y %X "
- "GMT").timetuple())
+ target_url_mtime = time.mktime(target_url_date.timetuple())
if target_url_mtime > target_dest_mtime:
logging.debug('URL target is newer than disk...will '
'download')
@@ -184,11 +214,111 @@ def fetch_upstream_and_unpack(dest, url, targets):
download_target = False
else:
logging.debug('Unable to find last modified url date')
+
if download_target:
urllib.request.urlretrieve(target_url, filename=target_dest)
logging.info("Target downloaded: {}".format(target))
- if target.endswith('.tar'):
+ if target.endswith(('.tar', 'tar.gz', 'tgz')):
logging.info('Unpacking tar file')
tar = tarfile.open(target_dest)
tar.extractall(path=dest)
tar.close()
+
+
+def install_ansible():
+ # we only install for CentOS/Fedora for now
+ dist = distro.id()
+ if 'centos' in dist:
+ pkg_mgr = 'yum'
+ elif 'fedora' in dist:
+ pkg_mgr = 'dnf'
+ else:
+ return
+
+ # yum python module only exists for 2.x, so use subprocess
+ try:
+ subprocess.check_call([pkg_mgr, '-y', 'install', 'ansible'])
+ except subprocess.CalledProcessError:
+ logging.warning('Unable to install Ansible')
+
+
+def internet_connectivity():
+ try:
+ urllib.request.urlopen('http://opnfv.org', timeout=3)
+ return True
+ except (urllib.request.URLError, socket.timeout):
+ logging.debug('No internet connectivity detected')
+ return False
+
+
+def open_webpage(url, timeout=5):
+ try:
+ response = urllib.request.urlopen(url, timeout=timeout)
+ return response.read()
+ except (urllib.request.URLError, socket.timeout) as e:
+ logging.error("Unable to open URL: {}".format(url))
+ raise exc.FetchException('Unable to open URL') from e
+
+
+def edit_tht_env(env_file, section, settings):
+ assert isinstance(settings, dict)
+ with open(env_file) as fh:
+ data = yaml.safe_load(fh)
+
+ if section not in data.keys():
+ data[section] = {}
+ for setting, value in settings.items():
+ data[section][setting] = value
+ with open(env_file, 'w') as fh:
+ yaml.safe_dump(data, fh, default_flow_style=False)
+ logging.debug("Data written to env file {}:\n{}".format(env_file, data))
+
+
+def unique(tmp_list):
+ assert isinstance(tmp_list, list)
+ uniq_list = []
+ for x in tmp_list:
+ if x not in uniq_list:
+ uniq_list.append(x)
+ return uniq_list
+
+
+def bash_settings_to_dict(data):
+ """
+ Parses bash settings x=y and returns dict of key, values
+ :param data: bash settings data in x=y format
+ :return: dict of keys and values
+ """
+ return dict(item.split('=') for item in data.splitlines())
+
+
+def fetch_properties(url):
+ """
+ Downloads OPNFV properties and returns a dictionary of the key, values
+ :param url: URL of properties file
+ :return: dict of k,v for each properties
+ """
+ if bool(urllib.parse.urlparse(url).scheme):
+ logging.debug('Fetching properties from internet: {}'.format(url))
+ return bash_settings_to_dict(open_webpage(url).decode('utf-8'))
+ elif os.path.isfile(url):
+ logging.debug('Fetching properties from file: {}'.format(url))
+ with open(url, 'r') as fh:
+ data = fh.read()
+ return bash_settings_to_dict(data)
+ else:
+ logging.warning('Unable to fetch properties for: {}'.format(url))
+ raise exc.FetchException('Unable determine properties location: '
+ '{}'.format(url))
+
+
+def find_container_client(os_version):
+ """
+ Determines whether to use docker or podman client
+ :param os_version: openstack version
+ :return: client name as string
+ """
+ if os_version == 'rocky' or os_version == 'queens':
+ return 'docker'
+ else:
+ return 'podman'