summaryrefslogtreecommitdiffstats
path: root/apex/common
diff options
context:
space:
mode:
Diffstat (limited to 'apex/common')
-rw-r--r--apex/common/constants.py16
-rw-r--r--apex/common/exceptions.py4
-rw-r--r--apex/common/utils.py63
3 files changed, 72 insertions, 11 deletions
diff --git a/apex/common/constants.py b/apex/common/constants.py
index a2b9a634..0aa6a6ca 100644
--- a/apex/common/constants.py
+++ b/apex/common/constants.py
@@ -39,10 +39,14 @@ VIRT_PW = '--root-password'
THT_DIR = '/usr/share/openstack-tripleo-heat-templates'
THT_ENV_DIR = os.path.join(THT_DIR, 'environments')
+THT_DOCKER_ENV_DIR = os.path.join(THT_ENV_DIR, 'services-docker')
DEFAULT_OS_VERSION = 'pike'
DEFAULT_ODL_VERSION = 'nitrogen'
VALID_ODL_VERSIONS = ['carbon', 'nitrogen', 'oxygen', 'master']
+CEPH_VERSION_MAP = {'pike': 'jewel',
+ 'queens': 'luminous',
+ 'master': 'luminous'}
PUPPET_ODL_URL = 'https://git.opendaylight.org/gerrit/integration/packaging' \
'/puppet-opendaylight'
DEBUG_OVERCLOUD_PW = 'opnfvapex'
@@ -50,3 +54,15 @@ NET_ENV_FILE = 'network-environment.yaml'
DEPLOY_TIMEOUT = 90
UPSTREAM_RDO = 'https://images.rdoproject.org/pike/delorean/current-tripleo/'
OPENSTACK_GERRIT = 'https://review.openstack.org'
+
+DOCKER_TAG = 'current-tripleo-rdo'
+# Maps regular service files to docker versions
+# None value means mapping is same as key
+VALID_DOCKER_SERVICES = {
+ 'neutron-opendaylight.yaml': None,
+ 'neutron-opendaylight-dpdk.yaml': None,
+ 'neutron-opendaylight-sriov.yaml': None,
+ 'neutron-ml2-ovn.yaml': 'neutron-ovn.yaml'
+}
+DOCKERHUB_OOO = ('https://registry.hub.docker.com/v2/repositories'
+ '/tripleoupstream/?page_size=1024')
diff --git a/apex/common/exceptions.py b/apex/common/exceptions.py
index 54d99834..a4d390a4 100644
--- a/apex/common/exceptions.py
+++ b/apex/common/exceptions.py
@@ -18,3 +18,7 @@ class JumpHostNetworkException(Exception):
class ApexCleanException(Exception):
pass
+
+
+class ApexBuildException(Exception):
+ pass
diff --git a/apex/common/utils.py b/apex/common/utils.py
index b727b11a..cb7cbe13 100644
--- a/apex/common/utils.py
+++ b/apex/common/utils.py
@@ -22,6 +22,8 @@ import urllib.request
import urllib.parse
import yaml
+from apex.common import exceptions as exc
+
def str2bool(var):
if isinstance(var, bool):
@@ -139,30 +141,45 @@ def run_ansible(ansible_vars, playbook, host='localhost', user='root',
raise Exception(e)
-def fetch_upstream_and_unpack(dest, url, targets):
+def fetch_upstream_and_unpack(dest, url, targets, fetch=True):
"""
Fetches targets from a url destination and downloads them if they are
newer. Also unpacks tar files in dest dir.
:param dest: Directory to download and unpack files to
:param url: URL where target files are located
:param targets: List of target files to download
+ :param fetch: Whether or not to fetch latest from internet (boolean)
:return: None
"""
os.makedirs(dest, exist_ok=True)
assert isinstance(targets, list)
for target in targets:
- download_target = True
target_url = urllib.parse.urljoin(url, target)
target_dest = os.path.join(dest, target)
- logging.debug("Fetching and comparing upstream target: \n{}".format(
- target_url))
- try:
- u = urllib.request.urlopen(target_url)
- except urllib.error.URLError as e:
- logging.error("Failed to fetch target url. Error: {}".format(
- e.reason))
- raise
- if os.path.isfile(target_dest):
+ target_exists = os.path.isfile(target_dest)
+ if fetch:
+ download_target = True
+ elif not target_exists:
+ logging.warning("no-fetch requested but target: {} is not "
+ "cached, will download".format(target_dest))
+ download_target = True
+ else:
+ logging.info("no-fetch requested and previous cache exists for "
+ "target: {}. Will skip download".format(target_dest))
+ download_target = False
+
+ if download_target:
+ logging.debug("Fetching and comparing upstream"
+ " target: \n{}".format(target_url))
+ try:
+ u = urllib.request.urlopen(target_url)
+ except urllib.error.URLError as e:
+ logging.error("Failed to fetch target url. Error: {}".format(
+ e.reason))
+ raise
+ # Check if previous file and fetch we need to compare files to
+ # determine if download is necessary
+ if target_exists and download_target:
logging.debug("Previous file found: {}".format(target_dest))
metadata = u.info()
headers = metadata.items()
@@ -186,6 +203,7 @@ def fetch_upstream_and_unpack(dest, url, targets):
download_target = False
else:
logging.debug('Unable to find last modified url date')
+
if download_target:
urllib.request.urlretrieve(target_url, filename=target_dest)
logging.info("Target downloaded: {}".format(target))
@@ -220,3 +238,26 @@ def internet_connectivity():
except (urllib.request.URLError, socket.timeout):
logging.debug('No internet connectivity detected')
return False
+
+
+def open_webpage(url, timeout=5):
+ try:
+ response = urllib.request.urlopen(url, timeout=timeout)
+ return response.read()
+ except (urllib.request.URLError, socket.timeout):
+ logging.error("Unable to open URL: {}".format(url))
+ raise
+
+
+def edit_tht_env(env_file, section, settings):
+ assert isinstance(settings, dict)
+ with open(env_file) as fh:
+ data = yaml.safe_load(fh)
+
+ if section not in data.keys():
+ data[section] = {}
+ for setting, value in settings.items():
+ data[section][setting] = value
+ with open(env_file, 'w') as fh:
+ yaml.safe_dump(data, fh, default_flow_style=False)
+ logging.debug("Data written to env file {}:\n{}".format(env_file, data))