diff options
author | Tim Rozet <trozet@redhat.com> | 2018-06-04 21:19:22 -0400 |
---|---|---|
committer | Tim Rozet <trozet@redhat.com> | 2018-06-14 16:25:19 -0400 |
commit | f07870e4a3933cc7e78e6dc6457724bb49cad4f8 (patch) | |
tree | 29258209ef54270f70483977410782328b89377d /apex/common | |
parent | bd292a3c44dbe385974e4ee41dcb8149558f0be4 (diff) |
Fixes deployment failure with allNodesConfig
This pulls in upstream patch to revert a bad commit that causes the
"Unknown Property controller_ips". Also includes a fix for being able to
detect if patches that are merged upstream are also promoted into
TripleO images or container images. This happens by comparing the time
the patch was submitted to the time when the TripleO Image or Docker
Image was last updated.
JIRA: APEX-610
JIRA: APEX-612
Change-Id: I1c2ab7fb4425b407acd7b6d9ebab914ed3a24478
Signed-off-by: Tim Rozet <trozet@redhat.com>
Diffstat (limited to 'apex/common')
-rw-r--r-- | apex/common/constants.py | 4 | ||||
-rw-r--r-- | apex/common/utils.py | 42 |
2 files changed, 26 insertions, 20 deletions
diff --git a/apex/common/constants.py b/apex/common/constants.py index 7ccfcd81..89c3e6e1 100644 --- a/apex/common/constants.py +++ b/apex/common/constants.py @@ -68,5 +68,5 @@ VALID_DOCKER_SERVICES = { 'neutron-opendaylight-sriov.yaml': None, 'neutron-ml2-ovn.yaml': 'neutron-ovn.yaml' } -DOCKERHUB_OOO = ('https://registry.hub.docker.com/v2/repositories' - '/tripleomaster/?page_size=1024') +DOCKERHUB_OOO = 'https://registry.hub.docker.com/v2/repositories' \ + '/tripleomaster/' diff --git a/apex/common/utils.py b/apex/common/utils.py index cb7cbe13..2ac900a3 100644 --- a/apex/common/utils.py +++ b/apex/common/utils.py @@ -141,6 +141,28 @@ def run_ansible(ansible_vars, playbook, host='localhost', user='root', raise Exception(e) +def get_url_modified_date(url): + """ + Returns the last modified date for an Tripleo image artifact + :param url: URL to examine + :return: datetime object of when artifact was last modified + """ + try: + u = urllib.request.urlopen(url) + except urllib.error.URLError as e: + logging.error("Failed to fetch target url. Error: {}".format( + e.reason)) + raise + + metadata = u.info() + headers = metadata.items() + for header in headers: + if isinstance(header, tuple) and len(header) == 2: + if header[0] == 'Last-Modified': + return datetime.datetime.strptime(header[1], + "%a, %d %b %Y %X GMT") + + def fetch_upstream_and_unpack(dest, url, targets, fetch=True): """ Fetches targets from a url destination and downloads them if they are @@ -171,30 +193,14 @@ def fetch_upstream_and_unpack(dest, url, targets, fetch=True): if download_target: logging.debug("Fetching and comparing upstream" " target: \n{}".format(target_url)) - try: - u = urllib.request.urlopen(target_url) - except urllib.error.URLError as e: - logging.error("Failed to fetch target url. Error: {}".format( - e.reason)) - raise # Check if previous file and fetch we need to compare files to # determine if download is necessary if target_exists and download_target: logging.debug("Previous file found: {}".format(target_dest)) - metadata = u.info() - headers = metadata.items() - target_url_date = None - for header in headers: - if isinstance(header, tuple) and len(header) == 2: - if header[0] == 'Last-Modified': - target_url_date = header[1] - break + target_url_date = get_url_modified_date(target_url) if target_url_date is not None: target_dest_mtime = os.path.getmtime(target_dest) - target_url_mtime = time.mktime( - datetime.datetime.strptime(target_url_date, - "%a, %d %b %Y %X " - "GMT").timetuple()) + target_url_mtime = time.mktime(target_url_date.timetuple()) if target_url_mtime > target_dest_mtime: logging.debug('URL target is newer than disk...will ' 'download') |