From f07870e4a3933cc7e78e6dc6457724bb49cad4f8 Mon Sep 17 00:00:00 2001 From: Tim Rozet Date: Mon, 4 Jun 2018 21:19:22 -0400 Subject: Fixes deployment failure with allNodesConfig This pulls in upstream patch to revert a bad commit that causes the "Unknown Property controller_ips". Also includes a fix for being able to detect if patches that are merged upstream are also promoted into TripleO images or container images. This happens by comparing the time the patch was submitted to the time when the TripleO Image or Docker Image was last updated. JIRA: APEX-610 JIRA: APEX-612 Change-Id: I1c2ab7fb4425b407acd7b6d9ebab914ed3a24478 Signed-off-by: Tim Rozet --- apex/build_utils.py | 10 +--- apex/builders/common_builder.py | 95 ++++++++++++++++++++++++++----- apex/common/constants.py | 4 +- apex/common/utils.py | 42 ++++++++------ apex/tests/test_apex_common_builder.py | 101 ++++++++++++++++++++++++++++++++- 5 files changed, 208 insertions(+), 44 deletions(-) (limited to 'apex') diff --git a/apex/build_utils.py b/apex/build_utils.py index 1c413dfd..213ae115 100644 --- a/apex/build_utils.py +++ b/apex/build_utils.py @@ -27,7 +27,7 @@ def get_change(url, repo, branch, change_id): :param repo: name of repo :param branch: branch of repo :param change_id: SHA change id - :return: change if found and not abandoned, closed, or merged + :return: change if found and not abandoned, closed """ rest = GerritRestAPI(url=url) change_path = "{}~{}~{}".format(quote_plus(repo), quote_plus(branch), @@ -37,12 +37,8 @@ def get_change(url, repo, branch, change_id): try: assert change['status'] not in 'ABANDONED' 'CLOSED', \ 'Change {} is in {} state'.format(change_id, change['status']) - if change['status'] == 'MERGED': - logging.info('Change {} is merged, ignoring...' - .format(change_id)) - return None - else: - return change + logging.debug('Change found: {}'.format(change)) + return change except KeyError: logging.error('Failed to get valid change data structure from url ' diff --git a/apex/builders/common_builder.py b/apex/builders/common_builder.py index 0cd683c3..10fab9b6 100644 --- a/apex/builders/common_builder.py +++ b/apex/builders/common_builder.py @@ -9,11 +9,13 @@ # Common building utilities for undercloud and overcloud +import datetime import git import json import logging import os import re +import urllib.parse import apex.builders.overcloud_builder as oc_builder from apex import build_utils @@ -52,7 +54,9 @@ def project_to_docker_image(project): """ # Fetch all docker containers in docker hub with tripleo and filter # based on project - hub_output = utils.open_webpage(con.DOCKERHUB_OOO, timeout=10) + + hub_output = utils.open_webpage( + urllib.parse.urljoin(con.DOCKERHUB_OOO, '?page_size=1024'), timeout=10) try: results = json.loads(hub_output.decode())['results'] except Exception as e: @@ -72,6 +76,60 @@ def project_to_docker_image(project): return docker_images +def is_patch_promoted(change, branch, docker_image=None): + """ + Checks to see if a patch that is in merged exists in either the docker + container or the promoted tripleo images + :param change: gerrit change json output + :param branch: branch to use when polling artifacts (does not include + stable prefix) + :param docker_image: container this applies to if (defaults to None) + :return: True if the patch exists in a promoted artifact upstream + """ + assert isinstance(change, dict) + assert 'status' in change + + # if not merged we already know this is not closed/abandoned, so we know + # this is not promoted + if change['status'] != 'MERGED': + return False + assert 'submitted' in change + # drop microseconds cause who cares + stime = re.sub('\..*$', '', change['submitted']) + submitted_date = datetime.datetime.strptime(stime, "%Y-%m-%d %H:%M:%S") + # Patch applies to overcloud/undercloud + if docker_image is None: + oc_url = urllib.parse.urljoin( + con.UPSTREAM_RDO.replace('master', branch), 'overcloud-full.tar') + oc_mtime = utils.get_url_modified_date(oc_url) + if oc_mtime > submitted_date: + logging.debug("oc image was last modified at {}, which is" + "newer than merge date: {}".format(oc_mtime, + submitted_date)) + return True + else: + # must be a docker patch, check docker tag modified time + docker_url = con.DOCKERHUB_OOO.replace('tripleomaster', + "tripleo{}".format(branch)) + url_path = "{}/tags/{}".format(docker_image, con.DOCKER_TAG) + docker_url = urllib.parse.urljoin(docker_url, url_path) + logging.debug("docker url is: {}".format(docker_url)) + docker_output = utils.open_webpage(docker_url, 10) + logging.debug('Docker web output: {}'.format(docker_output)) + hub_mtime = json.loads(docker_output.decode())['last_updated'] + hub_mtime = re.sub('\..*$', '', hub_mtime) + # docker modified time is in this format '2018-06-11T15:23:55.135744Z' + # and we drop microseconds + hub_dtime = datetime.datetime.strptime(hub_mtime, "%Y-%m-%dT%H:%M:%S") + if hub_dtime > submitted_date: + logging.debug("docker image: {} was last modified at {}, which is" + "newer than merge date: {}".format(docker_image, + hub_dtime, + submitted_date)) + return True + return False + + def add_upstream_patches(patches, image, tmp_dir, default_branch=os.path.join('stable', con.DEFAULT_OS_VERSION), @@ -99,20 +157,29 @@ def add_upstream_patches(patches, image, tmp_dir, branch = default_branch patch_diff = build_utils.get_patch(patch['change-id'], patch['project'], branch) - if patch_diff: + project_path = project_to_path(patch['project']) + # If docker tag and python we know this patch belongs on docker + # container for a docker service. Therefore we build the dockerfile + # and move the patch into the containers directory. We also assume + # this builder call is for overcloud, because we do not support + # undercloud containers + if docker_tag and 'python' in project_path: + # Projects map to multiple THT services, need to check which + # are supported + ooo_docker_services = project_to_docker_image(patch['project']) + docker_img = ooo_docker_services[0] + else: + ooo_docker_services = [] + docker_img = None + change = build_utils.get_change(con.OPENSTACK_GERRIT, + patch['project'], branch, + patch['change-id']) + patch_promoted = is_patch_promoted(change, + branch.replace('/stable', ''), + docker_img) + + if patch_diff and not patch_promoted: patch_file = "{}.patch".format(patch['change-id']) - project_path = project_to_path(patch['project']) - # If docker tag and python we know this patch belongs on docker - # container for a docker service. Therefore we build the dockerfile - # and move the patch into the containers directory. We also assume - # this builder call is for overcloud, because we do not support - # undercloud containers - if docker_tag and 'python' in project_path: - # Projects map to multiple THT services, need to check which - # are supported - ooo_docker_services = project_to_docker_image(patch['project']) - else: - ooo_docker_services = [] # If we found services, then we treat the patch like it applies to # docker only if ooo_docker_services: diff --git a/apex/common/constants.py b/apex/common/constants.py index 7ccfcd81..89c3e6e1 100644 --- a/apex/common/constants.py +++ b/apex/common/constants.py @@ -68,5 +68,5 @@ VALID_DOCKER_SERVICES = { 'neutron-opendaylight-sriov.yaml': None, 'neutron-ml2-ovn.yaml': 'neutron-ovn.yaml' } -DOCKERHUB_OOO = ('https://registry.hub.docker.com/v2/repositories' - '/tripleomaster/?page_size=1024') +DOCKERHUB_OOO = 'https://registry.hub.docker.com/v2/repositories' \ + '/tripleomaster/' diff --git a/apex/common/utils.py b/apex/common/utils.py index cb7cbe13..2ac900a3 100644 --- a/apex/common/utils.py +++ b/apex/common/utils.py @@ -141,6 +141,28 @@ def run_ansible(ansible_vars, playbook, host='localhost', user='root', raise Exception(e) +def get_url_modified_date(url): + """ + Returns the last modified date for an Tripleo image artifact + :param url: URL to examine + :return: datetime object of when artifact was last modified + """ + try: + u = urllib.request.urlopen(url) + except urllib.error.URLError as e: + logging.error("Failed to fetch target url. Error: {}".format( + e.reason)) + raise + + metadata = u.info() + headers = metadata.items() + for header in headers: + if isinstance(header, tuple) and len(header) == 2: + if header[0] == 'Last-Modified': + return datetime.datetime.strptime(header[1], + "%a, %d %b %Y %X GMT") + + def fetch_upstream_and_unpack(dest, url, targets, fetch=True): """ Fetches targets from a url destination and downloads them if they are @@ -171,30 +193,14 @@ def fetch_upstream_and_unpack(dest, url, targets, fetch=True): if download_target: logging.debug("Fetching and comparing upstream" " target: \n{}".format(target_url)) - try: - u = urllib.request.urlopen(target_url) - except urllib.error.URLError as e: - logging.error("Failed to fetch target url. Error: {}".format( - e.reason)) - raise # Check if previous file and fetch we need to compare files to # determine if download is necessary if target_exists and download_target: logging.debug("Previous file found: {}".format(target_dest)) - metadata = u.info() - headers = metadata.items() - target_url_date = None - for header in headers: - if isinstance(header, tuple) and len(header) == 2: - if header[0] == 'Last-Modified': - target_url_date = header[1] - break + target_url_date = get_url_modified_date(target_url) if target_url_date is not None: target_dest_mtime = os.path.getmtime(target_dest) - target_url_mtime = time.mktime( - datetime.datetime.strptime(target_url_date, - "%a, %d %b %Y %X " - "GMT").timetuple()) + target_url_mtime = time.mktime(target_url_date.timetuple()) if target_url_mtime > target_dest_mtime: logging.debug('URL target is newer than disk...will ' 'download') diff --git a/apex/tests/test_apex_common_builder.py b/apex/tests/test_apex_common_builder.py index fe69ca2e..09bd2545 100644 --- a/apex/tests/test_apex_common_builder.py +++ b/apex/tests/test_apex_common_builder.py @@ -51,10 +51,47 @@ class TestCommonBuilder(unittest.TestCase): path = '/usr/lib/python2.7/site-packages/' self.assertEquals(c_builder.project_to_path(project), path) + def test_is_patch_promoted(self): + dummy_change = {'submitted': '2017-06-05 20:23:09.000000000', + 'status': 'MERGED'} + self.assertTrue(c_builder.is_patch_promoted(dummy_change, + 'master')) + + def test_is_patch_promoted_docker(self): + dummy_change = {'submitted': '2017-06-05 20:23:09.000000000', + 'status': 'MERGED'} + dummy_image = 'centos-binary-opendaylight' + self.assertTrue(c_builder.is_patch_promoted(dummy_change, + 'master', + docker_image=dummy_image)) + + def test_patch_not_promoted(self): + dummy_change = {'submitted': '2900-06-05 20:23:09.000000000', + 'status': 'MERGED'} + self.assertFalse(c_builder.is_patch_promoted(dummy_change, + 'master')) + + def test_patch_not_promoted_docker(self): + dummy_change = {'submitted': '2900-06-05 20:23:09.000000000', + 'status': 'MERGED'} + dummy_image = 'centos-binary-opendaylight' + self.assertFalse(c_builder.is_patch_promoted(dummy_change, + 'master', + docker_image=dummy_image)) + + def test_patch_not_promoted_and_not_merged(self): + dummy_change = {'submitted': '2900-06-05 20:23:09.000000000', + 'status': 'BLAH'} + self.assertFalse(c_builder.is_patch_promoted(dummy_change, + 'master')) + @patch('builtins.open', mock_open()) + @patch('apex.builders.common_builder.is_patch_promoted') + @patch('apex.build_utils.get_change') @patch('apex.build_utils.get_patch') @patch('apex.virtual.utils.virt_customize') - def test_add_upstream_patches(self, mock_customize, mock_get_patch): + def test_add_upstream_patches(self, mock_customize, mock_get_patch, + mock_get_change, mock_is_patch_promoted): mock_get_patch.return_value = None change_id = 'I301370fbf47a71291614dd60e4c64adc7b5ebb42' patches = [{ @@ -73,14 +110,18 @@ class TestCommonBuilder(unittest.TestCase): {con.VIRT_RUN_CMD: "cd {} && patch -p1 < {}".format( project_path, patch_file)}] mock_get_patch.return_value = 'some random diff' + mock_is_patch_promoted.return_value = False c_builder.add_upstream_patches(patches, 'dummy.qcow2', '/dummytmp/') mock_customize.assert_called_once_with(test_virt_ops, 'dummy.qcow2') @patch('builtins.open', mock_open()) + @patch('apex.builders.common_builder.is_patch_promoted') + @patch('apex.build_utils.get_change') @patch('apex.build_utils.get_patch') @patch('apex.virtual.utils.virt_customize') def test_add_upstream_patches_docker_puppet( - self, mock_customize, mock_get_patch): + self, mock_customize, mock_get_patch, mock_get_change, + mock_is_patch_promoted): change_id = 'I301370fbf47a71291614dd60e4c64adc7b5ebb42' patches = [{ 'change-id': change_id, @@ -96,19 +137,22 @@ class TestCommonBuilder(unittest.TestCase): {con.VIRT_RUN_CMD: "cd {} && patch -p1 < {}".format( project_path, patch_file)}] mock_get_patch.return_value = 'some random diff' + mock_is_patch_promoted.return_value = False c_builder.add_upstream_patches(patches, 'dummy.qcow2', '/dummytmp/', uc_ip='192.0.2.1', docker_tag='latest') mock_customize.assert_called_once_with(test_virt_ops, 'dummy.qcow2') @patch('builtins.open', mock_open()) + @patch('apex.builders.common_builder.is_patch_promoted') + @patch('apex.build_utils.get_change') @patch('apex.builders.common_builder.project_to_docker_image') @patch('apex.builders.overcloud_builder.build_dockerfile') @patch('apex.build_utils.get_patch') @patch('apex.virtual.utils.virt_customize') def test_add_upstream_patches_docker_python( self, mock_customize, mock_get_patch, mock_build_docker_file, - mock_project2docker): + mock_project2docker, ock_get_change, mock_is_patch_promoted): mock_project2docker.return_value = ['NovaApi'] change_id = 'I301370fbf47a71291614dd60e4c64adc7b5ebb42' patches = [{ @@ -116,6 +160,7 @@ class TestCommonBuilder(unittest.TestCase): 'project': 'openstack/nova' }] mock_get_patch.return_value = 'some random diff' + mock_is_patch_promoted.return_value = False services = c_builder.add_upstream_patches(patches, 'dummy.qcow2', '/dummytmp/', uc_ip='192.0.2.1', @@ -124,6 +169,56 @@ class TestCommonBuilder(unittest.TestCase): assert mock_build_docker_file.called self.assertSetEqual(services, {'NovaApi'}) + @patch('builtins.open', mock_open()) + @patch('apex.builders.common_builder.is_patch_promoted') + @patch('apex.build_utils.get_change') + @patch('apex.builders.common_builder.project_to_docker_image') + @patch('apex.builders.overcloud_builder.build_dockerfile') + @patch('apex.build_utils.get_patch') + @patch('apex.virtual.utils.virt_customize') + def test_not_add_upstream_patches_docker_python( + self, mock_customize, mock_get_patch, mock_build_docker_file, + mock_project2docker, ock_get_change, mock_is_patch_promoted): + # Test that the calls are not made when the patch is already merged and + # promoted + mock_project2docker.return_value = ['NovaApi'] + change_id = 'I301370fbf47a71291614dd60e4c64adc7b5ebb42' + patches = [{ + 'change-id': change_id, + 'project': 'openstack/nova' + }] + mock_get_patch.return_value = 'some random diff' + mock_is_patch_promoted.return_value = True + services = c_builder.add_upstream_patches(patches, 'dummy.qcow2', + '/dummytmp/', + uc_ip='192.0.2.1', + docker_tag='latest') + assert mock_customize.not_called + assert mock_build_docker_file.not_called + assert len(services) == 0 + + @patch('builtins.open', mock_open()) + @patch('apex.builders.common_builder.is_patch_promoted') + @patch('apex.build_utils.get_change') + @patch('apex.build_utils.get_patch') + @patch('apex.virtual.utils.virt_customize') + def test_not_upstream_patches_docker_puppet( + self, mock_customize, mock_get_patch, mock_get_change, + mock_is_patch_promoted): + # Test that the calls are not made when the patch is already merged and + # promoted + change_id = 'I301370fbf47a71291614dd60e4c64adc7b5ebb42' + patches = [{ + 'change-id': change_id, + 'project': 'openstack/puppet-tripleo' + }] + mock_get_patch.return_value = 'some random diff' + mock_is_patch_promoted.return_value = True + c_builder.add_upstream_patches(patches, 'dummy.qcow2', '/dummytmp/', + uc_ip='192.0.2.1', + docker_tag='latest') + assert mock_customize.not_called + @patch('builtins.open', mock_open()) @patch('apex.virtual.utils.virt_customize') def test_add_repo(self, mock_customize): -- cgit 1.2.3-korg