From 96e8ffec3e9534f4cc131303334cb2d93179feed Mon Sep 17 00:00:00 2001
From: Dan Radez <dradez@redhat.com>
Date: Thu, 14 Sep 2017 09:56:36 -0400
Subject: Adding unittests for build

- covers build.py
- covers build_utils.py
- moved build_utils out of build module, it wasn't possible to import
  build.py while build_utils was in a module

Change-Id: I42f08a475d9ca219a62c421d4bdd2d1d3c49691a
Signed-off-by: Dan Radez <dradez@redhat.com>
(cherry picked from commit c876e9d261a7c3189cc8374c995014309c5b9881)
---
 apex/build.py                       |  13 +-
 apex/build/__init__.py              |   0
 apex/build/build_utils.py           | 113 -------------
 apex/build_utils.py                 | 113 +++++++++++++
 apex/tests/test_apex_build.py       | 308 ++++++++++++++++++++++++++++++++++++
 apex/tests/test_apex_build_utils.py | 167 +++++++++++++++++++
 build/variables.sh                  |   2 +-
 test-requirements.txt               |   2 +
 8 files changed, 600 insertions(+), 118 deletions(-)
 delete mode 100644 apex/build/__init__.py
 delete mode 100644 apex/build/build_utils.py
 create mode 100644 apex/build_utils.py
 create mode 100644 apex/tests/test_apex_build.py
 create mode 100644 apex/tests/test_apex_build_utils.py

diff --git a/apex/build.py b/apex/build.py
index 2d0786a8..08f91abe 100644
--- a/apex/build.py
+++ b/apex/build.py
@@ -103,8 +103,8 @@ def unpack_cache(cache_dest, cache_dir=None):
         except subprocess.CalledProcessError:
             logging.warning("Cache unpack failed")
             return
-        logging.info("Cache unpacked, contents are: {}",
-                     os.listdir(cache_dest))
+        logging.info("Cache unpacked, contents are: {}".format(
+                     os.listdir(cache_dest)))
 
 
 def build(build_root, version, iso=False, rpms=False):
@@ -193,7 +193,7 @@ def prune_cache(cache_dir):
                 os.remove(cache_full_path)
                 cache_entries.pop(0)
                 cache_modified_flag = True
-            except os.EX_OSERR:
+            except OSError:
                 logging.warning("Failed to remove cache file: {}".format(
                     cache_full_path))
                 break
@@ -207,7 +207,8 @@ def prune_cache(cache_dir):
         with open(journal_file, 'w') as fh:
             yaml.safe_dump(cache_entries, fh, default_flow_style=False)
 
-if __name__ == '__main__':
+
+def main():
     parser = create_build_parser()
     args = parser.parse_args(sys.argv[1:])
     if args.debug:
@@ -247,3 +248,7 @@ if __name__ == '__main__':
     build(apex_build_root, args.build_version, args.iso, args.rpms)
     build_cache(cache_tmp_dir, args.cache_dir)
     prune_cache(args.cache_dir)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/apex/build/__init__.py b/apex/build/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/apex/build/build_utils.py b/apex/build/build_utils.py
deleted file mode 100644
index 66a63d37..00000000
--- a/apex/build/build_utils.py
+++ /dev/null
@@ -1,113 +0,0 @@
-##############################################################################
-# Copyright (c) 2017 Feng Pan (fpan@redhat.com) and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import argparse
-import git
-import logging
-import os
-from pygerrit2.rest import GerritRestAPI
-import re
-import shutil
-import sys
-
-from urllib.parse import quote_plus
-
-
-def clone_fork(args):
-    ref = None
-    logging.info("Cloning {}".format(args.repo))
-
-    try:
-        cm = git.Repo(search_parent_directories=True).commit().message
-    except git.exc.InvalidGitRepositoryError:
-        logging.debug('Current Apex directory is not a git repo: {}'
-                      .format(os.getcwd()))
-        cm = ''
-
-    logging.info("Current commit message: {}".format(cm))
-    m = re.search('{}:\s*(\S+)'.format(args.repo), cm)
-
-    if m:
-        change_id = m.group(1)
-        logging.info("Using change ID {} from {}".format(change_id, args.repo))
-        rest = GerritRestAPI(url=args.url)
-        change_path = "{}~{}~{}".format(args.repo, quote_plus(args.branch),
-                                        change_id)
-        change_str = "changes/{}?o=CURRENT_REVISION".format(change_path)
-        change = rest.get(change_str)
-        try:
-            assert change['status'] not in 'ABANDONED' 'CLOSED',\
-                'Change {} is in {} state'.format(change_id, change['status'])
-            if change['status'] == 'MERGED':
-                logging.info('Change {} is merged, ignoring...'
-                             .format(change_id))
-            else:
-                current_revision = change['current_revision']
-                ref = change['revisions'][current_revision]['ref']
-                logging.info('setting ref to {}'.format(ref))
-        except KeyError:
-            logging.error('Failed to get valid change data structure from url '
-                          '{}/{}, data returned: \n{}'
-                          .format(change_id, change_str, change))
-            raise
-
-    # remove existing file or directory named repo
-    if os.path.exists(args.repo):
-        if os.path.isdir(args.repo):
-            shutil.rmtree(args.repo)
-        else:
-            os.remove(args.repo)
-
-    ws = git.Repo.clone_from("{}/{}".format(args.url, args.repo),
-                             args.repo, b=args.branch)
-    if ref:
-        git_cmd = ws.git
-        git_cmd.fetch("{}/{}".format(args.url, args.repo), ref)
-        git_cmd.checkout('FETCH_HEAD')
-        logging.info('Checked out commit:\n{}'.format(ws.head.commit.message))
-
-
-def get_parser():
-    parser = argparse.ArgumentParser()
-    parser.add_argument('--debug', action='store_true', default=False,
-                        help="Turn on debug messages")
-    subparsers = parser.add_subparsers()
-    fork = subparsers.add_parser('clone-fork',
-                                 help='Clone fork of dependent repo')
-    fork.add_argument('-r', '--repo', required=True, help='Name of repository')
-    fork.add_argument('-u', '--url',
-                      default='https://gerrit.opnfv.org/gerrit',
-                      help='Gerrit URL of repository')
-    fork.add_argument('-b', '--branch',
-                      default='master',
-                      help='Branch to checkout')
-    fork.set_defaults(func=clone_fork)
-    return parser
-
-
-def main():
-    parser = get_parser()
-    args = parser.parse_args(sys.argv[1:])
-    if args.debug:
-        logging_level = logging.DEBUG
-    else:
-        logging_level = logging.INFO
-
-    logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
-                        datefmt='%m/%d/%Y %I:%M:%S %p',
-                        level=logging_level)
-    if hasattr(args, 'func'):
-        args.func(args)
-    else:
-        parser.print_help()
-        exit(1)
-
-
-if __name__ == "__main__":
-    main()
diff --git a/apex/build_utils.py b/apex/build_utils.py
new file mode 100644
index 00000000..66a63d37
--- /dev/null
+++ b/apex/build_utils.py
@@ -0,0 +1,113 @@
+##############################################################################
+# Copyright (c) 2017 Feng Pan (fpan@redhat.com) and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import argparse
+import git
+import logging
+import os
+from pygerrit2.rest import GerritRestAPI
+import re
+import shutil
+import sys
+
+from urllib.parse import quote_plus
+
+
+def clone_fork(args):
+    ref = None
+    logging.info("Cloning {}".format(args.repo))
+
+    try:
+        cm = git.Repo(search_parent_directories=True).commit().message
+    except git.exc.InvalidGitRepositoryError:
+        logging.debug('Current Apex directory is not a git repo: {}'
+                      .format(os.getcwd()))
+        cm = ''
+
+    logging.info("Current commit message: {}".format(cm))
+    m = re.search('{}:\s*(\S+)'.format(args.repo), cm)
+
+    if m:
+        change_id = m.group(1)
+        logging.info("Using change ID {} from {}".format(change_id, args.repo))
+        rest = GerritRestAPI(url=args.url)
+        change_path = "{}~{}~{}".format(args.repo, quote_plus(args.branch),
+                                        change_id)
+        change_str = "changes/{}?o=CURRENT_REVISION".format(change_path)
+        change = rest.get(change_str)
+        try:
+            assert change['status'] not in 'ABANDONED' 'CLOSED',\
+                'Change {} is in {} state'.format(change_id, change['status'])
+            if change['status'] == 'MERGED':
+                logging.info('Change {} is merged, ignoring...'
+                             .format(change_id))
+            else:
+                current_revision = change['current_revision']
+                ref = change['revisions'][current_revision]['ref']
+                logging.info('setting ref to {}'.format(ref))
+        except KeyError:
+            logging.error('Failed to get valid change data structure from url '
+                          '{}/{}, data returned: \n{}'
+                          .format(change_id, change_str, change))
+            raise
+
+    # remove existing file or directory named repo
+    if os.path.exists(args.repo):
+        if os.path.isdir(args.repo):
+            shutil.rmtree(args.repo)
+        else:
+            os.remove(args.repo)
+
+    ws = git.Repo.clone_from("{}/{}".format(args.url, args.repo),
+                             args.repo, b=args.branch)
+    if ref:
+        git_cmd = ws.git
+        git_cmd.fetch("{}/{}".format(args.url, args.repo), ref)
+        git_cmd.checkout('FETCH_HEAD')
+        logging.info('Checked out commit:\n{}'.format(ws.head.commit.message))
+
+
+def get_parser():
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--debug', action='store_true', default=False,
+                        help="Turn on debug messages")
+    subparsers = parser.add_subparsers()
+    fork = subparsers.add_parser('clone-fork',
+                                 help='Clone fork of dependent repo')
+    fork.add_argument('-r', '--repo', required=True, help='Name of repository')
+    fork.add_argument('-u', '--url',
+                      default='https://gerrit.opnfv.org/gerrit',
+                      help='Gerrit URL of repository')
+    fork.add_argument('-b', '--branch',
+                      default='master',
+                      help='Branch to checkout')
+    fork.set_defaults(func=clone_fork)
+    return parser
+
+
+def main():
+    parser = get_parser()
+    args = parser.parse_args(sys.argv[1:])
+    if args.debug:
+        logging_level = logging.DEBUG
+    else:
+        logging_level = logging.INFO
+
+    logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
+                        datefmt='%m/%d/%Y %I:%M:%S %p',
+                        level=logging_level)
+    if hasattr(args, 'func'):
+        args.func(args)
+    else:
+        parser.print_help()
+        exit(1)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/apex/tests/test_apex_build.py b/apex/tests/test_apex_build.py
new file mode 100644
index 00000000..a55f790e
--- /dev/null
+++ b/apex/tests/test_apex_build.py
@@ -0,0 +1,308 @@
+##############################################################################
+# Copyright (c) 2016 Dan Radez (dradez@redhat.com) (Red Hat)
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import os
+import subprocess
+import unittest
+
+from mock import patch
+from mock import mock_open
+from argparse import ArgumentParser
+
+from apex.build import ApexBuildException
+from apex.build import build
+from apex.build import build_cache
+from apex.build import create_build_parser
+from apex.build import get_cache_file
+from apex.build import get_journal
+from apex.build import main
+from apex.build import prune_cache
+from apex.build import unpack_cache
+
+from nose.tools import (
+    assert_is_none,
+    assert_raises,
+    assert_is_instance)
+
+a_mock_open = mock_open(read_data=None)
+
+
+class TestBuild(unittest.TestCase):
+    @classmethod
+    def setup_class(cls):
+        """This method is run once for each class before any tests are run"""
+        cls.repo_name = 'test_repo'
+        cls.repo_url = 'https://gerrit.opnfv.org/gerrit/' + cls.repo_name
+        cls.change_id = 'I5c1b3ded249c4e3c558be683559e03deb27721b8'
+        cls.commit_id = '8669c687a75a00106b055add49b82fee826b8fe8'
+        cls.sys_argv = ['deploy.py', 'clone-fork', '-r', cls.repo_name]
+        cls.sys_argv_debug = ['deploy.py', '--debug']
+
+    @classmethod
+    def teardown_class(cls):
+        """This method is run once for each class _after_ all tests are run"""
+
+    def setup(self):
+        """This method is run once before _each_ test method is executed"""
+
+    def teardown(self):
+        """This method is run once after _each_ test method is executed"""
+
+    def test_create_build_parser(self):
+        assert_is_instance(create_build_parser(), ArgumentParser)
+
+    @patch('apex.build.yaml')
+    @patch('apex.build.os.path')
+    @patch('builtins.open', a_mock_open, create=True)
+    def test_get_journal_exists(self, mock_os_path, mock_yaml):
+        # setup mock
+        mock_os_path.isfile.return_value = True
+        mock_yaml.safe_load.return_value = ['a', 'list']
+        # execute
+        assert_is_instance(get_journal('test_dir'), list)
+        # assert
+        mock_os_path.isfile.assert_called_with('test_dir/cache_journal.yaml')
+        mock_yaml.safe_load.assert_called_with(a_mock_open.return_value)
+
+    @patch('apex.build.os.path')
+    def test_get_journal_notexist(self, mock_os_path):
+        # setup mock
+        mock_os_path.isfile.return_value = False
+        # execute
+        assert_is_none(get_journal('test_dir'))
+
+    @patch('apex.build.os.path')
+    @patch('apex.build.get_journal')
+    def test_get_cache_file(self, mock_get_journal, mock_os_path):
+        mock_get_journal.return_value = ['journal_contents']
+        mock_os_path.isfile.return_value = True
+        get_cache_file('test_dir')
+
+    def test_unpack_cache_no_cache_dir(self):
+        unpack_cache('dest_dir', cache_dir=None)
+
+    @patch('apex.build.os.path')
+    def test_unpack_cache_not_isdir(self, mock_os_path):
+        mock_os_path.isdir.return_value = False
+        unpack_cache('dest_dir', cache_dir='cache_dir')
+
+    @patch('apex.build.get_cache_file')
+    @patch('apex.build.os.path')
+    def test_unpack_cache_cache_file_none(self, mock_os_path, mock_cache_file):
+        mock_os_path.isdir.return_value = True
+        mock_cache_file.return_value = None
+        unpack_cache('dest_dir', cache_dir='cache_dir')
+
+    @patch('apex.build.subprocess.check_call')
+    @patch('apex.build.get_cache_file')
+    @patch('apex.build.os.path')
+    @patch('apex.build.os')
+    def test_unpack_cache_tar_failure(self, mock_os, mock_os_path,
+                                      mock_cache_file,
+                                      mock_subprocess):
+        mock_os_path.isdir.return_value = True
+        mock_cache_file.return_value = 'cache_file'
+        mock_os_path.exists.return_value = False
+        mock_subprocess.side_effect = subprocess.CalledProcessError(1, 'cmd')
+        unpack_cache('dest_dir', cache_dir='cache_dir')
+
+    @patch('apex.build.subprocess')
+    @patch('apex.build.get_cache_file')
+    @patch('apex.build.os.path')
+    @patch('apex.build.os')
+    def test_unpack_cache_cache_dest_not_exist(self, mock_os, mock_os_path,
+                                               mock_cache_file,
+                                               mock_subprocess):
+        mock_os_path.isdir.return_value = True
+        mock_cache_file.return_value = 'cache_file'
+        mock_os_path.exists.return_value = False
+        mock_os.listdir.return_value = ['listdir', 'is', 'Mocked']
+        unpack_cache('dest_dir', cache_dir='cache_dir')
+
+    @patch('apex.build.subprocess')
+    def test_build(self, mock_subprocess):
+        build('build_root', None)
+
+    @patch('apex.build.subprocess.check_call')
+    def test_build_check_call_raises(self, mock_subprocess):
+        mock_subprocess.side_effect = subprocess.CalledProcessError('cmd', 1)
+        assert_raises(subprocess.CalledProcessError, build, 'build_root', None)
+
+    @patch('apex.build.subprocess.check_output')
+    @patch('apex.build.subprocess.check_call')
+    def test_build_check_output_raises(self, mock_check_call, mock_subprocess):
+        mock_subprocess.side_effect = subprocess.CalledProcessError('cmd', 1)
+        assert_raises(subprocess.CalledProcessError, build, 'build_root', None)
+
+    @patch('apex.build.subprocess')
+    def test_build_rpms(self, mock_subprocess):
+        build('build_root', 'v123', rpms=True)
+
+    @patch('apex.build.subprocess')
+    def test_build_iso(self, mock_subprocess):
+        build('build_root', 'v123', iso=True)
+
+    def test_build_cache_none(self):
+        build_cache('cache_source', None)
+
+    @patch('apex.build.get_journal')
+    @patch('apex.build.yaml')
+    @patch('apex.build.os')
+    @patch('apex.build.subprocess')
+    @patch('builtins.open', a_mock_open, create=True)
+    def test_build_cache(self, mock_subprocess, mock_os,
+                         mock_yaml, mock_get_journal):
+        build_cache('cache_source', 'cache_dir')
+        mock_yaml.safe_dump.assert_called_with(mock_get_journal.return_value,
+                                               a_mock_open.return_value,
+                                               default_flow_style=False)
+
+    @patch('apex.build.get_journal')
+    @patch('apex.build.uuid')
+    @patch('apex.build.yaml')
+    @patch('apex.build.os')
+    @patch('apex.build.subprocess')
+    @patch('builtins.open', a_mock_open, create=True)
+    def test_build_cache_get_journal_none(self, mock_subprocess, mock_os,
+                                          mock_yaml, mock_uuid,
+                                          mock_get_journal):
+        uuid = '73b18d27-8d25-4e02-a937-cb08609b6911'
+        mock_get_journal.return_value = None
+        mock_uuid.uuid4.return_value = uuid
+        build_cache('cache_source', 'cache_dir')
+        mock_yaml.safe_dump.assert_called_with(['apex-cache-{}.tgz'.format(
+                                                uuid)],
+                                               a_mock_open.return_value,
+                                               default_flow_style=False)
+
+    @patch('apex.build.get_journal')
+    @patch('apex.build.yaml')
+    @patch('apex.build.os.path')
+    @patch('apex.build.os')
+    @patch('apex.build.subprocess.check_call')
+    @patch('builtins.open', mock_open(read_data=None), create=True)
+    def test_build_cache_tar_fails(self, mock_subprocess, mock_os,
+                                   mock_os_path, mock_yaml, mock_get_journal):
+        mock_subprocess.side_effect = BaseException()
+        mock_os_path.isfile.return_value = True
+        assert_raises(BaseException, build_cache, 'cache_source', 'cache_dir')
+
+    @patch('apex.build.get_journal')
+    @patch('apex.build.yaml')
+    @patch('apex.build.os.path')
+    @patch('apex.build.os')
+    @patch('apex.build.subprocess.check_call')
+    @patch('builtins.open', mock_open(read_data=None), create=True)
+    def test_build_cache_cache_full_path_false(self, mock_subprocess, mock_os,
+                                               mock_os_path, mock_yaml,
+                                               mock_get_journal):
+        mock_os_path.isfile.return_value = False
+        build_cache('cache_source', 'cache_dir')
+        mock_yaml.safe_dump.assert_not_called()
+
+    def test_prune_cache_none(self):
+        prune_cache(None)
+
+    @patch('apex.build.get_journal')
+    def test_prune_cache_empty_journal(self, mock_get_journal):
+        mock_get_journal.return_value = []
+        prune_cache('not-none')
+
+    @patch('apex.build.get_journal')
+    @patch('apex.build.yaml')
+    @patch('apex.build.os')
+    @patch('builtins.open', mock_open(read_data=None), create=True)
+    def test_prune_cache_os_remove_error(self, mock_os, mock_yaml,
+                                         mock_get_journal):
+        # setup Mock
+        mock_get_journal.return_value = ['more', 'than', 'two']
+        rm = mock_os.remove
+        rm.side_effect = OSError()
+        # execute
+        prune_cache('not-none')
+
+    @patch('apex.build.get_journal')
+    @patch('apex.build.yaml')
+    @patch('apex.build.os')
+    @patch('builtins.open', a_mock_open, create=True)
+    def test_prune_cache(self, mock_os, mock_yaml, mock_get_journal):
+        # setup Mock
+        mock_get_journal.return_value = ['more', 'than', 'two']
+        # execute
+        prune_cache('not-none')
+        # assert
+        mock_yaml.safe_dump.assert_called_with(['than', 'two'],
+                                               a_mock_open.return_value,
+                                               default_flow_style=False)
+
+    @patch('apex.build.create_build_parser')
+    @patch('apex.build.subprocess.check_output')
+    @patch('apex.build.os.path')
+    @patch('apex.build.os')
+    @patch('apex.build.utils')
+    @patch('apex.build.unpack_cache')
+    @patch('apex.build.build_cache')
+    @patch('apex.build.prune_cache')
+    @patch('apex.build.build')
+    def test_main(self, mock_build, mock_prune_cache,
+                  mock_build_cache, mock_unpack_cache,
+                  mock_utils, mock_os, mock_os_path,
+                  mock_subprocess, mock_parser):
+        # setup mock
+        mbc = mock_parser.return_value
+        args = mbc.parse_args.return_value
+        args.debug = False
+        mock_os_path.isdir.return_value = True
+        # execute
+        main()
+        # assert
+        # TODO
+
+    @patch('apex.build.create_build_parser')
+    @patch('apex.build.subprocess.check_output')
+    @patch('apex.build.os.path')
+    @patch('apex.build.os')
+    @patch('apex.build.utils')
+    @patch('apex.build.unpack_cache')
+    @patch('apex.build.build_cache')
+    @patch('apex.build.prune_cache')
+    @patch('apex.build.build')
+    def test_main_throw_build_except(self, mock_build, mock_prune_cache,
+                                     mock_build_cache, mock_unpack_cache,
+                                     mock_utils, mock_os, mock_os_path,
+                                     mock_subprocess, mock_parser):
+        # setup mock
+        mbc = mock_parser.return_value
+        args = mbc.parse_args.return_value
+        args.debug = True
+        mock_os_path.isdir.return_value = False
+        # execute and assert
+        assert_raises(ApexBuildException, main)
+
+    @patch('apex.build.create_build_parser')
+    @patch('apex.build.subprocess.check_output')
+    @patch('apex.build.os.path')
+    @patch('apex.build.os')
+    @patch('apex.build.utils')
+    @patch('apex.build.unpack_cache')
+    @patch('apex.build.build_cache')
+    @patch('apex.build.prune_cache')
+    @patch('apex.build.build')
+    def test_main_throw_subprocess_except(self, mock_build, mock_prune_cache,
+                                          mock_build_cache, mock_unpack_cache,
+                                          mock_utils, mock_os, mock_os_path,
+                                          mock_subprocess, mock_parser):
+        # setup mock
+        mbc = mock_parser.return_value
+        args = mbc.parse_args.return_value
+        args.debug = False
+        mock_os_path.isdir.return_value = True
+        mock_subprocess.side_effect = subprocess.CalledProcessError('cmd', 1)
+        # execute and assert
+        assert_raises(subprocess.CalledProcessError, main)
diff --git a/apex/tests/test_apex_build_utils.py b/apex/tests/test_apex_build_utils.py
new file mode 100644
index 00000000..d9d542d6
--- /dev/null
+++ b/apex/tests/test_apex_build_utils.py
@@ -0,0 +1,167 @@
+##############################################################################
+# Copyright (c) 2016 Dan Radez (dradez@redhat.com) (Red Hat)
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import argparse
+import git
+
+from mock import patch
+
+from apex import build_utils
+
+from nose.tools import (
+    assert_is_instance,
+    assert_raises)
+
+
+class TestBuildUtils(object):
+    @classmethod
+    def setup_class(cls):
+        """This method is run once for each class before any tests are run"""
+        cls.repo_name = 'test_repo'
+        cls.repo_url = 'https://gerrit.opnfv.org/gerrit'
+        cls.change_id = 'I5c1b3ded249c4e3c558be683559e03deb27721b8'
+        cls.commit_id = '8669c687a75a00106b055add49b82fee826b8fe8'
+        cls.sys_argv = ['deploy.py', 'clone-fork', '-r', cls.repo_name]
+        cls.sys_argv_debug = ['deploy.py', '--debug']
+
+    @classmethod
+    def teardown_class(cls):
+        """This method is run once for each class _after_ all tests are run"""
+
+    def setup(self):
+        """This method is run once before _each_ test method is executed"""
+
+    def teardown(self):
+        """This method is run once after _each_ test method is executed"""
+
+    def test_main_wo_func_w_debug(self):
+        with patch.object(build_utils.sys, 'argv', self.sys_argv_debug):
+            # no func argument (clone-fork) throws sys exit
+            assert_raises(SystemExit, build_utils.main)
+
+    @patch('apex.build_utils.get_parser')
+    @patch('apex.build_utils.os.path')
+    @patch('apex.build_utils.os')
+    @patch('apex.build_utils.shutil')
+    @patch('apex.build_utils.GerritRestAPI')
+    @patch('apex.build_utils.git.Repo')
+    def test_clone_fork(self, mock_git_repo, mock_gerrit_api,
+                        mock_shutil, mock_os, mock_path, mock_get_parser):
+        # setup mock
+        args = mock_get_parser.parse_args.return_value
+        args.repo = self.repo_name
+        args.url = self.repo_url
+        args.branch = 'master'
+        x = mock_git_repo.return_value
+        xx = x.commit.return_value
+        xx.message = '{}: {}'.format(self.repo_name, self.change_id)
+        mock_path.exists.return_value = True
+        mock_path.isdir.return_value = True
+        y = mock_gerrit_api.return_value
+        y.get.return_value = {'status': 'TEST',
+                              'current_revision': 'revision',
+                              'revisions':
+                                  {'revision': {'ref': self.commit_id}}}
+        z = mock_git_repo.clone_from.return_value
+        # execute
+        build_utils.clone_fork(args)
+        # check results
+        mock_path.exists.assert_called_with(self.repo_name)
+        mock_path.isdir.assert_called_with(self.repo_name)
+        mock_shutil.rmtree.assert_called_with(self.repo_name)
+        mock_git_repo.clone_from.assert_called_with('{}/{}'.
+                                                    format(self.repo_url,
+                                                           self.repo_name),
+                                                    self.repo_name,
+                                                    b='master')
+        z.git.fetch.assert_called_with('{}/{}'.format(self.repo_url,
+                                                      self.repo_name),
+                                       self.commit_id)
+        z.git.checkout.assert_called_with('FETCH_HEAD')
+
+    @patch('apex.build_utils.get_parser')
+    @patch('apex.build_utils.os.path')
+    @patch('apex.build_utils.os')
+    @patch('apex.build_utils.shutil')
+    @patch('apex.build_utils.GerritRestAPI')
+    @patch('apex.build_utils.git.Repo')
+    def test_clone_fork_MERGED(self, mock_git_repo, mock_gerrit_api,
+                               mock_shutil, mock_os, mock_path,
+                               mock_get_parser):
+        # setup mock
+        args = mock_get_parser.parse_args.return_value
+        args.repo = self.repo_name
+        args.url = self.repo_url
+        args.branch = 'master'
+        x = mock_git_repo.return_value
+        xx = x.commit.return_value
+        xx.message = '{}: {}'.format(self.repo_name, self.change_id)
+        mock_path.exists.return_value = True
+        mock_path.isdir.return_value = False
+        y = mock_gerrit_api.return_value
+        y.get.return_value = {'status': 'MERGED',
+                              'current_revision': 'revision',
+                              'revisions':
+                                  {'revision': {'ref': self.commit_id}}}
+        z = mock_git_repo.clone_from.return_value
+        # execute
+        build_utils.clone_fork(args)
+        # check results
+        mock_path.exists.assert_called_with(self.repo_name)
+        mock_os.remove.assert_called_with(self.repo_name)
+        mock_git_repo.clone_from.assert_called_with('{}/{}'.
+                                                    format(self.repo_url,
+                                                           self.repo_name),
+                                                    self.repo_name, b='master')
+        z.git.fetch.assert_not_called
+        z.git.checkout.assert_not_called
+
+    @patch('apex.build_utils.get_parser')
+    @patch('apex.build_utils.GerritRestAPI')
+    @patch('apex.build_utils.git.Repo')
+    def test_clone_fork_invalid_git_repo(self, mock_git_repo,
+                                         mock_gerrit_api, mock_get_parser):
+        # setup mock
+        args = mock_get_parser.parse_args.return_value
+        args.repo = self.repo_name
+        args.url = self.repo_url
+        args.branch = 'master'
+        mock_git_repo.side_effect = git.exc.InvalidGitRepositoryError()
+        build_utils.clone_fork(args)
+
+    @patch('apex.build_utils.get_parser')
+    @patch('apex.build_utils.GerritRestAPI')
+    @patch('apex.build_utils.git.Repo')
+    def test_clone_fork_raises_key_error(self, mock_git_repo,
+                                         mock_gerrit_api, mock_get_parser):
+        # setup mock
+        args = mock_get_parser.parse_args.return_value
+        args.repo = self.repo_name
+        args.url = self.repo_url
+        args.branch = 'master'
+        x = mock_git_repo.return_value
+        xx = x.commit.return_value
+        xx.message = '{}: {}'.format(self.repo_name, self.change_id)
+        y = mock_gerrit_api.return_value
+        y.get.return_value = {}
+        # execute & assert
+        assert_raises(KeyError, build_utils.clone_fork, args)
+
+    def test_get_parser(self):
+        assert_is_instance(build_utils.get_parser(), argparse.ArgumentParser)
+
+    @patch('apex.build_utils.get_parser')
+    def test_main(self, mock_get_parser):
+        with patch.object(build_utils.sys, 'argv', self.sys_argv):
+            build_utils.main()
+
+    @patch('apex.build_utils.get_parser')
+    def test_main_debug(self, mock_get_parser):
+        with patch.object(build_utils.sys, 'argv', self.sys_argv_debug):
+            build_utils.main()
diff --git a/build/variables.sh b/build/variables.sh
index a611073c..8d736d30 100644
--- a/build/variables.sh
+++ b/build/variables.sh
@@ -14,7 +14,7 @@ QUAGGA_RPMS_DIR=${BUILD_DIR}/quagga_build_dir
 CACHE_DIR="$(dirname ${BUILD_ROOT})/.cache"
 CACHE_HISTORY=".cache_history"
 PATCHES_DIR="${BUILD_ROOT}/patches"
-BUILD_UTILS="$(dirname ${BUILD_ROOT})/apex/build/build_utils.py"
+BUILD_UTILS="$(dirname ${BUILD_ROOT})/apex/build_utils.py"
 
 
 rdo_images_uri=${RDO_IMAGES_URI:-http://artifacts.opnfv.org/apex/euphrates/rdo}
diff --git a/test-requirements.txt b/test-requirements.txt
index f22863c7..4d4509ea 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -4,3 +4,5 @@ nose # LGPL
 flake8<2.6.0,>=2.5.4 # MIT
 pylint==1.4.5 # GPLv2
 sphinx!=1.3b1,<1.4,>=1.2.1 # BSD
+gitpython #BSD
+pygerrit2 #MIT
-- 
cgit 1.2.3-korg