aboutsummaryrefslogtreecommitdiffstats
path: root/firstboot
AgeCommit message (Expand)AuthorFilesLines
2017-05-19Update the template_version alias for all the templates to pike.Carlos Camacho7-7/+7
2017-02-23Merge "Add support for node groups in NetConfigDataLookup"Jenkins1-5/+46
2017-02-08Re-organizes Contrail services to the correct rolesMichael Henkel1-0/+105
2017-01-04Add example showing how to set root password via cloud-initSteven Hardy1-0/+38
2016-12-23Bump template version for all templates to "ocata"Steven Hardy5-5/+5
2016-12-05Add support for node groups in NetConfigDataLookupHarald Jensas1-5/+46
2016-11-17No longer hard coding to a specifc network interface name.Harald Jensas1-2/+2
2016-11-07Add an optional extra node admin ssh key parameterSteven Hardy1-1/+7
2016-07-01Add example of creating per-node os-net-config mappingsSteven Hardy1-0/+65
2015-11-24Update typosSwapnil Kulkarni (coolsvap)1-1/+1
2015-09-03Add NodeAdminUserData interface for "heat-admin" userSteven Hardy1-0/+29
2015-05-04Firstboot rsync for development purposesJiri Stransky1-0/+49
2015-04-24Enable passing optional first-boot user-dataSteven Hardy2-0/+80
ef='#n287'>287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620
# Copyright 2013: Mirantis Inc.
# All Rights Reserved.
#
#    Licensed under the Apache License, Version 2.0 (the "License"); you may
#    not use this file except in compliance with the License. You may obtain
#    a copy of the License at
#
#         http://www.apache.org/licenses/LICENSE-2.0
#
#    Unless required by applicable law or agreed to in writing, software
#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#    License for the specific language governing permissions and limitations
#    under the License.

import collections
from contextlib import closing
import datetime
import errno
import importlib
import ipaddress
import logging
import os
import pydoc
import random
import re
import signal
import socket
import subprocess
import sys
import time
import threading
import math

import six
from flask import jsonify
from six.moves import configparser
from oslo_serialization import jsonutils
from oslo_utils import encodeutils

import yardstick
from yardstick.common import exceptions


logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)


# Decorator for cli-args
def cliargs(*args, **kwargs):
    def _decorator(func):
        func.__dict__.setdefault('arguments', []).insert(0, (args, kwargs))
        return func
    return _decorator


def itersubclasses(cls, _seen=None):
    """Generator over all subclasses of a given class in depth first order."""

    if not isinstance(cls, type):
        raise TypeError("itersubclasses must be called with "
                        "new-style classes, not %.100r" % cls)
    _seen = _seen or set()
    try:
        subs = cls.__subclasses__()
    except TypeError:   # fails only when cls is type
        subs = cls.__subclasses__(cls)
    for sub in subs:
        if sub not in _seen:
            _seen.add(sub)
            yield sub
            for sub in itersubclasses(sub, _seen):
                yield sub


def import_modules_from_package(package, raise_exception=False):
    """Import modules given a package name

    :param: package - Full package name. For example: rally.deploy.engines
    """
    yardstick_root = os.path.dirname(os.path.dirname(yardstick.__file__))
    path = os.path.join(yardstick_root, *package.split('.'))
    for root, _, files in os.walk(path):
        matches = (filename for filename in files if filename.endswith('.py')
                   and not filename.startswith('__'))
        new_package = os.path.relpath(root, yardstick_root).replace(os.sep,
                                                                    '.')
        module_names = set(
            '{}.{}'.format(new_package, filename.rsplit('.py', 1)[0])
            for filename in matches)
        # Find modules which haven't already been imported
        missing_modules = module_names.difference(sys.modules)
        logger.debug('Importing modules: %s', missing_modules)
        for module_name in missing_modules:
            try:
                importlib.import_module(module_name)
            except (ImportError, SyntaxError) as exc:
                if raise_exception:
                    raise exc
                logger.exception('Unable to import module %s', module_name)


NON_NONE_DEFAULT = object()


def get_key_with_default(data, key, default=NON_NONE_DEFAULT):
    value = data.get(key, default)
    if value is NON_NONE_DEFAULT:
        raise KeyError(key)
    return value


def make_dict_from_map(data, key_map):
    return {dest_key: get_key_with_default(data, src_key, default)
            for dest_key, (src_key, default) in key_map.items()}


def makedirs(d):
    try:
        os.makedirs(d)
    except OSError as e:
        if e.errno != errno.EEXIST:
            raise


def remove_file(path):
    try:
        os.remove(path)
    except OSError as e:
        if e.errno != errno.ENOENT:
            raise


def execute_command(cmd, **kwargs):
    exec_msg = "Executing command: '%s'" % cmd
    logger.debug(exec_msg)

    output = subprocess.check_output(cmd.split(), **kwargs)
    return encodeutils.safe_decode(output, incoming='utf-8').split(os.linesep)


def source_env(env_file):
    p = subprocess.Popen(". %s; env" % env_file, stdout=subprocess.PIPE,
                         shell=True)
    output = p.communicate()[0]

    # sometimes output type would be binary_type, and it don't have splitlines
    # method, so we need to decode
    if isinstance(output, six.binary_type):
        output = encodeutils.safe_decode(output)
    env = dict(line.split('=', 1) for line in output.splitlines() if '=' in line)
    os.environ.update(env)
    return env


def read_json_from_file(path):
    with open(path, 'r') as f:
        j = f.read()
    # don't use jsonutils.load() it conflicts with already decoded input
    return jsonutils.loads(j)


def write_json_to_file(path, data, mode='w'):
    with open(path, mode) as f:
        jsonutils.dump(data, f)


def write_file(path, data, mode='w'):
    with open(path, mode) as f:
        f.write(data)


def parse_ini_file(path):
    parser = configparser.ConfigParser()

    try:
        files = parser.read(path)
    except configparser.MissingSectionHeaderError:
        logger.exception('invalid file type')
        raise
    else:
        if not files:
            raise RuntimeError('file not exist')

    try:
        default = {k: v for k, v in parser.items('DEFAULT')}
    except configparser.NoSectionError:
        default = {}

    config = dict(DEFAULT=default,
                  **{s: {k: v for k, v in parser.items(
                      s)} for s in parser.sections()})

    return config


def get_port_mac(sshclient, port):
    cmd = "ifconfig |grep HWaddr |grep %s |awk '{print $5}' " % port
    _, stdout, _ = sshclient.execute(cmd, raise_on_error=True)

    return stdout.rstrip()


def get_port_ip(sshclient, port):
    cmd = "ifconfig %s |grep 'inet addr' |awk '{print $2}' " \
        "|cut -d ':' -f2 " % port
    _, stdout, _ = sshclient.execute(cmd, raise_on_error=True)

    return stdout.rstrip()


def flatten_dict_key(data):
    next_data = {}

    # use list, because iterable is too generic
    if not any(isinstance(v, (collections.Mapping, list))
               for v in data.values()):
        return data

    for k, v in data.items():
        if isinstance(v, collections.Mapping):
            for n_k, n_v in v.items():
                next_data["%s.%s" % (k, n_k)] = n_v
        # use list because iterable is too generic
        elif isinstance(v, collections.Iterable) and not isinstance(v, six.string_types):
            for index, item in enumerate(v):
                next_data["%s%d" % (k, index)] = item
        else:
            next_data[k] = v

    return flatten_dict_key(next_data)


def translate_to_str(obj):
    if isinstance(obj, collections.Mapping):
        return {str(k): translate_to_str(v) for k, v in obj.items()}
    elif isinstance(obj, list):
        return [translate_to_str(ele) for ele in obj]
    elif isinstance(obj, six.text_type):
        return str(obj)
    return obj


def result_handler(status, data):
    result = {
        'status': status,
        'result': data
    }
    return jsonify(result)


def change_obj_to_dict(obj):
    dic = {}
    for k, v in vars(obj).items():
        try:
            vars(v)
        except TypeError:
            dic.update({k: v})
    return dic


def set_dict_value(dic, keys, value):
    return_dic = dic

    for key in keys.split('.'):
        return_dic.setdefault(key, {})
        if key == keys.split('.')[-1]:
            return_dic[key] = value
        else:
            return_dic = return_dic[key]
    return dic


def get_free_port(ip):
    with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
        port = random.randint(5000, 10000)
        while s.connect_ex((ip, port)) == 0:
            port = random.randint(5000, 10000)
        return port


def mac_address_to_hex_list(mac):
    try:
        octets = ["0x{:02x}".format(int(elem, 16)) for elem in mac.split(':')]
    except ValueError:
        raise exceptions.InvalidMacAddress(mac_address=mac)
    if len(octets) != 6 or all(len(octet) != 4 for octet in octets):
        raise exceptions.InvalidMacAddress(mac_address=mac)
    return octets


def make_ipv4_address(ip_addr):
    return ipaddress.IPv4Address(six.text_type(ip_addr))


def safe_ip_address(ip_addr):
    """ get ip address version v6 or v4 """
    try:
        return ipaddress.ip_address(six.text_type(ip_addr))
    except ValueError:
        logging.error("%s is not valid", ip_addr)
        return None


def get_ip_version(ip_addr):
    """ get ip address version v6 or v4 """
    try:
        address = ipaddress.ip_address(six.text_type(ip_addr))
    except ValueError:
        logging.error("%s is not valid", ip_addr)
        return None
    else:
        return address.version


def make_ip_addr(ip, mask):
    """
    :param ip[str]: ip adddress
    :param mask[str]: /24 prefix of 255.255.255.0 netmask
    :return: IPv4Interface object
    """
    try:
        return ipaddress.ip_interface(six.text_type('/'.join([ip, mask])))
    except (TypeError, ValueError):
        # None so we can skip later
        return None


def ip_to_hex(ip_addr, separator=''):
    try:
        address = ipaddress.ip_address(six.text_type(ip_addr))
    except ValueError:
        logging.error("%s is not valid", ip_addr)
        return ip_addr

    if address.version != 4:
        return ip_addr

    if not separator:
        return '{:08x}'.format(int(address))

    return separator.join('{:02x}'.format(octet) for octet in address.packed)


def get_mask_from_ip_range(ip_low, ip_high):
    _ip_low = ipaddress.ip_address(ip_low)
    _ip_high = ipaddress.ip_address(ip_high)
    _ip_low_int = int(_ip_low)
    _ip_high_int = int(_ip_high)
    return _ip_high.max_prefixlen - (_ip_high_int ^ _ip_low_int).bit_length()


def try_int(s, *args):
    """Convert to integer if possible."""
    try:
        return int(s)
    except (TypeError, ValueError):
        return args[0] if args else s


class SocketTopology(dict):

    @classmethod
    def parse_cpuinfo(cls, cpuinfo):
        socket_map = {}

        lines = cpuinfo.splitlines()

        core_details = []
        core_lines = {}
        for line in lines:
            if line.strip():
                name, value = line.split(":", 1)
                core_lines[name.strip()] = try_int(value.strip())
            else:
                core_details.append(core_lines)
                core_lines = {}

        for core in core_details:
            socket_map.setdefault(core["physical id"], {}).setdefault(
                core["core id"], {})[core["processor"]] = (
                core["processor"], core["core id"], core["physical id"])

        return cls(socket_map)

    def sockets(self):
        return sorted(self.keys())

    def cores(self):
        return sorted(core for cores in self.values() for core in cores)

    def processors(self):
        return sorted(
            proc for cores in self.values() for procs in cores.values() for
            proc in procs)


def config_to_dict(config):
    return {section: dict(config.items(section)) for section in
            config.sections()}


def validate_non_string_sequence(value, default=None, raise_exc=None):
    # NOTE(ralonsoh): refactor this function to check if raise_exc is an
    # Exception. Remove duplicate code, this function is duplicated in this
    # repository.
    if isinstance(value, collections.Sequence) and not isinstance(value, six.string_types):
        return value
    if raise_exc:
        raise raise_exc  # pylint: disable=raising-bad-type
    return default


def join_non_strings(separator, *non_strings):
    try:
        non_strings = validate_non_string_sequence(non_strings[0], raise_exc=RuntimeError)
    except (IndexError, RuntimeError):
        pass
    return str(separator).join(str(non_string) for non_string in non_strings)


def safe_decode_utf8(s):
    """Safe decode a str from UTF"""
    if six.PY3 and isinstance(s, bytes):
        return s.decode('utf-8', 'surrogateescape')
    return s


class ErrorClass(object):

    def __init__(self, *args, **kwargs):
        if 'test' not in kwargs:
            raise RuntimeError

    def __getattr__(self, item):
        raise AttributeError


class Timer(object):
    def __init__(self, timeout=None, raise_exception=True):
        super(Timer, self).__init__()
        self.start = self.delta = None
        self._timeout = int(timeout) if timeout else None
        self._timeout_flag = False
        self._raise_exception = raise_exception

    def _timeout_handler(self, *args):
        self._timeout_flag = True
        if self._raise_exception:
            raise exceptions.TimerTimeout(timeout=self._timeout)
        self.__exit__()

    def __enter__(self):
        self.start = datetime.datetime.now()
        if self._timeout:
            signal.signal(signal.SIGALRM, self._timeout_handler)
            signal.alarm(self._timeout)
        return self

    def __exit__(self, *_):
        if self._timeout:
            signal.alarm(0)
        self.delta = datetime.datetime.now() - self.start

    def __getattr__(self, item):
        return getattr(self.delta, item)

    def __iter__(self):
        self._raise_exception = False
        return self.__enter__()

    def next(self):  # pragma: no cover
        # NOTE(ralonsoh): Python 2 support.
        if not self._timeout_flag:
            return datetime.datetime.now()
        raise StopIteration()

    def __next__(self):  # pragma: no cover
        # NOTE(ralonsoh): Python 3 support.
        return self.next()

    def __del__(self):  # pragma: no cover
        signal.alarm(0)

    def delta_time_sec(self):
        return (datetime.datetime.now() - self.start).total_seconds()


def read_meminfo(ssh_client):
    """Read "/proc/meminfo" file and parse all keys and values"""

    cpuinfo = six.BytesIO()
    ssh_client.get_file_obj('/proc/meminfo', cpuinfo)
    lines = cpuinfo.getvalue().decode('utf-8')
    matches = re.findall(r"([\w\(\)]+):\s+(\d+)( kB)*", lines)
    output = {}
    for match in matches:
        output[match[0]] = match[1]

    return output


def setup_hugepages(ssh_client, size_kb):
    """Setup needed number of hugepages for the size specified"""

    NR_HUGEPAGES_PATH = '/proc/sys/vm/nr_hugepages'
    meminfo = read_meminfo(ssh_client)
    hp_size_kb = int(meminfo['Hugepagesize'])
    hp_number = int(math.ceil(size_kb / float(hp_size_kb)))
    ssh_client.execute(
        'echo %s | sudo tee %s' % (hp_number, NR_HUGEPAGES_PATH))
    hp = six.BytesIO()
    ssh_client.get_file_obj(NR_HUGEPAGES_PATH, hp)
    hp_number_set = int(hp.getvalue().decode('utf-8').splitlines()[0])
    logger.info('Hugepages size (kB): %s, number claimed: %s, number set: %s',
                hp_size_kb, hp_number, hp_number_set)
    return hp_size_kb, hp_number, hp_number_set


def find_relative_file(path, task_path):
    """
    Find file in one of places: in abs of path or relative to a directory path,
    in this order.

    :param path:
    :param task_path:
    :return str: full path to file
    """
    # fixme: create schema to validate all fields have been provided
    for lookup in [os.path.abspath(path), os.path.join(task_path, path)]:
        try:
            with open(lookup):
                return lookup
        except IOError:
            pass
    raise IOError(errno.ENOENT, 'Unable to find {} file'.format(path))


def open_relative_file(path, task_path):
    try:
        return open(path)
    except IOError as e:
        if e.errno == errno.ENOENT:
            return open(os.path.join(task_path, path))
        raise


def wait_until_true(predicate, timeout=60, sleep=1, exception=None):
    """Wait until callable predicate is evaluated as True

    When in a thread different from the main one, Timer(timeout) will fail
    because signal is not handled. In this case

    :param predicate: (func) callable deciding whether waiting should continue
    :param timeout: (int) timeout in seconds how long should function wait
    :param sleep: (int) polling interval for results in seconds
    :param exception: exception instance to raise on timeout. If None is passed
                      (default) then WaitTimeout exception is raised.
    """
    if isinstance(threading.current_thread(), threading._MainThread):
        try:
            with Timer(timeout=timeout):
                while not predicate():
                    time.sleep(sleep)
        except exceptions.TimerTimeout:
            if exception and issubclass(exception, Exception):
                raise exception  # pylint: disable=raising-bad-type
            raise exceptions.WaitTimeout
    else:
        with Timer() as timer:
            while timer.delta_time_sec() < timeout:
                if predicate():
                    return
                time.sleep(sleep)
        if exception and issubclass(exception, Exception):
            raise exception  # pylint: disable=raising-bad-type
        raise exceptions.WaitTimeout


def send_socket_command(host, port, command):
    """Send a string command to a specific port in a host

    :param host: (str) ip or hostname of the host
    :param port: (int) port number
    :param command: (str) command to send
    :return: 0 if success, error number if error
    """
    sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    ret = 0
    try:
        err_number = sock.connect_ex((host, int(port)))
        if err_number != 0:
            return err_number
        sock.sendall(six.b(command))
    except Exception:  # pylint: disable=broad-except
        ret = 1
    finally:
        sock.close()
    return ret


def safe_cast(value, type_to_convert, default_value):
    """Convert value to type, in case of error return default_value

    :param value: value to convert
    :param type_to_convert: type to convert, could be "type" or "string"
    :param default_value: default value to return
    :return: converted value or default_value
    """
    if isinstance(type_to_convert, type):
        _type = type_to_convert
    else:
        _type = pydoc.locate(type_to_convert)
        if not _type:
            raise exceptions.InvalidType(type_to_convert=type_to_convert)

    try:
        return _type(value)
    except ValueError:
        return default_value