summaryrefslogtreecommitdiffstats
path: root/dovetail/utils/dovetail_utils.py
blob: 1c4aca9d8963bb2cd9e31b728c9373a6ac656cdd (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
#!/usr/bin/env python
#
# Copyright (c) 2018 grakiss.wanglei@huawei.com and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
#

from __future__ import print_function
import sys
import os
import re
import requests
import subprocess
from collections import Mapping, Set, Sequence
import json
from datetime import datetime
from distutils.version import LooseVersion
import yaml
import python_hosts
import docker
from docker.types import Mount

from dovetail import constants
from dovetail.utils.dovetail_config import DovetailConfig as dt_cfg
from dovetail.utils.openstack_utils import OS_Utils


def exec_log(verbose, logger, msg, level, flush=False):
    if not verbose:
        return

    if logger:
        if level == 'info':
            logger.info(msg)
        elif level == 'error':
            logger.error(msg)
        elif level == 'debug':
            logger.debug(msg)
    else:
        print(msg)
        if flush:
            sys.stdout.flush()


def exec_cmd(cmd, logger=None, exit_on_error=False, info=False,
             exec_msg_on=True, err_msg='', verbose=True,
             progress_bar=False):
    msg_err = ("The command '%s' failed." % cmd) if not err_msg else err_msg
    msg_exec = ("Executing command: '%s'" % cmd)
    level = 'info' if info else 'debug'
    if exec_msg_on:
        exec_log(verbose, logger, msg_exec, level)

    p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
                         stderr=subprocess.STDOUT)
    stdout = ''
    if progress_bar:
        count = 1
        DEBUG = os.getenv('DEBUG')
    for line in iter(p.stdout.readline, b''):
        exec_log(verbose, logger, line.strip().decode('unicode-escape'),
                 level, True)
        stdout += str(line)
        if progress_bar and (DEBUG is None or DEBUG.lower() != 'true'):
            show_progress_bar(count)
            count += 1
    stdout = stdout.strip()
    returncode = p.wait()
    p.stdout.close()

    if returncode != 0:
        exec_log(verbose, logger, msg_err, 'error')
        if exit_on_error:
            sys.exit(1)

    return returncode, stdout


# walkthrough the object, yield path and value

# dual python 2/3 compatibility, inspired by the "six" library
string_types = (str, 'unicode') if str is bytes else (str, bytes)
# items = lambda mapping: getattr(mapping, 'items', mapping.items)()


def items(mapping):
    return getattr(mapping, 'items', mapping.items)()


def objwalk(obj, path=(), memo=None):
    if memo is None:
        memo = set()
    iterator = None
    if isinstance(obj, Mapping):
        iterator = items
    elif isinstance(obj, (Sequence, Set)) and not isinstance(obj,
                                                             string_types):
        iterator = enumerate
    if iterator:
        if id(obj) not in memo:
            memo.add(id(obj))
            for path_component, value in iterator(obj):
                for result in objwalk(value, path + (path_component,), memo):
                    yield result
            memo.remove(id(obj))
    else:
        yield path, obj


def get_obj_by_path(obj, dst_path):
    for path, obj in objwalk(obj):
        if path == dst_path:
            return obj
    return None


def source_env(env_file):
    with open(env_file, 'r') as f:
        lines = f.readlines()
    for line in lines:
        if line.lstrip().startswith('export'):
            for match in re.findall(r"export (.*)=(.*)", line):
                match = (match[0].strip('\"'), match[1].strip('\"'))
                match = (match[0].strip('\''), match[1].strip('\''))
                os.environ.update({match[0]: match[1]})


def check_https_enabled(logger=None):
    logger.debug('Checking if https enabled or not...')
    os_auth_url = os.getenv('OS_AUTH_URL')
    if os_auth_url.startswith('https'):
        logger.debug('https is enabled')
        return True
    logger.debug('https is not enabled')
    return False


def get_duration(start_date, stop_date, logger):
    fmt = '%Y-%m-%d %H:%M:%S'
    try:
        datetime_start = datetime.strptime(start_date, fmt)
        datetime_stop = datetime.strptime(stop_date, fmt)
        delta = (datetime_stop - datetime_start).seconds
        res = '%sm%ss' % (delta / 60, delta % 60)
        return res
    except ValueError as e:
        logger.exception('ValueError: {}'.format(e))
        return None


def show_progress_bar(length):
    max_len = 50
    length %= max_len
    sys.stdout.write('Running ' + ' ' * max_len + '\r')
    sys.stdout.flush()
    sys.stdout.write('Running ' + '.' * length + '\r')
    sys.stdout.flush()


def check_docker_version(logger=None):
    client = docker.from_env()
    server_ver = None
    try:
        server_ver = client.version()['Version']
    except Exception:
        logger.error('Failed to get Docker server version')
    if server_ver and (LooseVersion(server_ver) >= LooseVersion('1.12.3')):
        logger.debug('Docker server version: {}'.format(server_ver))
    else:
        logger.error("Don't support this Docker server version. "
                     "Docker server should be updated to at least 1.12.3.")


def add_hosts_info(ip, hostnames):
    hosts = python_hosts.Hosts(path='/etc/hosts')
    filtered_hostnames = [hostname for hostname in hostnames if hostname]
    if not ip or not filtered_hostnames:
        return
    new_entry = python_hosts.HostsEntry(entry_type='ipv4',
                                        address=ip,
                                        names=filtered_hostnames)
    hosts.add([new_entry])
    hosts.write()


def get_hardware_info(logger=None):
    pod_file = os.path.join(dt_cfg.dovetail_config['config_dir'],
                            dt_cfg.dovetail_config['pod_file'])
    logger.info('Get hardware info of all nodes list in file {} ...'
                .format(pod_file))
    result_dir = dt_cfg.dovetail_config['result_dir']
    info_file_path = os.path.join(result_dir, 'sut_hardware_info')
    all_info_file = os.path.join(result_dir, 'all_hosts_info.json')
    inventory_file = os.path.join(result_dir, 'inventory.ini')
    if not get_inventory_file(pod_file, inventory_file, logger):
        logger.error('Failed to get SUT hardware info.')
        return None
    ret, msg = exec_cmd('cd {} && ansible all -m setup -i {} --tree {}'
                        .format(constants.USERCONF_PATH, inventory_file,
                                info_file_path), verbose=False)
    if not os.path.exists(info_file_path) or ret != 0:
        logger.error('Failed to get SUT hardware info.')
        return None
    if not combine_files(info_file_path, all_info_file, logger):
        logger.error('Failed to get all hardware info.')
        return None
    logger.info('Hardware info of all nodes are stored in file {}.'
                .format(all_info_file))
    return all_info_file


def get_inventory_file(pod_file, inventory_file, logger=None):
    if not os.path.isfile(pod_file):
        logger.error("File {} doesn't exist.".format(pod_file))
        return False
    try:
        with open(pod_file, 'r') as f, open(inventory_file, 'w') as out_f:
            pod_info = yaml.safe_load(f)
            for host in pod_info['nodes']:
                host_info = ('{} ansible_host={} ansible_user={}'
                             .format(host['name'], host['ip'], host['user']))
                if 'password' in host.keys():
                    host_info += (' ansible_ssh_pass={}\n'
                                  .format(host['password']))
                elif 'key_filename' in host.keys():
                    key = os.path.join(dt_cfg.dovetail_config['config_dir'],
                                       'id_rsa')
                    host_info += (' ansible_ssh_private_key_file={}\n'
                                  .format(key))
                else:
                    logger.error('No password or key_filename in file {}.'
                                 .format(pod_file))
                    return False
                out_f.write(host_info)
        logger.debug('Ansible inventory file is {}.'.format(inventory_file))
        return True
    except KeyError as e:
        logger.exception('KeyError {}.'.format(e))
        return False
    except Exception:
        logger.exception('Failed to read file {}.'.format(pod_file))
        return False


def combine_files(file_path, result_file, logger=None):
    all_info = {}
    info_files = os.listdir(file_path)
    for info_file in info_files:
        try:
            absolute_file_path = os.path.join(file_path, info_file)
            with open(absolute_file_path, 'r') as f:
                all_info[info_file] = json.load(f)
        except Exception:
            logger.error('Failed to read file {}.'.format(absolute_file_path))
            return None
    try:
        with open(result_file, 'w') as f:
            f.write(json.dumps(all_info))
    except Exception:
        logger.exception('Failed to write file {}.'.format(result_file))
        return None
    return result_file


def get_openstack_endpoint(logger=None):
    https_enabled = check_https_enabled(logger)
    insecure = os.getenv('OS_INSECURE')
    if https_enabled and insecure and insecure.lower() == 'true':
        os_utils = OS_Utils(verify=False)
    else:
        os_utils = OS_Utils()
    res_endpoints, msg_endpoints = os_utils.search_endpoints()
    if not res_endpoints:
        logger.error('Failed to list endpoints. Exception message, {}'
                     .format(msg_endpoints))
        return None
    endpoints_info = []
    for item in msg_endpoints:
        endpoint = {'URL': item['url'], 'Enabled': item['enabled']}
        res_services, msg_services = os_utils.search_services(
            service_id=item['service_id'])
        if not res_services:
            logger.error('Failed to list services. Exception message, {}'
                         .format(msg_services))
            return None
        endpoint['Service Type'] = msg_services[0]['service_type']
        endpoint['Service Name'] = msg_services[0]['name']
        endpoints_info.append(endpoint)

    result_file = os.path.join(dt_cfg.dovetail_config['result_dir'],
                               'endpoint_info.json')
    try:
        with open(result_file, 'w') as f:
            json.dump(endpoints_info, f)
            logger.debug('Record all endpoint info into file {}.'
                         .format(result_file))
            return endpoints_info
    except Exception:
        logger.exception('Failed to write endpoint info into file.')
        return None


def check_cacert_file(cacert, logger=None):
    if not os.path.isfile(cacert):
        logger.error('OS_CACERT is {}, but the file does not exist.'
                     .format(cacert))
        return False
    if not dt_cfg.dovetail_config['config_dir'] == os.path.dirname(cacert):
        logger.error('Credential file must be put under {}, '
                     'which can be mounted into other container.'
                     .format(dt_cfg.dovetail_config['config_dir']))
        return False
    return True


def get_hosts_info(logger=None):
    hosts_config = {}
    hosts_config_file = os.path.join(dt_cfg.dovetail_config['config_dir'],
                                     'hosts.yaml')
    if not os.path.isfile(hosts_config_file):
        logger.warn('There is no hosts file {}. This may cause some issues '
                    'with domain name resolution.'.format(hosts_config_file))
        return hosts_config
    with open(hosts_config_file) as f:
        hosts_yaml = yaml.safe_load(f)
        if not hosts_yaml:
            logger.debug('File {} is empty.'.format(hosts_config_file))
            return hosts_config
        hosts_info = hosts_yaml.get('hosts_info', None)
        if not hosts_info:
            logger.error('There is no key hosts_info in file {}'
                         .format(hosts_config_file))
            return hosts_config
        for ip, hostnames in hosts_info.items():
            if not hostnames:
                continue
            add_hosts_info(ip, hostnames)
            names_str = ' '.join(hostname for hostname in hostnames
                                 if hostname)
            if not names_str:
                continue
            hosts_config[names_str] = ip
            logger.debug('Get hosts info {}:{}.'.format(ip, names_str))
    return hosts_config


def read_yaml_file(file_path, logger=None):
    if not os.path.isfile(file_path):
        logger.error("File {} doesn't exist.".format(file_path))
        return None
    try:
        with open(file_path, 'r') as f:
            content = yaml.safe_load(f)
            return content
    except Exception as e:
        logger.exception('Failed to read file {}, exception: {}'
                         .format(file_path, e))
        return None


def read_plain_file(file_path, logger=None):
    if not os.path.isfile(file_path):
        logger.error("File {} doesn't exist.".format(file_path))
        return None
    try:
        with open(file_path, 'r') as f:
            content = f.read()
            return content
    except Exception as e:
        logger.exception('Failed to read file {}, exception: {}'
                         .format(file_path, e))
        return None


def get_value_from_dict(key_path, input_dict):
    """
    Returns the value of a key in input_dict
    key_path must be given in string format with dots
    Example: result.dir
    """
    if not isinstance(key_path, str) or not isinstance(input_dict, dict):
        return None
    for key in key_path.split('.'):
        input_dict = input_dict.get(key)
        if not input_dict:
            return None
    return input_dict


def get_openstack_info(logger):
    """
    When the sut is an OpenStack deployment, its software and hardware info
    are needed.
    Software info is the endpoint list.
    Hardware info is every node's cpu, disk ...
    """
    openrc = os.path.join(dt_cfg.dovetail_config['config_dir'],
                          dt_cfg.dovetail_config['env_file'])
    if not os.path.isfile(openrc):
        logger.error('File {} does not exist.'.format(openrc))
        return
    source_env(openrc)
    get_hosts_info(logger)
    get_openstack_endpoint(logger)
    get_hardware_info(logger)


def push_results_to_db(case_name, details, start_date, stop_date, logger):
    """
    Push results to OPNFV TestAPI DB when running with OPNFV CI jobs.
    All results can be filtered with TestAPI.
    http://testresults.opnfv.org/test/#/results
    """
    try:
        url = os.getenv('TEST_DB_URL')
        data = {'project_name': 'dovetail', 'case_name': case_name,
                'details': details, 'start_date': start_date,
                'stop_date': stop_date}
        data['criteria'] = details['criteria'] if details else 'FAIL'
        data['installer'] = os.getenv('INSTALLER_TYPE')
        data['scenario'] = os.getenv('DEPLOY_SCENARIO')
        data['pod_name'] = os.getenv('NODE_NAME')
        data['build_tag'] = os.getenv('BUILD_TAG')
        data['version'] = os.getenv('VERSION')
        req = requests.post(url, data=json.dumps(data, sort_keys=True),
                            headers={'Content-Type': 'application/json'})
        req.raise_for_status()
        logger.debug('The results were successfully pushed to DB.')
        return True
    except Exception:
        logger.exception('The results cannot be pushed to DB.')
        return False


def get_mount_list(project_cfg):
    mount_list = []
    mounts = get_value_from_dict('mounts', project_cfg)
    for mount in mounts:
        if mount:
            param_dict = {}
            for param in mount.split(','):
                key_word = param.split('=')

                if len(key_word) != 2:
                    return None, 'Error mount {}.'.format(mount)

                param_dict[key_word[0]] = key_word[1]
            try:
                mount_list.append(Mount(target=param_dict['target'],
                                        source=param_dict['source'],
                                        type='bind'))
            except Exception as e:
                return None, e

    return mount_list, 'Successfully to get mount list.'