summaryrefslogtreecommitdiffstats
path: root/dashboard/dashboard/elastic2kibana/visualization_assembler.py
blob: e3b6b0d38126473d021343d4a8420de2c9bf08bc (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import json

import utility
from common import elastic_access


class VisStateBuilder(object):
    def __init__(self, vis_p):
        super(VisStateBuilder, self).__init__()
        self.vis_p = vis_p

    def build(self):
        name = self.vis_p.get('name')
        fields = self.vis_p.get('fields')

        aggs = []
        index = 1
        for field in fields:
            aggs.append({
                "id": index,
                "field": field.get("field")
            })
            index += 1

        template = utility.env.get_template('{}.json'.format(name))
        vis = template.render(aggs=aggs)
        return json.loads(vis)


class VisualizationAssembler(object):
    def __init__(self,
                 project,
                 case,
                 installer,
                 pod,
                 scenario,
                 vis_p,
                 es_url,
                 es_creds):
        super(VisualizationAssembler, self).__init__()
        self.project = project
        self.case = case
        self.installer = installer
        self.pod = pod
        self.scenario = scenario
        self.vis_p = vis_p
        self.es_url = es_url
        self.es_creds = es_creds
        self._assemble()
        self._publish()

    def _assemble(self):
        visState = VisStateBuilder(self.vis_p).build()
        self.vis_state_title = visState['title']

        vis = {
            "visState": json.dumps(visState),
            "filters": {
                "project_name": self.project,
                "case_name": self.case,
                "installer": self.installer,
                "metric": self.vis_state_title,
                "pod_name": self.pod,
                "scenario": self.scenario
            }
        }

        template = utility.env.get_template('visualization.json')

        self.visualization = json.loads(template.render(vis=vis))
        utility.dumps(self.visualization,
                      ['visState', 'description', 'uiStateJSON'])
        utility.dumps_2depth(self.visualization,
                             'kibanaSavedObjectMeta',
                             'searchSourceJSON')
        title = self.visualization['title']
        self.id = title.replace(' ', '-').replace('/', '-')

    def _publish(self):
        elastic_access.publish_kibana(self.es_url,
                                      self.es_creds,
                                      'visualization',
                                      self.id,
                                      self.visualization)


class VisualizationsAssembler(object):
    def __init__(self,
                 project,
                 case,
                 installer,
                 pod,
                 scenarios,
                 vis_p,
                 es_url,
                 es_creds):
        super(VisualizationsAssembler, self).__init__()
        self.visAssemblers = []
        for scenario in scenarios:
            self.visAssemblers.append(VisualizationAssembler(project,
                                                             case,
                                                             installer,
                                                             pod,
                                                             scenario,
                                                             vis_p,
                                                             es_url,
                                                             es_creds))
an class="p">:: bash http_proxy (or HTTP_PROXY) https_proxy (or HTTP_PROXY) no_proxy (or NO_PROXY) RSYNC_PROXY RSYNC_CONNECT_PROG As an example, these are the settings that were put in the user's .bashrc when verifying the proxy build functionality: .. code-block:: bash export RSYNC_PROXY=10.0.0.1:8888 export http_proxy=http://10.0.0.1:8888 export https_proxy=http://10.0.0.1:8888 export no_proxy=localhost,127.0.0.1,.consultron.com,.sock Using a ssh proxy for the rsync connection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If the proxy setup is not allowing the rsync protocol, an alternative solution is to use a SSH tunnel to a machine capable of accessing the outbound port 873. Set the RSYNC_CONNECT_PROG according to the rsync manual page (for example to "ssh <username>@<hostname> nc %H 873") to enable this. Also note that netcat needs to be installed on the remote system! Make sure that the ssh command also refers to the user on the remote system, as the command itself will be run from the Docker build container as the root user (but with the invoking user's SSH keys). Disabling the Ubuntu repo cache if rsync is not allowed ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ During the build phase, a local Ubuntu package repository is fetched from upstream in order to be added to the OPNFV Fuel ISO and for parts of this process rsync is used. If neither of the two available methods for proxying rsync are available, the last resort is to turn off the caching of the Ubuntu packages in the build system. This is done by removing the "f_repobuild" from SUBDIRS in the beginning of the <armband/upstream/fuel/build/f_isoroot/Makefile>. Note! Doing this will require the Fuel master node to have Internet access when installing the ISO artifact built as no Ubuntu package cache will be on the ISO! Note! Armband build system uses git submodules to track fuel and other upstream repos, so in order to apply the above change, one should first initialize the submodules and apply armband patches (only needed once): .. code-block:: bash $ make submodules-init $ make patches-import Configure your build environment -------------------------------- ** Configuring the build environment should not be performed if building standard Brahmaputra release ** Select the versions of the components you want to build by editing the <armband/upstream/fuel/build/config.mk> file. Note! The same observation as above, before altering Makefile, run: .. code-block:: bash $ make submodules-init patches-import Non official build: Selecting which plugins to build ---------------------------------------------------- In order to cut the build time for unofficial builds (made by an individual developer locally), the selection if which Fuel plugins to build (if any) can be done by environment variable "BUILD_FUEL_PLUGINS" prior to building. Only the plugin targets from <armband/upstream/fuel/build/f_isoroot/Makefile> that are specified in the environment variable will then be built. In order to completely disable the building of plugins, the environment variable is set to " ". When using this functionality, the resulting iso file will be prepended with the prefix "unofficial-" to clearly indicate that this is not a full build. This method of plugin selection is not meant to be used from within Gerrit! Note! So far, only ODL plugin was ported to AArch64. Building ======== There is only one preffered method available for building Fuel for AArch64: - A low level method using Make Low level build method using make --------------------------------- The low level method is based on Make: From the <armband> directory, invoke <make [target]> Following targets exist: - release - this will do the same as: - make submodules-clean clean-docker clean-build - make submodules-init patches-import build - none/all/build - this will: - Initialize the docker build environment - Build Fuel from upstream (as defined by fuel-build/config-spec) - Build the OPNFV defined plugins/features from upstream - Build the defined additions to fuel (as defined by the structure of this framework) - Apply changes and patches to fuel (as defined by the structure of this framework) - Reconstruct a fuel .iso image - submodules-init - Initialize git submodules (fuel@OPNFV, fuel-library etc.) - submodules-clean - cleanup git submodules (fuel@OPNFV, fuel-library etc.) - patches-import - this will apply armband@OPNFV patches to git submodules - patches-export - this will export git submodules changes as armband patches - clean-build - this will remove all artifacts from earlier builds. - clean-docker - this will remove all docker caches from earlier builds. If the build is successful, you will find the generated ISO file in the <armband/upstream/fuel/build/release> subdirectory! Artifacts ========= The artifacts produced are: - <OPNFV_XXXX.iso> - Which represents the bootable (x86_64) Fuel for AArch64 image, XXXX is replaced with the build identity provided to the build system - <OPNFV_XXXX.iso.txt> - Which holds version metadata. References ========== 1) `OPNFV Installation instruction for the AArch64 Brahmaputra 3.0 release of OPNFV when using Fuel as a deployment tool <http://artifacts.opnfv.org/armband/brahmaputra/docs/installation-instruction.html>`_ 2) `OPNFV Build instruction for the AArch64 Brahmaputra 3.0 release of OPNFV when using Fuel as a deployment tool <http://artifacts.opnfv.org/armband/brahmaputra/docs/build-instruction.html>`_ 3) `OPNFV Release Note for the AArch64 Brahmaputra 3.0 release of OPNFV when using Fuel as a deployment tool <http://artifacts.opnfv.org/armband/brahmaputra/docs/release-notes.html>`_