diff options
Diffstat (limited to 'tools/lma/logs')
-rw-r--r-- | tools/lma/logs/dockerfile/elastalert/Dockerfile | 23 | ||||
-rw-r--r-- | tools/lma/logs/dockerfile/fluentd/Dockerfile | 23 | ||||
-rw-r--r-- | tools/lma/logs/jupyter-notebooks/Trend-Analysis.ipynb | 308 |
3 files changed, 354 insertions, 0 deletions
diff --git a/tools/lma/logs/dockerfile/elastalert/Dockerfile b/tools/lma/logs/dockerfile/elastalert/Dockerfile new file mode 100644 index 00000000..3304ad17 --- /dev/null +++ b/tools/lma/logs/dockerfile/elastalert/Dockerfile @@ -0,0 +1,23 @@ +# Copyright 2020 Adarsh yadav, Aditya Srivastava +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM python:alpine +RUN apk --update upgrade && \ + apk add gcc libffi-dev musl-dev python3-dev openssl-dev tzdata libmagic && \ + rm -rf /var/cache/apk/* +RUN pip install elastalert &&\ + apk del gcc libffi-dev musl-dev python3-dev openssl-dev +RUN mkdir -p /opt/elastalert && \ + mkdir -p /opt/elastalert/rules &&\ +WORKDIR /opt/elastalert
\ No newline at end of file diff --git a/tools/lma/logs/dockerfile/fluentd/Dockerfile b/tools/lma/logs/dockerfile/fluentd/Dockerfile new file mode 100644 index 00000000..19dea0f8 --- /dev/null +++ b/tools/lma/logs/dockerfile/fluentd/Dockerfile @@ -0,0 +1,23 @@ +# Copyright 2020 Adarsh yadav, Aditya Srivastava +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM fluent/fluentd:v1.11.0-debian-1.0 +USER root +RUN gem sources --add https://rubygems.org/ +RUN apt-get update \ + && gem install fluent-plugin-elasticsearch \ + && gem install elasticsearch-xpack\ + && gem install fluent-plugin-rewrite-tag-filter\ + && gem install fluent-plugin-dio +USER fluent
\ No newline at end of file diff --git a/tools/lma/logs/jupyter-notebooks/Trend-Analysis.ipynb b/tools/lma/logs/jupyter-notebooks/Trend-Analysis.ipynb new file mode 100644 index 00000000..1bc770a1 --- /dev/null +++ b/tools/lma/logs/jupyter-notebooks/Trend-Analysis.ipynb @@ -0,0 +1,308 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Trend Analysis\n", + "##### Contributor:\n", + "\n", + "- Adarsh Yadav <adiyadav0509@gmail.com> " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import pandas as pd\n", + "import matplotlib.pyplot as plt\n", + "import seaborn as sns\n", + "import matplotlib.dates as mdates\n", + "import numpy as np\n", + "import io \n", + "\n", + "from elasticsearch import Elasticsearch\n", + "from elasticsearch_dsl import Search\n", + "from elasticsearch.connection import create_ssl_context\n", + "import csv\n", + "import ssl\n", + "import urllib3\n", + "import os" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Enter foldername and index" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#Give folder name\n", + "# foldername = \"results_2020-06-12_06-47-56\"\n", + "foldername = \"result-test1\"\n", + "#Give index name - \"node1*\" or \"node4*\"\n", + "index = \"node4*\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "ssl_context = create_ssl_context()\n", + "ssl_context.check_hostname = False\n", + "ssl_context.verify_mode = ssl.CERT_NONE\n", + "urllib3.disable_warnings()\n", + "client = Elasticsearch(['https://elasticsearch:password123@10.10.120.211:31111'],verify_certs=False,ssl_context=ssl_context)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Trex" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "filename = \"/tmp/\"+foldername+\"/trex-liveresults-counts.dat\"\n", + "s = Search(index=index).using(client).query(\"exists\", field=\"ts\").query(\"match_phrase\", log_path=filename)\n", + "\n", + "trex = pd.DataFrame()\n", + "trex_data = dict()\n", + "for hits in s.scan():\n", + " trex_data['ts'] = hits.ts\n", + " trex_data['rx_pkts'] = hits.rx_pkts\n", + " trex_data['rx_port'] = hits.rx_port\n", + " trex_data['tx_port'] = hits.tx_port\n", + " trex = trex.append(trex_data, ignore_index=True)\n", + "if not trex.empty:\n", + " #convert 'ts' to datetime\n", + " trex['ts'] = pd.to_datetime(trex['ts'],unit='s')\n", + " trex_grp = trex.groupby('rx_port')\n", + " trex_rx_0 = trex_grp.get_group(0.0) \n", + " trex_rx_1 = trex_grp.get_group(1.0) \n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "if not trex.empty:\n", + " fig, ax = plt.subplots(2,figsize=(16, 10))\n", + " ax[0].plot(trex_rx_0['ts'],\n", + " trex_rx_0['rx_pkts'],\n", + " 'tab:orange')\n", + " ax[0].title.set_text(\"At rx_port=0 & tx_port=1\")\n", + " ax[0].set(xlabel=\"timestamp\")\n", + " ax[0].set(ylabel=\"rx_pkts\")\n", + "\n", + " ax[1].plot(trex_rx_1['ts'],\n", + " trex_rx_1['rx_pkts'],\n", + " 'tab:green')\n", + " ax[1].title.set_text(\"At rx_port=1 & tx_port=0\")\n", + " ax[1].set(xlabel=\"timestamp\")\n", + " ax[1].set(ylabel=\"rx_pkts\")\n", + "\n", + " #change date format\n", + " myFmt = mdates.DateFormatter('%Y-%m-%d %H:%M:%S')\n", + " for i in range(2):\n", + " ax[i].xaxis.set_major_formatter(myFmt) \n", + " plt.show()\n", + "else:\n", + " print(\"No data Found\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Spirent" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "filename = \"/tmp/\"+foldername+\"/stc-liveresults.dat.rx\"\n", + "s = Search(index=index).using(client).query(\"exists\", field=\"ts\").query(\"match_phrase\", log_path=filename)\n", + "\n", + "spirent = pd.DataFrame()\n", + "spirent_data = dict()\n", + "for hits in s.scan():\n", + " spirent_data['ts'] = hits.ts\n", + " spirent_data['RxPrt'] = hits.RxPrt\n", + " spirent_data['FrCnt'] = hits.FrCnt\n", + " spirent = spirent.append(spirent_data, ignore_index=True)\n", + "if not spirent.empty:\n", + " #convert 'ts' to datetime\n", + " spirent['ts'] = pd.to_datetime(spirent['ts'],unit='s')\n", + " spirent_grp = spirent.groupby('RxPrt')\n", + " spirent_rx_1 = spirent_grp.get_group('Port //1/1') \n", + " spirent_rx_2 = spirent_grp.get_group('Port //1/2') " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "if not spirent.empty:\n", + " fig, ax = plt.subplots(2,figsize=(16, 10))\n", + " ax[0].plot(spirent_rx_1['ts'],\n", + " spirent_rx_1['FrCnt'],\n", + " 'tab:orange')\n", + " ax[0].title.set_text(\"At RxPrt=//1/1\")\n", + " ax[0].set(xlabel=\"timestamp\")\n", + " ax[0].set(ylabel=\"FrCnt\")\n", + "\n", + " ax[1].plot(spirent_rx_2['ts'],\n", + " spirent_rx_2['FrCnt'],\n", + " 'tab:green')\n", + " ax[1].title.set_text(\"At RxPrt=//1/2\")\n", + " ax[1].set(xlabel=\"timestamp\")\n", + " ax[1].set(ylabel=\"FrCnt\")\n", + "\n", + " #change date format\n", + " myFmt = mdates.DateFormatter('%Y-%m-%d %H:%M:%S')\n", + " for i in range(2):\n", + " ax[i].xaxis.set_major_formatter(myFmt) \n", + " plt.show()\n", + "else:\n", + " print(\"No data Found\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Ixia" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "filename = \"/tmp/\"+foldername+\"/Traffic Item Statistics.csv\"\n", + "s = Search(index=index).using(client).query(\"exists\", field=\"msg\").query(\"match_phrase\", log_path=filename)\n", + "\n", + "for hits in s.scan():\n", + " with open('./ixia-traffic.csv', 'a+') as f:\n", + " f.write(hits.msg+\"\\n\")\n", + " \n", + "ixia = pd.DataFrame()\n", + "if os.path.exists('./ixia-traffic.csv'):\n", + " ixia = pd.read_csv('./ixia-traffic.csv')\n", + " os.remove(f.name)\n", + " f.close()\n", + "if not ixia.empty:\n", + " ixia = ixia[['~ElapsedTime','Traffic Item 1:Frames Delta','Traffic Item 1:Loss %']].astype(float)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "if not ixia.empty:\n", + " fig, ax = plt.subplots(2,figsize=(16, 10))\n", + " ax[0].plot(ixia['~ElapsedTime'],\n", + " ixia['Traffic Item 1:Frames Delta'],\n", + " 'tab:orange')\n", + " ax[0].set(xlabel=\"Elapsed Time\")\n", + " ax[0].set(ylabel=\"Frames Delta\")\n", + "\n", + " ax[1].plot(ixia['~ElapsedTime'],\n", + " ixia['Traffic Item 1:Loss %'],\n", + " 'tab:green')\n", + " ax[1].set(xlabel=\"Elapsed Time\")\n", + " ax[1].set(ylabel=\"Loss %\")\n", + "\n", + " plt.show()\n", + "else:\n", + " print(\"No data Found\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Time Analysis" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "filename = \"/tmp/\"+foldername+\"/\"\n", + "s = Search(index=index).using(client).query(\"exists\", field=\"setup_duration\").query(\"match_phrase\", log_path=filename)\n", + "for hits in s.scan():\n", + " print(\"Setup duration: \", hits.setup_duration,\"s\")\n", + "\n", + "s = Search(index=index).using(client).query(\"exists\", field=\"iteration_duration\").query(\"match_phrase\", log_path=filename)\n", + "for hits in s.scan():\n", + " print(\"Iteration duration: \", hits.iteration_duration,\"s\")\n", + "\n", + "s = Search(index=index).using(client).query(\"exists\", field=\"traffic_duration\").query(\"match_phrase\", log_path=filename)\n", + "for hits in s.scan():\n", + " print(\"Traffic duration: \", hits.traffic_duration,\"s\")\n", + "\n", + "s = Search(index=index).using(client).query(\"exists\", field=\"test_duration\").query(\"match_phrase\", log_path=filename)\n", + "for hits in s.scan():\n", + " print(\"Test duration: \", hits.test_duration,\"s\")\n", + "\n", + "s = Search(index=index).using(client).query(\"exists\", field=\"report_duration\").query(\"match_phrase\", log_path=filename)\n", + "for hits in s.scan():\n", + " print(\"Report duration: \", hits.report_duration,\"s\")\n", + " \n", + "s = Search(index=index).using(client).query(\"exists\", field=\"vswitch_duration\").query(\"match_phrase\", log_path=filename)\n", + "for hits in s.scan():\n", + " print(\"Vswitch starting duration: \", hits.vswitch_duration,\"s\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.8" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} |