summaryrefslogtreecommitdiffstats
path: root/tools/lma/logs/jupyter-notebooks
diff options
context:
space:
mode:
authoradi0509 <adiyadav0509@gmail.com>2020-08-21 23:24:08 +0530
committeradi0509 <adiyadav0509@gmail.com>2020-09-04 22:15:50 +0530
commite20d8ba804539203664a542521db5b64826ba05c (patch)
tree5bcf4e84e19274ff66c2dbc5755fa0634dafe1f6 /tools/lma/logs/jupyter-notebooks
parente5eef0ffdf2d281fecf12597041fd8af23d65e42 (diff)
LMA: Deployment of LMA solution.(ansible)
Ansible playbooks for LMA deployment Signed-off-by: Adarsh Yadav <adiyadav0509@gmail.com> Change-Id: I3ab71e139bc1668577506fb996105a834a9c3c65
Diffstat (limited to 'tools/lma/logs/jupyter-notebooks')
-rw-r--r--tools/lma/logs/jupyter-notebooks/Trend-Analysis.ipynb308
1 files changed, 308 insertions, 0 deletions
diff --git a/tools/lma/logs/jupyter-notebooks/Trend-Analysis.ipynb b/tools/lma/logs/jupyter-notebooks/Trend-Analysis.ipynb
new file mode 100644
index 00000000..1bc770a1
--- /dev/null
+++ b/tools/lma/logs/jupyter-notebooks/Trend-Analysis.ipynb
@@ -0,0 +1,308 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Trend Analysis\n",
+ "##### Contributor:\n",
+ "\n",
+ "- Adarsh Yadav <adiyadav0509@gmail.com> "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import pandas as pd\n",
+ "import matplotlib.pyplot as plt\n",
+ "import seaborn as sns\n",
+ "import matplotlib.dates as mdates\n",
+ "import numpy as np\n",
+ "import io \n",
+ "\n",
+ "from elasticsearch import Elasticsearch\n",
+ "from elasticsearch_dsl import Search\n",
+ "from elasticsearch.connection import create_ssl_context\n",
+ "import csv\n",
+ "import ssl\n",
+ "import urllib3\n",
+ "import os"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Enter foldername and index"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#Give folder name\n",
+ "# foldername = \"results_2020-06-12_06-47-56\"\n",
+ "foldername = \"result-test1\"\n",
+ "#Give index name - \"node1*\" or \"node4*\"\n",
+ "index = \"node4*\""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "ssl_context = create_ssl_context()\n",
+ "ssl_context.check_hostname = False\n",
+ "ssl_context.verify_mode = ssl.CERT_NONE\n",
+ "urllib3.disable_warnings()\n",
+ "client = Elasticsearch(['https://elasticsearch:password123@10.10.120.211:31111'],verify_certs=False,ssl_context=ssl_context)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Trex"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "filename = \"/tmp/\"+foldername+\"/trex-liveresults-counts.dat\"\n",
+ "s = Search(index=index).using(client).query(\"exists\", field=\"ts\").query(\"match_phrase\", log_path=filename)\n",
+ "\n",
+ "trex = pd.DataFrame()\n",
+ "trex_data = dict()\n",
+ "for hits in s.scan():\n",
+ " trex_data['ts'] = hits.ts\n",
+ " trex_data['rx_pkts'] = hits.rx_pkts\n",
+ " trex_data['rx_port'] = hits.rx_port\n",
+ " trex_data['tx_port'] = hits.tx_port\n",
+ " trex = trex.append(trex_data, ignore_index=True)\n",
+ "if not trex.empty:\n",
+ " #convert 'ts' to datetime\n",
+ " trex['ts'] = pd.to_datetime(trex['ts'],unit='s')\n",
+ " trex_grp = trex.groupby('rx_port')\n",
+ " trex_rx_0 = trex_grp.get_group(0.0) \n",
+ " trex_rx_1 = trex_grp.get_group(1.0) \n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "if not trex.empty:\n",
+ " fig, ax = plt.subplots(2,figsize=(16, 10))\n",
+ " ax[0].plot(trex_rx_0['ts'],\n",
+ " trex_rx_0['rx_pkts'],\n",
+ " 'tab:orange')\n",
+ " ax[0].title.set_text(\"At rx_port=0 & tx_port=1\")\n",
+ " ax[0].set(xlabel=\"timestamp\")\n",
+ " ax[0].set(ylabel=\"rx_pkts\")\n",
+ "\n",
+ " ax[1].plot(trex_rx_1['ts'],\n",
+ " trex_rx_1['rx_pkts'],\n",
+ " 'tab:green')\n",
+ " ax[1].title.set_text(\"At rx_port=1 & tx_port=0\")\n",
+ " ax[1].set(xlabel=\"timestamp\")\n",
+ " ax[1].set(ylabel=\"rx_pkts\")\n",
+ "\n",
+ " #change date format\n",
+ " myFmt = mdates.DateFormatter('%Y-%m-%d %H:%M:%S')\n",
+ " for i in range(2):\n",
+ " ax[i].xaxis.set_major_formatter(myFmt) \n",
+ " plt.show()\n",
+ "else:\n",
+ " print(\"No data Found\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Spirent"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "filename = \"/tmp/\"+foldername+\"/stc-liveresults.dat.rx\"\n",
+ "s = Search(index=index).using(client).query(\"exists\", field=\"ts\").query(\"match_phrase\", log_path=filename)\n",
+ "\n",
+ "spirent = pd.DataFrame()\n",
+ "spirent_data = dict()\n",
+ "for hits in s.scan():\n",
+ " spirent_data['ts'] = hits.ts\n",
+ " spirent_data['RxPrt'] = hits.RxPrt\n",
+ " spirent_data['FrCnt'] = hits.FrCnt\n",
+ " spirent = spirent.append(spirent_data, ignore_index=True)\n",
+ "if not spirent.empty:\n",
+ " #convert 'ts' to datetime\n",
+ " spirent['ts'] = pd.to_datetime(spirent['ts'],unit='s')\n",
+ " spirent_grp = spirent.groupby('RxPrt')\n",
+ " spirent_rx_1 = spirent_grp.get_group('Port //1/1') \n",
+ " spirent_rx_2 = spirent_grp.get_group('Port //1/2') "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "if not spirent.empty:\n",
+ " fig, ax = plt.subplots(2,figsize=(16, 10))\n",
+ " ax[0].plot(spirent_rx_1['ts'],\n",
+ " spirent_rx_1['FrCnt'],\n",
+ " 'tab:orange')\n",
+ " ax[0].title.set_text(\"At RxPrt=//1/1\")\n",
+ " ax[0].set(xlabel=\"timestamp\")\n",
+ " ax[0].set(ylabel=\"FrCnt\")\n",
+ "\n",
+ " ax[1].plot(spirent_rx_2['ts'],\n",
+ " spirent_rx_2['FrCnt'],\n",
+ " 'tab:green')\n",
+ " ax[1].title.set_text(\"At RxPrt=//1/2\")\n",
+ " ax[1].set(xlabel=\"timestamp\")\n",
+ " ax[1].set(ylabel=\"FrCnt\")\n",
+ "\n",
+ " #change date format\n",
+ " myFmt = mdates.DateFormatter('%Y-%m-%d %H:%M:%S')\n",
+ " for i in range(2):\n",
+ " ax[i].xaxis.set_major_formatter(myFmt) \n",
+ " plt.show()\n",
+ "else:\n",
+ " print(\"No data Found\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Ixia"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "filename = \"/tmp/\"+foldername+\"/Traffic Item Statistics.csv\"\n",
+ "s = Search(index=index).using(client).query(\"exists\", field=\"msg\").query(\"match_phrase\", log_path=filename)\n",
+ "\n",
+ "for hits in s.scan():\n",
+ " with open('./ixia-traffic.csv', 'a+') as f:\n",
+ " f.write(hits.msg+\"\\n\")\n",
+ " \n",
+ "ixia = pd.DataFrame()\n",
+ "if os.path.exists('./ixia-traffic.csv'):\n",
+ " ixia = pd.read_csv('./ixia-traffic.csv')\n",
+ " os.remove(f.name)\n",
+ " f.close()\n",
+ "if not ixia.empty:\n",
+ " ixia = ixia[['~ElapsedTime','Traffic Item 1:Frames Delta','Traffic Item 1:Loss %']].astype(float)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "if not ixia.empty:\n",
+ " fig, ax = plt.subplots(2,figsize=(16, 10))\n",
+ " ax[0].plot(ixia['~ElapsedTime'],\n",
+ " ixia['Traffic Item 1:Frames Delta'],\n",
+ " 'tab:orange')\n",
+ " ax[0].set(xlabel=\"Elapsed Time\")\n",
+ " ax[0].set(ylabel=\"Frames Delta\")\n",
+ "\n",
+ " ax[1].plot(ixia['~ElapsedTime'],\n",
+ " ixia['Traffic Item 1:Loss %'],\n",
+ " 'tab:green')\n",
+ " ax[1].set(xlabel=\"Elapsed Time\")\n",
+ " ax[1].set(ylabel=\"Loss %\")\n",
+ "\n",
+ " plt.show()\n",
+ "else:\n",
+ " print(\"No data Found\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Time Analysis"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "filename = \"/tmp/\"+foldername+\"/\"\n",
+ "s = Search(index=index).using(client).query(\"exists\", field=\"setup_duration\").query(\"match_phrase\", log_path=filename)\n",
+ "for hits in s.scan():\n",
+ " print(\"Setup duration: \", hits.setup_duration,\"s\")\n",
+ "\n",
+ "s = Search(index=index).using(client).query(\"exists\", field=\"iteration_duration\").query(\"match_phrase\", log_path=filename)\n",
+ "for hits in s.scan():\n",
+ " print(\"Iteration duration: \", hits.iteration_duration,\"s\")\n",
+ "\n",
+ "s = Search(index=index).using(client).query(\"exists\", field=\"traffic_duration\").query(\"match_phrase\", log_path=filename)\n",
+ "for hits in s.scan():\n",
+ " print(\"Traffic duration: \", hits.traffic_duration,\"s\")\n",
+ "\n",
+ "s = Search(index=index).using(client).query(\"exists\", field=\"test_duration\").query(\"match_phrase\", log_path=filename)\n",
+ "for hits in s.scan():\n",
+ " print(\"Test duration: \", hits.test_duration,\"s\")\n",
+ "\n",
+ "s = Search(index=index).using(client).query(\"exists\", field=\"report_duration\").query(\"match_phrase\", log_path=filename)\n",
+ "for hits in s.scan():\n",
+ " print(\"Report duration: \", hits.report_duration,\"s\")\n",
+ " \n",
+ "s = Search(index=index).using(client).query(\"exists\", field=\"vswitch_duration\").query(\"match_phrase\", log_path=filename)\n",
+ "for hits in s.scan():\n",
+ " print(\"Vswitch starting duration: \", hits.vswitch_duration,\"s\")"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.6.8"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}