Skip to content

Instantly share code, notes, and snippets.

@prateekiiest
Created March 24, 2020 11:54
Show Gist options
  • Save prateekiiest/0a3441759744984a6338f736d77c03e3 to your computer and use it in GitHub Desktop.
Save prateekiiest/0a3441759744984a6338f736d77c03e3 to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 21,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Collecting regex\n",
" Downloading regex-2020.2.20-cp37-cp37m-win_amd64.whl (271 kB)\n",
"Installing collected packages: regex\n",
"Successfully installed regex-2020.2.20\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"WARNING: Retrying (Retry(total=4, connect=None, read=None, redirect=None, status=None)) after connection broken by 'ReadTimeoutError(\"HTTPSConnectionPool(host='pypi.org', port=443): Read timed out. (read timeout=15)\")': /simple/regex/\n",
"WARNING: Retrying (Retry(total=3, connect=None, read=None, redirect=None, status=None)) after connection broken by 'ReadTimeoutError(\"HTTPSConnectionPool(host='pypi.org', port=443): Read timed out. (read timeout=15)\")': /simple/regex/\n",
" WARNING: Retrying (Retry(total=4, connect=None, read=None, redirect=None, status=None)) after connection broken by 'ReadTimeoutError(\"HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Read timed out. (read timeout=15)\")': /packages/5e/f2/5270ea5c857c1f42d4815687e1df3ab1a0432bfe240ac3ab03c029fad9d5/regex-2020.2.20-cp37-cp37m-win_amd64.whl\n",
" WARNING: Retrying (Retry(total=3, connect=None, read=None, redirect=None, status=None)) after connection broken by 'ReadTimeoutError(\"HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Read timed out. (read timeout=15)\")': /packages/5e/f2/5270ea5c857c1f42d4815687e1df3ab1a0432bfe240ac3ab03c029fad9d5/regex-2020.2.20-cp37-cp37m-win_amd64.whl\n",
" WARNING: Retrying (Retry(total=2, connect=None, read=None, redirect=None, status=None)) after connection broken by 'ReadTimeoutError(\"HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Read timed out. (read timeout=15)\")': /packages/5e/f2/5270ea5c857c1f42d4815687e1df3ab1a0432bfe240ac3ab03c029fad9d5/regex-2020.2.20-cp37-cp37m-win_amd64.whl\n",
" WARNING: Retrying (Retry(total=1, connect=None, read=None, redirect=None, status=None)) after connection broken by 'ReadTimeoutError(\"HTTPSConnectionPool(host='files.pythonhosted.org', port=443): Read timed out. (read timeout=15)\")': /packages/5e/f2/5270ea5c857c1f42d4815687e1df3ab1a0432bfe240ac3ab03c029fad9d5/regex-2020.2.20-cp37-cp37m-win_amd64.whl\n"
]
}
],
"source": [
"!pip install regex"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## AEL PARSER [TEMPLATE MINING]"
]
},
{
"cell_type": "code",
"execution_count": 44,
"metadata": {},
"outputs": [],
"source": [
"import sys\n",
"import regex as re\n",
"import os\n",
"import hashlib\n",
"import pandas as pd\n",
"from datetime import datetime\n",
"from collections import defaultdict\n",
"from functools import reduce\n",
"\n",
"class Event():\n",
" def __init__(self, logidx, Eventstr=\"\"):\n",
" self.id = hashlib.md5(Eventstr.encode('utf-8')).hexdigest()[0:8]\n",
" self.logs = [logidx]\n",
" self.Eventstr = Eventstr\n",
" self.EventToken = Eventstr.split()\n",
" self.merged = False\n",
"\n",
" def refresh_id(self):\n",
" self.id = hashlib.md5(self.Eventstr.encode('utf-8')).hexdigest()[0:8]\n",
"\n",
"\n",
"class LogParser():\n",
" def __init__(self, indir, outdir, log_format, minEventCount=2, merge_percent=1, \n",
" rex=[], keep_para=True):\n",
" self.logformat = log_format\n",
" self.path = indir\n",
" self.savePath = outdir\n",
" self.rex = rex\n",
" self.minEventCount = minEventCount\n",
" self.merge_percent = merge_percent\n",
" self.df_log = None\n",
" self.logname = None\n",
" self.merged_events = []\n",
" self.bins = defaultdict(dict)\n",
" self.keep_para = keep_para\n",
"\n",
" def parse(self, logname):\n",
" start_time = datetime.now()\n",
" print('Parsing file: ' + os.path.join(self.path, logname))\n",
" self.logname = logname\n",
" self.load_data()\n",
" self.tokenize()\n",
" self.categorize()\n",
" self.reconcile()\n",
" self.dump()\n",
" print('Parsing done. [Time taken: {!s}]'.format(datetime.now() - start_time))\n",
"\n",
" def tokenize(self):\n",
" '''\n",
" Put logs into bins according to (# of '<*>', # of token)\n",
" '''\n",
" for idx, log in self.df_log['Content_'].iteritems():\n",
" para_count = 0\n",
"\n",
" tokens = log.split()\n",
" for token in tokens:\n",
" if token == \"<*>\":\n",
" para_count += 1\n",
"\n",
" if 'Logs' not in self.bins[(len(tokens), para_count)]:\n",
" self.bins[(len(tokens), para_count)]['Logs'] = [idx]\n",
" else:\n",
" self.bins[(len(tokens), para_count)]['Logs'].append(idx)\n",
"\n",
" def categorize(self):\n",
" '''\n",
" Abstract templates bin by bin\n",
" '''\n",
" for key in self.bins:\n",
" abin = self.bins[key]\n",
" abin['Events'] = []\n",
"\n",
" for logidx in abin['Logs']:\n",
" log = self.df_log['Content_'].loc[logidx]\n",
" matched = False\n",
" for event in abin['Events']:\n",
" if log == event.Eventstr:\n",
" matched = True\n",
" event.logs.append(logidx)\n",
" break\n",
" if not matched:\n",
" abin['Events'].append(Event(logidx, log))\n",
"\n",
"\n",
" def reconcile(self):\n",
" '''\n",
" Merge events if a bin has too many events\n",
" '''\n",
" for key in self.bins:\n",
" abin = self.bins[key]\n",
" if len(abin['Events']) > self.minEventCount:\n",
" tobeMerged = []\n",
" for e1 in abin['Events']: \n",
" if e1.merged:\n",
" continue\n",
" e1.merged = True\n",
" tobeMerged.append([e1])\n",
" \n",
" for e2 in abin['Events']:\n",
" if e2.merged:\n",
" continue\n",
" if self.has_diff(e1.EventToken, e2.EventToken):\n",
" tobeMerged[-1].append(e2)\n",
" e2.merged = True\n",
" for Es in tobeMerged:\n",
" merged_event = reduce(self.merge_event, Es)\n",
" merged_event.refresh_id()\n",
" self.merged_events.append(merged_event)\n",
" else:\n",
" for e in abin['Events']:\n",
" self.merged_events.append(e)\n",
"\n",
" def dump(self):\n",
" if not os.path.isdir(self.savePath):\n",
" os.makedirs(self.savePath)\n",
"\n",
" templateL = [0] * self.df_log.shape[0]\n",
" idL = [0] * self.df_log.shape[0]\n",
" df_events = []\n",
"\n",
" for event in self.merged_events:\n",
" for logidx in event.logs:\n",
" templateL[logidx] = event.Eventstr\n",
" idL[logidx] = event.id\n",
" df_events.append([event.id, event.Eventstr, len(event.logs)])\n",
"\n",
" df_event = pd.DataFrame(df_events, columns=['EventId', 'EventTemplate', 'Occurrences'])\n",
"\n",
"\n",
" self.df_log['EventId'] = idL\n",
" self.df_log['EventTemplate'] = templateL\n",
" self.df_log.drop(\"Content_\", axis=1, inplace=True)\n",
" if self.keep_para:\n",
" self.df_log[\"ParameterList\"] = self.df_log.apply(self.get_parameter_list, axis=1) \n",
" self.df_log.to_csv(os.path.join(self.savePath, self.logname + '_structured.csv'), index=False)\n",
"\n",
" occ_dict = dict(self.df_log['EventTemplate'].value_counts())\n",
" df_event = pd.DataFrame()\n",
" df_event['EventTemplate'] = self.df_log['EventTemplate'].unique()\n",
" df_event['EventId'] = df_event['EventTemplate'].map(lambda x: hashlib.md5(x.encode('utf-8')).hexdigest()[0:8])\n",
" df_event['Occurrences'] = df_event['EventTemplate'].map(occ_dict)\n",
" df_event.to_csv(os.path.join(self.savePath, self.logname + '_templates.csv'), index=False, columns=[\"EventId\", \"EventTemplate\", \"Occurrences\"])\n",
"\n",
" def merge_event(self, e1, e2):\n",
" for pos in range(len(e1.EventToken)):\n",
" if e1.EventToken[pos] != e2.EventToken[pos]:\n",
" e1.EventToken[pos] = \"<*>\"\n",
"\n",
" e1.logs.extend(e2.logs)\n",
" e1.Eventstr = ' '.join(e1.EventToken)\n",
"\n",
" return e1\n",
"\n",
" def has_diff(self, tokens1, tokens2):\n",
" # print(tokens1)\n",
" # print(tokens2)\n",
" # print(\"-----\")\n",
"\n",
" diff = 0\n",
" for idx in range(len(tokens1)):\n",
" #print(\"tokens1 : \",tokens1)\n",
" #print(\"tokens2 :\",tokens2)\n",
" if tokens1[idx] != tokens2[idx]:\n",
" diff += 1\n",
" return True if 0 < diff * 1.0 / len(tokens1) <= self.merge_percent else False\n",
"\n",
" def load_data(self):\n",
" def preprocess(log):\n",
" for currentRex in self.rex:\n",
" log = re.sub(currentRex, '<*>', log)\n",
" return log\n",
"\n",
" headers, regex = self.generate_logformat_regex(self.logformat)\n",
" self.df_log = self.log_to_dataframe(os.path.join(self.path, self.logname), regex, headers, self.logformat)\n",
" self.df_log['Content_'] = self.df_log['Content'].map(preprocess)\n",
"\n",
"\n",
" def log_to_dataframe(self, log_file, regex, headers, logformat):\n",
" ''' Function to transform log file to dataframe '''\n",
" log_messages = []\n",
" linecount = 0\n",
" with open(log_file, 'r', errors='ignore') as fin:\n",
" for line in fin.readlines():\n",
" try:\n",
" match = regex.search(line.strip())\n",
" message = [match.group(header) for header in headers]\n",
" log_messages.append(message)\n",
" linecount += 1\n",
" except Exception as e:\n",
" pass\n",
" #print(linecount)\n",
" logdf = pd.DataFrame(log_messages, columns=headers)\n",
" logdf.insert(0, 'LineId', None)\n",
" logdf['LineId'] = [i + 1 for i in range(linecount)]\n",
" return logdf\n",
"\n",
" def generate_logformat_regex(self, logformat):\n",
" ''' \n",
" Function to generate regular expression to split log messages\n",
" '''\n",
" headers = []\n",
" splitters = re.split(r'(<[^<>]+>)', logformat)\n",
" regex = ''\n",
" for k in range(len(splitters)):\n",
" if k % 2 == 0:\n",
" splitter = re.sub(' +', '\\s+', splitters[k])\n",
" regex += splitter\n",
" else:\n",
" header = splitters[k].strip('<').strip('>')\n",
" regex += '(?P<%s>.*?)' % header\n",
" headers.append(header)\n",
" regex = re.compile('^' + regex + '$')\n",
" return headers, regex\n",
"\n",
" def get_parameter_list(self, row):\n",
" template_regex = re.sub(r\"<.{1,5}>\", \"<*>\", row[\"EventTemplate\"])\n",
" if \"<*>\" not in template_regex: return []\n",
" template_regex = re.sub(r'([^A-Za-z0-9])', r'\\\\\\1', template_regex)\n",
" template_regex = re.sub(r'\\\\ +', r'\\s+', template_regex)\n",
" template_regex = \"^\" + template_regex.replace(\"\\<\\*\\>\", \"(.*?)\") + \"$\"\n",
" parameter_list = re.findall(template_regex, row[\"Content\"])\n",
" parameter_list = parameter_list[0] if parameter_list else ()\n",
" parameter_list = list(parameter_list) if isinstance(parameter_list, tuple) else [parameter_list]\n",
" return parameter_list"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Reading the small BGL file as a dataframe and exploring"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"df = pd.read_csv('AEL_Result/bgl_2k.csv')"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>LineId</th>\n",
" <th>Label</th>\n",
" <th>Timestamp</th>\n",
" <th>Date</th>\n",
" <th>Node</th>\n",
" <th>Time</th>\n",
" <th>NodeRepeat</th>\n",
" <th>Type</th>\n",
" <th>Component</th>\n",
" <th>Level</th>\n",
" <th>Content</th>\n",
" <th>EventId</th>\n",
" <th>EventTemplate</th>\n",
" <th>ParameterList</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>1</td>\n",
" <td>-</td>\n",
" <td>1117838570</td>\n",
" <td>2005.06.03</td>\n",
" <td>R02-M1-N0-C:J12-U11</td>\n",
" <td>2005-06-03-15.42.50.675872</td>\n",
" <td>R02-M1-N0-C:J12-U11</td>\n",
" <td>RAS</td>\n",
" <td>KERNEL</td>\n",
" <td>INFO</td>\n",
" <td>instruction cache parity error corrected</td>\n",
" <td>3aa50e45</td>\n",
" <td>instruction cache parity error corrected</td>\n",
" <td>[]</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>2</td>\n",
" <td>-</td>\n",
" <td>1117838573</td>\n",
" <td>2005.06.03</td>\n",
" <td>R02-M1-N0-C:J12-U11</td>\n",
" <td>2005-06-03-15.42.53.276129</td>\n",
" <td>R02-M1-N0-C:J12-U11</td>\n",
" <td>RAS</td>\n",
" <td>KERNEL</td>\n",
" <td>INFO</td>\n",
" <td>instruction cache parity error corrected</td>\n",
" <td>3aa50e45</td>\n",
" <td>instruction cache parity error corrected</td>\n",
" <td>[]</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>3</td>\n",
" <td>-</td>\n",
" <td>1117838976</td>\n",
" <td>2005.06.03</td>\n",
" <td>R02-M1-N0-C:J12-U11</td>\n",
" <td>2005-06-03-15.49.36.156884</td>\n",
" <td>R02-M1-N0-C:J12-U11</td>\n",
" <td>RAS</td>\n",
" <td>KERNEL</td>\n",
" <td>INFO</td>\n",
" <td>instruction cache parity error corrected</td>\n",
" <td>3aa50e45</td>\n",
" <td>instruction cache parity error corrected</td>\n",
" <td>[]</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>4</td>\n",
" <td>-</td>\n",
" <td>1117838978</td>\n",
" <td>2005.06.03</td>\n",
" <td>R02-M1-N0-C:J12-U11</td>\n",
" <td>2005-06-03-15.49.38.026704</td>\n",
" <td>R02-M1-N0-C:J12-U11</td>\n",
" <td>RAS</td>\n",
" <td>KERNEL</td>\n",
" <td>INFO</td>\n",
" <td>instruction cache parity error corrected</td>\n",
" <td>3aa50e45</td>\n",
" <td>instruction cache parity error corrected</td>\n",
" <td>[]</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>5</td>\n",
" <td>-</td>\n",
" <td>1117842440</td>\n",
" <td>2005.06.03</td>\n",
" <td>R23-M0-NE-C:J05-U01</td>\n",
" <td>2005-06-03-16.47.20.730545</td>\n",
" <td>R23-M0-NE-C:J05-U01</td>\n",
" <td>RAS</td>\n",
" <td>K</td>\n",
" <td>ERNEL</td>\n",
" <td>INFO 63543 double-hummer alignment exceptions</td>\n",
" <td>f1604253</td>\n",
" <td>INFO 63543 double-hummer alignment exceptions</td>\n",
" <td>[]</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" LineId Label Timestamp Date Node \\\n",
"0 1 - 1117838570 2005.06.03 R02-M1-N0-C:J12-U11 \n",
"1 2 - 1117838573 2005.06.03 R02-M1-N0-C:J12-U11 \n",
"2 3 - 1117838976 2005.06.03 R02-M1-N0-C:J12-U11 \n",
"3 4 - 1117838978 2005.06.03 R02-M1-N0-C:J12-U11 \n",
"4 5 - 1117842440 2005.06.03 R23-M0-NE-C:J05-U01 \n",
"\n",
" Time NodeRepeat Type Component Level \\\n",
"0 2005-06-03-15.42.50.675872 R02-M1-N0-C:J12-U11 RAS KERNEL INFO \n",
"1 2005-06-03-15.42.53.276129 R02-M1-N0-C:J12-U11 RAS KERNEL INFO \n",
"2 2005-06-03-15.49.36.156884 R02-M1-N0-C:J12-U11 RAS KERNEL INFO \n",
"3 2005-06-03-15.49.38.026704 R02-M1-N0-C:J12-U11 RAS KERNEL INFO \n",
"4 2005-06-03-16.47.20.730545 R23-M0-NE-C:J05-U01 RAS K ERNEL \n",
"\n",
" Content EventId \\\n",
"0 instruction cache parity error corrected 3aa50e45 \n",
"1 instruction cache parity error corrected 3aa50e45 \n",
"2 instruction cache parity error corrected 3aa50e45 \n",
"3 instruction cache parity error corrected 3aa50e45 \n",
"4 INFO 63543 double-hummer alignment exceptions f1604253 \n",
"\n",
" EventTemplate ParameterList \n",
"0 instruction cache parity error corrected [] \n",
"1 instruction cache parity error corrected [] \n",
"2 instruction cache parity error corrected [] \n",
"3 instruction cache parity error corrected [] \n",
"4 INFO 63543 double-hummer alignment exceptions [] "
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"df.head()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Get All event Templates / Modules"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"0 instruction cache parity error corrected\n",
"1 instruction cache parity error corrected\n",
"2 instruction cache parity error corrected\n",
"3 instruction cache parity error corrected\n",
"4 INFO 63543 double-hummer alignment exceptions\n",
" ... \n",
"1995 instruction cache parity error corrected\n",
"1996 instruction cache parity error corrected\n",
"1997 instruction cache parity error corrected\n",
"1998 instruction cache parity error corrected\n",
"1999 ciod: generated <*> core files for program <*>\n",
"Name: EventTemplate, Length: 2000, dtype: object"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"df[\"EventTemplate\"]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Print all Unique Templates"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"['NFS Mount failed on <*> slept 15 seconds, retrying (1)',\n",
" 'ciod: Error loading <*> invalid or missing program image, No such file or directory',\n",
" 'fpr29=0xffffffff ffffffff ffffffff ffffffff',\n",
" 'ciod: Error reading message prefix on CioStream socket to <*> Connection timed out',\n",
" '<*> double-hummer alignment exceptions',\n",
" 'instruction address: <*>',\n",
" '<*> torus sender z- retransmission error(s) (dcr 0x02f9) detected and corrected over <*> seconds',\n",
" 'ciod: Z coordinate 32 exceeds physical dimension 32 at line 33 of node map file /p/gb2/pakin1/contention-32768cpes-torus/xyzt-1x1x32768x1.map',\n",
" 'generating <*>',\n",
" 'instruction cache parity error corrected',\n",
" 'ciod: Message code 0 is not 51 or 4294967295',\n",
" 'ciod: Error loading <*> invalid or missing program image, Exec format error',\n",
" 'ciod: Error reading message prefix after LOAD_MESSAGE on CioStream socket to <*> Link has been severed',\n",
" 'Kernel detected <*> integer alignment exceptions <*> iar 0x00544ea8, dear <*> <*> iar 0x00544eb8, dear <*> <*> iar 0x00544ea8, dear <*> <*> iar 0x00544eb8, dear <*> <*> iar 0x00544ee0, dear <*> <*> iar 0x00544ef0, dear <*> <*> iar 0x00544ee0, dear <*> <*> iar 0x00544ef0, dear <*>',\n",
" 'ciod: Received signal 15, code=0, errno=0, <*>',\n",
" 'Node card VPD check: <*> node in processor card slot <*> do not match. VPD ecid <*> found <*>',\n",
" 'critical input interrupt (unit=0x0b <*> warning for <*> <*> wire, suppressing further interrupts of same type',\n",
" 'PrepareForService shutting down Node card(mLctn(R10-M1-N2), mCardSernum(203231503833343000000000594c31304b34323934303257), mLp(FF:F2:9F:16:E0:DA:00:0D:60:E9:1F:25), mIp(10.1.1.164), mType(4)) as part of Service Action 648',\n",
" 'ciod: Missing or invalid fields on line 1 of node map file <*>',\n",
" 'rts: bad message header: expecting type 57 instead of type 3 (softheader=00131db8 81aa0003 00000002 00000000) PSR0=00001f01 PSR1=00000000 PRXF=00000002 PIXF=00000007',\n",
" 'ciod: Error reading message prefix on CioStream socket to <*> <*> <*> <*> <*>',\n",
" 'icache prefetch <*>',\n",
" 'rts tree/torus link training failed: wanted: B C X+ X- Y+ Y- Z+ Z- got: B C X- Y- Z+ Z-',\n",
" '0 microseconds spent in the rbs signal handler during 0 calls. 0 microseconds was the maximum time for a single instance of a correctable ddr.',\n",
" 'Error receiving packet on tree network, expecting type 57 instead of type 3 <*> <*> 00000002 00000000) <*> PSR1=00000000 PRXF=00000002 PIXF=00000007',\n",
" 'Kernel detected 35591540 integer alignment exceptions (35591533) iar 0x0023f108, dear 0x1feaa260 (35591534) iar 0x00265564, dear 0x1feaa1c0 (35591535) iar 0x00265574, dear 0x1feaa1e0 (35591536) iar 0x00265578, dear 0x1feaa200 (35591537) iar 0x00265588, dear 0x1feaa220 (35591538) iar 0x0026558c, dear 0x1feaa240 (35591539) iar 0x00265594, dear 0x1feaa260 (35591540) iar 0x00265598, dear 0x1feaa280',\n",
" 'data storage interrupt',\n",
" 'data TLB error interrupt',\n",
" '2 L3 EDRAM error(s) (dcr 0x0157) detected and corrected over 282 seconds',\n",
" 'Node card status: no ALERTs are active. Clock Mode is Low. Clock Select is Midplane. Phy JTAG Reset is asserted. ASIC JTAG Reset is asserted. Temperature Mask is not active. No temperature error. Temperature Limit Error Latch is clear. PGOOD IS NOT ASSERTED. PGOOD ERROR LATCH IS ACTIVE. MPGOOD IS NOT OK. MPGOOD ERROR LATCH IS ACTIVE. The 2.5 volt rail is OK. The 1.5 volt rail is OK.',\n",
" 'program interrupt: fp cr field .............0',\n",
" 'data store interrupt caused by <*>',\n",
" 'MACHINE CHECK DCR read timeout (mc=e08x iar 0x00000000 lr 0xc00045a4)',\n",
" 'NodeCard is not fully functional',\n",
" 'lr:00004ed0 cr:28244842 xer:20000002 ctr:00086000',\n",
" '<*> total interrupts. 0 critical input interrupts. 0 microseconds total spent on critical input interrupts, 0 microseconds max time in a critical input interrupt.',\n",
" 'disable store gathering..................0',\n",
" '<*> <*> register: 0x00002000',\n",
" 'ciod: cpu 0 at treeaddr 438 sent unrecognized message 0xffffffff',\n",
" 'store operation.............................1',\n",
" 'ciod: In packet from node 91.0 (R62-M1-Nf-C:J03-U11), message code 2 is not 3 or 4294967295 (softheader=003b005b 00030000 00000001 00000000)',\n",
" 'PrepareForService shutting down NodeCard(mLctn(R25-M1-N2), mCardSernum(203231503833343000000000594c31304b34333431303158), mLp(FF:F2:9F:16:CF:0F:00:0D:60:E9:30:F0), mIp(10.2.2.80), mType(4)) as part of Service Action 310',\n",
" 'floating point instr. enabled.....1',\n",
" 'machine check: i-fetch......................0',\n",
" 'debug wait enable.................0',\n",
" 'iar <*> dear <*>',\n",
" 'Ido chip status changed: <*> <*> <*> t=4 status=M <*> <*> <*> <*> PDT 2005',\n",
" 'rts internal error',\n",
" 'rts panic! - stopping execution',\n",
" 'ciod: failed to read message prefix on control stream (CioStream socket to <*>',\n",
" 'Node card status: ALERT 0, ALERT 1, ALERT 2, ALERT 3 is (are) active. Clock Mode is Low. Clock Select is Midplane. Phy JTAG Reset is asserted. ASIC JTAG Reset is not asserted. Temperature Mask is not active. No temperature error. Temperature Limit Error Latch is clear. PGOOD is asserted. PGOOD error latch is clear. MPGOOD is OK. MPGOOD error latch is clear. The 2.5 volt rail is OK. The 1.5 volt rail is OK.',\n",
" '<*> ddr error(s) detected and corrected on rank 0, symbol <*> over <*> seconds',\n",
" 'dbcr0=0x00000000 dbsr=0x00000000 ccr0=0x40002000',\n",
" 'rts: kernel terminated for reason <*>',\n",
" 'byte ordering exception.....................0',\n",
" 'special purpose registers:',\n",
" 'Node card is not fully functional',\n",
" 'machine check enable..............0',\n",
" 'data cache search parity error detected. attempting to correct',\n",
" 'Can not get assembly information for node card',\n",
" 'auxiliary processor.........................0',\n",
" 'minus normalized number..................0',\n",
" 'critical input interrupt (unit=0x0b <*> warning for torus <*> wire',\n",
" 'wait state enable.................0',\n",
" 'ciod: LOGIN <*> failed: No such file or directory',\n",
" 'Machine State Register: 0x0002f900',\n",
" 'total of <*> ddr error(s) detected and corrected over <*> seconds',\n",
" 'program interrupt: fp cr update.............0',\n",
" 'guaranteed <*> cache block <*>',\n",
" 'Lustre mount FAILED : <*> : block_id : location',\n",
" 'ciod: pollControlDescriptors: Detected the debugger died.',\n",
" 'CE sym <*> at <*> mask <*>',\n",
" '3 L3 EDRAM error(s) (dcr 0x0157) detected and corrected',\n",
" 'ciod: generated <*> core files for program <*>',\n",
" 'size of scratchpad portion of L3.........0 (0M)',\n",
" 'program interrupt',\n",
" 'Lustre mount FAILED : <*> : point /p/gb1',\n",
" 'ciod: LOGIN chdir(/p/gb1/stella/RAPTOR/2183) failed: Input/output error',\n",
" 'ciod: Error creating node map from file /home/pakin1/sweep3d-2.2b/results/random1-8x32x32x2.map: Permission denied',\n",
" '<*> tree receiver 1 in re-synch state event(s) (dcr 0x0185) detected over <*> seconds',\n",
" 'Ido chip status changed: <*> <*> v=13 t=1 status=M <*> <*> <*> <*> PDT 2005',\n",
" 'fraction rounded.........................0',\n",
" 'rts: kernel terminated for reason 1001rts: bad message header: invalid cpu, type=42315, cpu=105, index=1207960804, total=2691015',\n",
" 'total of <*> ddr error(s) detected and corrected',\n",
" '<*> tree receiver 2 in re-synch state event(s) (dcr 0x019a) detected',\n",
" 'exception syndrome register: <*>',\n",
" 'debug interrupt enable............0',\n",
" 'New ido chip inserted into the database: <*> <*> <*> <*>',\n",
" 'critical input interrupt enable...0',\n",
" 'data address space................0',\n",
" 'ciod: Error loading /g/g24/buber/Yunsic/BlueGene/partad.develf/taddriver.32.exe: program image too big, 361544528 > 266076160',\n",
" 'data address: 0x00000002',\n",
" '<*> torus receiver <*> input pipe error(s) (dcr <*> detected and corrected',\n",
" 'shutdown complete',\n",
" '<*> ddr errors(s) detected and corrected on rank 0, symbol <*> bit <*>',\n",
" 'idoproxydb hit ASSERT condition: ASSERT expression=0 Source file=idotransportmgr.cpp Source line=1043 Function=int IdoTransportMgr::SendPacket(IdoUdpMgr*, BglCtlPavTrace*)',\n",
" '<*> floating point alignment exceptions',\n",
" 'floating pt ex mode <*> enable......0',\n",
" 'r24=0x0ffea4c8 r25=0x00000003 r26=0x0000000f r27=0xffffd000',\n",
" 'program interrupt: <*> <*>',\n",
" 'ciod: Error loading <*> invalid or missing program image, Permission denied',\n",
" 'INFO 63543 double-hummer alignment exceptions',\n",
" 'instruction address space.........0',\n",
" 'force load/store alignment...............0',\n",
" 'problem state (0=sup,1=usr).......0',\n",
" 'ciod: Error creating node map from file <*> <*> <*> <*>',\n",
" 'ciod: duplicate canonical-rank 31 to logical-rank 0 mapping at line 3 of node map file /p/gb2/pakin1/sweep3d-5x5x400-10mk-3mmi-1024pes-sweep/sweep.map',\n",
" 'suppressing further interrupts of same type']"
]
},
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"list(set(df[\"EventTemplate\"]))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Number of Unique Modules"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"108"
]
},
"execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"len(list(set(df[\"EventTemplate\"])))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"---------------------------------"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Unique System IDs"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"1778"
]
},
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"len(list(set(df[\"Node\"])))"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"0 R02-M1-N0-C:J12-U11\n",
"1 R02-M1-N0-C:J12-U11\n",
"2 R02-M1-N0-C:J12-U11\n",
"3 R02-M1-N0-C:J12-U11\n",
"4 R23-M0-NE-C:J05-U01\n",
"Name: Node, dtype: object"
]
},
"execution_count": 13,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"df[\"Node\"].head()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Create Feature Matrix"
]
},
{
"cell_type": "code",
"execution_count": 21,
"metadata": {},
"outputs": [],
"source": [
"def create_fm(df, start, end):\n",
" \n",
" count_dict = defaultdict(lambda: defaultdict(int)) \n",
" v= df.iloc[start:end].groupby([\"EventTemplate\", \"Node\"]).size()\n",
" p = v.keys()\n",
" event_temp = p[0][0]\n",
" node = p[0][1]\n",
" count_dict[event_temp][node] = v[0]\n",
" return count_dict"
]
},
{
"cell_type": "code",
"execution_count": 24,
"metadata": {},
"outputs": [],
"source": [
"c = create_fm(df, start= 1, end =4) # create feature matrix from line 1 to 4."
]
},
{
"cell_type": "code",
"execution_count": 25,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"defaultdict(<function __main__.create_fm.<locals>.<lambda>()>,\n",
" {'instruction cache parity error corrected': defaultdict(int,\n",
" {'R02-M1-N0-C:J12-U11': 3})})"
]
},
"execution_count": 25,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"c"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### The structure of creation of feature matrix is changed later"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Working on large BGL file"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Split files"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"**split -l 2000 BGL.log**\n",
"\n",
"splits the entire log file into small files each of number of lines = 200\n",
"\n",
" run this command where BGL.log is located in command line"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"logfiles = []\n",
"for filename in os.listdir('BGL_folder_data'):\n",
" logfiles.append(filename)\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 34,
"metadata": {},
"outputs": [],
"source": [
"import pandas as pd\n",
"df_example = pd.read_csv(\"AEL_result/xac_structured.csv\")"
]
},
{
"cell_type": "code",
"execution_count": 35,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>LineId</th>\n",
" <th>Label</th>\n",
" <th>Timestamp</th>\n",
" <th>Date</th>\n",
" <th>Node</th>\n",
" <th>Time</th>\n",
" <th>NodeRepeat</th>\n",
" <th>Type</th>\n",
" <th>Component</th>\n",
" <th>Level</th>\n",
" <th>Content</th>\n",
" <th>EventId</th>\n",
" <th>EventTemplate</th>\n",
" <th>ParameterList</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>1</td>\n",
" <td>-</td>\n",
" <td>1117839457</td>\n",
" <td>2005.06.03</td>\n",
" <td>R02-M1-N0-C:J12-U11</td>\n",
" <td>2005-06-03-15.57.37.709638</td>\n",
" <td>R02-M1-N0-C:J12-U11</td>\n",
" <td>RAS</td>\n",
" <td>KERNEL</td>\n",
" <td>INFO</td>\n",
" <td>instruction cache parity error corrected</td>\n",
" <td>3aa50e45</td>\n",
" <td>instruction cache parity error corrected</td>\n",
" <td>[]</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>2</td>\n",
" <td>-</td>\n",
" <td>1117839458</td>\n",
" <td>2005.06.03</td>\n",
" <td>R02-M1-N0-C:J12-U11</td>\n",
" <td>2005-06-03-15.57.38.319517</td>\n",
" <td>R02-M1-N0-C:J12-U11</td>\n",
" <td>RAS</td>\n",
" <td>KERNEL</td>\n",
" <td>INFO</td>\n",
" <td>instruction cache parity error corrected</td>\n",
" <td>3aa50e45</td>\n",
" <td>instruction cache parity error corrected</td>\n",
" <td>[]</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" LineId Label Timestamp Date Node \\\n",
"0 1 - 1117839457 2005.06.03 R02-M1-N0-C:J12-U11 \n",
"1 2 - 1117839458 2005.06.03 R02-M1-N0-C:J12-U11 \n",
"\n",
" Time NodeRepeat Type Component Level \\\n",
"0 2005-06-03-15.57.37.709638 R02-M1-N0-C:J12-U11 RAS KERNEL INFO \n",
"1 2005-06-03-15.57.38.319517 R02-M1-N0-C:J12-U11 RAS KERNEL INFO \n",
"\n",
" Content EventId \\\n",
"0 instruction cache parity error corrected 3aa50e45 \n",
"1 instruction cache parity error corrected 3aa50e45 \n",
"\n",
" EventTemplate ParameterList \n",
"0 instruction cache parity error corrected [] \n",
"1 instruction cache parity error corrected [] "
]
},
"execution_count": 35,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"df_example.head(2)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Call the AEL parser with the given template mining patterns."
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"import sys\n",
"sys.path.append('../')\n",
"import os\n",
"import pandas as pd\n",
"\n",
"input_dir = 'BGL_folder_data/' # The input directory of log file\n",
"output_dir = 'AEL_result/' # The output directory of parsing results\n",
"\n",
"log_format= '<Label> <Timestamp> <Date> <Node> <Time> <NodeRepeat> <Type> <Component> <Level> <Content>'\n",
"minEventCount = 2\n",
"merge_percent = 0.5\n",
"rege = [r'core\\.\\d+']\n",
"def process_table(ite):\n",
" \n",
" for i in range(ite, ite + 50):\n",
" parser = LogParser(input_dir, output_dir, log_format, rex=rege, \n",
" minEventCount=minEventCount, merge_percent=merge_percent)\n",
"\n",
" parser.parse(files[i])"
]
},
{
"cell_type": "code",
"execution_count": 47,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
"\n",
" 0%| | 0/2374 [00:00<?, ?it/s]\u001b[A\u001b[A\n",
"\n",
" 1%|▋ | 20/2374 [00:00<00:25, 92.17it/s]\u001b[A\u001b[A\n",
"\n",
" 2%|█▎ | 40/2374 [00:06<03:48, 10.23it/s]\u001b[A\u001b[A\n",
"\n",
" 3%|██ | 60/2374 [00:12<06:02, 6.38it/s]\u001b[A\u001b[A\n",
"\n",
" 3%|██▋ | 80/2374 [00:17<07:07, 5.36it/s]\u001b[A\u001b[A\n",
"\n",
" 4%|███▎ | 100/2374 [00:23<08:16, 4.58it/s]\u001b[A\u001b[A\n",
"\n",
" 5%|███▉ | 120/2374 [00:25<07:16, 5.17it/s]\u001b[A\u001b[A\n",
"\n",
" 6%|████▋ | 140/2374 [00:28<06:40, 5.58it/s]\u001b[A\u001b[A\n",
"\n",
" 7%|█████▎ | 160/2374 [00:32<06:56, 5.32it/s]\u001b[A\u001b[A\n",
"\n",
" 8%|█████▉ | 180/2374 [00:35<06:03, 6.04it/s]\u001b[A\u001b[A\n",
"\n",
" 8%|██████▋ | 200/2374 [00:37<05:37, 6.45it/s]\u001b[A\u001b[A\n",
"\n",
" 9%|███████▎ | 220/2374 [00:41<05:48, 6.18it/s]\u001b[A\u001b[A\n",
"\n",
" 10%|███████▉ | 240/2374 [00:45<06:09, 5.77it/s]\u001b[A\u001b[A\n",
"\n",
" 11%|████████▋ | 260/2374 [00:47<05:21, 6.57it/s]\u001b[A\u001b[A\n",
"\n",
" 12%|█████████▎ | 280/2374 [00:51<05:43, 6.10it/s]\u001b[A\u001b[A\n",
"\n",
" 13%|█████████▉ | 300/2374 [00:52<04:38, 7.46it/s]\u001b[A\u001b[A\n",
"\n",
" 13%|██████████▋ | 320/2374 [00:56<05:15, 6.51it/s]\u001b[A\u001b[A\n",
"\n",
" 14%|███████████▎ | 340/2374 [00:58<04:45, 7.12it/s]\u001b[A\u001b[A\n",
"\n",
" 15%|███████████▉ | 360/2374 [01:02<05:10, 6.49it/s]\u001b[A\u001b[A\n",
"\n",
" 16%|████████████▋ | 380/2374 [01:07<06:22, 5.21it/s]\u001b[A\u001b[A\n",
"\n",
" 17%|█████████████▎ | 400/2374 [01:12<06:38, 4.96it/s]\u001b[A\u001b[A\n",
"\n",
" 18%|█████████████▉ | 420/2374 [01:16<06:40, 4.88it/s]\u001b[A\u001b[A\n",
"\n",
" 19%|██████████████▋ | 440/2374 [01:20<06:25, 5.01it/s]\u001b[A\u001b[A\n",
"\n",
" 19%|███████████████▎ | 460/2374 [01:27<07:58, 4.00it/s]\u001b[A\u001b[A\n",
"\n",
" 20%|███████████████▉ | 480/2374 [01:34<08:27, 3.73it/s]\u001b[A\u001b[A\n",
"\n",
" 21%|████████████████▋ | 500/2374 [01:37<07:27, 4.18it/s]\u001b[A\u001b[A\n",
"\n",
" 22%|█████████████████▎ | 520/2374 [01:39<06:03, 5.10it/s]\u001b[A\u001b[A\n",
"\n",
" 23%|█████████████████▉ | 540/2374 [01:43<06:00, 5.08it/s]\u001b[A\u001b[A\n",
"\n",
" 24%|██████████████████▋ | 560/2374 [01:45<05:18, 5.70it/s]\u001b[A\u001b[A\n",
"\n",
" 24%|███████████████████▎ | 580/2374 [01:48<04:50, 6.18it/s]\u001b[A\u001b[A\n",
"\n",
" 25%|███████████████████▉ | 600/2374 [01:52<05:08, 5.74it/s]\u001b[A\u001b[A\n",
"\n",
" 26%|████████████████████▋ | 620/2374 [01:56<05:07, 5.70it/s]\u001b[A\u001b[A\n",
"\n",
" 27%|█████████████████████▎ | 640/2374 [01:59<05:00, 5.77it/s]\u001b[A\u001b[A\n",
"\n",
" 28%|█████████████████████▉ | 660/2374 [02:03<05:02, 5.67it/s]\u001b[A\u001b[A\n",
"\n",
" 29%|██████████████████████▋ | 680/2374 [02:06<04:47, 5.90it/s]\u001b[A\u001b[A\n",
"\n",
" 29%|███████████████████████▎ | 700/2374 [02:10<05:06, 5.47it/s]\u001b[A\u001b[A\n",
"\n",
" 30%|███████████████████████▉ | 720/2374 [02:14<05:04, 5.43it/s]\u001b[A\u001b[A\n",
"\n",
" 31%|████████████████████████▋ | 740/2374 [02:17<05:02, 5.41it/s]\u001b[A\u001b[A\n",
"\n",
" 32%|█████████████████████████▎ | 760/2374 [02:21<04:52, 5.51it/s]\u001b[A\u001b[A\n",
"\n",
" 33%|█████████████████████████▉ | 780/2374 [02:24<04:37, 5.74it/s]\u001b[A\u001b[A\n",
"\n",
" 33%|█████████████████████████▉ | 781/2374 [02:24<04:03, 6.54it/s]\u001b[A\u001b[A\n",
"\n",
" 34%|██████████████████████████▌ | 800/2374 [02:28<04:21, 6.01it/s]\u001b[A\u001b[A\n",
"\n",
" 35%|███████████████████████████▎ | 820/2374 [02:33<04:55, 5.26it/s]\u001b[A\u001b[A\n",
"\n",
" 35%|███████████████████████████▉ | 840/2374 [02:37<04:55, 5.20it/s]\u001b[A\u001b[A\n",
"\n",
" 36%|████████████████████████████▌ | 860/2374 [02:40<04:43, 5.34it/s]\u001b[A\u001b[A\n",
"\n",
" 37%|█████████████████████████████▎ | 880/2374 [02:45<04:52, 5.11it/s]\u001b[A\u001b[A\n",
"\n",
" 38%|█████████████████████████████▉ | 900/2374 [02:48<04:37, 5.31it/s]\u001b[A\u001b[A\n",
"\n",
" 39%|██████████████████████████████▌ | 920/2374 [02:51<04:27, 5.44it/s]\u001b[A\u001b[A\n",
"\n",
" 40%|███████████████████████████████▎ | 940/2374 [02:55<04:22, 5.47it/s]\u001b[A\u001b[A\n",
"\n",
" 40%|███████████████████████████████▉ | 960/2374 [02:58<03:57, 5.96it/s]\u001b[A\u001b[A\n",
"\n",
" 41%|████████████████████████████████▌ | 980/2374 [03:02<04:20, 5.36it/s]\u001b[A\u001b[A\n",
"\n",
" 42%|████████████████████████████████▊ | 1000/2374 [03:05<03:57, 5.78it/s]\u001b[A\u001b[A\n",
"\n",
" 43%|█████████████████████████████████▌ | 1020/2374 [03:10<04:20, 5.20it/s]\u001b[A\u001b[A\n",
"\n",
" 44%|██████████████████████████████████▏ | 1040/2374 [03:13<04:03, 5.48it/s]\u001b[A\u001b[A\n",
"\n",
" 45%|██████████████████████████████████▊ | 1060/2374 [03:18<04:28, 4.89it/s]\u001b[A\u001b[A\n",
"\n",
" 45%|███████████████████████████████████▍ | 1080/2374 [03:22<04:24, 4.90it/s]\u001b[A\u001b[A\n",
"\n",
" 46%|████████████████████████████████████▏ | 1100/2374 [03:27<04:32, 4.67it/s]\u001b[A\u001b[A\n",
"\n",
" 47%|████████████████████████████████████▊ | 1120/2374 [03:31<04:14, 4.93it/s]\u001b[A\u001b[A\n",
"\n",
" 48%|█████████████████████████████████████▍ | 1140/2374 [03:34<04:02, 5.08it/s]\u001b[A\u001b[A\n",
"\n",
" 49%|██████████████████████████████████████ | 1160/2374 [03:36<03:26, 5.88it/s]\u001b[A\u001b[A\n",
"\n",
" 50%|██████████████████████████████████████▊ | 1180/2374 [03:40<03:23, 5.87it/s]\u001b[A\u001b[A\n",
"\n",
" 51%|███████████████████████████████████████▍ | 1200/2374 [03:45<03:45, 5.19it/s]\u001b[A\u001b[A\n",
"\n",
" 51%|████████████████████████████████████████ | 1220/2374 [03:48<03:33, 5.39it/s]\u001b[A\u001b[A\n",
"\n",
" 52%|████████████████████████████████████████▋ | 1240/2374 [03:51<03:15, 5.81it/s]\u001b[A\u001b[A\n",
"\n",
" 53%|█████████████████████████████████████████▍ | 1260/2374 [03:55<03:18, 5.60it/s]\u001b[A\u001b[A\n",
"\n",
" 54%|██████████████████████████████████████████ | 1280/2374 [03:59<03:34, 5.11it/s]\u001b[A\u001b[A\n",
"\n",
" 55%|██████████████████████████████████████████▋ | 1300/2374 [04:03<03:28, 5.16it/s]\u001b[A\u001b[A\n",
"\n",
" 56%|███████████████████████████████████████████▎ | 1320/2374 [04:06<03:09, 5.56it/s]\u001b[A\u001b[A\n",
"\n",
" 56%|████████████████████████████████████████████ | 1340/2374 [04:09<02:57, 5.82it/s]\u001b[A\u001b[A\n",
"\n",
" 57%|████████████████████████████████████████████▋ | 1360/2374 [04:11<02:29, 6.77it/s]\u001b[A\u001b[A\n",
"\n",
" 58%|█████████████████████████████████████████████▎ | 1380/2374 [04:16<02:49, 5.86it/s]\u001b[A\u001b[A\n",
"\n",
" 59%|█████████████████████████████████████████████▉ | 1400/2374 [04:18<02:31, 6.43it/s]\u001b[A\u001b[A\n",
"\n",
" 60%|██████████████████████████████████████████████▋ | 1420/2374 [04:24<03:02, 5.22it/s]\u001b[A\u001b[A\n",
"\n",
" 61%|███████████████████████████████████████████████▎ | 1440/2374 [04:27<02:55, 5.31it/s]\u001b[A\u001b[A\n",
"\n",
" 61%|███████████████████████████████████████████████▉ | 1460/2374 [04:31<02:49, 5.41it/s]\u001b[A\u001b[A\n",
"\n",
" 62%|████████████████████████████████████████████████▋ | 1480/2374 [04:34<02:42, 5.49it/s]\u001b[A\u001b[A\n",
"\n",
" 63%|█████████████████████████████████████████████████▎ | 1500/2374 [04:37<02:34, 5.67it/s]\u001b[A\u001b[A\n",
"\n",
" 64%|█████████████████████████████████████████████████▉ | 1520/2374 [04:42<02:43, 5.21it/s]\u001b[A\u001b[A\n",
"\n",
" 65%|██████████████████████████████████████████████████▌ | 1540/2374 [04:46<02:44, 5.06it/s]\u001b[A\u001b[A\n",
"\n",
" 66%|███████████████████████████████████████████████████▎ | 1560/2374 [04:50<02:33, 5.29it/s]\u001b[A\u001b[A\n",
"\n",
" 67%|███████████████████████████████████████████████████▉ | 1580/2374 [04:53<02:23, 5.55it/s]\u001b[A\u001b[A\n",
"\n",
" 67%|████████████████████████████████████████████████████▌ | 1600/2374 [04:59<02:44, 4.70it/s]\u001b[A\u001b[A\n",
"\n",
" 68%|█████████████████████████████████████████████████████▏ | 1620/2374 [05:02<02:31, 4.97it/s]\u001b[A\u001b[A\n",
"\n",
" 69%|█████████████████████████████████████████████████████▉ | 1640/2374 [05:08<02:50, 4.32it/s]\u001b[A\u001b[A\n",
"\n",
" 70%|██████████████████████████████████████████████████████▌ | 1660/2374 [05:13<02:49, 4.21it/s]\u001b[A\u001b[A\n",
"\n",
" 71%|███████████████████████████████████████████████████████▏ | 1680/2374 [05:17<02:39, 4.36it/s]\u001b[A\u001b[A\n",
"\n",
" 72%|███████████████████████████████████████████████████████▊ | 1700/2374 [05:23<02:47, 4.03it/s]\u001b[A\u001b[A\n",
"\n",
" 72%|████████████████████████████████████████████████████████▌ | 1720/2374 [05:27<02:35, 4.21it/s]\u001b[A\u001b[A\n",
"\n",
" 73%|█████████████████████████████████████████████████████████▏ | 1740/2374 [05:30<02:13, 4.73it/s]\u001b[A\u001b[A\n",
"\n",
" 74%|█████████████████████████████████████████████████████████▊ | 1760/2374 [05:34<02:03, 4.99it/s]\u001b[A\u001b[A\n",
"\n",
" 75%|██████████████████████████████████████████████████████████▍ | 1780/2374 [05:37<01:48, 5.49it/s]\u001b[A\u001b[A\n",
"\n",
" 76%|███████████████████████████████████████████████████████████▏ | 1800/2374 [05:43<02:03, 4.66it/s]\u001b[A\u001b[A\n",
"\n",
" 77%|███████████████████████████████████████████████████████████▊ | 1820/2374 [05:47<01:56, 4.75it/s]\u001b[A\u001b[A\n",
"\n",
" 78%|████████████████████████████████████████████████████████████▍ | 1840/2374 [05:49<01:41, 5.26it/s]\u001b[A\u001b[A\n",
"\n",
" 78%|█████████████████████████████████████████████████████████████ | 1860/2374 [05:54<01:45, 4.87it/s]\u001b[A\u001b[A\n",
"\n",
" 79%|█████████████████████████████████████████████████████████████▊ | 1880/2374 [06:00<01:50, 4.46it/s]\u001b[A\u001b[A\n",
"\n",
" 80%|██████████████████████████████████████████████████████████████▍ | 1900/2374 [06:04<01:48, 4.36it/s]\u001b[A\u001b[A\n",
"\n",
" 81%|███████████████████████████████████████████████████████████████ | 1920/2374 [06:10<01:53, 4.01it/s]\u001b[A\u001b[A\n",
"\n",
" 82%|███████████████████████████████████████████████████████████████▋ | 1940/2374 [06:16<01:49, 3.97it/s]\u001b[A\u001b[A\n",
"\n",
" 83%|████████████████████████████████████████████████████████████████▍ | 1960/2374 [06:20<01:41, 4.09it/s]\u001b[A\u001b[A\n",
"\n",
" 83%|█████████████████████████████████████████████████████████████████ | 1980/2374 [06:24<01:31, 4.32it/s]\u001b[A\u001b[A\n",
"\n",
" 84%|█████████████████████████████████████████████████████████████████▋ | 2000/2374 [06:28<01:22, 4.55it/s]\u001b[A\u001b[A\n",
"\n",
" 85%|██████████████████████████████████████████████████████████████████▎ | 2020/2374 [06:33<01:20, 4.40it/s]\u001b[A\u001b[A\n",
"\n",
" 86%|███████████████████████████████████████████████████████████████████ | 2040/2374 [06:39<01:23, 4.01it/s]\u001b[A\u001b[A\n",
"\n",
" 87%|███████████████████████████████████████████████████████████████████▋ | 2060/2374 [06:44<01:18, 4.01it/s]\u001b[A\u001b[A\n",
"\n",
" 88%|████████████████████████████████████████████████████████████████████▎ | 2080/2374 [06:50<01:18, 3.76it/s]\u001b[A\u001b[A\n",
"\n",
" 88%|████████████████████████████████████████████████████████████████████▉ | 2100/2374 [06:55<01:11, 3.81it/s]\u001b[A\u001b[A\n",
"\n",
" 89%|█████████████████████████████████████████████████████████████████████▋ | 2120/2374 [07:00<01:03, 3.97it/s]\u001b[A\u001b[A\n",
"\n",
" 90%|██████████████████████████████████████████████████████████████████████▎ | 2140/2374 [07:05<01:01, 3.82it/s]\u001b[A\u001b[A\n",
"\n",
" 91%|██████████████████████████████████████████████████████████████████████▉ | 2160/2374 [07:09<00:51, 4.12it/s]\u001b[A\u001b[A\n",
"\n",
" 92%|███████████████████████████████████████████████████████████████████████▋ | 2180/2374 [07:16<00:53, 3.64it/s]\u001b[A\u001b[A\n",
"\n",
" 93%|████████████████████████████████████████████████████████████████████████▎ | 2200/2374 [07:21<00:45, 3.80it/s]\u001b[A\u001b[A\n",
"\n",
" 94%|████████████████████████████████████████████████████████████████████████▉ | 2220/2374 [07:24<00:35, 4.33it/s]\u001b[A\u001b[A\n",
"\n",
" 94%|█████████████████████████████████████████████████████████████████████████▌ | 2240/2374 [07:31<00:35, 3.82it/s]\u001b[A\u001b[A\n",
"\n",
" 95%|██████████████████████████████████████████████████████████████████████████▎ | 2260/2374 [07:33<00:25, 4.46it/s]\u001b[A\u001b[A\n",
"\n",
" 96%|██████████████████████████████████████████████████████████████████████████▉ | 2280/2374 [07:38<00:21, 4.29it/s]\u001b[A\u001b[A\n",
"\n",
" 97%|███████████████████████████████████████████████████████████████████████████▌ | 2300/2374 [07:45<00:19, 3.83it/s]\u001b[A\u001b[A\n",
"\n",
" 98%|████████████████████████████████████████████████████████████████████████████▏ | 2320/2374 [07:49<00:13, 4.07it/s]\u001b[A\u001b[A\n",
"\n",
" 99%|████████████████████████████████████████████████████████████████████████████▉ | 2340/2374 [07:54<00:08, 4.16it/s]\u001b[A\u001b[A\n",
"\n",
"100%|██████████████████████████████████████████████████████████████████████████████| 2374/2374 [08:02<00:00, 4.92it/s]\u001b[A\u001b[A\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Overall Parsing done. [Time taken: 0:08:12.610360]\n"
]
}
],
"source": [
"import multiprocessing\n",
"from joblib import Parallel, delayed\n",
"from tqdm import tqdm\n",
"\n",
"num_cores = 20\n",
"inputs = tqdm(logfiles)\n",
"\n",
"start_time = datetime.now()\n",
"processed_list = Parallel(n_jobs=num_cores)(delayed(ael)(i) for i in inputs)\n",
"\n",
"print('Overall Parsing done. [Time taken: {!s}]'.format(datetime.now() - start_time))\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Get All CSV files made till now"
]
},
{
"cell_type": "code",
"execution_count": 357,
"metadata": {},
"outputs": [],
"source": [
"csv_files = []\n",
"for root,dirs,files in os.walk(\"AEL_Result\"):\n",
" for file in files:\n",
" if file.endswith(\"_structured.csv\"):\n",
" csv_files.append(file)"
]
},
{
"cell_type": "code",
"execution_count": 358,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"['xaa_structured.csv',\n",
" 'xab_structured.csv',\n",
" 'xac_structured.csv',\n",
" 'xad_structured.csv',\n",
" 'xae_structured.csv',\n",
" 'xaf_structured.csv',\n",
" 'xag_structured.csv',\n",
" 'xah_structured.csv',\n",
" 'xai_structured.csv',\n",
" 'xaj_structured.csv',\n",
" 'xak_structured.csv',\n",
" 'xal_structured.csv',\n",
" 'xam_structured.csv',\n",
" 'xan_structured.csv',\n",
" 'xao_structured.csv',\n",
" 'xap_structured.csv',\n",
" 'xaq_structured.csv',\n",
" 'xar_structured.csv',\n",
" 'xas_structured.csv',\n",
" 'xat_structured.csv',\n",
" 'xau_structured.csv',\n",
" 'xav_structured.csv',\n",
" 'xaw_structured.csv',\n",
" 'xax_structured.csv',\n",
" 'xay_structured.csv',\n",
" 'xaz_structured.csv',\n",
" 'xba_structured.csv',\n",
" 'xbb_structured.csv',\n",
" 'xbc_structured.csv',\n",
" 'xbd_structured.csv',\n",
" 'xbe_structured.csv',\n",
" 'xbf_structured.csv',\n",
" 'xbg_structured.csv',\n",
" 'xbh_structured.csv',\n",
" 'xbi_structured.csv',\n",
" 'xbj_structured.csv',\n",
" 'xbk_structured.csv',\n",
" 'xbl_structured.csv',\n",
" 'xbm_structured.csv',\n",
" 'xbn_structured.csv',\n",
" 'xbo_structured.csv',\n",
" 'xbp_structured.csv',\n",
" 'xbq_structured.csv',\n",
" 'xbr_structured.csv',\n",
" 'xbs_structured.csv',\n",
" 'xbt_structured.csv',\n",
" 'xbu_structured.csv',\n",
" 'xbv_structured.csv',\n",
" 'xbw_structured.csv',\n",
" 'xbx_structured.csv',\n",
" 'xby_structured.csv',\n",
" 'xbz_structured.csv',\n",
" 'xca_structured.csv',\n",
" 'xcb_structured.csv',\n",
" 'xcc_structured.csv',\n",
" 'xcd_structured.csv',\n",
" 'xce_structured.csv',\n",
" 'xcf_structured.csv',\n",
" 'xcg_structured.csv',\n",
" 'xch_structured.csv',\n",
" 'xci_structured.csv',\n",
" 'xcj_structured.csv',\n",
" 'xck_structured.csv',\n",
" 'xcl_structured.csv',\n",
" 'xcm_structured.csv',\n",
" 'xcn_structured.csv',\n",
" 'xco_structured.csv',\n",
" 'xcp_structured.csv',\n",
" 'xcq_structured.csv',\n",
" 'xcr_structured.csv',\n",
" 'xcs_structured.csv',\n",
" 'xct_structured.csv',\n",
" 'xcu_structured.csv',\n",
" 'xcv_structured.csv',\n",
" 'xcw_structured.csv',\n",
" 'xcx_structured.csv',\n",
" 'xcy_structured.csv',\n",
" 'xcz_structured.csv',\n",
" 'xda_structured.csv',\n",
" 'xdb_structured.csv',\n",
" 'xdc_structured.csv',\n",
" 'xdd_structured.csv',\n",
" 'xde_structured.csv',\n",
" 'xdf_structured.csv',\n",
" 'xdg_structured.csv',\n",
" 'xdh_structured.csv',\n",
" 'xdi_structured.csv',\n",
" 'xdj_structured.csv',\n",
" 'xdk_structured.csv',\n",
" 'xdl_structured.csv',\n",
" 'xdm_structured.csv',\n",
" 'xdn_structured.csv',\n",
" 'xdo_structured.csv',\n",
" 'xdp_structured.csv',\n",
" 'xdq_structured.csv',\n",
" 'xdr_structured.csv',\n",
" 'xds_structured.csv',\n",
" 'xdt_structured.csv',\n",
" 'xdu_structured.csv',\n",
" 'xdv_structured.csv',\n",
" 'xdw_structured.csv',\n",
" 'xdx_structured.csv',\n",
" 'xdy_structured.csv',\n",
" 'xdz_structured.csv',\n",
" 'xea_structured.csv',\n",
" 'xeb_structured.csv',\n",
" 'xec_structured.csv',\n",
" 'xed_structured.csv',\n",
" 'xee_structured.csv',\n",
" 'xef_structured.csv',\n",
" 'xeg_structured.csv',\n",
" 'xeh_structured.csv',\n",
" 'xei_structured.csv',\n",
" 'xej_structured.csv',\n",
" 'xek_structured.csv',\n",
" 'xel_structured.csv',\n",
" 'xem_structured.csv',\n",
" 'xen_structured.csv',\n",
" 'xeo_structured.csv',\n",
" 'xep_structured.csv',\n",
" 'xeq_structured.csv',\n",
" 'xer_structured.csv',\n",
" 'xes_structured.csv',\n",
" 'xet_structured.csv',\n",
" 'xeu_structured.csv',\n",
" 'xev_structured.csv',\n",
" 'xew_structured.csv',\n",
" 'xex_structured.csv',\n",
" 'xey_structured.csv',\n",
" 'xez_structured.csv',\n",
" 'xfa_structured.csv',\n",
" 'xfb_structured.csv',\n",
" 'xfc_structured.csv',\n",
" 'xfd_structured.csv',\n",
" 'xfe_structured.csv',\n",
" 'xff_structured.csv',\n",
" 'xfg_structured.csv',\n",
" 'xfh_structured.csv',\n",
" 'xfi_structured.csv',\n",
" 'xfj_structured.csv',\n",
" 'xfk_structured.csv',\n",
" 'xfl_structured.csv',\n",
" 'xfm_structured.csv',\n",
" 'xfn_structured.csv',\n",
" 'xfo_structured.csv',\n",
" 'xfp_structured.csv',\n",
" 'xfq_structured.csv',\n",
" 'xfr_structured.csv',\n",
" 'xfs_structured.csv',\n",
" 'xft_structured.csv',\n",
" 'xfu_structured.csv',\n",
" 'xfv_structured.csv',\n",
" 'xfw_structured.csv',\n",
" 'xfx_structured.csv',\n",
" 'xfy_structured.csv',\n",
" 'xfz_structured.csv',\n",
" 'xga_structured.csv',\n",
" 'xgb_structured.csv',\n",
" 'xgc_structured.csv',\n",
" 'xgd_structured.csv',\n",
" 'xge_structured.csv',\n",
" 'xgf_structured.csv',\n",
" 'xgg_structured.csv',\n",
" 'xgh_structured.csv',\n",
" 'xgi_structured.csv',\n",
" 'xgj_structured.csv',\n",
" 'xgk_structured.csv',\n",
" 'xgl_structured.csv',\n",
" 'xgm_structured.csv',\n",
" 'xgn_structured.csv',\n",
" 'xgo_structured.csv',\n",
" 'xgp_structured.csv',\n",
" 'xgq_structured.csv',\n",
" 'xgr_structured.csv',\n",
" 'xgs_structured.csv',\n",
" 'xgt_structured.csv',\n",
" 'xgu_structured.csv',\n",
" 'xgv_structured.csv',\n",
" 'xgw_structured.csv',\n",
" 'xgx_structured.csv',\n",
" 'xgy_structured.csv',\n",
" 'xgz_structured.csv',\n",
" 'xha_structured.csv',\n",
" 'xhb_structured.csv',\n",
" 'xhc_structured.csv',\n",
" 'xhd_structured.csv',\n",
" 'xhe_structured.csv',\n",
" 'xhf_structured.csv',\n",
" 'xhg_structured.csv',\n",
" 'xhh_structured.csv',\n",
" 'xhi_structured.csv',\n",
" 'xhj_structured.csv',\n",
" 'xhk_structured.csv',\n",
" 'xhl_structured.csv',\n",
" 'xhm_structured.csv',\n",
" 'xhn_structured.csv',\n",
" 'xho_structured.csv',\n",
" 'xhp_structured.csv',\n",
" 'xhq_structured.csv',\n",
" 'xhr_structured.csv',\n",
" 'xhs_structured.csv',\n",
" 'xht_structured.csv',\n",
" 'xhu_structured.csv',\n",
" 'xhv_structured.csv',\n",
" 'xhw_structured.csv',\n",
" 'xhx_structured.csv',\n",
" 'xhy_structured.csv',\n",
" 'xhz_structured.csv',\n",
" 'xia_structured.csv',\n",
" 'xib_structured.csv',\n",
" 'xic_structured.csv',\n",
" 'xid_structured.csv',\n",
" 'xie_structured.csv',\n",
" 'xif_structured.csv',\n",
" 'xig_structured.csv',\n",
" 'xih_structured.csv',\n",
" 'xii_structured.csv',\n",
" 'xij_structured.csv',\n",
" 'xik_structured.csv',\n",
" 'xil_structured.csv',\n",
" 'xim_structured.csv',\n",
" 'xin_structured.csv',\n",
" 'xio_structured.csv',\n",
" 'xip_structured.csv',\n",
" 'xiq_structured.csv',\n",
" 'xir_structured.csv',\n",
" 'xis_structured.csv',\n",
" 'xit_structured.csv',\n",
" 'xiu_structured.csv',\n",
" 'xiv_structured.csv',\n",
" 'xiw_structured.csv',\n",
" 'xix_structured.csv',\n",
" 'xiy_structured.csv',\n",
" 'xiz_structured.csv',\n",
" 'xja_structured.csv',\n",
" 'xjb_structured.csv',\n",
" 'xjc_structured.csv',\n",
" 'xjd_structured.csv',\n",
" 'xje_structured.csv',\n",
" 'xjf_structured.csv',\n",
" 'xjg_structured.csv',\n",
" 'xjh_structured.csv',\n",
" 'xji_structured.csv',\n",
" 'xjj_structured.csv',\n",
" 'xjk_structured.csv',\n",
" 'xjl_structured.csv',\n",
" 'xjm_structured.csv',\n",
" 'xjn_structured.csv',\n",
" 'xjo_structured.csv',\n",
" 'xjp_structured.csv',\n",
" 'xjq_structured.csv',\n",
" 'xjr_structured.csv',\n",
" 'xjs_structured.csv',\n",
" 'xjt_structured.csv',\n",
" 'xju_structured.csv',\n",
" 'xjv_structured.csv',\n",
" 'xjw_structured.csv',\n",
" 'xjx_structured.csv',\n",
" 'xjy_structured.csv',\n",
" 'xjz_structured.csv',\n",
" 'xka_structured.csv',\n",
" 'xkb_structured.csv',\n",
" 'xkc_structured.csv',\n",
" 'xkd_structured.csv',\n",
" 'xke_structured.csv',\n",
" 'xkf_structured.csv',\n",
" 'xkg_structured.csv',\n",
" 'xkh_structured.csv',\n",
" 'xki_structured.csv',\n",
" 'xkj_structured.csv',\n",
" 'xkk_structured.csv',\n",
" 'xkl_structured.csv',\n",
" 'xkm_structured.csv',\n",
" 'xkn_structured.csv',\n",
" 'xko_structured.csv',\n",
" 'xkp_structured.csv',\n",
" 'xkq_structured.csv',\n",
" 'xkr_structured.csv',\n",
" 'xks_structured.csv',\n",
" 'xkt_structured.csv',\n",
" 'xku_structured.csv',\n",
" 'xkv_structured.csv',\n",
" 'xkw_structured.csv',\n",
" 'xkx_structured.csv',\n",
" 'xky_structured.csv',\n",
" 'xkz_structured.csv',\n",
" 'xla_structured.csv',\n",
" 'xlb_structured.csv',\n",
" 'xlc_structured.csv',\n",
" 'xld_structured.csv',\n",
" 'xle_structured.csv',\n",
" 'xlf_structured.csv',\n",
" 'xlg_structured.csv',\n",
" 'xlh_structured.csv',\n",
" 'xli_structured.csv',\n",
" 'xlj_structured.csv',\n",
" 'xlk_structured.csv',\n",
" 'xll_structured.csv',\n",
" 'xlm_structured.csv',\n",
" 'xln_structured.csv',\n",
" 'xlo_structured.csv',\n",
" 'xlp_structured.csv',\n",
" 'xlq_structured.csv',\n",
" 'xlr_structured.csv',\n",
" 'xls_structured.csv',\n",
" 'xlt_structured.csv',\n",
" 'xlu_structured.csv',\n",
" 'xlv_structured.csv',\n",
" 'xlw_structured.csv',\n",
" 'xlx_structured.csv',\n",
" 'xly_structured.csv',\n",
" 'xlz_structured.csv',\n",
" 'xma_structured.csv',\n",
" 'xmb_structured.csv',\n",
" 'xmc_structured.csv',\n",
" 'xmd_structured.csv',\n",
" 'xme_structured.csv',\n",
" 'xmf_structured.csv',\n",
" 'xmg_structured.csv',\n",
" 'xmh_structured.csv',\n",
" 'xmi_structured.csv',\n",
" 'xmj_structured.csv',\n",
" 'xmk_structured.csv',\n",
" 'xml_structured.csv',\n",
" 'xmm_structured.csv',\n",
" 'xmn_structured.csv',\n",
" 'xmo_structured.csv',\n",
" 'xmp_structured.csv',\n",
" 'xmq_structured.csv',\n",
" 'xmr_structured.csv',\n",
" 'xms_structured.csv',\n",
" 'xmt_structured.csv',\n",
" 'xmu_structured.csv',\n",
" 'xmv_structured.csv',\n",
" 'xmw_structured.csv',\n",
" 'xmx_structured.csv',\n",
" 'xmy_structured.csv',\n",
" 'xmz_structured.csv',\n",
" 'xna_structured.csv',\n",
" 'xnb_structured.csv',\n",
" 'xnc_structured.csv',\n",
" 'xnd_structured.csv',\n",
" 'xne_structured.csv',\n",
" 'xnf_structured.csv',\n",
" 'xng_structured.csv',\n",
" 'xnh_structured.csv',\n",
" 'xni_structured.csv',\n",
" 'xnj_structured.csv',\n",
" 'xnk_structured.csv',\n",
" 'xnl_structured.csv',\n",
" 'xnm_structured.csv',\n",
" 'xnn_structured.csv',\n",
" 'xno_structured.csv',\n",
" 'xnp_structured.csv',\n",
" 'xnq_structured.csv',\n",
" 'xnr_structured.csv',\n",
" 'xns_structured.csv',\n",
" 'xnt_structured.csv',\n",
" 'xnu_structured.csv',\n",
" 'xnv_structured.csv',\n",
" 'xnw_structured.csv',\n",
" 'xnx_structured.csv',\n",
" 'xny_structured.csv',\n",
" 'xnz_structured.csv',\n",
" 'xoa_structured.csv',\n",
" 'xob_structured.csv',\n",
" 'xoc_structured.csv',\n",
" 'xod_structured.csv',\n",
" 'xoe_structured.csv',\n",
" 'xof_structured.csv',\n",
" 'xog_structured.csv',\n",
" 'xoh_structured.csv',\n",
" 'xoi_structured.csv',\n",
" 'xoj_structured.csv',\n",
" 'xok_structured.csv',\n",
" 'xol_structured.csv',\n",
" 'xom_structured.csv',\n",
" 'xon_structured.csv',\n",
" 'xoo_structured.csv',\n",
" 'xop_structured.csv',\n",
" 'xoq_structured.csv',\n",
" 'xor_structured.csv',\n",
" 'xos_structured.csv',\n",
" 'xot_structured.csv',\n",
" 'xou_structured.csv',\n",
" 'xov_structured.csv',\n",
" 'xow_structured.csv',\n",
" 'xox_structured.csv',\n",
" 'xoy_structured.csv',\n",
" 'xoz_structured.csv',\n",
" 'xpa_structured.csv',\n",
" 'xpb_structured.csv',\n",
" 'xpc_structured.csv',\n",
" 'xpd_structured.csv',\n",
" 'xpe_structured.csv',\n",
" 'xpf_structured.csv',\n",
" 'xpg_structured.csv',\n",
" 'xph_structured.csv',\n",
" 'xpi_structured.csv',\n",
" 'xpj_structured.csv',\n",
" 'xpk_structured.csv',\n",
" 'xpl_structured.csv',\n",
" 'xpm_structured.csv',\n",
" 'xpn_structured.csv',\n",
" 'xpo_structured.csv',\n",
" 'xpp_structured.csv',\n",
" 'xpq_structured.csv',\n",
" 'xpr_structured.csv',\n",
" 'xps_structured.csv',\n",
" 'xpt_structured.csv',\n",
" 'xpu_structured.csv',\n",
" 'xpv_structured.csv',\n",
" 'xpw_structured.csv',\n",
" 'xpx_structured.csv',\n",
" 'xpy_structured.csv',\n",
" 'xpz_structured.csv',\n",
" 'xqa_structured.csv',\n",
" 'xqb_structured.csv',\n",
" 'xqc_structured.csv',\n",
" 'xqd_structured.csv',\n",
" 'xqe_structured.csv',\n",
" 'xqf_structured.csv',\n",
" 'xqg_structured.csv',\n",
" 'xqh_structured.csv',\n",
" 'xqi_structured.csv',\n",
" 'xqj_structured.csv',\n",
" 'xqk_structured.csv',\n",
" 'xql_structured.csv',\n",
" 'xqm_structured.csv',\n",
" 'xqn_structured.csv',\n",
" 'xqo_structured.csv',\n",
" 'xqp_structured.csv',\n",
" 'xqq_structured.csv',\n",
" 'xqr_structured.csv',\n",
" 'xqs_structured.csv',\n",
" 'xqt_structured.csv',\n",
" 'xqu_structured.csv',\n",
" 'xqv_structured.csv',\n",
" 'xqw_structured.csv',\n",
" 'xqx_structured.csv',\n",
" 'xqy_structured.csv',\n",
" 'xqz_structured.csv',\n",
" 'xra_structured.csv',\n",
" 'xrb_structured.csv',\n",
" 'xrc_structured.csv',\n",
" 'xrd_structured.csv',\n",
" 'xre_structured.csv',\n",
" 'xrf_structured.csv',\n",
" 'xrg_structured.csv',\n",
" 'xrh_structured.csv',\n",
" 'xri_structured.csv',\n",
" 'xrj_structured.csv',\n",
" 'xrk_structured.csv',\n",
" 'xrl_structured.csv',\n",
" 'xrm_structured.csv',\n",
" 'xrn_structured.csv',\n",
" 'xro_structured.csv',\n",
" 'xrp_structured.csv',\n",
" 'xrq_structured.csv',\n",
" 'xrr_structured.csv',\n",
" 'xrs_structured.csv',\n",
" 'xrt_structured.csv',\n",
" 'xru_structured.csv',\n",
" 'xrv_structured.csv',\n",
" 'xrw_structured.csv',\n",
" 'xrx_structured.csv',\n",
" 'xry_structured.csv',\n",
" 'xrz_structured.csv',\n",
" 'xsa_structured.csv',\n",
" 'xsb_structured.csv',\n",
" 'xsc_structured.csv',\n",
" 'xsd_structured.csv',\n",
" 'xse_structured.csv',\n",
" 'xsf_structured.csv',\n",
" 'xsg_structured.csv',\n",
" 'xsh_structured.csv',\n",
" 'xsi_structured.csv',\n",
" 'xsj_structured.csv',\n",
" 'xsk_structured.csv',\n",
" 'xsl_structured.csv',\n",
" 'xsm_structured.csv',\n",
" 'xsn_structured.csv',\n",
" 'xso_structured.csv',\n",
" 'xsp_structured.csv',\n",
" 'xsq_structured.csv',\n",
" 'xsr_structured.csv',\n",
" 'xss_structured.csv',\n",
" 'xst_structured.csv',\n",
" 'xsu_structured.csv',\n",
" 'xsv_structured.csv',\n",
" 'xsw_structured.csv',\n",
" 'xsx_structured.csv',\n",
" 'xsy_structured.csv',\n",
" 'xsz_structured.csv',\n",
" 'xta_structured.csv',\n",
" 'xtb_structured.csv',\n",
" 'xtc_structured.csv',\n",
" 'xtd_structured.csv',\n",
" 'xte_structured.csv',\n",
" 'xtf_structured.csv',\n",
" 'xtg_structured.csv',\n",
" 'xth_structured.csv',\n",
" 'xti_structured.csv',\n",
" 'xtj_structured.csv',\n",
" 'xtk_structured.csv',\n",
" 'xtl_structured.csv',\n",
" 'xtm_structured.csv',\n",
" 'xtn_structured.csv',\n",
" 'xto_structured.csv',\n",
" 'xtp_structured.csv',\n",
" 'xtq_structured.csv',\n",
" 'xtr_structured.csv',\n",
" 'xts_structured.csv',\n",
" 'xtt_structured.csv',\n",
" 'xtu_structured.csv',\n",
" 'xtv_structured.csv',\n",
" 'xtw_structured.csv',\n",
" 'xtx_structured.csv',\n",
" 'xty_structured.csv',\n",
" 'xtz_structured.csv',\n",
" 'xua_structured.csv',\n",
" 'xub_structured.csv',\n",
" 'xuc_structured.csv',\n",
" 'xud_structured.csv',\n",
" 'xue_structured.csv',\n",
" 'xuf_structured.csv',\n",
" 'xug_structured.csv',\n",
" 'xuh_structured.csv',\n",
" 'xui_structured.csv',\n",
" 'xuj_structured.csv',\n",
" 'xuk_structured.csv',\n",
" 'xul_structured.csv',\n",
" 'xum_structured.csv',\n",
" 'xun_structured.csv',\n",
" 'xuo_structured.csv',\n",
" 'xup_structured.csv',\n",
" 'xuq_structured.csv',\n",
" 'xur_structured.csv',\n",
" 'xus_structured.csv',\n",
" 'xut_structured.csv',\n",
" 'xuu_structured.csv',\n",
" 'xuv_structured.csv',\n",
" 'xuw_structured.csv',\n",
" 'xux_structured.csv',\n",
" 'xuy_structured.csv',\n",
" 'xuz_structured.csv',\n",
" 'xva_structured.csv',\n",
" 'xvb_structured.csv',\n",
" 'xvc_structured.csv',\n",
" 'xvd_structured.csv',\n",
" 'xve_structured.csv',\n",
" 'xvf_structured.csv',\n",
" 'xvg_structured.csv',\n",
" 'xvh_structured.csv',\n",
" 'xvi_structured.csv',\n",
" 'xvj_structured.csv',\n",
" 'xvk_structured.csv',\n",
" 'xvl_structured.csv',\n",
" 'xvm_structured.csv',\n",
" 'xvn_structured.csv',\n",
" 'xvo_structured.csv',\n",
" 'xvp_structured.csv',\n",
" 'xvq_structured.csv',\n",
" 'xvr_structured.csv',\n",
" 'xvs_structured.csv',\n",
" 'xvt_structured.csv',\n",
" 'xvu_structured.csv',\n",
" 'xvv_structured.csv',\n",
" 'xvw_structured.csv',\n",
" 'xvx_structured.csv',\n",
" 'xvy_structured.csv',\n",
" 'xvz_structured.csv',\n",
" 'xwa_structured.csv',\n",
" 'xwb_structured.csv',\n",
" 'xwc_structured.csv',\n",
" 'xwd_structured.csv',\n",
" 'xwe_structured.csv',\n",
" 'xwf_structured.csv',\n",
" 'xwg_structured.csv',\n",
" 'xwh_structured.csv',\n",
" 'xwi_structured.csv',\n",
" 'xwj_structured.csv',\n",
" 'xwk_structured.csv',\n",
" 'xwl_structured.csv',\n",
" 'xwm_structured.csv',\n",
" 'xwn_structured.csv',\n",
" 'xwo_structured.csv',\n",
" 'xwp_structured.csv',\n",
" 'xwq_structured.csv',\n",
" 'xwr_structured.csv',\n",
" 'xws_structured.csv',\n",
" 'xwt_structured.csv',\n",
" 'xwu_structured.csv',\n",
" 'xwv_structured.csv',\n",
" 'xww_structured.csv',\n",
" 'xwx_structured.csv',\n",
" 'xwy_structured.csv',\n",
" 'xwz_structured.csv',\n",
" 'xxa_structured.csv',\n",
" 'xxb_structured.csv',\n",
" 'xxc_structured.csv',\n",
" 'xxd_structured.csv',\n",
" 'xxe_structured.csv',\n",
" 'xxf_structured.csv',\n",
" 'xxg_structured.csv',\n",
" 'xxh_structured.csv',\n",
" 'xxi_structured.csv',\n",
" 'xxj_structured.csv',\n",
" 'xxk_structured.csv',\n",
" 'xxl_structured.csv',\n",
" 'xxm_structured.csv',\n",
" 'xxn_structured.csv',\n",
" 'xxo_structured.csv',\n",
" 'xxp_structured.csv',\n",
" 'xxq_structured.csv',\n",
" 'xxr_structured.csv',\n",
" 'xxs_structured.csv',\n",
" 'xxt_structured.csv',\n",
" 'xxu_structured.csv',\n",
" 'xxv_structured.csv',\n",
" 'xxw_structured.csv',\n",
" 'xxx_structured.csv',\n",
" 'xxy_structured.csv',\n",
" 'xxz_structured.csv',\n",
" 'xya_structured.csv',\n",
" 'xyb_structured.csv',\n",
" 'xyc_structured.csv',\n",
" 'xyd_structured.csv',\n",
" 'xye_structured.csv',\n",
" 'xyf_structured.csv',\n",
" 'xyg_structured.csv',\n",
" 'xyh_structured.csv',\n",
" 'xyi_structured.csv',\n",
" 'xyj_structured.csv',\n",
" 'xyk_structured.csv',\n",
" 'xyl_structured.csv',\n",
" 'xym_structured.csv',\n",
" 'xyn_structured.csv',\n",
" 'xyo_structured.csv',\n",
" 'xyp_structured.csv',\n",
" 'xyq_structured.csv',\n",
" 'xyr_structured.csv',\n",
" 'xys_structured.csv',\n",
" 'xyt_structured.csv',\n",
" 'xyu_structured.csv',\n",
" 'xyv_structured.csv',\n",
" 'xyw_structured.csv',\n",
" 'xyx_structured.csv',\n",
" 'xyy_structured.csv',\n",
" 'xyz_structured.csv',\n",
" 'xzaaa_structured.csv',\n",
" 'xzaab_structured.csv',\n",
" 'xzaac_structured.csv',\n",
" 'xzaad_structured.csv',\n",
" 'xzaae_structured.csv',\n",
" 'xzaaf_structured.csv',\n",
" 'xzaag_structured.csv',\n",
" 'xzaah_structured.csv',\n",
" 'xzaai_structured.csv',\n",
" 'xzaaj_structured.csv',\n",
" 'xzaak_structured.csv',\n",
" 'xzaal_structured.csv',\n",
" 'xzaam_structured.csv',\n",
" 'xzaan_structured.csv',\n",
" 'xzaao_structured.csv',\n",
" 'xzaap_structured.csv',\n",
" 'xzaaq_structured.csv',\n",
" 'xzaar_structured.csv',\n",
" 'xzaas_structured.csv',\n",
" 'xzaat_structured.csv',\n",
" 'xzaau_structured.csv',\n",
" 'xzaav_structured.csv',\n",
" 'xzaaw_structured.csv',\n",
" 'xzaax_structured.csv',\n",
" 'xzaay_structured.csv',\n",
" 'xzaaz_structured.csv',\n",
" 'xzaba_structured.csv',\n",
" 'xzabb_structured.csv',\n",
" 'xzabc_structured.csv',\n",
" 'xzabd_structured.csv',\n",
" 'xzabe_structured.csv',\n",
" 'xzabf_structured.csv',\n",
" 'xzabg_structured.csv',\n",
" 'xzabh_structured.csv',\n",
" 'xzabi_structured.csv',\n",
" 'xzabj_structured.csv',\n",
" 'xzabk_structured.csv',\n",
" 'xzabl_structured.csv',\n",
" 'xzabm_structured.csv',\n",
" 'xzabn_structured.csv',\n",
" 'xzabo_structured.csv',\n",
" 'xzabp_structured.csv',\n",
" 'xzabq_structured.csv',\n",
" 'xzabr_structured.csv',\n",
" 'xzabs_structured.csv',\n",
" 'xzabt_structured.csv',\n",
" 'xzabu_structured.csv',\n",
" 'xzabv_structured.csv',\n",
" 'xzabw_structured.csv',\n",
" 'xzabx_structured.csv',\n",
" 'xzaby_structured.csv',\n",
" 'xzabz_structured.csv',\n",
" 'xzaca_structured.csv',\n",
" 'xzacb_structured.csv',\n",
" 'xzacc_structured.csv',\n",
" 'xzacd_structured.csv',\n",
" 'xzace_structured.csv',\n",
" 'xzacf_structured.csv',\n",
" 'xzacg_structured.csv',\n",
" 'xzach_structured.csv',\n",
" 'xzaci_structured.csv',\n",
" 'xzacj_structured.csv',\n",
" 'xzack_structured.csv',\n",
" 'xzacl_structured.csv',\n",
" 'xzacm_structured.csv',\n",
" 'xzacn_structured.csv',\n",
" 'xzaco_structured.csv',\n",
" 'xzacp_structured.csv',\n",
" 'xzacq_structured.csv',\n",
" 'xzacr_structured.csv',\n",
" 'xzacs_structured.csv',\n",
" 'xzact_structured.csv',\n",
" 'xzacu_structured.csv',\n",
" 'xzacv_structured.csv',\n",
" 'xzacw_structured.csv',\n",
" 'xzacx_structured.csv',\n",
" 'xzacy_structured.csv',\n",
" 'xzacz_structured.csv',\n",
" 'xzada_structured.csv',\n",
" 'xzadb_structured.csv',\n",
" 'xzadc_structured.csv',\n",
" 'xzadd_structured.csv',\n",
" 'xzade_structured.csv',\n",
" 'xzadf_structured.csv',\n",
" 'xzadg_structured.csv',\n",
" 'xzadh_structured.csv',\n",
" 'xzadi_structured.csv',\n",
" 'xzadj_structured.csv',\n",
" 'xzadk_structured.csv',\n",
" 'xzadl_structured.csv',\n",
" 'xzadm_structured.csv',\n",
" 'xzadn_structured.csv',\n",
" 'xzado_structured.csv',\n",
" 'xzadp_structured.csv',\n",
" 'xzadq_structured.csv',\n",
" 'xzadr_structured.csv',\n",
" 'xzads_structured.csv',\n",
" 'xzadt_structured.csv',\n",
" 'xzadu_structured.csv',\n",
" 'xzadv_structured.csv',\n",
" 'xzadw_structured.csv',\n",
" 'xzadx_structured.csv',\n",
" 'xzady_structured.csv',\n",
" 'xzadz_structured.csv',\n",
" 'xzaea_structured.csv',\n",
" 'xzaeb_structured.csv',\n",
" 'xzaec_structured.csv',\n",
" 'xzaed_structured.csv',\n",
" 'xzaee_structured.csv',\n",
" 'xzaef_structured.csv',\n",
" 'xzaeg_structured.csv',\n",
" 'xzaeh_structured.csv',\n",
" 'xzaei_structured.csv',\n",
" 'xzaej_structured.csv',\n",
" 'xzaek_structured.csv',\n",
" 'xzael_structured.csv',\n",
" 'xzaem_structured.csv',\n",
" 'xzaen_structured.csv',\n",
" 'xzaeo_structured.csv',\n",
" 'xzaep_structured.csv',\n",
" 'xzaeq_structured.csv',\n",
" 'xzaer_structured.csv',\n",
" 'xzaes_structured.csv',\n",
" 'xzaet_structured.csv',\n",
" 'xzaeu_structured.csv',\n",
" 'xzaev_structured.csv',\n",
" 'xzaew_structured.csv',\n",
" 'xzaex_structured.csv',\n",
" 'xzaey_structured.csv',\n",
" 'xzaez_structured.csv',\n",
" 'xzafa_structured.csv',\n",
" 'xzafb_structured.csv',\n",
" 'xzafc_structured.csv',\n",
" 'xzafd_structured.csv',\n",
" 'xzafe_structured.csv',\n",
" 'xzaff_structured.csv',\n",
" 'xzafg_structured.csv',\n",
" 'xzafh_structured.csv',\n",
" 'xzafi_structured.csv',\n",
" 'xzafj_structured.csv',\n",
" 'xzafk_structured.csv',\n",
" 'xzafl_structured.csv',\n",
" 'xzafm_structured.csv',\n",
" 'xzafn_structured.csv',\n",
" 'xzafo_structured.csv',\n",
" 'xzafp_structured.csv',\n",
" 'xzafq_structured.csv',\n",
" 'xzafr_structured.csv',\n",
" 'xzafs_structured.csv',\n",
" 'xzaft_structured.csv',\n",
" 'xzafu_structured.csv',\n",
" 'xzafv_structured.csv',\n",
" 'xzafw_structured.csv',\n",
" 'xzafx_structured.csv',\n",
" 'xzafy_structured.csv',\n",
" 'xzafz_structured.csv',\n",
" 'xzaga_structured.csv',\n",
" 'xzagb_structured.csv',\n",
" 'xzagc_structured.csv',\n",
" 'xzagd_structured.csv',\n",
" 'xzage_structured.csv',\n",
" 'xzagf_structured.csv',\n",
" 'xzagg_structured.csv',\n",
" 'xzagh_structured.csv',\n",
" 'xzagi_structured.csv',\n",
" 'xzagj_structured.csv',\n",
" 'xzagk_structured.csv',\n",
" 'xzagl_structured.csv',\n",
" 'xzagm_structured.csv',\n",
" 'xzagn_structured.csv',\n",
" 'xzago_structured.csv',\n",
" 'xzagp_structured.csv',\n",
" 'xzagq_structured.csv',\n",
" 'xzagr_structured.csv',\n",
" 'xzags_structured.csv',\n",
" 'xzagt_structured.csv',\n",
" 'xzagu_structured.csv',\n",
" 'xzagv_structured.csv',\n",
" 'xzagw_structured.csv',\n",
" 'xzagx_structured.csv',\n",
" 'xzagy_structured.csv',\n",
" 'xzagz_structured.csv',\n",
" 'xzaha_structured.csv',\n",
" 'xzahb_structured.csv',\n",
" 'xzahc_structured.csv',\n",
" 'xzahd_structured.csv',\n",
" 'xzahe_structured.csv',\n",
" 'xzahf_structured.csv',\n",
" 'xzahg_structured.csv',\n",
" 'xzahh_structured.csv',\n",
" 'xzahi_structured.csv',\n",
" 'xzahj_structured.csv',\n",
" 'xzahk_structured.csv',\n",
" 'xzahl_structured.csv',\n",
" 'xzahm_structured.csv',\n",
" 'xzahn_structured.csv',\n",
" 'xzaho_structured.csv',\n",
" 'xzahp_structured.csv',\n",
" 'xzahq_structured.csv',\n",
" 'xzahr_structured.csv',\n",
" 'xzahs_structured.csv',\n",
" 'xzaht_structured.csv',\n",
" 'xzahu_structured.csv',\n",
" 'xzahv_structured.csv',\n",
" 'xzahw_structured.csv',\n",
" 'xzahx_structured.csv',\n",
" 'xzahy_structured.csv',\n",
" 'xzahz_structured.csv',\n",
" 'xzaia_structured.csv',\n",
" 'xzaib_structured.csv',\n",
" 'xzaic_structured.csv',\n",
" 'xzaid_structured.csv',\n",
" 'xzaie_structured.csv',\n",
" 'xzaif_structured.csv',\n",
" 'xzaig_structured.csv',\n",
" 'xzaih_structured.csv',\n",
" 'xzaii_structured.csv',\n",
" 'xzaij_structured.csv',\n",
" 'xzaik_structured.csv',\n",
" 'xzail_structured.csv',\n",
" 'xzaim_structured.csv',\n",
" 'xzain_structured.csv',\n",
" 'xzaio_structured.csv',\n",
" 'xzaip_structured.csv',\n",
" 'xzaiq_structured.csv',\n",
" 'xzair_structured.csv',\n",
" 'xzais_structured.csv',\n",
" 'xzait_structured.csv',\n",
" 'xzaiu_structured.csv',\n",
" 'xzaiv_structured.csv',\n",
" 'xzaiw_structured.csv',\n",
" 'xzaix_structured.csv',\n",
" 'xzaiy_structured.csv',\n",
" 'xzaiz_structured.csv',\n",
" 'xzaja_structured.csv',\n",
" 'xzajb_structured.csv',\n",
" 'xzajc_structured.csv',\n",
" 'xzajd_structured.csv',\n",
" 'xzaje_structured.csv',\n",
" 'xzajf_structured.csv',\n",
" 'xzajg_structured.csv',\n",
" 'xzajh_structured.csv',\n",
" 'xzaji_structured.csv',\n",
" 'xzajj_structured.csv',\n",
" 'xzajk_structured.csv',\n",
" 'xzajl_structured.csv',\n",
" 'xzajm_structured.csv',\n",
" 'xzajn_structured.csv',\n",
" 'xzajo_structured.csv',\n",
" 'xzajp_structured.csv',\n",
" 'xzajq_structured.csv',\n",
" 'xzajr_structured.csv',\n",
" 'xzajs_structured.csv',\n",
" 'xzajt_structured.csv',\n",
" 'xzaju_structured.csv',\n",
" 'xzajv_structured.csv',\n",
" 'xzajw_structured.csv',\n",
" 'xzajx_structured.csv',\n",
" 'xzajy_structured.csv',\n",
" 'xzajz_structured.csv',\n",
" 'xzaka_structured.csv',\n",
" 'xzakb_structured.csv',\n",
" 'xzakc_structured.csv',\n",
" 'xzakd_structured.csv',\n",
" 'xzake_structured.csv',\n",
" 'xzakf_structured.csv',\n",
" 'xzakg_structured.csv',\n",
" 'xzakh_structured.csv',\n",
" 'xzaki_structured.csv',\n",
" 'xzakj_structured.csv',\n",
" 'xzakk_structured.csv',\n",
" 'xzakl_structured.csv',\n",
" 'xzakm_structured.csv',\n",
" 'xzakn_structured.csv',\n",
" 'xzako_structured.csv',\n",
" 'xzakp_structured.csv',\n",
" 'xzakq_structured.csv',\n",
" 'xzakr_structured.csv',\n",
" 'xzaks_structured.csv',\n",
" 'xzakt_structured.csv',\n",
" 'xzaku_structured.csv',\n",
" 'xzakv_structured.csv',\n",
" 'xzakw_structured.csv',\n",
" 'xzakx_structured.csv',\n",
" 'xzaky_structured.csv',\n",
" 'xzakz_structured.csv',\n",
" 'xzala_structured.csv',\n",
" 'xzalb_structured.csv',\n",
" 'xzalc_structured.csv',\n",
" 'xzald_structured.csv',\n",
" 'xzale_structured.csv',\n",
" 'xzalf_structured.csv',\n",
" 'xzalg_structured.csv',\n",
" 'xzalh_structured.csv',\n",
" 'xzali_structured.csv',\n",
" 'xzalj_structured.csv',\n",
" 'xzalk_structured.csv',\n",
" 'xzall_structured.csv',\n",
" 'xzalm_structured.csv',\n",
" 'xzaln_structured.csv',\n",
" 'xzalo_structured.csv',\n",
" 'xzalp_structured.csv',\n",
" 'xzalq_structured.csv',\n",
" 'xzalr_structured.csv',\n",
" 'xzals_structured.csv',\n",
" 'xzalt_structured.csv',\n",
" 'xzalu_structured.csv',\n",
" 'xzalv_structured.csv',\n",
" 'xzalw_structured.csv',\n",
" 'xzalx_structured.csv',\n",
" 'xzaly_structured.csv',\n",
" 'xzalz_structured.csv',\n",
" 'xzama_structured.csv',\n",
" 'xzamb_structured.csv',\n",
" 'xzamc_structured.csv',\n",
" 'xzamd_structured.csv',\n",
" 'xzame_structured.csv',\n",
" 'xzamf_structured.csv',\n",
" 'xzamg_structured.csv',\n",
" 'xzamh_structured.csv',\n",
" 'xzami_structured.csv',\n",
" 'xzamj_structured.csv',\n",
" 'xzamk_structured.csv',\n",
" 'xzaml_structured.csv',\n",
" 'xzamm_structured.csv',\n",
" 'xzamn_structured.csv',\n",
" 'xzamo_structured.csv',\n",
" 'xzamp_structured.csv',\n",
" 'xzamq_structured.csv',\n",
" 'xzamr_structured.csv',\n",
" 'xzams_structured.csv',\n",
" 'xzamt_structured.csv',\n",
" 'xzamu_structured.csv',\n",
" 'xzamv_structured.csv',\n",
" 'xzamw_structured.csv',\n",
" 'xzamx_structured.csv',\n",
" 'xzamy_structured.csv',\n",
" 'xzamz_structured.csv',\n",
" 'xzana_structured.csv',\n",
" 'xzanb_structured.csv',\n",
" 'xzanc_structured.csv',\n",
" 'xzand_structured.csv',\n",
" 'xzane_structured.csv',\n",
" 'xzanf_structured.csv',\n",
" 'xzang_structured.csv',\n",
" 'xzanh_structured.csv',\n",
" 'xzani_structured.csv',\n",
" 'xzanj_structured.csv',\n",
" 'xzank_structured.csv',\n",
" 'xzanl_structured.csv',\n",
" ...]"
]
},
"execution_count": 358,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"csv_files"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Creating the Feature matrix for each logging period (small file chunk) and writing into a separate csv file Logging_Period.csv"
]
},
{
"cell_type": "code",
"execution_count": 371,
"metadata": {},
"outputs": [],
"source": [
"\n",
"def merge_first(t):\n",
" if len(t) == 4:\n",
" t[0] = t[0] + t[1]\n",
" t = t.remove(t[1])\n",
" return t\n",
"\n",
"def create_fm(filename, ite):\n",
" df = pd.read_csv(\"AEL_Result/\"+filename)\n",
"\n",
" ex = df.groupby([\"EventTemplate\", \"Node\"]).size()\n",
" l =ex.to_csv(index=True)\n",
" l = l.split(\"\\r\\n\")\n",
" o = [list_item.split(\",\") for list_item in l]\n",
" for list_item in range(len(o)):\n",
" merge_first(o[list_item])\n",
" \n",
" o[0][2] = 'Count'\n",
" path = 'BGL_prateek_results'\n",
" if not os.path.isdir(path):\n",
" os.mkdir(path) \n",
" with open(path + '/' + 'Logging_Period ' +str(ite) +'.csv', 'w', newline='') as file:\n",
" writer = csv.writer(file)\n",
" writer.writerows(o)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Running the AEL parser"
]
},
{
"cell_type": "code",
"execution_count": 45,
"metadata": {},
"outputs": [],
"source": [
"def ael(filename):\n",
"\n",
" parser = LogParser(input_dir, output_dir, log_format, rex=rege, \n",
" minEventCount=minEventCount, merge_percent=merge_percent)\n",
"\n",
" parser.parse(filename)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Creating BGL results"
]
},
{
"cell_type": "code",
"execution_count": 372,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
"\n",
"\n",
"\n",
"\n",
"\n",
" 0%| | 0/2374 [00:00<?, ?it/s]\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Overall Parsing done. [Time taken: 0:01:12.420979]\n"
]
}
],
"source": [
"import multiprocessing\n",
"from joblib import Parallel, delayed\n",
"from tqdm import tqdm\n",
"\n",
"num_cores = 20\n",
"inputs = tqdm(csv_files)\n",
"\n",
"start_time = datetime.now()\n",
"processed_list = Parallel(n_jobs=num_cores)(delayed(create_fm)(csv_files[i], i) for i in range(len(inputs)))\n",
"\n",
"print('Overall Parsing done. [Time taken: {!s}]'.format(datetime.now() - start_time))\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.6"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
@prateekiiest
Copy link
Author

Folder architecture

.
├── BGL
    ├── soumik_coded_files
    ├── AEL_Results         
        ├── _structured.csv  (small files of logging interval = 2000 lines)
    ├── BGL_prateek_results
        ├── LoggingPeriod_ i.csv  (Feature Matrix - Attribute Count)

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment