Skip to content

Instantly share code, notes, and snippets.

@mfukar
Last active December 30, 2016 08:22
Show Gist options
  • Save mfukar/9269332 to your computer and use it in GitHub Desktop.
Save mfukar/9269332 to your computer and use it in GitHub Desktop.
Log analyser for Dawn of the Dragons raids. Overengineered as fuck.
#!/usr/bin/env python
# @file dotd.py
# @author Michael Foukarakis
# @version 0.6
# @date Created: Sun Aug 25, 2013 09:57 BST
# Last Update: Fri Dec 30, 2016 10:21 EET
#------------------------------------------------------------------------
# Description: Log analyser for Dawn of the Dragons raids.
#------------------------------------------------------------------------
# History: None yet
# TODO: Handle logs from streams or pipes?
# Integrate with clipboard
# WARNING: Requires Python 3
#------------------------------------------------------------------------
# -*- coding: utf-8 -*-
#------------------------------------------------------------------------
import itertools, collections, re, argparse
from functools import total_ordering
__all__ = ['Analyser']
class LogReader():
def __init__(self, iterable, pc=None):
self.iterable = iterable
self.pc = pc
self.peek = itertools.groupby(self.iterable, self._keyfunc())
def __iter__(self):
"""x.__iter__() <==> iter(x)
"""
return self
def _keyfunc(self):
"""A key function to be used in grouping all procs and the hit in one group.
Remember all procs precede the actual hit line. This function yields the same key
value for all those lines. It does so by incrementing the key value one line after
the PC name is found - this signifies a hit line has been encountered.
"""
def _f(elem):
if _f.flag:
_f.key += 1
_f.flag = False
if -1 != elem.find(self.pc):
_f.flag = True
return _f.key
_f.key, _f.flag = 0, False
return _f
def __next__(self):
"""x.__next__() <==> next(x)
"""
return self.next()
def next(self):
for k, g in self.peek:
return list(g)
raise StopIteration
@total_ordering
class Proc():
"""The Proc class models a proc (Programmed Random OCcurrence). PROCs are totally
ordered on total damage over all logs parsed.
"""
def __init__(self, name=None):
self.name = name if name else ''
self.hits = 0
self.damage = 0
def __lt__(self, other):
return self.damage < other.damage
def __str__(self):
"""x.__str__() <==> str(x)
"""
return '%s - total damage [%u]' % (self.name, self.damage)
def __eq__(self, other):
"""x.__eq__(y) <==> x==y
PROCs compare equal if they have the same name.
"""
return hasattr(other, 'name') and self.name == other.name
def __hash__(self):
"""A proc can be used as a dictionary key, and dict[proc.name] == dict[proc].
"""
return hash(self.name)
def update(self, damage):
self.hits += 1
self.damage += damage
class Poff(collections.defaultdict):
"""A defaultdict in which the key is also the value. This has the following good
properties:
* You can index its values using one of their properties as the key. To index
objects based on their .name, you implement the __hash__ method to hash .name
members.
* If the key is not present, the default factory will be called with the key as
the only argument.
"""
def __missing__(self, key):
self[key] = self.default_factory(key)
return self[key]
class Analyser():
"""The analyser parses hits and stores statistics. It can be printed to show them.
"""
def __init__(self):
self.hits, self.crits, self.damage = 0, 0, 0
self.gold, self.exp = 0, 0
self.obtains = collections.defaultdict()
self.procs = Poff(Proc)
self.finds = collections.defaultdict(int)
self.handlers = {
'^\w+ dealt (.*) damage! Lost .* health. Earned (.*) gold and (.*) experience!$' : self.record_hit,
'^\w+ crit (.*) damage! Lost .* health. Earned (.*) gold and (.*) experience!$' : self.record_crit,
'^(.*) contributed (.*) damage.$' : self.record_proc,
'^.* has restored some of your Health.$' : lambda: None,
'^You have obtained: (.*)\.$' : self.record_find,
'^You Found (.*)!$' : self.record_find
}
def match(self, line):
"""Match LINE against all patterns this analyser can handle, and return a tuple
containing the handler for each line and the arguments to pass to it as a tuple.
If no patterns have matched, it will return a nop handler and None as a parameter.
Each LINE only matches one handler.
"""
for k, handler in self.handlers.items():
m = re.match(k, line)
if m:
return handler, m.groups()
else:
return lambda : None, ()
def record_hit(self, damage, gold, exp):
self.hits += 1
self.damage += int(damage.replace(',', ''))
self.gold += int(gold.replace(',', ''))
self.exp += int(exp.replace(',', ''))
def record_crit(self, damage, gold, exp):
self.hits += 1
self.crits += 1
self.damage += int(damage.replace(',', ''))
self.gold += int(gold.replace(',', ''))
self.exp += int(exp.replace(',', ''))
def record_proc(self, name, damage):
self.procs[name].update(int(damage.replace(',', '')))
def record_find(self, item):
self.finds[item] += 1
def feed(self, hit):
"""Process a hit and record all statistics from procs, hits, items found, and life
gained.
"""
# TODO: Pre-hit hooks
for handler, params in map(self.match, hit):
handler(*params)
# TODO: Post-hit hooks
def __str__(self):
"""x.__str__() <==> str(x)
"""
if not self.hits:
return '[-] No hits recorded!'
xs = 'Gold / hit : [{:15.3f}]\nXP / hit : [{:15.3f}]\n'.format (self.gold / self.hits, self.exp / self.hits)
hs = 'Hits : [{:15}]\nCrit chance : [{:13.2f} %]\nDamage avg : [{:15,.3f}]\n'.\
format(self.hits, 100 * self.crits / self.hits, self.damage / self.hits)
ps, fs = '', 'Found:\n'
# TODO: Print a histogram of procs
ps += '{:^47} {:^5} {:^6} {:^12} {:^15} {:^15}\n'.format('Name', 'Count', 'Rate', 'Damage/proc', 'Damage/hit', 'Total damage')
for p in sorted(self.procs.values(), reverse=True):
ps += '--------------------\n'
# Name | Count | Rate | Damage/proc | Damage/hit | Total damage
ps += '[{:^44}] {:>5} {:>3.2f}% {:>12,d} {:>13,} {:>15,}\n'\
.format(p.name, p.hits, 100 * p.hits / self.hits, int(p.damage / p.hits), \
int(p.damage / self.hits), p.damage)
for item, finding in self.finds.items():
fs += '{:<36} : {}\n'.format(item, finding)
return hs + xs + ps# + fs
def process(iterable, pc):
reader = LogReader(iterable, pc)
analyser = Analyser()
for hit in reader:
analyser.feed(hit)
print('Statistics for {}'.format(pc))
print('--------------------')
print(analyser)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Process Dawn of the Dragons raid logs.')
parser.add_argument('logs', metavar='LOGFILE', type=str, nargs='+', help='a log for the analyser to process.')
parser.add_argument('--name', type=str, default='Rhea', help='the PC name.')
parser.add_argument('--no-items', dest='items', action='store_false', help='do not list info about items found.')
parser.set_defaults(items=True)
args = parser.parse_args()
for fname in args.logs:
with open(fname, 'r') as fh:
process(fh, args.name)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment