Skip to content

Instantly share code, notes, and snippets.

@luser
Last active September 20, 2018 18:01
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save luser/8a99b1accd2b96a37f7e to your computer and use it in GitHub Desktop.
Save luser/8a99b1accd2b96a37f7e to your computer and use it in GitHub Desktop.
Makefile stats for mozilla-central
function get_etherpad_bug_data() {
return new Promise((resolve, reject) => {
var req = new XMLHttpRequest();
req.responseType = 'json';
req.onload = () => { console.log('Got etherpad data'); resolve(req.response); };
req.onerror = (e) => reject(e);
req.open('GET', 'https://public.etherpad-mozilla.org/p/makefile-conversion-bugs/export/txt', true);
req.send(null);
});
}
function object_values(o) {
var vals = [];
for (var key in o) {
if (o.hasOwnProperty(key)) {
vals.push(o[key]);
}
}
return vals;
}
function load_bug_details(bug_data) {
var bugs = new Set(object_values(bug_data).reduce((a, b) => a.concat(b), []));
return new Promise((resolve, reject) => {
var req = new XMLHttpRequest();
req.responseType = 'json';
req.onload = () => {
console.log('Got bugzilla data');
var info = req.response;
function* make_bug_links(data) {
for (var bug of data.bugs) {
yield [bug.id, `<a title="${bug.status}${bug.resolution ? ' ' + bug.resolution : ''} - ${bug.summary}" style="${bug.resolution ? 'text-decoration: line-through;' : ''}" href="https://bugzilla.mozilla.org/show_bug.cgi?id=${bug.id}">${bug.id}</a>`];
}
}
resolve({
files: bug_data,
bugs: new Map(make_bug_links(req.response))
});
};
req.onerror = (e) => reject(e);
req.open('GET', `https://bugzilla.mozilla.org/rest/bug?id=${Array.from(bugs).join(',')}&include_fields=id,summary,status,resolution`, true);
req.setRequestHeader('Accept', 'text/json');
req.send(null);
});
}
function dom_loaded() {
return new Promise((resolve, reject) => {
function listener() {
console.log('DOMContentLoaded');
document.removeEventListener('DOMContentLoaded', listener);
resolve();
}
document.addEventListener('DOMContentLoaded', listener, false);
});
}
function fill_table_with_bugs(data) {
console.log('filling table');
var t = document.getElementsByTagName('table')[0].tBodies[0];
var no_bug_files = 0;
for (var row of t.rows) {
var cells = row.cells;
var f = cells[1].textContent;
if (f in data.files) {
cells[2].innerHTML = data.files[f].map((b) => data.bugs.get(b)).join(' ');
} else {
row.className = 'nobug';
no_bug_files++;
}
}
document.getElementById('no_bug_files').textContent = no_bug_files;
document.getElementById('no_bug_percent').textContent = (100 * no_bug_files / t.rows.length).toFixed(2);
}
Promise.all([get_etherpad_bug_data().then(load_bug_details),
dom_loaded(),
])
.then((data) => fill_table_with_bugs(data[0]))
.catch((e) => console.error(e));
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
import sys
def gen_html(json_path, html_path):
data = json.load(open(json_path, 'rb'))
with open(html_path, 'wb') as h:
h.write('''<!DOCTYPE html>
<html>
<head><meta charset="utf-8">
<script src="tablesort.min.js"></script>
<script>
// https://raw.githubusercontent.com/tristen/tablesort/gh-pages/src/sorts/tablesort.number.js
(function(){
var cleanNumber = function(i) {
return i.replace(/[^\-?0-9.]/g, '');
},
compareNumber = function(a, b) {
a = parseFloat(a);
b = parseFloat(b);
a = isNaN(a) ? 0 : a;
b = isNaN(b) ? 0 : b;
return a - b;
};
Tablesort.extend('number', function(item) {
return item.match(/^-?[£\x24Û¢´€]?\d+\s*([,\.]\d{0,2})/) || // Prefixed currency
item.match(/^-?\d+\s*([,\.]\d{0,2})?[£\x24Û¢´€]/) || // Suffixed currency
item.match(/^-?(\d)*-?([,\.]){0,1}-?(\d)+([E,e][\-+][\d]+)?%?$/); // Number
}, function(a, b) {
a = cleanNumber(a);
b = cleanNumber(b);
return compareNumber(b, a);
});
}());
</script>
</head>
<body>
<h1>Makefile haters club</h1>
<table id="table">
<thead>
<tr><th class='no-sort'>User</th><th title="Net Makefile.in files removed from the tree" class="sort-default">Makefiles</th><th title="Net lines of Makefile.in removed from the tree">Lines</th><th title="Number of changesets touching Makefile.in files">Changesets</th></tr>
</thead>
<tbody>
''')
for user, d in data.iteritems():
h.write('<tr><td>{user}</td><td>{removals}</td><td>{lines}</td><td>{csets}</td>\n'.format(user=user, csets=len(d['changesets']), **d))
h.write('''
</tbody>
</table>
<script>
new Tablesort(document.getElementById('table'), {descending: true});
</script>
</body>
</html>
''')
if __name__ == '__main__':
gen_html(sys.argv[1], sys.argv[2])
#!/usr/bin/env python
import json
import sys
from mercurial import (
ui,
hg,
util,
mdiff,
)
from mercurial.node import short
from collections import defaultdict
from makefilestats import add_paths, ignore_makefile_in
def get_haters(topsrcdir):
add_paths(topsrcdir)
data = defaultdict(lambda: dict(changesets=[], removals=0, lines=0))
u = ui.ui()
u.setconfig('ui', 'forcecwd', topsrcdir)
repo = hg.repository(u, topsrcdir)
# a9021c50ccf9 is the first changeset from bug 784841.
# The backed_out and backout revsets are from a local hg ext I have.
first_rev = 'a9021c50ccf9'
csets = repo.set('%s::central & ! backed_out() & ! backout() & ! merge() & file("**/Makefile.in")' % first_rev)
for ctx in csets:
who = util.email(ctx.user()).lower()
data[who]['changesets'].append(short(ctx.node()))
parent = ctx.p1()
status = parent.status(ctx)
for f in status.removed:
if f.endswith('Makefile.in') and not ignore_makefile_in(f):
data[who]['removals'] += 1
fctx = parent.filectx(f)
data[who]['lines'] += len(fctx.data().splitlines())
for f in status.added:
if f.endswith('Makefile.in') and not ignore_makefile_in(f):
data[who]['removals'] -= 1
fctx = ctx.filectx(f)
data[who]['lines'] -= len(fctx.data().splitlines())
for f in status.modified:
if f.endswith('Makefile.in') and not ignore_makefile_in(f):
old_lines = len(parent.filectx(f).data().splitlines())
new_lines = len(ctx.filectx(f).data().splitlines())
data[who]['lines'] += old_lines - new_lines
with open('/tmp/makefilehaters.json', 'wb') as j:
json.dump(data, j)
maxuser = max(len(u) for u in data.iterkeys())
print('{:{width}} Makefiles Lines Changesets'.format('User', width=maxuser))
print('\n'.join('{user:{width}} {removals:9} {lines:5} {csets:10}'.format(user=user, csets=len(d['changesets']), width=maxuser, **d) for (user, d) in sorted(data.items(), key=lambda x: x[1]['removals'], reverse=True)))
if __name__ == '__main__':
get_haters(sys.argv[1])
#!/usr/bin/env python
from __future__ import print_function
import bisect
import calendar
import csv
import datetime
import itertools
import os
import sys
import time
import plotly
from makefilestats import add_paths, summarize_rev_data, gen_data_for_rev
from pushes import get_pushes
import plotly.graph_objs as go
def read_pushes_data(topsrcdir, csv_file, plot_dir):
add_paths(topsrcdir)
pushes = list(get_pushes(topsrcdir, 'central'))
push_ids = [p[0] for p in pushes]
if os.path.isfile(csv_file):
with open(csv_file, 'rb') as csv_fd:
data = [[datetime.datetime.strptime(d, '%Y-%m-%dT%H:%M:%S'), int(p), int(f), int(l)]
for (d, p, f, l) in itertools.islice(csv.reader(csv_fd), 1, None)]
last_known_push = data[-1][1]
blank_slate = False
else:
# Push 24373 added moz.build support
last_known_push = 24372
data = []
blank_slate = True
index = bisect.bisect(push_ids, last_known_push)
if index < len(push_ids):
with open(csv_file, 'ab') as f:
w = csv.writer(f)
if blank_slate:
w.writerow(['Date', 'Push ID', 'Makefiles', 'Total Lines'])
for push_id, node, timestamp in pushes[index:]:
date = datetime.datetime.utcfromtimestamp(timestamp)
rd = summarize_rev_data(gen_data_for_rev(topsrcdir, node))
data.append([date, push_id, rd.files, rd.nonempty_lines])
w.writerow([date.isoformat(), push_id, rd.files, rd.nonempty_lines])
fig = plotly.tools.make_subplots(rows=2,
shared_xaxes=True,
print_grid=False,
subplot_titles=(
'Number of Makefile.in files',
'Total lines of non-blank-non-comment Makefile data'
))
fig['layout']['xaxis1'].update({
'type': 'date',
# Default to showing just this year.
'range': [
calendar.timegm(datetime.date(2016, 1, 1).timetuple()) * 1000,
time.time() * 1000
],
})
# Adjust the y axes to be sensible for this time slice.
fig['layout']['yaxis1'].update({
'range': [0, 200],
})
fig['layout']['yaxis2'].update({
'range': [0, 5000],
})
fig['data'] = [
go.Scatter(
x=[d[0] for d in data],
y=[d[2] for d in data],
name='Makefiles',
),
go.Scatter(
x=[d[0] for d in data],
y=[d[3] for d in data],
yaxis='y2',
name='Lines',
),
]
div = plotly.offline.plot(
fig,
output_type='div',
auto_open=False,
show_link=False,
)
with open(os.path.join(plot_dir, 'makefiles_count.html'), 'wb') as f:
f.write('''<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>mozilla-central Makefile.in count</title>
<style>
html, body {{
width: 100%;
height: 100%;
margin: 0px;
padding: 0px;
}}
body > div {{
width: 100%;
height: 100%;
}}
</style>
</head>
<body>
{}
<script>function scale() {{ document.querySelector('[data-title="Autoscale"]').click(); }}</script>
<button style="position: absolute; top: 10px; left: 10px;" onclick="scale()">Show entire range
</button>
</body>
</html>
'''.format(div))
if __name__ == '__main__':
read_pushes_data(sys.argv[1], sys.argv[2], sys.argv[3])
#!/usr/bin/env python
from __future__ import print_function
import argparse
import datetime
import itertools
import json
import operator
import os
import requests
import sys
from collections import defaultdict, namedtuple
from urlparse import urljoin
IGNORE_DIRS = (
'nsprpub',
'security/nss',
'intl/icu',
'memory/jemalloc/src',
'js/src/ctypes/libffi',
'ipc/chromium/src/third_party/libevent',
'python/mozbuild/mozbuild/test',
'extensions/spellcheck/hunspell/tests',
'third_party/',
)
IGNORE_MAKEFILES = (
'toolkit/crashreporter/google-breakpad/Makefile.in',
'toolkit/crashreporter/google-breakpad/src/third_party/glog/Makefile.in',
)
def ignore_makefile_in(path):
return any(path.startswith(d) for d in IGNORE_DIRS) or path in IGNORE_MAKEFILES
def makefile_ins(topsrcdir, rev):
import mozpack.hg as mozpackhg
finder = mozpackhg.MercurialNativeRevisionFinder(topsrcdir, rev)
for path, data in finder.find('**/Makefile.in'):
if ignore_makefile_in(path):
continue
yield path, data
def iterstatements(statements):
import pymake.parserdata
cond_stack = []
class peekiter:
def __init__(self, i):
self.peeked = []
self.i = i
def __iter__(self):
return self
def next(self):
if self.peeked:
return self.peeked.pop()
return self.i.next()
def peek(self, default=None):
if not self.peeked:
try:
self.peeked.append(self.i.next())
except StopIteration:
return default
return self.peeked[0]
def takewhile(self, predicate):
while predicate(self.peek(None)):
yield self.next()
def flatten(stmts):
i = peekiter(iter(stmts))
for s in i:
if isinstance(s, pymake.parserdata.ConditionBlock):
for cond, sl in s:
yield list(cond_stack), cond
cond_stack.append(cond)
for t in flatten(sl):
yield t
cond_stack.pop()
elif isinstance(s, pymake.parserdata.Rule) or isinstance(s, pymake.parserdata.StaticPatternRule):
yield list(cond_stack), (s, list(i.takewhile(lambda t: isinstance(t, pymake.parserdata.Command))))
else:
# Need to make a copy else we have a reference.
yield list(cond_stack), s
return flatten(statements)
def getexp(exp):
if exp.simple:
return exp.s
return exp.to_source()
def getline(s):
import pymake.parserdata
if isinstance(s, pymake.parserdata.Rule) or isinstance(s, pymake.parserdata.StaticPatternRule):
return getattr(s.targetexp.loc, 'line', 0)
if isinstance(s, pymake.parserdata.SetVariable):
return getattr(s.vnameexp.loc, 'line', 0)
if isinstance(s, pymake.parserdata.EqCondition):
return getattr(s.exp1.loc, 'line', 0)
if isinstance(s, pymake.parserdata.IfdefCondition):
return getattr(s.exp.loc, 'line', 0)
if isinstance(s, pymake.parserdata.ConditionBlock):
return getattr(s.loc, 'line', 0)
if isinstance(s, pymake.parserdata.Include) or isinstance(s, pymake.parserdata.VPathDirective) or isinstance(s, pymake.parserdata.ExportDirective) or isinstance(s, pymake.parserdata.UnexportDirective) or isinstance(s, pymake.parserdata.EmptyDirective) or isinstance(s, pymake.parserdata.Command):
return getattr(s.exp.loc, 'line', 0)
if isinstance(s, pymake.parserdata.ElseCondition):
return 0
raise Exception('Unhandled statement type: %s' % s.__class__.__name__)
def gen_data_for_rev(topsrcdir, rev):
import pymake.parser
import pymake.parserdata
data = []
for path, obj in makefile_ins(topsrcdir, rev):
lines = obj.data.splitlines()
thisdata = {
'path': path,
'lines': len(lines),
# Count non-empty, non-comment lines.
'nonempty_lines': len(filter(lambda l: l and not l.startswith('#'), lines)),
'variables': defaultdict(list),
'rules': defaultdict(list),
'commands': 0,
'other': defaultdict(list),
}
data.append(thisdata)
try:
statements = \
list(iterstatements(pymake.parser.parsestring(obj.data, path)))
except Exception as e:
print(e, file=sys.stderr)
continue
for conds, s in statements:
if isinstance(s, tuple) and (isinstance(s[0], pymake.parserdata.Rule) or isinstance(s[0], pymake.parserdata.StaticPatternRule)):
# Count commands
thisdata['commands'] += len(s[1])
thisdata['rules'][getexp(s[0].targetexp)].append(getline(s[0]))
elif isinstance(s, pymake.parserdata.SetVariable):
thisdata['variables'][getexp(s.vnameexp)].append(getline(s))
else:
thisdata['other'][s.__class__.__name__].append(getline(s))
data.sort(key=operator.itemgetter('nonempty_lines'), reverse=True)
return data
revdata = namedtuple('revdata',
['files',
'lines',
'nonempty_lines',
'variables',
'rules',
'targets',
'commands',
'other',
])
def summarize_rev_data(data):
return revdata(len(data),
sum(v['lines'] for v in data),
sum(v.get('nonempty_lines', 0) for v in data),
len(set(itertools.chain.from_iterable(v['variables'] for v in data))),
sum(len(v['rules']) for v in data),
len(set(itertools.chain.from_iterable(v['rules'].iterkeys() for v in data))),
sum(v['commands'] for v in data),
sum(sum(len(o) for o in v['other'].itervalues()) for v in data),
)
def print_rev_data(data):
rd = summarize_rev_data(data)
print('\n'.join('%4d %s' % (v.get('nonempty_lines', 0), v['path']) for v in data))
print()
print('Total: %d files, %d non-empty lines' % (rd.files, rd.nonempty_lines))
print('%d unique Makefile variables assigned to' % rd.variables)
print('%d rules, %d unique targets, %d total commands' % (rd.rules, rd.targets, rd.commands))
print('%d other Makefile statements:' % rd.other)
for o in sorted(set(itertools.chain.from_iterable(v['other'].iterkeys() for v in data))):
print('\t%4d %s' % (sum(len(v['other'].get(o, [])) for v in data), o))
def write_html_summary(data, file, rev):
rd = summarize_rev_data(data)
base_url = 'https://hg.mozilla.org/mozilla-central/file/' + rev + '/'
date = datetime.datetime.utcnow().isoformat(' ')
with open(file, 'wb') as f:
f.write('''<!DOCTYPE html>
<head>
<meta charset="utf-8">
<title>mozilla-central Makefile data for {rev}</title>
<style>
.nobug {{
background-color: lightblue;
}}
</style>
<script src="bugs.js">
</script>
</head>
<body>
<h1>mozilla-central Makefile.in stats</h1>
<h2>Generated on {date}</h2>
<table>
<thead>
<tr><th title="Non-empty, non-comment lines in each Makefile.in">Lines</th><th>File</th><th title="Bugs on file to remove parts of this makefile">Bugs</th><th>Tags</th></tr>
</thead>
<tbody>
'''.format(rev=rev, date=date))
for m in data:
url = urljoin(base_url, m['path'])
f.write('<tr><td>{lines}</td><td><a href="{url}">{path}</a></td><td></td><td></td></tr>\n'.format(
lines=m['nonempty_lines'],
path=m['path'],
url=url,
))
f.write('''</tbody>
</table>
<p>Number of Makefile.in files: {files}
<p>Makefile.in files without bugs filed: <span id="no_bug_files"></span> (<span id="no_bug_percent"></span>%)
<p>Total non-empty lines in Makefile.in files: {lines}
<p>Data from mozilla-central revision <a href="https://hg.mozilla.org/mozilla-central/rev/{rev}">{rev}</a> generated on {date}.
<p><a href="makefiles_count.html">Makefile count+lines graph</a>
<p><a href="https://gist.github.com/luser/8a99b1accd2b96a37f7e">Generated from these scripts.</a>
<p>Bug data can be updated in <a href="https://github.com/luser/mozilla-makefile-stats/blob/master/bugs.json">this file on GitHub</a>.
</body>
</html>'''.format(files=rd.files, lines=rd.nonempty_lines, rev=rev, date=date))
def add_paths(topsrcdir):
sys.path.append(os.path.join(topsrcdir, 'build/pymake'))
sys.path.append(os.path.join(topsrcdir, 'python/mozbuild'))
sys.path.append(os.path.join(topsrcdir, 'python/jsmin'))
sys.path.append(os.path.join(topsrcdir, 'python/requests'))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--html', type=str,
help='Generate html summary in FILE')
parser.add_argument('rev', help='Generate data for REV')
parser.add_argument('srcdir', help='Source directory')
args = parser.parse_args()
topsrcdir = os.path.abspath(args.srcdir)
add_paths(topsrcdir)
if args.rev:
data = gen_data_for_rev(topsrcdir, args.rev)
json.dump({'rev': args.rev, 'makefiles': data}, open('/tmp/mozilla-central-%s.json' % args.rev, 'wb'))
print_rev_data(data)
if args.html:
write_html_summary(data, args.html, args.rev)
if __name__ == '__main__':
main()
#!/usr/bin/env python
from __future__ import unicode_literals, print_function
import binascii
import os
import sqlite3
import sys
def get_tree_id(db, tree):
field = db.execute('SELECT id FROM trees WHERE name=? LIMIT 1',
[tree]).fetchone()
if field:
return field[0]
def get_pushes(repo, tree):
db = sqlite3.connect(os.path.join(repo, '.hg', 'changetracker.db'))
db.execute('PRAGMA SYNCHRONOUS=OFF')
db.execute('PRAGMA JOURNAL_MODE=WAL')
tree_id = get_tree_id(db, tree)
if tree_id is None:
raise Exception('Failed to find tree id!')
for push_id, head, push_time in db.execute(
'SELECT pushes.push_id, changeset_pushes.head_changeset, pushes.time '
'FROM pushes, changeset_pushes '
'WHERE pushes.tree_id = ? AND '
'changeset_pushes.tree_id = ? AND '
'pushes.push_id = changeset_pushes.push_id '
'GROUP BY changeset_pushes.head_changeset '
'ORDER BY pushes.push_id ASC', [tree_id, tree_id]):
yield push_id, binascii.hexlify(str(head)), push_time
def main():
for push_id, node, timestamp in get_pushes(sys.argv[1], sys.argv[2]):
print('{},{},{}'.format(push_id, node, timestamp))
if __name__ == '__main__':
main()
#!/bin/bash
set -e
export SSH_AUTH_SOCK=$(find /run/user/$(id -u)/keyring*/ -perm 0775 -type s -user $(id -un) -group $(id -un) -name '*ssh' | head -n 1)
d=$(dirname $0)
out=/build/mozilla-makefile-stats
export HG=/home/luser/.pyenv/shims/hg
#export HGRCPATH=
{
(cd ${out}; git pull --quiet origin master)
${HG} -R /build/mozilla-central pull --quiet central
${HG} -R /build/mozilla-central pushlogsync --quiet
rev=$(${HG} -R /build/mozilla-central log -r central --template='{node}')
${d}/venv/bin/python $d/makefilestats.py --html ${out}/index.html ${rev} /build/mozilla-central/ >/dev/null
${d}/venv/bin/python ${d}/makefilelinescsv.py /build/mozilla-central/ ${out}/makefiles-lines.csv ${out} >/dev/null
cd ${out}
git commit -am "Data for mozilla-central revision ${rev}"
git push origin master
} > "/tmp/makefilestats.$(date -I).log" 2>&1
#!/usr/bin/env python
from __future__ import print_function
import itertools
import json
import operator
import sys
from collections import defaultdict
from makefilestats import print_rev_data
commands = (
'summary',
'variables',
'rules',
)
def summary(data):
print_rev_data(data)
def variables(data):
vs = defaultdict(int)
for v in itertools.chain.from_iterable(v['variables'] for v in data):
vs[v] += 1
print('\n'.join('{: 3d} {}'.format(count, var) for (var, count) in sorted(vs.items(), reverse=True, key=operator.itemgetter(1))))
def rules(data):
rs = defaultdict(int)
for r in itertools.chain.from_iterable(v['rules'] for v in data):
rs[r] += 1
print('\n'.join('{: 3d} {}'.format(count, rule) for (rule, count) in sorted(rs.items(), reverse=True, key=operator.itemgetter(1))))
def main():
data = json.load(open(sys.argv[1]))
if sys.argv[2] not in commands:
print('Use one of: %s' % ', '.join(commands))
m = data['makefiles']
globals()[sys.argv[2]](m)
if __name__ == '__main__':
main()
/*!
* tablesort v4.0.0 (2015-12-17)
* http://tristen.ca/tablesort/demo/
* Copyright (c) 2015 ; Licensed MIT
*/!function(){function a(b,c){if(!(this instanceof a))return new a(b,c);if(!b||"TABLE"!==b.tagName)throw new Error("Element must be a table");this.init(b,c||{})}var b=[],c=function(a){var b;return window.CustomEvent&&"function"==typeof window.CustomEvent?b=new CustomEvent(a):(b=document.createEvent("CustomEvent"),b.initCustomEvent(a,!1,!1,void 0)),b},d=function(a){return a.getAttribute("data-sort")||a.textContent||a.innerText||""},e=function(a,b){return a=a.toLowerCase(),b=b.toLowerCase(),a===b?0:b>a?1:-1},f=function(a,b){return function(c,d){var e=a(c.td,d.td);return 0===e?b?d.index-c.index:c.index-d.index:e}};a.extend=function(a,c,d){if("function"!=typeof c||"function"!=typeof d)throw new Error("Pattern and sort must be a function");b.push({name:a,pattern:c,sort:d})},a.prototype={init:function(a,b){var c,d,e,f,g=this;if(g.table=a,g.thead=!1,g.options=b,a.rows&&a.rows.length>0&&(a.tHead&&a.tHead.rows.length>0?(c=a.tHead.rows[a.tHead.rows.length-1],g.thead=!0):c=a.rows[0]),c){var h=function(){g.current&&g.current!==this&&(g.current.classList.remove("sort-up"),g.current.classList.remove("sort-down")),g.current=this,g.sortTable(this)};for(e=0;e<c.cells.length;e++)f=c.cells[e],f.classList.contains("no-sort")||(f.classList.add("sort-header"),f.tabindex=0,f.addEventListener("click",h,!1),f.classList.contains("sort-default")&&(d=f));d&&(g.current=d,g.sortTable(d))}},sortTable:function(a,g){var h,i=this,j=a.cellIndex,k=e,l="",m=[],n=i.thead?0:1,o=a.getAttribute("data-sort-method"),p=a.getAttribute("data-sort-order");if(i.table.dispatchEvent(c("beforeSort")),g?h=a.classList.contains("sort-up")?"sort-up":"sort-down":(h=a.classList.contains("sort-up")?"sort-down":a.classList.contains("sort-down")?"sort-up":"asc"===p?"sort-down":"desc"===p?"sort-up":i.options.descending?"sort-up":"sort-down",a.classList.remove("sort-down"===h?"sort-up":"sort-down"),a.classList.add(h)),!(i.table.rows.length<2)){if(!o){for(;m.length<3&&n<i.table.tBodies[0].rows.length;)l=d(i.table.tBodies[0].rows[n].cells[j]),l=l.trim(),l.length>0&&m.push(l),n++;if(!m)return}for(n=0;n<b.length;n++)if(l=b[n],o){if(l.name===o){k=l.sort;break}}else if(m.every(l.pattern)){k=l.sort;break}i.col=j;var q,r=[],s={},t=0,u=0;for(n=0;n<i.table.tBodies.length;n++)for(q=0;q<i.table.tBodies[n].rows.length;q++)l=i.table.tBodies[n].rows[q],l.classList.contains("no-sort")?s[t]=l:r.push({tr:l,td:d(l.cells[i.col]),index:t}),t++;for("sort-down"===h?(r.sort(f(k,!0)),r.reverse()):r.sort(f(k,!1)),n=0;t>n;n++)s[n]?(l=s[n],u++):l=r[n-u].tr,i.table.tBodies[0].appendChild(l);i.table.dispatchEvent(c("afterSort"))}},refresh:function(){void 0!==this.current&&this.sortTable(this.current,!0)}},"undefined"!=typeof module&&module.exports?module.exports=a:window.Tablesort=a}();
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment