Skip to content

Instantly share code, notes, and snippets.

@mightyCelu
Last active May 24, 2018 12:13
Show Gist options
  • Save mightyCelu/ca1c7f4ddba028404b5b8f01a16492ac to your computer and use it in GitHub Desktop.
Save mightyCelu/ca1c7f4ddba028404b5b8f01a16492ac to your computer and use it in GitHub Desktop.
Script for merging OmNET++ SQLite result files
#!/usr/bin/python3
import logging
import os
import os.path as osp
import sys
import sqlite3
schema = '''
CREATE TABLE db (
dbId INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
dbName TEXT UNIQUE NOT NULL);
CREATE TABLE run (
dbId INTEGER NOT NULL,
runId INTEGER NOT NULL,
runName TEXT NOT NULL,
simtimeExp INTEGER NOT NULL,
PRIMARY KEY (runId, dbId) );
CREATE TABLE runattr (
dbId INTEGER NOT NULL,
runId INTEGER NOT NULL,
attrName TEXT NOT NULL,
attrValue TEXT NOT NULL,
FOREIGN KEY (runId, dbId) REFERENCES run(runId, dbId) ON DELETE CASCADE ON UPDATE CASCADE );
CREATE TABLE runparam (
dbId INTEGER NOT NULL,
runId INTEGER NOT NULL,
parName TEXT NOT NULL,
parValue TEXT NOT NULL,
FOREIGN KEY (runId, dbId) REFERENCES run(runId, dbId) ON DELETE CASCADE ON UPDATE CASCADE );
CREATE TABLE scalar (
dbId INTEGER NOT NULL,
scalarId INTEGER NOT NULL,
runId INTEGER NOT NULL,
moduleName TEXT NOT NULL,
scalarName TEXT NOT NULL,
scalarValue REAL,
PRIMARY KEY (scalarId, dbId),
FOREIGN KEY (runId, dbId) REFERENCES run(runId, dbId) ON DELETE CASCADE ON UPDATE CASCADE );
CREATE TABLE scalarattr (
dbId INTEGER NOT NULL,
scalarId INTEGER NOT NULL,
attrName TEXT NOT NULL,
attrValue TEXT NOT NULL,
FOREIGN KEY (scalarId, dbId) REFERENCES scalar(scalarId, dbId) ON DELETE CASCADE ON UPDATE CASCADE );
CREATE TABLE statistic (
dbId INTEGER NOT NULL,
statId INTEGER NOT NULL,
runId INTEGER NOT NULL,
moduleName TEXT NOT NULL,
statName TEXT NOT NULL,
statCount INTEGER NOT NULL,
statMean REAL,
statStddev REAL,
statSum REAL,
statSqrsum REAL,
statMin REAL,
statMax REAL,
statWeights REAL,
statWeightedSum REAL,
statSqrSumWeights REAL,
statWeightedSqrSum REAL,
PRIMARY KEY (statId, dbId),
FOREIGN KEY (runId, dbId) REFERENCES run(runId, dbId) ON DELETE CASCADE ON UPDATE CASCADE );
CREATE TABLE statisticattr (
dbId INTEGER NOT NULL,
statId INTEGER NOT NULL,
attrName TEXT NOT NULL,
attrValue TEXT NOT NULL,
FOREIGN KEY (statId, dbId) REFERENCES statistic(statId, dbId) ON DELETE CASCADE ON UPDATE CASCADE );
CREATE TABLE histbin (
dbId INTEGER NOT NULL,
statId INTEGER NOT NULL,
baseValue NUMERIC NOT NULL,
cellValue INTEGER NOT NULL,
FOREIGN KEY (statId, dbId) REFERENCES statistic(statId, dbId) ON DELETE CASCADE ON UPDATE CASCADE );
CREATE TABLE vector (
dbId INTEGER NOT NULL,
vectorId INTEGER NOT NULL,
runId INTEGER NOT NULL,
moduleName TEXT NOT NULL,
vectorName TEXT NOT NULL,
vectorCount INTEGER,
vectorMin REAL,
vectorMax REAL,
vectorSum REAL,
vectorSumSqr REAL,
startEventNum INTEGER,
endEventNum INTEGER,
startSimtimeRaw INTEGER,
endSimtimeRaw INTEGER,
PRIMARY KEY (vectorId, dbId),
FOREIGN KEY (runId, dbId) REFERENCES run(runId, dbId) ON DELETE CASCADE ON UPDATE CASCADE );
CREATE TABLE vectorattr (
dbId INTEGER NOT NULL,
vectorId INTEGER NOT NULL,
attrName TEXT NOT NULL,
attrValue TEXT NOT NULL,
FOREIGN KEY (vectorId, dbId) REFERENCES vector(vectorId, dbId) ON DELETE CASCADE ON UPDATE CASCADE );
CREATE TABLE vectordata (
dbId INTEGER NOT NULL,
vectorId INTEGER NOT NULL,
eventNumber INTEGER NOT NULL,
simtimeRaw INTEGER NOT NULL,
value NUMERIC NOT NULL,
FOREIGN KEY (vectorId, dbId) REFERENCES vector(vectorId, dbId) ON DELETE CASCADE ON UPDATE CASCADE );
'''
def merge_dbs(c, result_dir):
exts = ('.sca', '.vec')
dbs = {osp.join(result_dir, db) for db in os.listdir(result_dir) if osp.splitext(db)[1] in exts}
def db_id(path):
return osp.splitext(osp.basename(path))[0]
c.executescript(schema)
c.executemany('INSERT INTO db(dbName) VALUES (?)', set((db_id(db),) for db in dbs))
c.commit()
ignored_tables = ('sqlite_sequence', 'db')
table_stmt = ("SELECT name FROM sqlite_master WHERE type == 'table' AND name NOT IN ({})"
.format(', '.join(('?',) * len(ignored_tables))))
tables = [t[0] for t in c.execute(table_stmt, ignored_tables).fetchall()]
for db in dbs:
logging.info('Adding {}'.format(db))
c.execute("ATTACH '{}' AS new".format(db))
dbId = c.execute('SELECT dbId FROM db WHERE dbName = ?', (db_id(db),)).fetchone()[0]
shared_tables = ('run', 'runattr', 'runparam')
for table in tables:
if table in shared_tables:
# Check, whether the entries of this table have already been inserted
new_entries_stmt = 'SELECT ? as dbId, * FROM new.{table} EXCEPT SELECT * FROM {table}'
already_inserted = not c.execute(new_entries_stmt.format(table=table), (dbId,)).fetchone()
if already_inserted:
continue
c.execute('INSERT INTO {table} SELECT ? AS dbId, * FROM new.{table}'.format(table=table), (dbId,))
c.commit()
c.execute('DETACH new')
def post_process(c):
c.execute('''UPDATE runattr SET attrValue = REPLACE(attrValue, '"', '')''')
c.execute('''UPDATE runparam SET parValue = REPLACE(parValue, '"', '')''')
def main():
import argparse
import itertools
def longest_common_prefix(strs):
return ''.join([t[0] for t in itertools.takewhile(lambda xs: len(set(xs)) == 1, zip(*strs))])
def strip_special_suffix(s):
return s[0:max(i for i, c in enumerate(s) if c.isalnum()) + 1] if s else ''
parser = argparse.ArgumentParser('Merge OmNET++ SQLite result files')
parser.add_argument('-f', '--force', action='store_true', help="Overwrite exisiting output file")
parser.add_argument('-p', '--post-process', action='store_true',
help="Apply postprocessing to the database. This removes quotation marks (\") from run "
"attributes and parameters")
parser.add_argument('-o', '--output',
help="Path to the combined database. If a directory is given, the longest prefix of "
"the input databases will be used instead. By default the output is placed into "
"the result directory")
parser.add_argument('result_dir', help="Directory, containing OmNET++ SQLite result files")
args = parser.parse_args()
if not args.output:
args.output = args.result_dir
if osp.isdir(args.output):
output = osp.join(args.output, '{}.db'.format(strip_special_suffix(longest_common_prefix(os.listdir(args.result_dir))) or 'out'))
elif osp.isfile(args.output) or (not osp.exists(args.output) and osp.isdir(osp.dirname(args.output))):
output = args.output
else:
assert(False)
if osp.exists(output):
if args.force:
logging.info('Overwriting existing file')
os.remove(output)
else:
raise parser.error('Output file exists')
with sqlite3.connect(output) as c:
merge_dbs(c, args.result_dir)
if args.post_process:
post_process(c)
if __name__ == '__main__':
sys.exit(main())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment