Skip to content

Instantly share code, notes, and snippets.

@wookiefriseur
Last active August 17, 2023 15:31
Show Gist options
  • Save wookiefriseur/d8661bf7015289a982f408612895bdda to your computer and use it in GitHub Desktop.
Save wookiefriseur/d8661bf7015289a982f408612895bdda to your computer and use it in GitHub Desktop.
Convert ESOProfiler.lua to PerfettoTracelog using streams
// SPDX-FileCopyrightText: 2022 sirinsidiator <insidiator@cmos.at>
//
// SPDX-License-Identifier: GPL-3.0-or-later
import * as fs from 'fs';
import * as path from 'path';
class LuaToJsonConverter {
convertLuaToJSONString(content: string): string {
let lines = content.split(/[\r\n]+/g);
for (let i = 0; i < lines.length; ++i) {
let line = lines[i];
// add comma after all root level entries except for the last one
if (i > 0 && line.search(/^(\w+)\s*=$/) !== -1) {
lines[i - 1] = lines[i - 1] + ',';
}
// convert root level entries to proper json format
line = line.replace(/^(\w+)\s*=\s*(\{\s*)?$/, '"$1":$2');
// convert all string keys to proper json format
line = line.replace(/^(\s+)\["(.+)"\]\s*=(.+)$/, '$1"$2":$3');
// convert all numeric keys to proper json format
line = line.replace(/^(\s+)\[(\d+)\]\s*=(.+)$/, '$1"$2":$3');
// remove comma after last entry of a table
if (i > 0 && line.search(/^\s*\}/) !== -1 && !lines[i - 1].endsWith('{')) {
lines[i - 1] = lines[i - 1].slice(0, -1);
}
lines[i] = line;
}
return '{' + lines.join('\r\n') + '}';
}
}
const PID = 0;
const TID = 0;
interface ProfilerData {
traceEvents: any[];
stackFrames: any;
otherData: any;
}
class PerfettoConverter {
eventStack: any[] = [];
data: ProfilerData = {
traceEvents: [],
stackFrames: {},
otherData: {},
};
names: any;
categories: any;
closures: any;
uptime?: number;
addCounter(name: string, unit: string, start: number, value: number) {
let args: any = {};
args[`[${unit}]`] = value;
let event = {
name: name,
ph: 'C',
cat: 'stats',
ts: start,
pid: PID,
tid: TID,
args: args,
};
this.data.traceEvents.push(event);
}
fillInNames() {
let data = this.data;
Object.keys(data.stackFrames).forEach((stackId) => {
let stackFrame = data.stackFrames[stackId];
let [name, file, line] = this.closures[stackFrame.name];
this.names[stackId] = name;
stackFrame['id'] = stackId;
stackFrame['location'] = file + ':' + line;
let matches = file.match(/@user:\/AddOns\/(.+?)\//);
if (matches && matches.length > 1) {
this.categories[stackId] = matches[1];
} else {
let matches = file.match(/@user:\/SavedVariables\/(.+?)/);
if (matches && matches.length > 1) {
this.categories[stackId] = 'SavedVariables';
}
}
});
data.traceEvents.forEach((event) => {
if (!event.name) {
event.name = this.names[event.sf];
if (this.categories[event.sf]) {
event.cat = this.categories[event.sf];
}
}
if (!event.args) {
event.args = this.data.stackFrames[event.sf];
}
});
}
addMetaData(name: string, args: any) {
this.data.traceEvents.unshift({
name: name,
cat: '__metadata',
ts: 0,
ph: 'M',
args: args,
pid: PID,
tid: TID,
});
}
getParentEvent(): any {
if (this.eventStack.length > 0) {
return this.eventStack[this.eventStack.length - 1];
}
return null;
}
cleanUpEventStack(currentTime: number) {
for (let i = this.eventStack.length - 1; i >= 0; --i) {
let event = this.eventStack[i];
if (event.ts + event.dur < currentTime) {
this.eventStack.pop();
}
}
}
parseFileContent(jsonContent: string) {
this.names = {};
this.closures = {};
this.categories = {};
this.data = {
traceEvents: [],
stackFrames: {},
otherData: {},
};
let parsed = JSON.parse(jsonContent)['ESOProfiler_Export'];
for (const eventIndex in parsed['traceEvents']) {
const raw = parsed['traceEvents'][eventIndex];
let data = raw.split(',');
let event = {
name: '',
cat: 'EsoUI',
ph: 'X',
ts: parseFloat(data[0]),
dur: parseFloat(data[1]),
tts: parseFloat(data[0]),
tdur: parseFloat(data[1]),
pid: PID,
tid: TID,
sf: data[2],
};
this.data.traceEvents.push(event);
this.cleanUpEventStack(event.ts);
let parentEvent = this.getParentEvent();
if (parentEvent) {
parentEvent.tdur -= event.dur;
}
this.eventStack.push(event);
}
for (const stackId in parsed['stackFrames']) {
const raw = parsed['stackFrames'][stackId];
let [recordDataIndex, parent] = raw.split(',');
let stackFrame: any = {};
stackFrame['name'] = recordDataIndex;
if (parent) {
stackFrame['parent'] = parent;
}
this.data.stackFrames[stackId] = stackFrame;
}
for (const recordDataIndex in parsed['closures']) {
const raw = parsed['closures'][recordDataIndex];
let data = raw.split(',');
this.closures[recordDataIndex] = data;
}
for (const frameIndex in parsed['frameStats']) {
const raw = parsed['frameStats'][frameIndex];
let [start, fps, latency, memory] = raw.split(',');
let startTime = parseFloat(start);
this.addCounter('FPS', 'Hz', startTime, parseInt(fps) / 100);
this.addCounter('Latency', 'ms', startTime, parseInt(latency));
this.addCounter('Memory', 'MB', startTime, parseInt(memory) / (1024 * 1024));
}
for (const key in parsed['otherData']) {
const value = parsed['otherData'][key];
this.data.otherData[key] = '' + value;
if (key === 'upTime') {
this.uptime = Math.floor(parseFloat(value) / 1e6);
}
}
this.fillInNames();
this.addMetaData('process_name', { name: 'eso64.exe' });
this.addMetaData('process_uptime_seconds', { uptime: this.uptime });
this.addMetaData('thread_name', { name: 'User Interface' });
}
async writeToStream(stream: fs.WriteStream, data: ProfilerData) {
const keys = Object.keys(data);
const numOfKeys = Object.keys(data).length;
// START of root object
stream.write('{');
for (let i = 0; i < numOfKeys; i++) {
const key = keys[i];
const fieldData = data[key as keyof ProfilerData];
// START of category object
stream.write(`"${key}":`);
if (Array.isArray(fieldData)) {
// START of values (array)
stream.write('[');
const numOfEvents = fieldData.length;
for (let j = 0; j < numOfEvents; j++) {
let eventJson = JSON.stringify(fieldData[j]);
// Delimit values, avoid trailing comma
if (j < numOfEvents - 1) {
eventJson += ',';
}
// Wait for buffer drain, so we don't spam listeners
if (!stream.write(eventJson)) {
await new Promise((resolve) => stream.once('drain', resolve));
}
}
// END of values (array)
stream.write(']');
} else {
// non-array value objects can be directly stringified
const fieldJson = JSON.stringify(fieldData);
// START of value object
stream.write(fieldJson);
// END of value object
}
// Delimit parents, avoid trailing comma
if (i < numOfKeys - 1) {
stream.write(',');
}
}
// END of root object
stream.write('}\n');
}
async generatePerfettoTrace(inputFilePath: string, outputFilePath: string) {
const content = fs.readFileSync(inputFilePath, 'utf8');
this.parseFileContent(content);
// Handle large strings without breaking JSON.stringify
const stream = fs.createWriteStream(outputFilePath);
await this.writeToStream(stream, this.data);
stream.end();
}
}
const readLuaContent = (filePath: string): Promise<string> => {
return new Promise((resolve, reject) => {
fs.readFile(filePath, 'utf8', (err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
});
};
const writeJsonOutput = (filePath: string, jsonString: string): Promise<void> => {
return new Promise((resolve, reject) => {
fs.writeFile(filePath, jsonString, 'utf8', (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
};
const convertAndSaveToFile = async (inputFilePath: string, outputFilePath: string) => {
try {
const luaContent = await readLuaContent(inputFilePath);
const converter = new LuaToJsonConverter();
const jsonString = converter.convertLuaToJSONString(luaContent);
await writeJsonOutput(outputFilePath, jsonString);
} catch (error) {
console.error(error);
}
};
const generatePerfettoTrace = async (inputFilePath: string, outputFilePath: string) => {
const converter = new PerfettoConverter();
await converter.generatePerfettoTrace(inputFilePath, outputFilePath);
};
// let's go
if (process.argv.length != 3) {
console.error('Usage: npx ts-node converter.ts <inputfile.lua>');
process.exit(1);
}
const inputFilePath = process.argv[2];
if (path.extname(inputFilePath) != '.lua') {
console.error('only *.lua files supported');
process.exit(1);
}
const baseName = path.basename(inputFilePath, '.lua');
const basePath = path.dirname(inputFilePath);
const tmpJsonPath = `${path.join(basePath, `tmp_${baseName}`)}.json`;
const outputFilePath = `${path.join(basePath, `perfetto_${baseName}`)}.json`;
(async () => {
console.log(`⌛ Converting trace Lua2JSON:\t${inputFilePath} -> ${tmpJsonPath}`);
await convertAndSaveToFile(inputFilePath, tmpJsonPath);
console.log(`⌛ Generating Perfetto Trace:\t${tmpJsonPath} -> ${outputFilePath}`);
await generatePerfettoTrace(tmpJsonPath, outputFilePath);
fs.promises.unlink(tmpJsonPath).then(() => {
console.log(`🗑️ Cleanup: Deleted tmp file`);
});
console.log('-'.repeat(20));
console.log(`📂 Output: ${path.resolve(outputFilePath)}`);
console.log(`\t\tyou can open it in a viewer like https://ui.perfetto.dev`);
console.log('-'.repeat(20));
})();
-- NOT WORKING YET, JUST SCAFFOLDING
local json = require("lib.dkjson")
local PID = 0
local TID = 0
local tableToExport = "ESOProfiler_Export"
-- Utils
local function split(input, delimiter)
local result = {}
local pattern = string.format("([^%s]+)", delimiter)
for match in input:gmatch(pattern) do
table.insert(result, match)
end
return result
end
-- Main
local function addCounter(self, name, unit, start, value)
local args = {
["[" .. unit .. "]"] = value
}
local event = {
name = name,
ph = 'C',
cat = 'stats',
ts = start,
pid = PID,
tid = TID,
args = args
}
table.insert(self.data.traceEvents, event)
end
local function fillInNames(self)
for stackId, stackFrame in pairs(self.data.stackFrames) do
io.write(string.format("id: %s , name: %s\n", stackId, stackFrame.name))
io.write(string.format("cl: %s\n", self.closures[stackFrame.name]))
for k, v in pairs(stackFrame) do
io.write(string.format("k: %s, v: %s\n", k, v))
end
local closureData = self.closures[stackFrame[tostring(stackId)]['name']]
if closureData then
local name, file, line = closureData[1], closureData[2], closureData[3]
io.write(string.format("name: %s, file: %s, line: %s\n", name, file, line))
self.names[stackId] = name
stackFrame.id = stackId
stackFrame.location = file .. ':' .. line
local matches = string.match(file, "@user:/AddOns/(.+?)/")
if matches and #matches > 0 then
self.categories[stackId] = matches
else
matches = string.match(file, "@user:/SavedVariables/(.+?)")
if matches and #matches > 0 then
self.categories[stackId] = "SavedVariables"
end
end
end
end
io.write(string.format("TraceEvents: %d\n", #self.data.traceEvents))
for i, event in ipairs(self.data.traceEvents) do
if not event.name then
event.name = self.names[event.sf]
if self.categories[event.sf] then
event.cat = self.categories[event.sf]
end
end
if not event.args then
event.args = self.data.stackFrames[event.sf]
end
end
end
local function addMetaData(self, name, args)
table.insert(self.data.traceEvents, 1, {
name = name,
cat = '__metadata',
ts = 0,
ph = 'M',
args = args,
pid = PID,
tid = TID
})
end
local function getParentEvent(self)
if #self.eventStack > 0 then
return self.eventStack[#self.eventStack]
end
return nil
end
local function cleanUpEventStack(self, currentTime)
local eventStack = self.eventStack
for i = #eventStack, 1, -1 do
local event = eventStack[i]
if event.ts + event.dur < currentTime then
table.remove(eventStack, i)
end
end
end
local function parseFileContent(self, tableName)
self.names = {}
self.closures = {}
self.categories = {}
self.data = {
traceEvents = {},
stackFrames = {},
otherData = {}
}
local content = _G[tableName]
for _, raw in ipairs(content['traceEvents']) do
local data = split(raw, ',')
local event = {
name = '',
cat = 'EsoUI',
ph = 'X',
ts = tonumber(data[1]),
dur = tonumber(data[2]),
tts = tonumber(data[1]),
tdur = tonumber(data[2]),
pid = PID,
tid = TID,
sf = tonumber(data[3])
}
table.insert(self.data.traceEvents, event)
self:cleanUpEventStack(event.ts)
local parentEvent = self:getParentEvent()
if parentEvent then
parentEvent.tdur = parentEvent.tdur - event.dur
end
table.insert(self.eventStack, event)
end
for stackId, raw in pairs(content['stackFrames']) do
local data = split(raw, ',')
local recordDataIndex, parent = data[1], data[2]
local stackFrame = {
name = tostring(recordDataIndex),
id = tostring(stackId)
}
if parent and #parent > 0 then
stackFrame.parent = parent
end
self.data.stackFrames[stackFrame.id] = stackFrame
end
for recordDataIndex, raw in pairs(content['closures']) do
local data = split(raw, ',')
self.closures[recordDataIndex] = data
end
for _, raw in pairs(content['frameStats']) do
local start, fps, latency, memory = raw:match("([^,]+),([^,]+),([^,]+),([^,]+)")
local startTime = tonumber(start)
self:addCounter('FPS', 'Hz', startTime, tonumber(fps) / 100)
self:addCounter('Latency', 'ms', startTime, tonumber(latency))
self:addCounter('Memory', 'MB', startTime, tonumber(memory) / (1024 * 1024))
end
for key, value in pairs(content['otherData']) do
self.data.otherData[key] = tostring(value)
if key == 'upTime' then
self.uptime = math.floor(tonumber(value) / 1e6)
end
end
self:fillInNames()
io.write("Adding MetaData\n")
self:addMetaData('process_name', { name = 'eso64.exe' })
self:addMetaData('process_uptime_seconds', { uptime = self.uptime })
self:addMetaData('thread_name', { name = 'User Interface' })
end
local perfettoConverter = {
data = {
traceEvents = {},
stackFrames = {},
otherData = {}
},
eventStack = {},
names = {},
categories = {},
closures = {},
uptime = 0,
addCounter = addCounter,
fillInNames = fillInNames,
addMetaData = addMetaData,
getParentEvent = getParentEvent,
cleanUpEventStack = cleanUpEventStack,
parseFileContent = parseFileContent,
}
local function main(...)
local args = { ... }
if #args < 1 then
print("Usage: lua lua2json.lua <InputFile.lua> [TableToExport]")
print("Defaults to ESOProfiler_Export as table name")
os.exit(1)
end
local inputFile = args[1]
tableToExport = args[2] or tableToExport
local chunk, err = assert(loadfile(inputFile))
chunk()
perfettoConverter:parseFileContent(tableToExport)
local jsonOutput = json.encode(perfettoConverter.data)
local resultFile = inputFile .. ".json"
local file = io.open(resultFile, "w")
assert(file, "cannot access result file")
io.write(string.format("writing to %s\n", resultFile))
file:write(jsonOutput)
file:close()
print("Written to " .. resultFile)
end
main(...)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment