Skip to content

Instantly share code, notes, and snippets.

@Nuclearfossil
Created January 13, 2016 01:30
Show Gist options
  • Save Nuclearfossil/87f52e3c632f53af6e82 to your computer and use it in GitHub Desktop.
Save Nuclearfossil/87f52e3c632f53af6e82 to your computer and use it in GitHub Desktop.
Flatbuffers vs Protobuffers
namespace metrics;
enum Type : byte {
INT = 0,
DOUBLE,
STRING,
}
table Metric {
timestamp:ulong;
type:int;
key:string;
doubleValue:double;
intValue:ulong;
stringValue:string;
}
table Metrics {
value_entry:[Metric];
}
root_type Metrics;
// Protobuf definition file
syntax = "proto3";
package metrics;
message Metric {
enum Type { INT = 0; DOUBLE = 1; STRING = 2; }
fixed64 timestamp = 1;
Type type = 2;
string key = 3;
double doubleValue = 4;
fixed64 intValue = 5;
string stringValue = 6;
}
message Metrics {
repeated Metric value_entry = 1;
}
#! python
import cProfile
import pstats
import StringIO
from metrics.Metrics import Metrics
from metrics.Metric import Metric
import flatbuffers
__author__ = 'amatheson'
def displayMetrics(metricsDS):
for index in xrange(metricsDS.ValueEntryLength()):
valueEntry = metricsDS.ValueEntry(index)
print "timestamp", valueEntry.Timestamp()
print "key", valueEntry.Key()
print "type", valueEntry.Type()
if valueEntry.Type() == 0:
print "value: ", valueEntry.IntValue()
elif valueEntry.Type() == 1:
print "value: ", valueEntry.DoubleValue()
elif valueEntry.Type() == 2:
print "value: ", valueEntry.StringValue()
def logMetrics(metricsDS, logfile):
for index in xrange(metricsDS.ValueEntryLength()):
metric = metricsDS.ValueEntry(index)
line = '{} {} {} {}\n'.format(metric.Timestamp(), metric.Key(), metric.Type(), metric.StringValue())
logfile.write(line)
def main():
pr = cProfile.Profile()
pr.enable()
buf = open('../../results/flatbuf_metrics.dat', 'rb').read()
buf = bytearray(buf)
myMetrics = Metrics.GetRootAsMetrics(buf, 0)
pr.disable()
loadResults = StringIO.StringIO()
sortby = 'cumulative'
ps = pstats.Stats(pr, stream=loadResults).sort_stats(sortby)
ps.print_stats()
pr.enable()
displayMetrics(myMetrics)
pr.disable()
printResults = StringIO.StringIO()
ps = pstats.Stats(pr, stream=printResults).sort_stats(sortby)
ps.print_stats()
print "Load Data: {}".format(loadResults.getvalue())
print "Print Data: {}".format(printResults.getvalue())
# ensure that the data we're reading is correct
logfile = open('../../results/flatbuf_contents.txt', 'w')
logMetrics(myMetrics, logfile)
logfile.close()
if __name__ == "__main__":
main()
#! python
import cProfile
import pstats
import StringIO
import metrics_pb2
import sys
pr = cProfile.Profile()
def displayMetrics(metricsDS):
for metric in metricsDS.value_entry:
print "timestamp", metric.timestamp
print "key", metric.key
print "type", metric.type
if metric.type == 0:
print "value: ", metric.intValue
elif metric.type == 1:
print "value: ", metric.doubleValue
elif metric.type == 2:
print "value: ", metric.stringValue
def logMetrics(metricsDS, logfile):
for metric in metricsDS.value_entry:
line = '{} {} {} {}\n'.format(metric.timestamp, metric.key, metric.type, metric.stringValue)
logfile.write(line)
pr.enable()
metricsDS = metrics_pb2.Metrics()
f = open('../../results/protobuf_metrics.dat', 'rb')
metricsDS.ParseFromString(f.read())
f.close()
pr.disable()
loadResults = StringIO.StringIO()
sortby = 'cumulative'
ps = pstats.Stats(pr, stream=loadResults).sort_stats(sortby)
ps.print_stats()
pr.enable()
displayMetrics(metricsDS)
pr.disable()
printResults = StringIO.StringIO()
ps = pstats.Stats(pr, stream=printResults).sort_stats(sortby)
ps.print_stats()
print "Load Data: {}".format(loadResults.getvalue())
print "Process Data: {}".format(printResults.getvalue())
# ensure that the data we're reading is correct
logfile = open('../../results/protobuf_contents.txt', 'w')
logMetrics(metricsDS, logfile)
logfile.close()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment