Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
Benchmarking json loading across json and ujson (precision floats and normal)
import json
import ujson
from timeit import repeat
def decode_json(data):
json.loads(data)
def decode_ujson(data, precise=False):
ujson.loads(data, precise_float=precise)
def raw():
with open("data/some_data.json") as f:
return f.read()
LOOPS = 100_000
REPEAT = 3
if __name__ == "__main__":
setup = "from __main__ import decode_json, decode_ujson, raw"
print("Best of {0} of {1} loads each\n".format(REPEAT, LOOPS))
print("std json: %.4fs" % min(repeat("decode_json(raw())", setup=setup, number=10000, repeat=3)))
print("ujson w/o precision load: %.4fs" % min(repeat("decode_ujson(raw())", setup=setup, number=10000, repeat=3)))
print("ujson w/ precision load: %.4fs" % min(repeat("decode_ujson(raw(), True)", setup=setup, number=10000, repeat=3)))
$ python benchmark_load.py
Best of 3 of 100000 loads each
std json: 6.1493s
ujson w/o precision load: 5.0161s
ujson w/ precision load: 5.0040s
$ python benchmark_load.py
Best of 3 of 100000 loads each
std json: 5.1441s
ujson w/o precision load: 4.9517s
ujson w/ precision load: 5.0072s
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.