Last active

Embed URL

HTTPS clone URL

SSH clone URL

You can clone with HTTPS or SSH.

Download Gist

JSON serializer/deserializer adapted for use with Google App Engine's NDB Datastore API. This script can handle Model, Expando, PolyModel, Query, QueryIterator, Key, datetime, struct_time, and complex types.

View ndb_json.py
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170
#!/usr/bin/env python
"""
JSON encoder/decoder adapted for use with Google App Engine NDB.
Usage:
import ndb_json
# Serialize an ndb.Query into an array of JSON objects.
query = models.MyModel.query()
query_json = ndb_json.dumps(query)
# Convert into a list of Python dictionaries.
query_dicts = ndb_json.loads(query_json)
# Serialize an ndb.Model instance into a JSON object.
entity = query.get()
entity_json = ndb_json.dumps(entity)
# Convert into a Python dictionary.
entity_dict = ndb_json.loads(entity_json)
Dependencies:
- dateutil: https://pypi.python.org/pypi/python-dateutil
"""
__author__ = 'Eric Higgins'
__copyright__ = 'Copyright 2013, Eric Higgins'
__version__ = '0.0.5'
__email__ = 'erichiggins@gmail.com'
__status__ = 'Development'
 
 
import base64
import datetime
import json
import re
import time
import types
 
import dateutil.parser
from google.appengine.ext import ndb
 
 
def encode_model(obj):
"""Encode objects like ndb.Model which have a `.to_dict()` method."""
obj_dict = obj.to_dict()
for key, val in obj_dict.iteritems():
if isinstance(val, types.StringType):
try:
unicode(val)
except UnicodeDecodeError:
# Encode binary strings (blobs) to base64.
obj_dict[key] = base64.b64encode(val)
return obj_dict
 
 
def encode_generator(obj):
"""Encode generator-like objects, such as ndb.Query."""
return list(obj)
 
 
def encode_key(obj):
"""Get the Entity from the ndb.Key for further encoding."""
# Note(eric): Potentially poor performance for Models w/ many KeyProperty properties.
return obj.get_async()
# Alternative 1: Convert into pairs.
# return obj.pairs()
# Alternative 2: Convert into URL-safe base64-encoded string.
# return obj.urlsafe()
 
 
def encode_future(obj):
"""Encode an ndb.Future instance."""
return obj.get_result()
 
 
def encode_datetime(obj):
"""Encode a datetime.datetime or datetime.date object as an ISO 8601 format string."""
# Reformat the date slightly for better JS compatibility.
# Offset-naive dates need 'Z' appended for JS.
# datetime.date objects don't have or need tzinfo, so don't append 'Z'.
zone = '' if getattr(obj, 'tzinfo', True) else 'Z'
return obj.isoformat() + zone
 
 
def encode_complex(obj):
"""Convert a complex number object into a list containing the real and imaginary values."""
return [obj.real, obj.imag]
 
 
def encode_basevalue(obj):
"""Retrieve the actual value from a ndb.model._BaseValue.
This is a convenience function to assist with the following issue:
https://code.google.com/p/appengine-ndb-experiment/issues/detail?id=208
"""
return obj.b_val
 
 
NDB_TYPE_ENCODING = {
ndb.MetaModel: encode_model,
ndb.Query: encode_generator,
ndb.QueryIterator: encode_generator,
ndb.Key: encode_key,
ndb.Future: encode_future,
datetime.date: encode_datetime,
datetime.datetime: encode_datetime,
time.struct_time: encode_generator,
types.ComplexType: encode_complex,
ndb.model._BaseValue: encode_basevalue,
}
 
 
class NdbEncoder(json.JSONEncoder):
"""Extend the JSON encoder to add support for NDB Models."""
 
def default(self, obj):
"""Overriding the default JSONEncoder.default for NDB support."""
 
obj_type = type(obj)
# NDB Models return a repr to calls from type().
if obj_type not in NDB_TYPE_ENCODING and hasattr(obj, '__metaclass__'):
obj_type = obj.__metaclass__
fn = NDB_TYPE_ENCODING.get(obj_type)
if fn:
return fn(obj)
 
return json.JSONEncoder.default(self, obj)
 
 
def dumps(ndb_model, **kwargs):
"""Custom json dumps using the custom encoder above."""
return NdbEncoder(**kwargs).encode(ndb_model)
 
 
def dump(ndb_model, fp, **kwargs):
"""Custom json dump using the custom encoder above."""
for chunk in NdbEncoder(**kwargs).iterencode(ndb_model):
fp.write(chunk)
 
 
def loads(json_str, **kwargs):
"""Custom json loads function that converts datetime strings."""
json_dict = json.loads(json_str, **kwargs)
if isinstance(json_dict, list):
return map(iteritems, json_dict)
return iteritems(json_dict)
 
 
def iteritems(json_dict):
"""Loop over a json dict and try to convert strings to datetime."""
for key, val in json_dict.iteritems():
if isinstance(val, dict):
iteritems(val)
# Its a little hacky to check for specific chars, but avoids integers.
elif isinstance(val, basestring) and 'T' in val:
try:
json_dict[key] = dateutil.parser.parse(val)
# Check for UTC.
if val.endswith(('+00:00', '-00:00', 'Z')):
# Then remove tzinfo for gae, which is offset-naive.
json_dict[key] = json_dict[key].replace(tzinfo=None)
except (TypeError, ValueError):
pass
return json_dict

Is it possible to use the model schema as a reference in loads so you don't have to guess if something is a string or a date(time) (in iteritems)?

Thanks a lot ! Time saver ! :D

Are you planning on including this in erichiggins/gaek, which I assume is going to be the place where you package these gists more formally? :-)

Either way, can you clarify the terms under which this gist is licensed? (i.e., if I wanted to include it verbatim or modified, versus re-implementing based on the ideas here)

Thanks!

How can I modify this to support JsonProperties? Currently, my responses are returning all the json of the JsonProperty as one big string, instead of actual json

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Something went wrong with that request. Please try again.