Skip to content

Instantly share code, notes, and snippets.

@nenriquez
Last active September 6, 2017 14:53
Show Gist options
  • Save nenriquez/49eeafc56e282577d03c9fcd67afd1da to your computer and use it in GitHub Desktop.
Save nenriquez/49eeafc56e282577d03c9fcd67afd1da to your computer and use it in GitHub Desktop.
App Engine's Datastore Backup to localhost importer
"""
# App Engine's Datastore Backup to localhost importer.
## Getting backup files
HOW TO USE
1) Dowloads your backup files from your bucket to this file's directory:
```
gsutil -m cp -r gs://<your_bucket>/datastore_backup_datastore_backup_* .
```
2) Start your server.
3) Call import_backup function from a test endpoint
"""
import logging
import os
from google.appengine.api.files import records
from google.appengine.datastore import entity_pb
from google.appengine.ext import ndb
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
def get_entity(r):
entity_proto = entity_pb.EntityProto(contents=r)
entity = ndb.ModelAdapter().pb_to_entity(entity_proto)
# override entity key to achieve that key belong to this app
key = ndb.Key(reference=entity_proto.key())
entity.key = ndb.Key(key.kind(), key.id())
return entity
def resolve_files(path, result_list=[]):
for f in os.listdir(path):
absolute_path = os.path.join(path, f)
if os.path.isdir(absolute_path):
resolve_files(absolute_path, result_list)
else:
if absolute_path.find("output") > 0:
result_list.append(absolute_path)
def import_backup():
fls = []
resolve_files(__location__, fls)
logging.info(fls)
to_store = []
for filename in fls:
raw = open(filename)
reader = records.RecordsReader(raw)
to_store = to_store + [get_entity(record) for record in reader]
ndb.put_multi(to_store)
logging.info("stored {} entities".format(len(to_store)))
# logging.info('\n'.join(map(str, to_store)))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment