Skip to content

Instantly share code, notes, and snippets.

@JoshLmao
Created June 13, 2019 10:34
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save JoshLmao/71b7300318e0f49e5b8daca5b6dfda3e to your computer and use it in GitHub Desktop.
Save JoshLmao/71b7300318e0f49e5b8daca5b6dfda3e to your computer and use it in GitHub Desktop.
Firebase Backup.py - Python script for automatically vising endpoints of a Realtime Firebase database and combining the JSON into one for saving to a file , every X amount of seconds
# Firebase Database Backup
# Created by @JoshLmao
#
# Description: On Firebase, you're allowed to Import and Export JSON into a real time database. If for some reason that database is breached
# without being a paid user of Firebase, you are at risk of losing a lot of data. Firebase doesn't allow user's to access the root of their database through
# the WebAPI, however you can access each child object at the url "https://[PROJECT-ID].firebaseio.com/[PATH-TO-CHILD].json"
#
# How to Use: Simply add each child from the master and it's url to the 'endpointsDict' in main(). For example, If your database had...
# -> MyDatabase
# -> Users
# -> Posts
# ...then your dictionary would be... endpointsUrl = { "users": "https://[PROJECT-ID].firebaseio.com/users.json", "posts": "https://[PROJECT-ID].firebaseio.com/posts.json" }
# You also need to change the 'saveFolder', 'fileNamePrefix' and 'sleepSeconds' properties inside main()
# NOTE: Finally, wherever you execute the command from the cmd is where the local path will start. For example, if my cmd was like this "C:\Users\Josh> python firebase_backup.py" then
# the save folder would be at "C:\Users\Josh\Backup"
import datetime
import urllib.request, json
import time
import pathlib
import os.path
import os
import sys
def log(message):
dt = datetime.datetime.now()
print( dt.strftime("%d/%m/%Y %H:%M:%S") + " | " + message)
def backupLoop(endpointsDict, sleepSeconds, folderFullPath, fileNamePrefix):
# Create directory if it doens't exist
if not os.path.exists(folderFullPath):
os.makedirs(folderFullPath)
log("Starting backup...")
# Get all json from all endpoints
endpointsJson = [ ]
for name, endpointUrl in endpointsDict.items():
with urllib.request.urlopen(endpointUrl) as url:
data = json.loads(url.read().decode())
endpointsJson.append(data)
# Merge endpoint json dictionaries into one JSON
# which can be imported back into Firebase
mergedJson = { }
names = list(endpointsDict.keys())
for i in range(0, len(endpointsJson)):
mergedJson[names[i]] = endpointsJson[i]
# Get DateTime and create file with datetime in name
dt = datetime.datetime.now()
file = open(folderFullPath + fileNamePrefix + "_" + dt.strftime("%d-%m-%Y_%H-%M-%S") + ".json", "w+")
# toString and write to file
stringJson = json.dumps(mergedJson)
file.write(stringJson)
file.close()
# Log and sleep for duration before repeating
log("Successfully completed database backup")
log("Sleeping for '" + str(sleepSeconds) + "' seconds")
time.sleep(sleepSeconds)
def main():
# Dictionary of endpoints and their name
# Key must be the first name inside the root
# Value is the url endpoint
endpointsDict = {
"users": "https://[PROJECT-ID].firebaseio.com/users.json",
"data": "https://[PROJECT-ID].firebaseio.com/data.json"
}
# Folder to save and store all JSON backup files in
saveFolder = "/Backups/"
# File name Prefix before date time 'Prefix_{DateTime}.json'
fileNamePrefix = "MyDatabase"
# Amount of seconds to delay the backup by
# Default to an hour
backupSeconds = 60 * 60
folderSavePath = os.path.dirname(sys.argv[0]) + saveFolder
log("STARTED - Saving backups to " + folderSavePath)
while True:
backupLoop(endpointsDict, backupSeconds, folderSavePath, fileNamePrefix)
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment