Skip to content

Instantly share code, notes, and snippets.

@Tugzrida
Last active April 7, 2024 10:55
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Tugzrida/76f3ee3c6da2c580bdc1488cacefbea1 to your computer and use it in GitHub Desktop.
Save Tugzrida/76f3ee3c6da2c580bdc1488cacefbea1 to your computer and use it in GitHub Desktop.
Backup Home Assistant to Dropbox
#!/usr/bin/python3
from urllib.request import urlopen, Request
from functools import cached_property
from datetime import timedelta, datetime, UTC
import dataclasses as dc
import glob, os, time, dropbox, tarfile, json, sys
######
HA_TOKEN = "XXX"
HA_BACKUP_DIR = "/home/pi/docker/homeassistant/config/backups/"
# Create a scoped app, with files.metadata.read and files.content.write
# Enter the key and secret here, then run with the "auth" argument to get a token for your account
DBX_KEY = "XXX"
DBX_SECRET = "XXX"
DBX_TOKEN = "XXX"
BACKUP_FREQUENCY = timedelta(days=3)
LOCAL_RETENTION = 5
REMOTE_RETENTION = 30
######
if (len(sys.argv) > 1 and sys.argv[1] == "auth"):
auth = dropbox.DropboxOAuth2FlowNoRedirect(DBX_KEY, DBX_SECRET, token_access_type="offline")
print("Authorize access to your Dropbox account here:")
print(f" {auth.start()}\n")
auth = auth.finish(input("Then enter the provided code here: ").strip())
print("\nSet DBX_TOKEN to:")
print(f" {auth.refresh_token}\n")
raise SystemExit
def do_ha_backup():
urlopen(Request(
"http://127.0.0.1:8123/api/services/backup/create",
headers={
"Authorization": f"Bearer {HA_TOKEN}"
},
method="POST"
))
time.sleep(5) # Make sure backup really is done
def list_dropbox_backups(dbx):
files = dbx.files_list_folder("").entries
for f in files:
f.client_modified = f.client_modified.replace(tzinfo=UTC)
# Newest to oldest
return sorted(files, key=lambda f: f.client_modified, reverse=True)
def dropbox_upload(dbx, file):
CHUNK = 8 * 2**20 # 8MiB
file_size = os.path.getsize(file.path)
with open(file.path, 'rb') as f:
session = dbx.files_upload_session_start(f.read(CHUNK))
cursor = dropbox.files.UploadSessionCursor(session.session_id, f.tell())
while (file_size - f.tell()) > CHUNK:
dbx.files_upload_session_append_v2(f.read(CHUNK), cursor)
cursor.offset = f.tell()
dbx.files_upload_session_finish(
f.read(),
cursor,
dropbox.files.CommitInfo(
path=f"/{file.remote_name}",
client_modified=file.mtime
)
)
@dc.dataclass
class BkupFile:
path: str
@cached_property
def mtime(self):
return datetime.fromtimestamp(os.path.getmtime(self.path), UTC)
@cached_property
def remote_name(self):
with tarfile.open(self.path) as tar:
metadata = json.load(tar.extractfile("./backup.json"))
return f'{metadata["slug"]}_v{metadata["homeassistant"]["version"].replace(".", "_")}.tar'
# All locally-stored backups from newest to oldest
local_backups = sorted((BkupFile(p) for p in glob.iglob(f"{HA_BACKUP_DIR}/*.tar")), key=lambda f: f.mtime, reverse=True)
if datetime.now(UTC) - local_backups[0].mtime > BACKUP_FREQUENCY:
print("Running HA backup")
do_ha_backup()
dbx = dropbox.Dropbox(oauth2_refresh_token=DBX_TOKEN, app_key=DBX_KEY, app_secret=DBX_SECRET)
# upload files not in dropbox
remote_backup_names = {f.name for f in list_dropbox_backups(dbx)}
for f in local_backups:
if f.remote_name in remote_backup_names:
continue # with next file
print(f"Uploading {f.remote_name}")
dropbox_upload(dbx, f)
# delete oldest files if more than x in dropbox
for f in list_dropbox_backups(dbx)[REMOTE_RETENTION:]:
print(f"Deleting remote backup {f.name}")
dbx.files_delete_v2(f.path_lower)
# only keep x backups locally
for f in local_backups[LOCAL_RETENTION:]:
print(f"Deleting local backup at {f.path}")
os.remove(f.path)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment