Skip to content

Instantly share code, notes, and snippets.

@devrishik
Created May 7, 2021 22:19
Show Gist options
  • Save devrishik/ee188a4a2cfb80f65e73f721bebad49a to your computer and use it in GitHub Desktop.
Save devrishik/ee188a4a2cfb80f65e73f721bebad49a to your computer and use it in GitHub Desktop.
Django docker dbbackup connector
#!/usr/bin/env python3
"""
ABSTRACT:
---------
Backup utility for all databases
In a microservice world we have postgres running in one docker container and the services on their own containers.
Such a scenario calls for backing up multiple databases other than those managed by Django.
This file presents an extended connector to django-dbbackup's PgDumpBinaryConnector to use
`pg_dump/pg_restore` tools from the postgres container only
This connector should be paired with a management command
Usage: PgDockerDump(database_name).dump(outfile_path)
Usage: PgDockerDump(database_name).restore(restore_file_path)
"""
import tarfile
from io import BytesIO
from tempfile import SpooledTemporaryFile
from docker.errors import APIError
from django.conf import settings
from dbbackup.db.postgresql import PgDumpBinaryConnector
from dbbackup.management.commands._base import BaseDbBackupCommand
# fix the line, use py docker api: https://docker-py.readthedocs.io/en/4.2.0/containers.html
from <project_root>.docker import PostgresContainer
# logger = get_logger(__name__)
logger = None
def read_bin_to_tar(path, filename):
""" read into mem """
stream = BytesIO()
tar = tarfile.TarFile(fileobj=stream, mode='w')
with open(path, 'rb') as dumpbin:
data = dumpbin.read()
tarinfo = tarfile.TarInfo(name=filename)
tarinfo.size = len(data)
tarinfo.mtime = time.time()
tar.addfile(tarinfo, BytesIO(data))
tar.close()
stream.seek(0)
return stream
class PgDockerDump(PgDumpBinaryConnector):
""" run pg_dump/pg_restore for a given db name
"""
def __init__(self, database_name, **kwargs):
""" constructor """
super().__init__('default', **kwargs)
self.con_settings = {
'NAME': database_name,
'HOST': settings.DATABASES['default']['HOST'],
'PORT': settings.DATABASES['default']['PORT'],
'USER': settings.DATABASES['default']['USER'],
'PASSWORD': settings.DATABASES['default']['PASSWORD'],
}
self.bcdm = BaseDbBackupCommand()
self.bcdm.logger = logger
self.container = PostgresContainer()
self.restore_suffix = ''
self.dump_filename = 'pg.dump'
self.dump_dest = f'/tmp/{self.dump_filename}'
@property
def settings(self):
""" settings """
return self.con_settings
def run_command(self, *args, **kwargs):
passwd = self.settings.get('PASSWORD')
if not passwd:
raise ValueError('No db password provided')
cmd = args[0]
tempfile = SpooledTemporaryFile(
max_size=settings.DBBACKUP_TMP_FILE_MAX_SIZE,
dir=settings.DBBACKUP_TMP_DIR)
out, err = self.container.exec_sh(
''.join(['PGPASSWORD=%s' % passwd, cmd]), stdin=True)
tempfile.write(out)
tempfile.seek(0)
return tempfile, err
def write_file(self, fileobj, path):
""" write file with
"""
return self.bcdm.write_local_file(fileobj, path)
def dump(self, dest):
""" dump dump to dir
"""
tempfile = self.create_dump()
self.write_file(tempfile, dest)
def restore(self, dest):
""" restore dump to dir
"""
self.container.exec_sh(f'rm -f {self.dump_dest}')
tardata = read_bin_to_tar(dest, self.dump_filename)
success = self.container.container.put_archive('/tmp/', data=tardata)
if not success:
raise APIError('unable to put tar')
tardata.close()
self.restore_suffix = f'< {self.dump_dest}'
ret, err = self.restore_dump(None)
if err:
data = ret.read()
logger.error(data)
raise APIError(data)
self.container.exec_sh(f'rm -f {self.dump_dest}')
ret.seek(0)
return ret
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment