Skip to content

Instantly share code, notes, and snippets.

@treejamie
Created October 24, 2012 12:00
Show Gist options
  • Save treejamie/3945696 to your computer and use it in GitHub Desktop.
Save treejamie/3945696 to your computer and use it in GitHub Desktop.
upgraded to encrypt and designed to be ran as hourly daily weekly or monthly
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import sys
import optparse
import subprocess as sp
import gzip
from datetime import datetime
try:
from boto.s3.connection import S3Connection, Location
from boto.s3.key import Key
from boto.exception import S3ResponseError
except ImportError:
pass
#
#
# how to use program
usage = 'usage: %prog '\
'[-t hourly|daily|weekly|monthly] '\
'[-e mysql|postgres] '\
'[-p password] '\
'[--s3 bucketname] '\
'dbname'
#
#
# make the options
_cmd_parser = optparse.OptionParser(usage=usage)
_opt = _cmd_parser.add_option
_opt(
'--s3',
action='store',
dest='s3',
default=False,
help='put bucket on s3 [default: %default] - requires boto library')
_opt(
'--password',
action='store',
dest='password',
default=False,
help='password to encrypt backups [default: %default]')
_opt(
'--type',
action='store',
dest='type',
default='daily',
help='backup type: hourly|daily|weekly|monthly [default: %default]')
_opt(
'--engine',
action='store',
dest='engine',
default='mysql',
help='database engine: mysql|postgres [default: %default]')
_cmd_options, _cmd_args = _cmd_parser.parse_args()
opt, args, parser = _cmd_options, _cmd_args, _cmd_parser
#
#
# go no futher if no dnname was supplied
try:
dbname = args[0]
except IndexError:
parser.print_help()
sys.exit()
#
#
# mysql or postgres
if opt.engine == 'mysql':
dump = sp.check_output(['which', 'mysqldump']).replace('\n', '')
dump_cmd = [dump, dbname]
ext = 'sql'
elif opt.engine == 'postgres':
dump = sp.check_output(['which', 'pg_dump']).replace('\n', '')
dump_cmd = [dump, '-O', '--format=t', dbname]
ext = 'dump'
else:
parser.print_help()
sys.exit()
#
#
# now get the dump
dump_process = sp.Popen(
dump_cmd,
stdout=sp.PIPE,
stderr=sp.STDOUT)
#
#
# are we encypting your backups Madame?
if opt.password:
openssl = sp.check_output(['which', 'openssl']).replace('\n', '')
encrypt_cmd = [
openssl,
'enc',
'-e',
'-aes-256-cbc',
'-salt',
'-pass',
'pass:%s' % opt.password]
encrypt_process = sp.Popen(
encrypt_cmd,
stdin=dump_process.stdout,
stdout=sp.PIPE,
stderr=sp.STDOUT)
dump_output = encrypt_process.communicate()[0]
else:
dump_output = dump_process.communicate()[0]
#
#
# set the date format
if opt.type == 'hourly':
dateformat = '%H'
elif opt.type == 'daily':
dateformat = '%A'
elif opt.type == 'weekly':
dateformat = '%W'
elif opt.type =='monthly':
dateformat = '%B'
else:
parser.print_help()
sys.exit()
#
#
# now name it
name = '%s-%s.%s.gz' % (
dbname,
datetime.now().strftime(dateformat).lower(),
ext)
#
#
# make sure we have somewhere to write it
local_root = os.path.join(
os.environ['HOME'],
'backups',
dbname,
opt.type)
if not os.path.exists(local_root):
os.makedirs(local_root)
#
#
# write to a gzip file
f_out = gzip.open('%s/%s' % (local_root, name), 'wb')
f_out.write(dump_output)
f_out.close()
#
#
if opt.s3:
conn = S3Connection()
#
#
# if the bucket doesn't exist make it
try:
bucket = conn.get_bucket(opt.s3, validate=True)
except S3ResponseError, e:
if e.code == "NoSuchBucket":
bucket = conn.create_bucket(opt.s3, Location.EU)
else:
sys.exit('Error getting/creating bucket. DB has been dumped'
'locally, but it has not been uploaded to S3.'
'Try creating the bucket via the AWS contral panel')
#
#
# make the s3 path
s3_root = os.path.join(
'backups',
dbname,
opt.type)
#
#
# make the bucket private
bucket.set_acl('private')
#
#
# make an s3 key
k = Key(bucket)
k.key = '%s/%s' % (s3_root, name)
#
#
# upload it and mark private
k.set_contents_from_filename('%s/%s' % (local_root, name))
bucket.set_acl('private', k.key)
#
#
# delete the file - after all this is a remote backup!
rm = sp.check_output(['rm', '%s/%s' % (local_root, name)])
# exit normally
sys.exit()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment