Skip to content

Instantly share code, notes, and snippets.

@aarongustafson
Created January 22, 2014 19:36
Show Gist options
  • Star 4 You must be signed in to star a gist
  • Fork 2 You must be signed in to fork a gist
  • Save aarongustafson/8565799 to your computer and use it in GitHub Desktop.
Save aarongustafson/8565799 to your computer and use it in GitHub Desktop.
backup to S3 as a shell command (uses s3cmd)
#!/bin/bash
##
# Backup Server to S3 script
#
# Creates a local cached backup of the source folder using rsync and then
# synchronises that with Amazon S3.
#
#
# It is assumed you have rsync installed on the server.
# This code uses s3cmd by Michal Ludvig. The version included in this package
# has been tested with this script and is known to work.
#
# You are free to download the latest version of s3cmd from http://s3tools.org
# if you wise but there is no guarantee this code will work (although it should)
#
# WARNING: You are responsbile for the charges Amazon make to you. You should
# check your statement every day for the first week of running this script so
# you know what sort of charge to expect per month. Remember the first run
# of the backup will be more costly as there is no data on the Amazon S3
#
# Author: Matt Harris, Easy-Designs LLC
# Copyright: Copyright (c) 2009 Easy-Designs LLC
# Since: Version 0.1
# License: MIT
#
# Change Log
# 0.2 | 17.Aug.2009 | Automated the s3cfg from the bash file
# 0.1 | 15.Aug.2009 | Internal Development Version
##
# Setup procedure
# 1. Just fill in the variables in the configuration section below
# 2. run the sync script using ./syncfiles.sh
## CONFIGURATION
THIS_FOLDER="/path/to/this/script"
# path to store the local 'cached' backup copy
LOCAL_BACKUP_PATH="${THIS_FOLDER}/cached/"
# path where the files we want to backup are stored
SOURCE_PATH="/path/to/source"
# S3 bucketname to use
S3_BUCKET_NAME="YOUR_BUCKET_NAME"
# S3 path to backup folder
S3_BACKUP_PATH="S3_BUCKET_SUBFOLDER"
# S3 Access Key
S3_ACCESS_KEY="YOUR_ACCESS_KEY"
# S3 Secret Key
S3_SECRET_KEY="YOUR_SECRET_KEY"
# Should S3 Use HTTPS. Default is ON. This isn't possible if your server is
# behind a proxy
S3_USE_HTTPS="ON"
# Path to the excludes file which tells us what not to backup
EXCLUDES_FILE="${THIS_FOLDER}/excludes.txt"
# The script tells rsync to delete any files which have been removed from the
# source folder. Changing this to "OFF" will mean rsync won't delete files
# removed from the source folder. The default is "ON"
RSYNC_DELETE_REMOVED="ON"
# The script tells rsync to delete any files which have been 'excluded' by the
# excludes file since the last sync. Changing this to "OFF" will mean rsync
# won't delete those excluded files. You may want to do this if you want to
# backup some files when they were first created, but not keep them backed up
# the current version forom then on. The default is "ON"
RSYNC_DELETE_EXCLUDED="ON"
# The script tells S3 to delete any files which have been removed from the
# local cache. This helps keep the cost of backup down but means a deleted file
# cannot be recovered. Changing this to "OFF" will mean S3 won't delete files
# removed from the local cache folder. The default is "ON"
S3_DELETE_REMOVED="ON"
## STOP EDITING
# Internal variables
s3cfg="${THIS_FOLDER}/${S3_BUCKET_NAME}.s3cfg"
# does the s3 configuration file exist?
if [ ! -f $s3cfg ]; then
echo "S3 configuration doesn't exist. Creating it now."
# s3 configuration doesn't exist, create it by replacing the variables in the
# template
if [[ $S3_USE_HTTPS == "ON" ]]; then
S3_USE_HTTPS="TRUE"
else
S3_USE_HTTPS="FALSE"
fi
ARGS="-e 's:%S3_ACCESS_KEY%:${S3_ACCESS_KEY}:g' -e 's:%S3_SECRET_KEY%:${S3_SECRET_KEY}:g' -e 's:%S3_USE_HTTPS%:${S3_USE_HTTPS}:g'"
CMD="sed ${ARGS} ${THIS_FOLDER}/../s3cfg.template > ${s3cfg}"
eval $CMD
fi
# build the rsync command
rsync_cmd="rsync -a ${SOURCE_PATH} ${LOCAL_BACKUP_PATH} --exclude-from=${EXCLUDES_FILE}"
rsync_msg="Rsyncing ${SOURCE_PATH} to ${LOCAL_BACKUP_PATH}."
if [[ "$RSYNC_DELETE_REMOVED" == "ON" ]]; then
rsync_cmd="${rsync_cmd} --delete"
rsync_msg="${rsync_msg} Removed files will be deleted."
fi
if [[ "$RSYNC_DELETE_EXCLUDED" == "ON" ]]; then
rsync_cmd="${rsync_cmd} --delete-excluded"
rsync_msg="${rsync_msg} Excluded files will be deleted."
fi
echo "$(date -u): ${rsync_msg}"
# run rsync
eval $rsync_cmd
echo "$(date -u): RSync Complete"
# build the s3 command
s3_cmd="${THIS_FOLDER}/../s3cmd-0.9.9/s3cmd"
# now try and create the bucket
CMD="${s3_cmd} --config $s3cfg mb s3://${S3_BUCKET_NAME}"
echo "Attempting to create bucket ${S3_BUCKET_NAME}"
eval $CMD
if [[ "$S3_DELETE_REMOVED" == "ON" ]]; then
s3_cmd="${s3_cmd} --delete-removed"
fi
s3_cmd="${s3_cmd} sync -r --config $s3cfg --no-progress ${LOCAL_BACKUP_PATH} s3://${S3_BUCKET_NAME}/${S3_BACKUP_PATH}"
echo "$(date -u): Sending to S3"
# run s3cmd
eval $s3_cmd
echo "$(date -u): Done"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment