Skip to content

Instantly share code, notes, and snippets.

@danielsan
Forked from BenBish/backup-to-s3.sh
Last active August 29, 2015 14:01
Show Gist options
  • Save danielsan/9b6e65ac06048182cc3a to your computer and use it in GitHub Desktop.
Save danielsan/9b6e65ac06048182cc3a to your computer and use it in GitHub Desktop.
createBackupUser.sql
emailmessage1.txt
s3-backup-restore-variables.sh

You must first install s3cmd via apt on ubuntu or via download page http://s3tools.org/download you can also clone the github project https://github.com/s3tools/s3cmd

git clone https://github.com/s3tools/s3cmd.git

If you choose to clone the github project remember to create a symlink in a directory that belongs to $PATH, like /usr/local/bin

cd s3cmd
sudo ln -s $(pwd)/s3cmd /usr/local/bin/

Then you must configure you s3cmd with you credentials

s3cmd --configure

Now, go back to the project directory and copy the variables sample file and create the real one

cp -a s3-backup-restore-variables.sample.sh s3-backup-restore-variables.sh

Edit the file and fill it out with your information

#!/bin/bash
## Loading variables
. $(dirname $0)/s3-backup-restore-variables.sh
# Start backing up things.
## Check we can write to the backups directory
if [ -w "$TmpBackupDir" ]
then
# Do nothing and move along.
echo 'Found and is writable: '$TmpBackupDir
else
echo "Can't write to: "$TmpBackupDir
exit
fi
## Make the backup directory (Also make it writable)
echo ''
echo 'Making Directory: '$Today_TmpBackupDir
mkdir $Today_TmpBackupDir
chmod 0777 $Today_TmpBackupDir
## GZip the directories and put them into the backups folder
echo ''
for i in "${DirsToBackup[@]}"
do
array_filename=($(echo $i | tr "/" "\n"))
directory_name=${array_filename[@]:(-1)}
filename='dir-'$directory_name'.tar.gz'
echo 'Backing up '$i' to '$Today_TmpBackupDir'/'$filename
cd $(dirname $i)
tar -czpf $Today_TmpBackupDir'/'$filename --exclude=master_directory/{log,cache}/* $directory_name
done
## Backup the MySQL databases
echo ''
for i in "${DBsToBackup[@]}"
do
filename='mysql-'$i'.sql'
echo 'Dumping DB '$i' to '$Today_TmpBackupDir'/'$filename
mysqldump -h "${MySQLDetails[0]}" -u "${MySQLDetails[1]}" -p"${MySQLDetails[2]}" $i > $Today_TmpBackupDir'/'$filename
tar -czpPf $Today_TmpBackupDir'/'$filename'.tar.gz' $Today_TmpBackupDir'/'$filename
rm -R $Today_TmpBackupDir'/'$filename
done
## Alert admin that backup complete, starting sync
SUBJECT="["$SERVERNAME"] Backup Complete, Starting Sync! - "$EMAILDATE
EMAILMESSAGE="/tmp/emailmessage2.txt"
echo "Just to let you know that the backup script has finished and we're starting sync to s3 now."> $EMAILMESSAGE
mail -s "$SUBJECT" "$EMAIL" < $EMAILMESSAGE
## Sending new files to S3
echo ''
echo 'Syncing '$Today_TmpBackupDir' to '$S3URI$TodayDate'/'
s3cmd put --recursive $Today_TmpBackupDir $S3URI
if [ $? -ne 0 ]; then
SUBJECT="s3cmd put failed on ["$SERVERNAME"]"
EMAILMESSAGE="/tmp/emailmessage3.txt"
echo "Just to let you know that the s3cmd put of '$Today_TmpBackupDir' failed."> $EMAILMESSAGE
echo "You should check things out immediately." >>$EMAILMESSAGE
mail -s "$SUBJECT" "$EMAIL" < $EMAILMESSAGE
fi
# Cleanup.
echo ''
echo 'Removing local expired backup: '$TmpBackupDir'/'${Expiry[0]}
rm -R $TmpBackupDir'/'${Expiry[0]}
if [ "$ExpiryDayOfMonth" != '01' ]; then
echo 'Removing remote expired backup: '$S3URI${Expiry[1]}'/'
s3cmd del $S3URI${Expiry[1]}'/' --recursive
else
echo 'No need to remove backup on the 1st'
fi
#echo 'Making '$Today_TmpBackupDir' permissions 0755'
#chmod 0755 $Today_TmpBackupDir
echo 'All Done! Yay! (",)'
## Notify admin that the script has finished
SUBJECT="["$SERVERNAME"] S3 Sync Complete! - "$EMAILDATE
EMAILMESSAGE="/tmp/emailmessage4.txt"
echo "Just to let you know that the s3 sync has now completed."> $EMAILMESSAGE
mail -s "$SUBJECT" "$EMAIL" < $EMAILMESSAGE
## Email Report of What Exists on S3 in Today's Folder
exec 1>'/tmp/s3report.txt'
s3cmd ls $S3URI$TodayDate/
SUBJECT="S3 Backup Report of ["$SERVERNAME"] : "$TodayDate
EMAILMESSAGE="/tmp/s3report.txt"
mail -s "$SUBJECT" "$EMAIL" < $EMAILMESSAGE
CREATE USER 'backup_user'@'localhost' IDENTIFIED BY 'SomeSecurePassword!';
GRANT SELECT, LOCK TABLES ON *.* TO 'backup_user'@'localhost';
0 3 * * * bash /home/backups/backup-to-s3.sh >/dev/null 2>&1
#!/bin/bash
echo $0
## Loading variables
. $(dirname $0)/s3-backup-restore-variables.sh
## Check we can write to the backups directory
if [ -w "$TmpBackupDir" ]
then
# Do nothing and move along.
echo 'Found and is writable: '$TmpBackupDir
else
echo "Can't write to: "$TmpBackupDir
exit
fi
## Make the backup directory (Also make it writable)
echo ''
echo 'Making Directory: '$RestoreDir
mkdir $RestoreDir
chmod 0777 $RestoreDir
cd $RestoreDir
last_dir=$(s3cmd ls $S3URI | tail -1 | sed 's,DIR ,,; s,^ *,,; s, *$,,')
echo "last_dir=${last_dir}"
#copying gzipped directories
for S3FILE in $(s3cmd ls $last_dir | grep dir- | cut -c 30-1000); do
s3cmd get $S3FILE --force
done
#for F in $(ls *.tar.gz); do
# tar -xzf $F
#done
## GZip the directories and put them into the backups folder
echo ''
for i in "${DirsToBackup[@]}"
do
# split folder absolute path by forward slash (/) to find get the folder name
array_filename=($(echo $i | tr "/" "\n"))
folder_name=${array_filename[@]:(-1)}
#composing the gar gzip filename
tar_gz_file='dir-'$folder_name'.tar.gz'
#checking if the filename exists to extract it
if [ -f $RestoreDir/$tar_gz_file ]; then
echo 'Extracting up '$tar_gz_file' to '$i
cd $(dirname $i)
rm -rf $folder_name
if [ -d $folder_name ];then
echo "PROBLEMS ON DELETION of $folder_name "
fi
tar -xzf $RestoreDir/$tar_gz_file
if [ ! -d $folder_name ];then
echo "PROBLEMS after $RestoreDir/$tar_gz_file extraction. Folder $folder_name not found"
fi
if [ -d $folder_name/cache ]; then
echo " ++++ CHANGING PERMISSION OF CACHE DIRECTORY ++++ "
chmod 777 $folder_name/cache -R
fi
fi
done
echo 'All Done! Yay! (",)'
## Notify admin that the script has finished
SUBJECT="["$SERVERNAME"] S3 Sync Complete! - "$EMAILDATE
EMAILMESSAGE="/tmp/emailmessage4.txt"
echo "Just to let you know that the s3 restor is now completed."> $EMAILMESSAGE
mail -s "$SUBJECT" "$EMAIL" < $EMAILMESSAGE
#!/dev/null
## Server Variables
SERVERNAME=`hostname`
## Email Variables
EMAILDATE=`date --date="today" +%y-%m-%d`
EMAIL="youremail@yourmaildomain"
SUBJECT="["$SERVERNAME"] Backup Script Started! - "$EMAILDATE
EMAILMESSAGE="./emailmessage1.txt"
# Set up the variables
### The URI of the S3 bucket.
S3URI='s3://your-s3-bucket-name/'
### An array of directories you want to backup (I included a few configuration directories to).
DirsToBackup=(
'/var/www/master_directory'
'/var/www/lib'
'/var/www/html'
)
### The databases you want to backup
DBsToBackup=(
'database1_name'
'database2_name'
)
### The directory we're going to store our backups in on this server.
TmpBackupDir='/tmp/backups/s3'
## The MySQL details
MySQLDetails[0]='localhost' # MySQL Host
MySQLDetails[1]='backup_user' # User
MySQLDetails[2]='DBPASS' # Password
## The expiry dates of the backups
### Only store 0 days of backups on the server.
### Changed to 0 days to not fill the server with unneccessary backups
Expiry[0]=`date --date="today" +%y-%m-%d`
### Only store 2 weeks worth of backups on S3
Expiry[1]=`date --date="2 weeks ago" +%y-%m-%d`
### Using ExpiryDayOfMonth to skip first day of the month when deleting so monthly backups are kept on s3
ExpiryDayOfMonth=`date --date="2 weeks ago" +%d`
### Todays date.
TodayDate=`date --date="today" +%y-%m-%d`
## Finally, setup the today specific variables.
Today_TmpBackupDir=$TmpBackupDir'/'$TodayDate
RestoreDir=$TmpBackupDir'/Restore_'$TodayDate
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment