Skip to content

Instantly share code, notes, and snippets.

@erincerys
Last active January 13, 2019 04:23
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save erincerys/3f51119f3b3229168744af0525eaaff8 to your computer and use it in GitHub Desktop.
Save erincerys/3f51119f3b3229168744af0525eaaff8 to your computer and use it in GitHub Desktop.
Backup directories and directory listings to a LUKS container and sync everything to S3
#!/bin/bash
## Description:
# Sync files to and create lists of directory contents in a LUKS container volume and upload it to S3
# Good for periodic backup jobs
# Supports rate limiting, encryption in transit and at rest and file path exclusions
## Usage:
# bash $0
## Dependencies
# Packages:
# - awscli
# - cryptsetup
# - gawk
# - sha256sum
# - grep
# - rsync
# Setup:
# - luks file container
# - configuration parameters below set properly
# - if this will be used in cron, youll need to set environment variables for awscli in the crontab
# WARNING:
# If you experience bitrot or otherwise encounter corruption, these is no detecting of this.
# You will overwrite the good data with the bad
# This can be mitigated by using versioning, either implemented through this script or enabled in the S3 bucket.
## ---
## Configuration
Debug=0
# encrypted container
ScriptWorkingPath='/media/data/backups/syncops'
ContainerFileName='cloudsync.img'
ContainerFile="${ScriptWorkingPath}/${ContainerFileName}"
ContainerMapperName='cloudsync'
ContainerMountPath='/media/tmpcrypt'
ContainerKeyFile='/media/data/personal/keyfiles/cloudsync.pem'
RunFailedFile="${ScriptWorkingPath}/last_run_failed"
## Escape slashes and use globs in these file path arrays
# Path exclusions from objects within SourcePaths
ExcludedPaths=( \
'tech/virtualmachines' \
'tech/caches' \
)
# Do not leave trailing slash on the end of directory sources
SourcePaths=( \
'/media/raid/backups/configuration' \
'/media/raid/backups/drive-order-in-case.txt' \
'/media/raid/misc/papers' \
'/media/raid/personal/text' \
'/media/raid/personal/resumes' \
'/media/raid/personal/keyfiles' \
'/media/raid/personal/finances' \
'/media/raid/personal/health' \
'/media/raid/games/saves' \
'/media/raid/tech' \
'/media/raid/backups/Dropbox/Text/Markdown' \
)
DestinationPath="${ContainerMountPath}/"
LogFile="${ScriptWorkingPath}/`date '+%Y%m%d'`.log"
# Pathes to create file lists for the contents of to be included in backup
FileListSources=( \
'/media/raid/backups' \
'/media/raid/misc/applications' \
'/media/raid/misc/ebooks' \
'/media/raid/music' \
'/media/raid/video/anime' \
'/media/raid/video/porn' \
'/media/raid/games' \
)
# Files to exclude when syncing to S3 - eg if anything else was uploaded there, we want to exclude it so its not deleted
S3ExcludedPaths=( \
'education.tar.gz' \
)
# SNS ARN for notifications
SnsTopic='arn:aws:sns:us-west-1:183912708525:Ian'
# awscli / s3cmd
AwsProfile='default'
#S3CmdConfigFile='/home/ian/.s3cfg'
AwsConfigFile='/home/ian/.aws/config'
AwsCredentialFile='/home/ian/.aws/credentials'
AwsRegion='us-west-1'
#ThrottleLimit=500 # how fast to upload the file to s3 in kilobytes
# s3 paths
S3BucketName='ians-backups'
S3Prefix='personal-workstation'
## awscli support rate limiting
## set this in ~/.aws/config if desired
## s3 =
## max_bandwidth = 500KB/s
AwsCliOptions="--storage-class=STANDARD_IA --profile=${AwsProfile} --region=${AwsRegion}"
#S3CmdOptions="--storage-class=STANDARD_IA --region=${AwsRegion} --config=${S3CmdConfigFile} --limit-rate=${ThrottleLimit}k"
## END CONFIGURATION
if [ $Debug -eq 1 ] ; then
AwsCliOptions="${AwsCliOptions} --debug"
#S3CmdOptions="${S3CmdOptions} --debug"
#else
# AwsCliOptions="${AwsCliOptions} --only-show-errors"
fi
## Stuf begins
# Configure environment vars for Aws CLI
#export AWS_DEFAULT_PROFILE=$AwsProfile
export AWS_CONFIG_FILE=$AwsConfigFile
export AWS_SHARED_CREDENTIALS_FILE=$AwsCredentialFile
function SnsPublication () {
local ErrorCode=$1
if [ $ErrorCode -gt 0 ] ; then
local ScriptResultCode='Failure'
else
local ScriptResultCode='Success'
local ScriptPostMessage="Successfully synchronized file changes to S3. See $LogFile for incremental changes"
fi
if [ $ErrorCode -eq 1 ] ; then
local ScriptPostMessage="Failed to complete rsync operation."
elif [ $ErrorCode -eq 2 ] ; then
local ScriptPostMessage="Failed to copy to S3."
fi
# Publish to SNS topic to notify admin
aws sns publish \
--region $AwsRegion \
--topic-arn $SnsTopic \
--subject "Workstation backup job notification ($ScriptResultCode)" \
--message "$ScriptPostMessage" \
2>&1 >> $LogFile
}
function CloseContainer () {
# Immediately flush pending cache writes to disk
sync -f ${ContainerMountPath}
# Unmount container
umount $ContainerMountPath
# Close container
cryptsetup luksClose $ContainerMapperName
}
# Open the crypted container
cryptsetup --key-file $ContainerKeyFile luksOpen $ContainerFile $ContainerMapperName
# Mount it
mount /dev/mapper/$ContainerMapperName $ContainerMountPath
# Assemble the one-liner
CommandPrefix="rsync --archive --delete --checksum --verbose --log-file=${LogFile}"
if [ $Debug -eq 1 ] ; then
CommandPrefix="${CommandPrefix} --msgs2stderr --debug=ALL"
fi
for e in "${ExcludedPaths[@]}" ; do
CommandExclusions="${CommandExclusions} --exclude=$e"
done
for i in "${SourcePaths[@]}" ; do
CommandSources="${CommandSources} $i"
done
Command="$CommandPrefix $CommandExclusions $CommandSources $DestinationPath"
# rsync
RsyncTempLog=`mktemp`
$Command 2>&1 > $RsyncTempLog
if [[ `grep -ic error $RsyncTempLog` -gt 0 ]] ; then
echo "There was an error. Check $LogFile for more info. Response is below."
echo $RsyncResponse
SnsPublication 2
CloseContainer
exit 1
fi
cat $RsyncTempLog >> $LogFile
# Create file list for each source
for fl in "${FileListSources[@]}" ; do
FlName=`echo $fl | grep -Po '[a-zA-Z_]+$'`
ls -Rla $fl 2>&1 > "${ScriptWorkingPath}/${FlName}.filelist"
done
# Get hash of container contents
ContainerFileList=`ls -Rla $ContainerMountPath`
ContainerChecksum=`echo $ContainerFileList | sha256sum | awk '{ print $1 }'`
ContainerChecksumFile="${ScriptWorkingPath}/${ContainerMapperName}.sha256sum"
[ -f ] ; mv $ContainerChecksumFile ${ContainerChecksumFile}.old
echo $ContainerChecksum > $ContainerChecksumFile
# if the container hash hasnt changed since last time, dont sync anything
if [[ \
! -e $RunFailedFile \
&& -e ${ContainerChecksumFile} \
&& "$(cat ${ContainerChecksumFile}.old)" == "$ContainerChecksum" \
]] ; then
echo "No changes since last sync!"
exit 0
fi
# Sync working directory to S3
# Effectively copies file lists but NOT container or logs
for el in "${S3ExcludedPaths[@]}" ; do
S3ExclusionArgs="${S3ExclusionArgs} --exclude ${el}"
done
AwsResponse=`aws s3 sync --exclude "*.log" --exclude "$ContainerFileName" ${S3ExclusionArgs} --size-only $AwsCliOptions $ScriptWorkingPath s3://${S3BucketName}/${S3Prefix}/ 2>&1`
AwsReturnCode=$?
echo $AwsResponse >> $LogFile
[ $AwsReturnCode -ne 0 ] && { echo 'Error copying file lists to S3!' >> $LogFile ; Errors=1 ; }
# Sync logfile
AwsResponse=`aws s3 cp $AwsCliOptions $LogFile s3://${S3BucketName}/${S3Prefix}/ 2>&1`
AwsReturnCode=$?
echo $AwsResponse >> $LogFile
[ $AwsReturnCode -ne 0 ] && { echo 'Error copying log file to S3!' >> $LogFile ; Errors=1 ; }
# Copy container, if necessary
if [[ \
-e ${RunFailedFile} \
|| ! -e ${ContainerChecksumFile} \
|| "$(cat ${ContainerChecksumFile}.old)" != "$ContainerChecksum" \
]] ; then
AwsTempLog=`mktemp`
# s3cmd $S3CmdOptions put ${ScriptWorkingPath}/${ContainerFileName} s3://${S3BucketName}/${S3Prefix}/ 2>&1 > $AwsTempLog
aws $AwsCliOptions s3 cp ${ScriptWorkingPath}/${ContainerFileName} s3://${S3BucketName}/${S3Prefix}/ 2>&1 > $AwsTempLog
AwsReturnCode=$?
echo $AwsResponse >> $LogFile
# Catch S3 errors
if [[ \
$AwsReturnCode -ne 0 \
|| `grep -Pic "(ERROR|(XAmzContentSHA256Mismatch|Error copying|BadDigest|Client error|stacktrace|unable|does not match MD5 checksum|exit status is '1')" $AwsTempLog` -gt 0 \
]] ; then
echo 'Error copying container to S3!' >> $LogFile
echo "There were errors copying to S3. Review the log at ${LogFile}"
SnsPublication 2
Errors=1
fi
fi
CloseContainer
if [ "$Errors" == 1 ] ; then
touch ${RunFailedFile}
exit 1
else
[ -e $RunFailedFile ] && rm $RunFailedFile
SnsPublication 0
fi
echo 'All done successfully!'
exit 0
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment