Skip to content

Instantly share code, notes, and snippets.

@SpartakusMd
Last active November 22, 2022 08:51
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save SpartakusMd/9387ddfb6c1f2ccc1ce15c49d638e82a to your computer and use it in GitHub Desktop.
Save SpartakusMd/9387ddfb6c1f2ccc1ce15c49d638e82a to your computer and use it in GitHub Desktop.
Web server backup to S3 of files and databases

Web server backup to S3 of files and databases

Will do a backup of all the databases and for the following folders

  • /etc/
  • /var/www/
  • /var/spool/cron/

Runnning the script regularly

30 1 * * * /bin/bash /etc/full-backup.sh >> /var/log/backuper.log

[client]
user = <user>
password = <password>
host = <host>
#!/bin/bash
# Check latest version on
# https://gist.github.com/SpartakusMd/9387ddfb6c1f2ccc1ce15c49d638e82a
BACKUPS_FOLDER="/backups"
SUFFIX=$(date +%F)
TARGET_FOLDER="$BACKUPS_FOLDER/$SUFFIX"
DAYS_TO_KEEP_THE_BACKUPS=60
MYSQL_CONF_FILE="/etc/full-backup.mysql.cnf"
S3_BUCKET="server-backups"
# Check if output directory exists
if [ ! -d "$TARGET_FOLDER" ];then
# Create directory with parent ("-p" option) directories
sudo mkdir -p "$TARGET_FOLDER"
fi
# Remove old backups
function cleanupBackups {
find "$BACKUPS_FOLDER" -maxdepth 1 -type d -mtime +"$DAYS_TO_KEEP_THE_BACKUPS" -exec rm -rf {} \;
}
# Create backup of the files
function backupFiles {
cd /
sudo tar -czf "$TARGET_FOLDER/etc.tar.gz" "etc/" > "/dev/null"
sudo tar -czf "$TARGET_FOLDER/www.tar.gz" "var/www/" > "/dev/null"
sudo tar -czf "$TARGET_FOLDER/cron.tar.gz" "var/spool/cron/" > "/dev/null"
}
# Create backup of the databases
function backupDatabases {
# Retrieve all database names except information schemas. Use sudo here to skip root password.
dbs=$(sudo mysql --defaults-extra-file=$MYSQL_CONF_FILE --batch --skip-column-names -e "SHOW DATABASES;" | grep -E -v "(information|performance)_schema|sys|mysql")
# Create temporary directory with "-d" option
tmp=$(mktemp -d)
# Create output file name
out="$TARGET_FOLDER/databases.tar.gz"
# Loop through all databases
for db in $dbs; do
# Dump database to temporary directory with file name same as database name + sql suffix
sudo mysqldump --defaults-extra-file=$MYSQL_CONF_FILE --databases "$db" > "$tmp/$db.sql"
done
cd $tmp
sudo tar -czf "$out" * > "/dev/null"
cd "/tmp/"
sudo rm -rf "$tmp"
}
# Copy the backups to S3
function syncWithS3 {
aws s3 sync "$TARGET_FOLDER" s3://"$S3_BUCKET/$SUFFIX"
}
echo "Start backuping: $(date +'%F %T')"
cleanupBackups
backupFiles
backupDatabases
syncWithS3
echo "End backuping: $(date +'%F %T')"
echo ""
echo ""
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment