Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save hscale/e99385f3f52216e68dc0b07fcd59f336 to your computer and use it in GitHub Desktop.
Save hscale/e99385f3f52216e68dc0b07fcd59f336 to your computer and use it in GitHub Desktop.
Backup Web Server and Database to Google Drive - Bash Script
#!/bin/bash
#Please note - This script tested on Centos-6,7/Redhat-6,7.
#Check Internet Connection
IS=`/bin/ping -c 5 4.2.2.2 | grep -c "64 bytes"`
if (test "$IS" -gt "2") then
internet_conn="1"
#Check Gdrive Software Install Or Not
file="/usr/bin/gdrive"
if [ -f "$file" ]
then
echo "Backup Process Starting......................."
else
#Download And Install Gdrive
if [ `getconf LONG_BIT` = "64" ]
then
wget
"https://docs.google.com/uc?id=0B3X9GlR6EmbnQ0FtZmJJUXEyRTA&export=dow
nload" -O /usr/bin/gdrive
chmod 777 /usr/bin/gdrive
gdrive list
clear
echo "Backup Process Starting......................."
else
wget
"https://docs.google.com/uc?id=0B3X9GlR6EmbnQ0FtZmJJUXEyRTA&export=dow
nload" -O /usr/bin/gdrive
chmod 777 /usr/bin/gdrive
gdrive list
clear
echo "Backup Process Starting......................."
fi
fi
######################################################################
# Change the below mentioned server details to your own Server - Database Details,
Backup Local Path, Remote Backup Path Name, And WeB Server FDirectory Path,
Your Email ID.
######################################################################
#Database credentials
user="root"
password="admin@123"
host="localhost"
db_name="--all-databases"
#Define local path for backups
BACKUPPATH="/tmp/Web_Server_Backup"
#Define remote backup path
BACKUPPATHREM="Web-Backup"
#Web Server Folder Path
SITESTORE="/var/www"
#Mention Your Email ID For Backup Status
EMAIL="ashutoshsmaurya@gmail.com"
######################################################################
#Date prefix
DATEFORM=$(date +"%Y-%m-%d")
#Days to retain - In the DAYSKEEP variable you can specify how many days of
backups you would like to keep, any older ones will be deleted from Google Drive.
DAYSKEEP=7
#Calculate days as filename prefix
DAYSKEPT=$(date +"%Y-%m-%d" -d "-$DAYSKEEP days")
#Create array of sites based on folder names
SITELIST=($(cd $SITESTORE; echo $PWD | rev | cut -d '/' -f 1 | rev))
#Make sure the backup folder exists
mkdir -p $BACKUPPATH
#For Cleanup
CLEAN=rm
#Check remote backup folder exists on gdrive
BACKUPSID=$(gdrive list --no-header | grep $BACKUPPATHREM | grep dir |
awk '{ print $1}')
if [ -z "$BACKUPSID" ]; then
gdrive mkdir $BACKUPPATHREM
BACKUPSID=$(gdrive list --no-header | grep $BACKUPPATHREM | grep dir
| awk '{ print $1}')
Fi
#Start the loop
for SITE in $SITELIST; do
#Delete old backup, get folder id and delete if exists
OLDBACKUP=$(gdrive list --no-header | grep $DAYSKEPT-$SITE | grep dir |
awk '{ print $1}')
if [ ! -z "$OLDBACKUP" ]; then
gdrive delete $OLDBACKUP
fi
#Create the local backup folder if it doesn't exist
if [ ! -e $BACKUPPATH/$SITE ]; then
mkdir $BACKUPPATH/$SITE
fi
#Enter the Web folder
cd $SITESTORE/
#Back up the Web folder
tar -czf $BACKUPPATH/$SITENAME/$SITE/$DATEFORM-$SITE.tar.gz .
#Back up the Mysql database
mysqldump --user=$user --password=$password --events --ignoretable=mysql.event
--host=$host $db_name | gzip >
$BACKUPPATH/$SITE/$DATEFORM-$SITE.sql.gz
#Get current folder ID
SITEFOLDERID=$(gdrive list --no-header | grep $SITE | grep dir | awk '{ print
$1}')
#Create folder if doesn't exist
if [ -z "$SITEFOLDERID" ]; then
gdrive mkdir --parent $BACKUPSID $SITE
SITEFOLDERID=$(gdrive list --no-header | grep $SITE | grep dir | awk '{
print $1}')
fi
#Upload Web Server Data tar
gdrive upload --parent $SITEFOLDERID --delete
$BACKUPPATH/$SITE/$DATEFORM-$SITE.tar.gz
#Upload Server Mysql database
gdrive upload --parent $SITEFOLDERID --delete
$BACKUPPATH/$SITE/$DATEFORM-$SITE.sql.gz
#Web Folder Log And Email Format
echo "Hi," >> /tmp/log01
echo " " >> /tmp/log01
gdrive list --no-header | grep $DATEFORM-$SITE.tar.gz | awk '{ print $1}' >
/tmp/web_log.txt
[ -s /tmp/web_log.txt ] && echo "Web Server Backup Successfully Done.. File
Name $DATEFORM-$SITE.tar.gz" >> /tmp/log01 || echo " Web Server Data
Backup Error..!!" >> /tmp/log01
#Database Log And Email Format
gdrive list --no-header | grep $DATEFORM-$SITE.sql.gz | awk '{ print $1}' >
/tmp/database_log.txt
[ -s /tmp/database_log.txt ] && echo "Database Backup Successfully Done.. File
Name - $DATEFORM-$SITE.sql.gz" >> /tmp/log01 || echo " Database Backup
Error..!!" >> /tmp/log01
echo " " >> /tmp/log01
echo " " >> /tmp/log01
echo "Thanks," >> /tmp/log01
echo "Ashutosh" >> /tmp/log01
#Log Status - Send Mail
cat -v /tmp/log01 | mutt -s "Backup Server Status Log - $(date)" $EMAIL
#Final Cleanup
chmod -R 777 /tmp/*
$CLEAN /tmp/web_log.txt
$CLEAN /tmp/database_log.txt
$CLEAN /tmp/log01
done
#Dispaly Internet Connection Error Message
else
internet_conn="0"
echo "------(ERROR)---------(ERROR)-------------(ERROR)------------(ERROR)-------------“
echo "############### Please Check Your Internet Connection. #############"
echo "---------------------------------------------------------------------------------------------------------"
fi
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment