Skip to content

Instantly share code, notes, and snippets.

@diegofcornejo
Last active November 17, 2023 02:53
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save diegofcornejo/801e6eb8c13c803d4c239004bded335b to your computer and use it in GitHub Desktop.
Save diegofcornejo/801e6eb8c13c803d4c239004bded335b to your computer and use it in GitHub Desktop.
Cloudclusters.io: Backup postgresql db and upload to AWS S3
#! /bin/bash
CLOUDCLUSTERS_HOME="/cloudclusters"
LOG_FILE="$CLOUDCLUSTERS_HOME/logs/db-backup.log"
SCRIPT_DIR="$CLOUDCLUSTERS_HOME/scripts"
BACKUP_DIR="$CLOUDCLUSTERS_HOME/backups"
S3_BUCKET="s3://<bucket_name>"
DB_NAME="<dbname>"
DB_USER="<dbuser>"
# Logging function
log() {
echo "$(date +'%Y-%m-%d %H:%M:%S') - $1" >> "$LOG_FILE"
}
# Function to send email
send_email() {
local subject="$DB_NAME - $1"
local body="$2"
local message_json=$SCRIPT_DIR/message.json
echo "{
\"Subject\": {
\"Data\": \"$subject\",
\"Charset\": \"UTF-8\"
},
\"Body\": {
\"Text\": {
\"Data\": \"$body\",
\"Charset\": \"UTF-8\"
}
}
}" > $message_json
aws ses send-email --from "DB Backup System <notifications@diegocornejo.com>" --destination file://$SCRIPT_DIR/destination.json --message file://$message_json --no-cli-pager >> "$LOG_FILE" 2>&1
}
# Function to handle errors
handle_error() {
local step_description=$1
local error_message="$(tail -n 1 "$LOG_FILE")"
log "Error in $step_description: $error_message"
send_email "Backup Failure" "An error occurred during $step_description. Please check the log file for details."
exit 1
}
# Start script
log "-----------------------------------"
log "Starting backup of $DB_NAME database"
# Set output file name
output_filename="backup-$(date +'%Y-%m-%d-%R').sql"
log "Output filename: $output_filename"
# Dump database
if ! pg_dump -U $DB_USER -d $DB_NAME -F c -b -v -f "$BACKUP_DIR/$output_filename" >> "$LOG_FILE" 2>&1; then
handle_error "database dump"
fi
# Compress file
log "Compressing file..."
if ! gzip "$BACKUP_DIR/$output_filename" >> "$LOG_FILE" 2>&1; then
handle_error "file compression"
fi
# Upload to S3
log "Uploading to S3..."
if ! aws s3 cp "$BACKUP_DIR/$output_filename.gz" "$S3_BUCKET/dbbackups/$output_filename.gz" >> "$LOG_FILE" 2>&1; then
handle_error "S3 upload"
fi
# Remove local file
log "Removing file..."
if ! rm "$BACKUP_DIR/$output_filename.gz" >> "$LOG_FILE" 2>&1; then
log "Warning: Failed to remove file - $(tail -n 1 "$LOG_FILE")"
fi
# Send success email
log "Backup finished successfully at $(date +'%Y-%m-%d-%R')"
send_email "Backup Success" "Backup completed successfully"
log "-----------------------------------"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment