Skip to content

Instantly share code, notes, and snippets.

@scottwater
Created February 22, 2024 18:40
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save scottwater/6159f258b0f3fd6cdbffe9ab5268f3f4 to your computer and use it in GitHub Desktop.
Save scottwater/6159f258b0f3fd6cdbffe9ab5268f3f4 to your computer and use it in GitHub Desktop.
SQLite Backup Script
#!/bin/bash
set -e
s3_key=$BACKUP_S3_KEY
s3_secret=$BACKUP_S3_SECRET
bucket=$BACKUP_S3_BUCKET
backup_db_passphrase=$BACKUP_S3_DB_PASSPHRASE
data_directory=$SQLITE_DATABASE_DIRECTORY
# ensure each backup has the same date key
date_key=$(date '+%Y-%m-%d-%H-%M-%S')
function backupToS3()
{
database=$1
database_file_name=$(basename -- "$database")
database_name="${database_file_name%.*}"
backup_file_name="/tmp/$database_name-backup-$date_key.sqlite3"
gpg_backup_file_name="$database_name-$date_key.gpg"
sqlite3 "$database" ".backup $backup_file_name"
gzip "$backup_file_name"
gpg --yes --batch --passphrase="$backup_db_passphrase" --output "/tmp/$gpg_backup_file_name" -c "$backup_file_name.gz"
date=$(date +"%a, %d %b %Y %T %z")
content_type='application/tar+gzip'
string="PUT\n\n$content_type\n$date\n/$bucket/$gpg_backup_file_name"
signature=$(echo -en "${string}" | openssl sha1 -hmac "${s3_secret}" -binary | base64)
curl -X PUT -T "/tmp/$gpg_backup_file_name" \
-H "Host: $bucket.s3.amazonaws.com" \
-H "Date: $date" \
-H "Content-Type: $content_type" \
-H "Authorization: AWS ${s3_key}:$signature" \
"https://$bucket.s3.amazonaws.com/$gpg_backup_file_name"
rm "$backup_file_name.gz"
rm "/tmp/$gpg_backup_file_name"
}
for file in "$data_directory"/*.sqlite3; do
backupToS3 "$file"
done
#!/bin/bash
set -e
# Hatchbox settings for ENVs
cd /home/deploy/howivscode/current
eval "$(/home/deploy/.asdf/bin/asdf vars)"
s3_key=$BACKUP_S3_KEY
s3_secret=$BACKUP_S3_SECRET
bucket=$BACKUP_S3_BUCKET
backup_db_passphrase=$BACKUP_S3_DB_PASSPHRASE
data_directory=$SQLITE_DATABASE_DIRECTORY
# ensure each backup has the same date key
date_key=$(date '+%Y-%m-%d-%H-%M-%S')
function backupToS3()
{
database=$1
database_file_name=$(basename -- "$database")
database_name="${database_file_name%.*}"
backup_file_name="/tmp/$database_name-backup-$date_key.sqlite3"
gpg_backup_file_name="$database_name-$date_key.gpg"
sqlite3 "$database" ".backup $backup_file_name"
gzip "$backup_file_name"
gpg --yes --batch --passphrase="$backup_db_passphrase" --output "/tmp/$gpg_backup_file_name" -c "$backup_file_name.gz"
date=$(date +"%a, %d %b %Y %T %z")
content_type='application/tar+gzip'
string="PUT\n\n$content_type\n$date\n/$bucket/$gpg_backup_file_name"
signature=$(echo -en "${string}" | openssl sha1 -hmac "${s3_secret}" -binary | base64)
curl -X PUT -T "/tmp/$gpg_backup_file_name" \
-H "Host: $bucket.s3.amazonaws.com" \
-H "Date: $date" \
-H "Content-Type: $content_type" \
-H "Authorization: AWS ${s3_key}:$signature" \
"https://$bucket.s3.amazonaws.com/$gpg_backup_file_name"
rm "$backup_file_name.gz"
rm "/tmp/$gpg_backup_file_name"
}
for file in "$data_directory"/*.sqlite3; do
backupToS3 "$file"
done
@scottwater
Copy link
Author

Hat tip to Paweł Urbanek and his guide for doing this with PostgreSQL + Heroku as well as Chris Parson’s gist for helping me write this.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment