Skip to content

Instantly share code, notes, and snippets.

@shokoe
Last active March 23, 2018 19:19
Show Gist options
  • Save shokoe/697c108b781a24febd9a79bd02a1f52a to your computer and use it in GitHub Desktop.
Save shokoe/697c108b781a24febd9a79bd02a1f52a to your computer and use it in GitHub Desktop.
AWS RDS audit log copy to local dir and to S3. Handles rotation to timestamp naming (so nothing is lost), monthly dirs and S3 encryption. Has a readable and easily monitored log.
#!/bin/bash
# currently support server_audit only!!
# Requires:
# working awscli
# apt-get install lockfile-progs
# required monitoring:
# logwatch on $admin_log for the word 'ERROR'
# log integrity check
# locations:
# script log - /var/log/rds/rds_getlogs.log
# rds rotated logs local - /var/log/rds/<rds name>/<year>-<month>/server_audit.<log LastWritten stamp converted to %Y%m%d%H%M%S>.log
# rds current log - /var/log/rds/<rds name>/<year>-<month>/server_audit.log
# S3 - /var/log/rds is completly synce to the S3 bucket
encrypt=1
log_dir='/var/log/rds'
bucket='epifx-rds-logs'
log_name='server_audit'
rds_db='prd-main'
admin_log="$log_dir/rds_getlogs.log"
lock_file='/var/run/rds_getlogs'
lock_retry=0
# couldn't find any wat of getting md5sum on rds side :( so using file size range as verification
min_size=1000000
max_size=1010000
[ ! -d $log_dir ] && mkdir -p $log_dir
[ ! -d $log_dir/$rds_db ] && mkdir -p $log_dir/$rds_db
lock(){
lockfile-create -r $lock_retry -p $lock_file && return 0
lec=$?
echo "ERROR: Can't get lock"
exit $lec
}
unlock(){
lockfile-remove $lock_file | log_pipe "lockfile-create"
}
log_out(){ echo -e "`date +'%Y-%d-%m %H:%M:%S'` (pid $$) -- $1" >> $admin_log; }
log_pipe(){
[ ! -z "$1" ] && p="$1: " || p=""
while read data; do
echo -e "`date +'%Y-%d-%m %H:%M:%S'` (pid $$) -- ${p}$data" >> $admin_log
done
}
lock(){
lockfile-create -r 2 -p /var/run/rds_getlogs && return 0
log_out "ERROR: Can't get lock"
exit $?
}
unlock(){ lockfile-remove /var/run/rds_getlogs; }
rds_get_log(){
# input files - 1:rds_source 2:local_target
aws rds download-db-log-file-portion --output text --no-paginate --db-instance-identifier $rds_db --log-file-name $1 > $2
}
main(){
case $encrypt in
1) log_out "Started operation (encryption enabled)"
cmd_enc='--sse aws:kms';;
*) log_out "Started operation (encryption disabled)"
cmd_enc='';;
esac
lock
# download all rotated logs
skip_count=0
while read k T F S; do
dir="$log_dir/$rds_db/`date -d @$(($T/1000)) +%Y-%m`"
[ ! -d $dir ] && mkdir -p $dir
file="$dir/${log_name}.`date -d @$(($T/1000)) +%Y%m%d%H%M%S`.log"
if [ -f $file ] && [ `stat -c %s $file` -ne 0 ]; then
#log_out " old log (skip) - $F ($T) > $file"
skip_count=$(($skip_count+1))
else
log_out " new log - $F ($T) > $file"
rds_get_log $F $file
fi
size=`stat -c %s $file`
if [ $size -lt $min_size ] || [ $size -gt $max_size ]; then
mv ${file} ${file}.bad.`date +%s`
log_out " ERROR: bad file size (${size}b), moved to ${file}.bad.`date +%s` and d/l again"
log_out " d/l again - $F ($T) > $file"
rds_get_log $F $file
fi
sleep 0.01
done < <(aws rds describe-db-log-files --db-instance-identifier $rds_db --output text |\
grep 'audit/server_audit\.log\.'|\
sort -k 2 -n)
log_out " skipped $skip_count old files"
# downloading active log
log_out " active log - audit/server_audit.log > $log_dir/$rds_db/${log_name}.log"
rds_get_log audit/server_audit.log $log_dir/$rds_db/${log_name}.log
log_out " sync to S3 (aws s3 sync $log_dir/ s3://$bucket)"
aws s3 sync $cmd_enc $log_dir/ s3://$bucket | sed 's#\r#\n#g;' | sed '/^Completed/d;' | log_pipe "aws_s3_sync"
unlock
log_out "Finished operation"
}
main 2>&1 | log_pipe "ERROR"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment