##steps
./backup.sh <ip> [<ip> <ip> ...]
in the OpsC UI, schedule a backup with the backup script path configured
##files
SSH_USER=ubuntu
for IP in "$@"
do
echo "Copying scripts to $IP..."
scp -i key.pem *_script.sh "$SSH_USER"@"$IP":~/
echo "Executing setup script on $IP..."
ssh -i key.pem $SSH_USER@$IP '~/setup_post_backup_script.sh'
done
#!/bin/bash
cd ~/
sudo mv post_backup_script.sh /usr/share/datastax-agent/bin/backup-scripts/post_backup_script
cd /usr/share/datastax-agent
sudo chmod 777 bin/backup-scripts/post_backup_script
sudo wget https://s3afe.googlecode.com/files/s3afe.py --no-check-certificate
sudo chmod 755 s3afe.py
sudo apt-get install -y python-pip
sudo pip install -U boto
#!/bin/bash
# An example custom post-snapshot script.
#
# Post-snapshot scripts will not be called with any command line arguments,
# but will receive the list of files included in the snapshot through stdin,
# one file per line.
#
# The script should exit with a status of 0 if all operations complete
# successfully. Otherwise, it should exit with a non-zero status to indicate
# failure.
ACCESS_KEY_ID=__key__
SECRET_ACCESS_KEY=__secret_key__
BUCLET_ACL=private
BUCKET_NAME=__bucket_name__
# Each line from stdin contains the name of an SSTable file or a schema
# definition file that was in the most recent snapshot; read them in and
# back them up.
while read sstable_name; do
OF="$sstable_name"-$(date +%Y%m%d).tgz
tar -cvf "$OF" "$sstable_name"
python s3afe.py -f "$OF" -n $"OF" -b "$BUCKET_NAME" -a "$BUCLET_ACL" -k "$ACCESS_KEY_ID" -s "$SECRET_ACCESS_KEY"
echo "$sstable_name" >> /tmp/sstables
done