Created
May 12, 2021 11:47
-
-
Save patrickscottbest/0067acb258dcfe58059728eebbf5bb8e to your computer and use it in GitHub Desktop.
S3 Bucket Restore Script for Mixed Glacier Files
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
# Script to traverse a list of files and attempt to download if not present, and request Glacier thaw if unavailable. | |
# Can be run again once you suspect that Glacier has thawed your files. | |
# Ensure your "awsuser" account has the AWS credentials in place. | |
# Recommend running in nohup, this can be a long standing process. | |
# as root: nohup ./s3pullorthaw.bash > recovery.log 2>&1 & | |
# Set your bucket name | |
BUCKET=bucket_name | |
# Set the number of days you'd like to have the files thawed for. | |
DAYS=25 | |
# Set the desired UID and GID you'd like the recovered files to have | |
UID=1000 | |
GID=1000 | |
# Set the user on the local system who holds the AWS credentials | |
AWSUSER=awsuser | |
while IFS= read -r line; do | |
#echo "Text read from file: $line" | |
if test -f "$line"; then | |
#echo "$FILE exists." | |
echo | |
echo "$line OK." | |
echo | |
else | |
echo "$line NOT exists." | |
# Dry Run | |
#sudo -u awsuser aws s3 cp s3://bucket/$line $line --dryrun | |
(sudo -u $AWSUSER aws s3 cp s3://$BUCKET/$line $line && sudo -u root chown $UID:$GID $line && echo Recovered.) || | |
sudo -u $AWSUSER aws s3api restore-object --bucket $BUCKET --key $line --restore-request '{"Days":$DAYS,"GlacierJobParameters":{"Tier":"Expedited"}}' | |
fi | |
done < therelist.txt | |
echo FINISHED SCRIPT |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
# Script to Traverse an AWS S3 Bucket and gather a list of filenames | |
# Ensure your "awsuser" account has the AWS credentials in place. | |
# Set your bucket name | |
BUCKET=bucket_name | |
# Set the user on the local system who holds the AWS credentials | |
AWSUSER=awsuser | |
sudo -u $AWSUSER aws s3 ls s3://$BUCKET/folder_or_prefix/ --recursive | awk '{$1=$2=$3=""; print $0}' | sed 's/^[ \t]*//' > therelist.txt |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment