Skip to content

Instantly share code, notes, and snippets.

@RubberChickenParadise
Last active November 27, 2022 21:09
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save RubberChickenParadise/90b5997930f39b249ab51ee6b3ac190c to your computer and use it in GitHub Desktop.
Save RubberChickenParadise/90b5997930f39b249ab51ee6b3ac190c to your computer and use it in GitHub Desktop.
AWS S3 upload for Sql Server
USE [msdb]
GO
/****** Object: Job [Maintenance - Upload Backups to S3] Script Date: 11/14/2022 10:50:03 AM ******/
BEGIN TRANSACTION
DECLARE @ReturnCode INT
SELECT @ReturnCode = 0
/****** Object: JobCategory [[Uncategorized (Local)]] Script Date: 11/14/2022 10:50:03 AM ******/
IF NOT EXISTS (SELECT name FROM msdb.dbo.syscategories WHERE name=N'[Uncategorized (Local)]' AND category_class=1)
BEGIN
EXEC @ReturnCode = msdb.dbo.sp_add_category @class=N'JOB', @type=N'LOCAL', @name=N'[Uncategorized (Local)]'
IF (@@ERROR <> 0 OR @ReturnCode <> 0) GOTO QuitWithRollback
END
DECLARE @jobId BINARY(16)
EXEC @ReturnCode = msdb.dbo.sp_add_job @job_name=N'Maintenance - Upload Backups to S3',
@enabled=1,
@notify_level_eventlog=0,
@notify_level_email=2,
@notify_level_netsend=0,
@notify_level_page=0,
@delete_level=0,
@description=N'No description available.',
@category_name=N'[Uncategorized (Local)]',
@owner_login_name=N'sa',
@notify_email_operator_name=N'Dba Alerts', @job_id = @jobId OUTPUT
IF (@@ERROR <> 0 OR @ReturnCode <> 0) GOTO QuitWithRollback
/****** Object: Step [Sync Files] Script Date: 11/14/2022 10:50:04 AM ******/
EXEC @ReturnCode = msdb.dbo.sp_add_jobstep @job_id=@jobId, @step_name=N'Sync Files',
@step_id=1,
@cmdexec_success_code=0,
@on_success_action=1,
@on_success_step_id=0,
@on_fail_action=2,
@on_fail_step_id=0,
@retry_attempts=0,
@retry_interval=0,
@os_run_priority=0, @subsystem=N'PowerShell',
@command=N'$env:AWS_ACCESS_KEY_ID = ''IdGoesHere''
$env:AWS_SECRET_ACCESS_KEY = ''KeyGoesHere''
& ''C:\Program Files\Amazon\AWSCLIV2\aws.exe'' s3 sync ''ServerBackupFolderGoesHere'' s3://S3BucketGoesHere/',
@database_name=N'master',
@flags=0
IF (@@ERROR <> 0 OR @ReturnCode <> 0) GOTO QuitWithRollback
EXEC @ReturnCode = msdb.dbo.sp_update_job @job_id = @jobId, @start_step_id = 1
IF (@@ERROR <> 0 OR @ReturnCode <> 0) GOTO QuitWithRollback
EXEC @ReturnCode = msdb.dbo.sp_add_jobschedule @job_id=@jobId, @name=N'every 4 hours',
@enabled=1,
@freq_type=4,
@freq_interval=1,
@freq_subday_type=8,
@freq_subday_interval=4,
@freq_relative_interval=0,
@freq_recurrence_factor=0,
@active_start_date=20221017,
@active_end_date=99991231,
@active_start_time=120000,
@active_end_time=235959
IF (@@ERROR <> 0 OR @ReturnCode <> 0) GOTO QuitWithRollback
EXEC @ReturnCode = msdb.dbo.sp_add_jobserver @job_id = @jobId, @server_name = N'(local)'
IF (@@ERROR <> 0 OR @ReturnCode <> 0) GOTO QuitWithRollback
COMMIT TRANSACTION
GOTO EndSave
QuitWithRollback:
IF (@@TRANCOUNT > 0) ROLLBACK TRANSACTION
EndSave:
GO
AWSTemplateFormatVersion: "2010-09-09"
Parameters:
ServerName:
Type: String
Resources:
PackageBucket:
Type: AWS::S3::Bucket
DeletionPolicy: Retain
Properties:
BucketName: !Join [ '-', [!Ref ServerName, 'backups' ] ]
LifecycleConfiguration:
Rules:
- Id: Logs Backup Cleanup
Status: Enabled
TagFilters:
- Key: logBackup
Value: ""
ExpirationInDays: 7
- Id: Nightly Backup Cleanup
Status: Enabled
TagFilters:
- Key: nightlyBackup
Value: ""
ExpirationInDays: 7
- Id: Weekly Backup Cleanup
Status: Enabled
TagFilters:
- Key: firstOfWeekBackup
Value: ""
ExpirationInDays: 90
Transitions:
- TransitionInDays: 30
StorageClass: STANDARD_IA
- Id: Monthly Backup Cleanup
Status: Enabled
TagFilters:
- Key: firstOfMonthBackup
Value: ""
Transitions:
- TransitionInDays: 30
StorageClass: STANDARD_IA
- TransitionInDays: 90
StorageClass: GLACIER
ExpirationInDays: 365
S3User:
Type: AWS::IAM::User
Properties:
UserName: !Join [ '_', ['svc', !Ref ServerName, 'backup' ] ]
Policies:
- PolicyName: WriteToS3
PolicyDocument:
Version: "2012-10-17"
Statement:
- Effect: Allow
Action: "s3:*"
Resource:
- !Sub arn:aws:s3:::${PackageBucket}
- !Sub arn:aws:s3:::${PackageBucket}/*
UserCredentials:
Type: AWS::IAM::AccessKey
Properties:
Status: Active
UserName: !Ref S3User
UserCredentialsStored:
Type: AWS::SecretsManager::Secret
Properties:
Name: !Sub /sqlBackupCredentials/${S3User}
SecretString: !Sub '{"ACCESS_KEY":"${UserCredentials}","SECRET_KEY":"${UserCredentials.SecretAccessKey}"}'
LambdaBucketPermission:
Type: AWS::Lambda::Permission
Properties:
Action: lambda:InvokeFunction
FunctionName: !ImportValue "sqlbackup-taggingLambda:LambdaArn"
Principal: s3.amazonaws.com
SourceArn: !GetAtt PackageBucket.Arn
AWSTemplateFormatVersion: "2010-09-09"
Parameters:
ServerName:
Type: String
Resources:
PackageBucket:
Type: AWS::S3::Bucket
DeletionPolicy: Retain
Properties:
BucketName: !Join [ '-', [!Ref ServerName, 'backups' ] ]
#This is update number 2. Comment this out for the first update
NotificationConfiguration:
LambdaConfigurations:
- Event: s3:ObjectCreated:*
Function: !ImportValue "sqlbackup-taggingLambda:LambdaArn"
LifecycleConfiguration:
Rules:
- Id: Logs Backup Cleanup
Status: Enabled
TagFilters:
- Key: logBackup
Value: ""
ExpirationInDays: 7
- Id: Nightly Backup Cleanup
Status: Enabled
TagFilters:
- Key: nightlyBackup
Value: ""
ExpirationInDays: 7
- Id: Weekly Backup Cleanup
Status: Enabled
TagFilters:
- Key: firstOfWeekBackup
Value: ""
ExpirationInDays: 90
Transitions:
- TransitionInDays: 30
StorageClass: STANDARD_IA
- Id: Monthly Backup Cleanup
Status: Enabled
TagFilters:
- Key: firstOfMonthBackup
Value: ""
Transitions:
- TransitionInDays: 30
StorageClass: STANDARD_IA
- TransitionInDays: 90
StorageClass: GLACIER
ExpirationInDays: 365
S3User:
Type: AWS::IAM::User
Properties:
UserName: !Join [ '_', ['svc', !Ref ServerName, 'backup' ] ]
Policies:
- PolicyName: WriteToS3
PolicyDocument:
Version: "2012-10-17"
Statement:
- Effect: Allow
Action: "s3:*"
Resource:
- !Sub arn:aws:s3:::${PackageBucket}
- !Sub arn:aws:s3:::${PackageBucket}/*
UserCredentials:
Type: AWS::IAM::AccessKey
Properties:
Status: Active
UserName: !Ref S3User
UserCredentialsStored:
Type: AWS::SecretsManager::Secret
Properties:
Name: !Sub /sqlBackupCredentials/${S3User}
SecretString: !Sub '{"ACCESS_KEY":"${UserCredentials}","SECRET_KEY":"${UserCredentials.SecretAccessKey}"}'
LambdaBucketPermission:
Type: AWS::Lambda::Permission
Properties:
Action: lambda:InvokeFunction
FunctionName: !ImportValue "sqlbackup-taggingLambda:LambdaArn"
Principal: s3.amazonaws.com
SourceArn: !GetAtt PackageBucket.Arn
#heavily influenced by https://docs.aws.amazon.com/lambda/latest/dg/with-s3-example.html
AWSTemplateFormatVersion: "2010-09-09"
Resources:
DatabaseBackupTaggingLambdaRole:
Type: AWS::IAM::Role
Properties:
RoleName: DatabaseBackupTaggingLambdaRole
AssumeRolePolicyDocument:
Version: "2012-10-17"
Statement:
- Effect: "Allow"
Principal:
Service: lambda.amazonaws.com
Action: sts:AssumeRole
ManagedPolicyArns:
- arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole
- arn:aws:iam::aws:policy/AmazonS3FullAccess
DatabaseBackupTagging:
Type: AWS::Lambda::Function
Properties:
FunctionName: S3DatabaseBackupTagging
Handler: index.handler
MemorySize: 128
Role: !GetAtt DatabaseBackupTaggingLambdaRole.Arn
Runtime: nodejs16.x
Code:
ZipFile: |
console.log('Loading function');
const aws = require('aws-sdk');
const s3 = new aws.S3({ apiVersion: '2006-03-01' });
exports.handler = async (event, context) => {
//console.log('Received event:', JSON.stringify(event, null, 2));
// Get the object from the event and show its content type
const bucket = event.Records[0].s3.bucket.name;
const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
const params = {
Bucket: bucket,
Key: key,
};
try {
if(key.endsWith(".trn")){
await s3.putObjectTagging({Bucket: bucket, Key: key, Tagging:{TagSet:[{Key: "logBackup", Value: ""}]}}).promise();
return "added log tag";
}
const { LastModified } = await s3.headObject(params).promise();
console.log('LAST MODIFIED:', LastModified);
const dayOfMonth = LastModified.getDate()
//Since its UTC time, have to use the second of the month. In the grand scheme, it wont matter much if its the first or second
if(dayOfMonth === 2){
await s3.putObjectTagging({Bucket: bucket, Key: key, Tagging:{TagSet:[{Key: "firstOfMonthBackup", Value: ""}]}}).promise();
return "added first of month tag";
}
const dayOfWeek = LastModified.getDay();
//Since its UTC time, have to use monday.
if(dayOfWeek === 1) {
await s3.putObjectTagging({Bucket: bucket, Key: key, Tagging:{TagSet:[{Key: "firstOfWeekBackup", Value: ""}]}}).promise();
return "added first of week tag";
}
await s3.putObjectTagging({Bucket: bucket, Key: key, Tagging:{TagSet:[{Key: "nightlyBackup", Value: ""}]}}).promise();
return "added Nightly Backup tab";
} catch (err) {
console.log(err);
const message = `Error getting object ${key} from bucket ${bucket}. Make sure they exist and your bucket is in the same region as this function.`;
console.log(message);
throw new Error(message);
}
};
Outputs:
DatabaseBackupTaggingLambda:
Description: VPC the ECS Cluster is on and all containers should reference
Value: !GetAtt DatabaseBackupTagging.Arn
Export:
Name: !Join [ ":", [ !Ref "AWS::StackName", LambdaArn ] ]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment