Skip to content

Commit

Permalink
Add call to set_s3_lifecycle method in iterate_and_log_notify_errors
Browse files Browse the repository at this point in the history
  • Loading branch information
BasilMawejje committed Oct 7, 2021
1 parent 631f7e0 commit 2cd76e3
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 7 deletions.
6 changes: 5 additions & 1 deletion app/models/conditions_response/backup.rb
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,10 @@ def self.condition_response(condition, log, use_slack_notification: true)

iterate_and_log_notify_errors(backup_files, 'in backup_files loop, uploading_file_to_s3', log) do |backup_file|
upload_file_to_s3(aws_s3, aws_s3_backup_bucket, aws_backup_bucket_full_prefix, backup_file)
# When we first upload our file to s3, the default storage class is STANDARD
# After 1 month, we want to to transition the object to STANDARD IA, then GLACIER after 3 months. This will help us save on costs.
# This however has effects on retreival time for objects which you can see in this performance chart https://aws.amazon.com/s3/storage-classes/#Performance_across_the_S3_Storage_Classes
set_s3_lifecycle_rules(bucket_name: aws_s3_backup_bucket, bucket_full_prefix: aws_backup_bucket_full_prefix, status: 'enabled', storage_rules: [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}])
end

log.record('info', 'Pruning older backups on local storage')
Expand Down Expand Up @@ -300,7 +304,7 @@ class << self
end

# s3_lifecycle_rules(bucket_name: 'bucket_name', bucket_full_prefix: 'bucket_full_prefix', status: 'enabled', storage_rules: [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}])
def self.s3_lifecycle_rules(bucket_name:, bucket_full_prefix:, status:, storage_rules:)
def self.set_s3_lifecycle_rules(bucket_name:, bucket_full_prefix:, status:, storage_rules:)
client = Aws::S3::Client.new(region: ENV['SHF_AWS_S3_BACKUP_REGION'],
credentials: Aws::Credentials.new(ENV['SHF_AWS_S3_BACKUP_KEY_ID'], ENV['SHF_AWS_S3_BACKUP_SECRET_ACCESS_KEY']))

Expand Down
18 changes: 12 additions & 6 deletions spec/models/conditions_response/backup_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -692,7 +692,6 @@ def create_faux_backup_file(backups_dir, file_prefix)
let!(:temp_backups_dir) { Dir.mktmpdir('faux-backups-dir') }
let!(:faux_backup_fn) { create_faux_backup_file(temp_backups_dir, 'faux_backup.bak') }


it '.upload_file_to_s3 calls .upload_file for the bucket, full object name, and file to upload' do
expect(mock_bucket_object).to receive(:upload_file).with(faux_backup_fn, anything)
Backup.upload_file_to_s3(mock_s3, bucket_name, bucket_full_prefix, faux_backup_fn)
Expand Down Expand Up @@ -1251,7 +1250,9 @@ def create_faux_backup_file(backups_dir, file_prefix)


describe 'iterate_and_log_notify_errors(list, slack_error_details, log)' do

let(:status) { 'Enabled' }
let(:storage_rules) { [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}] }

before(:each) do
allow(SHFNotifySlack).to receive(:failure_notification)
.with(anything, anything)
Expand Down Expand Up @@ -1303,9 +1304,14 @@ def create_faux_backup_file(backups_dir, file_prefix)
expect(@result_str).to eq 'ac'
end

it 'adds a bucket lifecycle policy to the object' do
expect(described_class).to receive(:set_s3_lifecycle_rules).with(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules)
described_class.set_s3_lifecycle_rules(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules)
end

end

describe 's3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_kwargs)' do
describe 'set_s3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_kwargs)' do
let(:invalid_storage_class_list) { ['INVALID_STORAGE_CLASS', 'OTHER_INVALID_STORAGE_CLASS'] }
let(:another_invalid_storage_class_list) { ['INVALID_STORAGE_CLASS', 'STANDARD_IA', 'GLACIER'] }
let(:status) { 'Enabled' }
Expand All @@ -1323,9 +1329,9 @@ def create_faux_backup_file(backups_dir, file_prefix)
client
end

it 'calls #s3_lifecycle_rules once' do
expect(described_class).to receive(:s3_lifecycle_rules).with(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules)
described_class.s3_lifecycle_rules(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules)
it 'calls #set_s3_lifecycle_rules once' do
expect(described_class).to receive(:set_s3_lifecycle_rules).with(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules)
described_class.set_s3_lifecycle_rules(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules)
end

it "returns 'Invalid storage class' for a list containing only invalid storage classes" do
Expand Down

0 comments on commit 2cd76e3

Please sign in to comment.