From 2cd76e3b6619675ea7ef1de3ec3d851592a4ad56 Mon Sep 17 00:00:00 2001 From: BasilMawejje Date: Thu, 7 Oct 2021 15:26:20 +0300 Subject: [PATCH] Add call to set_s3_lifecycle method in iterate_and_log_notify_errors --- app/models/conditions_response/backup.rb | 6 +++++- spec/models/conditions_response/backup_spec.rb | 18 ++++++++++++------ 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/app/models/conditions_response/backup.rb b/app/models/conditions_response/backup.rb index 1b000be17..a045dde1c 100644 --- a/app/models/conditions_response/backup.rb +++ b/app/models/conditions_response/backup.rb @@ -86,6 +86,10 @@ def self.condition_response(condition, log, use_slack_notification: true) iterate_and_log_notify_errors(backup_files, 'in backup_files loop, uploading_file_to_s3', log) do |backup_file| upload_file_to_s3(aws_s3, aws_s3_backup_bucket, aws_backup_bucket_full_prefix, backup_file) + # When we first upload our file to s3, the default storage class is STANDARD + # After 1 month, we want to to transition the object to STANDARD IA, then GLACIER after 3 months. This will help us save on costs. + # This however has effects on retreival time for objects which you can see in this performance chart https://aws.amazon.com/s3/storage-classes/#Performance_across_the_S3_Storage_Classes + set_s3_lifecycle_rules(bucket_name: aws_s3_backup_bucket, bucket_full_prefix: aws_backup_bucket_full_prefix, status: 'enabled', storage_rules: [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}]) end log.record('info', 'Pruning older backups on local storage') @@ -300,7 +304,7 @@ class << self end # s3_lifecycle_rules(bucket_name: 'bucket_name', bucket_full_prefix: 'bucket_full_prefix', status: 'enabled', storage_rules: [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}]) - def self.s3_lifecycle_rules(bucket_name:, bucket_full_prefix:, status:, storage_rules:) + def self.set_s3_lifecycle_rules(bucket_name:, bucket_full_prefix:, status:, storage_rules:) client = Aws::S3::Client.new(region: ENV['SHF_AWS_S3_BACKUP_REGION'], credentials: Aws::Credentials.new(ENV['SHF_AWS_S3_BACKUP_KEY_ID'], ENV['SHF_AWS_S3_BACKUP_SECRET_ACCESS_KEY'])) diff --git a/spec/models/conditions_response/backup_spec.rb b/spec/models/conditions_response/backup_spec.rb index a34f613ae..a461b6dfd 100644 --- a/spec/models/conditions_response/backup_spec.rb +++ b/spec/models/conditions_response/backup_spec.rb @@ -692,7 +692,6 @@ def create_faux_backup_file(backups_dir, file_prefix) let!(:temp_backups_dir) { Dir.mktmpdir('faux-backups-dir') } let!(:faux_backup_fn) { create_faux_backup_file(temp_backups_dir, 'faux_backup.bak') } - it '.upload_file_to_s3 calls .upload_file for the bucket, full object name, and file to upload' do expect(mock_bucket_object).to receive(:upload_file).with(faux_backup_fn, anything) Backup.upload_file_to_s3(mock_s3, bucket_name, bucket_full_prefix, faux_backup_fn) @@ -1251,7 +1250,9 @@ def create_faux_backup_file(backups_dir, file_prefix) describe 'iterate_and_log_notify_errors(list, slack_error_details, log)' do - + let(:status) { 'Enabled' } + let(:storage_rules) { [{days: 30, storage_class: 'STANDARD_IA'}, {days: 90, storage_class: 'GLACIER'}] } + before(:each) do allow(SHFNotifySlack).to receive(:failure_notification) .with(anything, anything) @@ -1303,9 +1304,14 @@ def create_faux_backup_file(backups_dir, file_prefix) expect(@result_str).to eq 'ac' end + it 'adds a bucket lifecycle policy to the object' do + expect(described_class).to receive(:set_s3_lifecycle_rules).with(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules) + described_class.set_s3_lifecycle_rules(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules) + end + end - describe 's3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_kwargs)' do + describe 'set_s3_lifecycle_rules(bucket, bucket_full_prefix, status, *storage_rules_kwargs)' do let(:invalid_storage_class_list) { ['INVALID_STORAGE_CLASS', 'OTHER_INVALID_STORAGE_CLASS'] } let(:another_invalid_storage_class_list) { ['INVALID_STORAGE_CLASS', 'STANDARD_IA', 'GLACIER'] } let(:status) { 'Enabled' } @@ -1323,9 +1329,9 @@ def create_faux_backup_file(backups_dir, file_prefix) client end - it 'calls #s3_lifecycle_rules once' do - expect(described_class).to receive(:s3_lifecycle_rules).with(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules) - described_class.s3_lifecycle_rules(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules) + it 'calls #set_s3_lifecycle_rules once' do + expect(described_class).to receive(:set_s3_lifecycle_rules).with(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules) + described_class.set_s3_lifecycle_rules(bucket_name: bucket_name, bucket_full_prefix: bucket_full_prefix, status: status, storage_rules: storage_rules) end it "returns 'Invalid storage class' for a list containing only invalid storage classes" do