diff --git a/storage/transfer_service/aws_request.py b/storage/transfer_service/aws_request.py index 6cff7740560c..9b5b0622551b 100644 --- a/storage/transfer_service/aws_request.py +++ b/storage/transfer_service/aws_request.py @@ -13,6 +13,7 @@ # # [START all] import argparse +import datetime import json import logging @@ -26,80 +27,78 @@ # [START main] def main(description, project_id, day, month, year, hours, minutes, source_bucket, access_key, secret_access_key, sink_bucket): - """Create a one-off transfer from Amazon S3 to GCS.""" + """Create a one-off transfer from Amazon S3 to Google Cloud Storage.""" credentials = GoogleCredentials.get_application_default() storagetransfer = discovery.build( 'storagetransfer', 'v1', credentials=credentials) # Edit this template with desired parameters. # Specify times below using US Pacific Time Zone. - transfer_job = ''' - {{ - "description": "{description}", - "status": "ENABLED", - "projectId": "{project_id}", - "schedule": {{ - "scheduleStartDate": {{ - "day": {day}, - "month": {month}, - "year": {year} - }}, - "scheduleEndDate": {{ - "day": {day}, - "month": {month}, - "year": {year} - }}, - "startTimeOfDay": {{ - "hours": {hours}, - "minutes": {minutes} - }} - }}, - "transferSpec": {{ - "awsS3DataSource": {{ - "bucketName": "{source_bucket}", - "awsAccessKey": {{ - "accessKeyId": "{access_key}", - "secretAccessKey": "{secret_access_key}" - }} - }}, - "gcsDataSink": {{ - "bucketName": "{sink_bucket}" - }} - }} - }} - '''.format(description=description, project_id=project_id, day=day, - month=month, year=year, hours=hours, minutes=minutes, - source_bucket=source_bucket, access_key=access_key, - secret_access_key=secret_access_key, sink_bucket=sink_bucket) + transfer_job = { + 'description': description, + 'status': 'ENABLED', + 'projectId': project_id, + 'schedule': { + 'scheduleStartDate': { + 'day': day, + 'month': month, + 'year': year + }, + 'scheduleEndDate': { + 'day': day, + 'month': month, + 'year': year + }, + 'startTimeOfDay': { + 'hours': hours, + 'minutes': minutes + } + }, + 'transferSpec': { + 'awsS3DataSource': { + 'bucketName': source_bucket, + 'awsAccessKey': { + 'accessKeyId': access_key, + 'secretAccessKey': secret_access_key + } + }, + 'gcsDataSink': { + 'bucketName': sink_bucket + } + } + } - result = storagetransfer.transferJobs().create(body=json.loads( - transfer_job)).execute() + print(transfer_job) + result = storagetransfer.transferJobs().create(body=transfer_job).execute() logging.info('Returned transferJob: %s', json.dumps(result, indent=4)) # [END main] if __name__ == '__main__': parser = argparse.ArgumentParser( - description='Create a one-off transfer from Amazon S3 to GCS.') - parser.add_argument('description', help='Your description') - parser.add_argument('project_id', help='Your Google Cloud project ID') - parser.add_argument('date', help='Date YYYY/MM/DD') - parser.add_argument('hours', help='Hours in 24hr (no leading 0s)') - parser.add_argument('minutes', help='Minutes (no leading 0s)') - parser.add_argument('source_bucket', help='Source bucket') - parser.add_argument('access_key', help='Your AWS access key id') - parser.add_argument('secret_access_key', help='Your AWS secret access key') - parser.add_argument('sink_bucket', help='Sink bucket') + description='Create a one-off transfer from Amazon S3 to Google Cloud ' + 'Storage.') + parser.add_argument('description', help='Transfer description.') + parser.add_argument('project_id', help='Your Google Cloud project ID.') + parser.add_argument('date', help='Date YYYY/MM/DD.') + parser.add_argument('time', help='Time (24hr) HH:MM.') + parser.add_argument('source_bucket', help='Source bucket name.') + parser.add_argument('access_key', help='Your AWS access key id.') + parser.add_argument('secret_access_key', help='Your AWS secret access ' + 'key.') + parser.add_argument('sink_bucket', help='Sink bucket name.') args = parser.parse_args() + date = datetime.datetime.strptime(args.date, '%Y/%m/%d') + time = datetime.datetime.strptime(args.time, '%H:%M') main( args.description, args.project_id, - args.date.split('/')[2].lstrip('0'), - args.date.split('/')[1].lstrip('0'), - args.date.split('/')[0].lstrip('0'), - args.hours, - args.minutes, + date.year, + date.month, + date.day, + time.hour, + time.minute, args.source_bucket, args.access_key, args.secret_access_key, diff --git a/storage/transfer_service/nearline_request.py b/storage/transfer_service/nearline_request.py index 0b5e648d2166..8339a6f8036b 100644 --- a/storage/transfer_service/nearline_request.py +++ b/storage/transfer_service/nearline_request.py @@ -13,6 +13,7 @@ # # [START all] import argparse +import datetime import json import logging @@ -26,74 +27,72 @@ # [START main] def main(description, project_id, day, month, year, hours, minutes, source_bucket, sink_bucket): - """Transfer from standard Cloud Storage to Cloud Storage Nearline.""" + """Create a transfer from the Google Cloud Storage Standard class to the + Nearline Storage class.""" credentials = GoogleCredentials.get_application_default() storagetransfer = discovery.build( 'storagetransfer', 'v1', credentials=credentials) # Edit this template with desired parameters. # Specify times below using US Pacific Time Zone. - transfer_job = ''' - {{ - "description": "{description}", - "status": "ENABLED", - "projectId": "{project_id}", - "schedule": {{ - "scheduleStartDate": {{ - "day": {day}, - "month": {month}, - "year": {year} - }}, - "startTimeOfDay": {{ - "hours": {hours}, - "minutes": {minutes} - }} - }}, - "transferSpec": {{ - "gcsDataSource": {{ - "bucketName": "{source_bucket}" - }}, - "gcsDataSink": {{ - "bucketName": "{sink_bucket}" - }}, - "objectConditions": {{ - "minTimeElapsedSinceLastModification": "2592000s" - }}, - "transferOptions": {{ - "deleteObjectsFromSourceAfterTransfer": true - }} - }} - }} - '''.format(description=description, project_id=project_id, day=day, - month=month, year=year, hours=hours, minutes=minutes, - source_bucket=source_bucket, sink_bucket=sink_bucket) - result = storagetransfer.transferJobs().create(body=json.loads( - transfer_job)).execute() + transfer_job = { + 'description': description, + 'status': 'ENABLED', + 'projectId': project_id, + 'schedule': { + 'scheduleStartDate': { + 'day': day, + 'month': month, + 'year': year + }, + 'startTimeOfDay': { + 'hours': hours, + 'minutes': minutes + } + }, + 'transferSpec': { + 'gcsDataSource': { + 'bucketName': source_bucket + }, + 'gcsDataSink': { + 'bucketName': sink_bucket + }, + 'objectConditions': { + 'minTimeElapsedSinceLastModification': '2592000s' + }, + 'transferOptions': { + 'deleteObjectsFromSourceAfterTransfer': 'true' + } + } + } + + result = storagetransfer.transferJobs().create(body=transfer_job).execute() logging.info('Returned transferJob: %s', json.dumps(result, indent=4)) # [END main] if __name__ == '__main__': parser = argparse.ArgumentParser( - description='Create a transfer from standard Cloud Storage to Cloud ' - 'Storage Nearline.') - parser.add_argument('description', help='Your description') - parser.add_argument('project_id', help='Your Google Cloud project ID') - parser.add_argument('date', help='Date YYYY/MM/DD') - parser.add_argument('hours', help='Hours in 24hr (no leading 0s)') - parser.add_argument('minutes', help='Minutes (no leading 0s)') - parser.add_argument('source_bucket', help='Source bucket') - parser.add_argument('sink_bucket', help='Sink bucket') + description='Create a transfer from the Google Cloud Storage Standard ' + 'class to the Nearline Storage class.') + parser.add_argument('description', help='Transfer description.') + parser.add_argument('project_id', help='Your Google Cloud project ID.') + parser.add_argument('date', help='Date YYYY/MM/DD.') + parser.add_argument('time', help='Time (24hr) HH:MM.') + parser.add_argument('source_bucket', help='Source bucket name.') + parser.add_argument('sink_bucket', help='Sink bucket name.') args = parser.parse_args() + date = datetime.datetime.strptime(args.date, '%Y/%m/%d') + time = datetime.datetime.strptime(args.time, '%H:%M') main( args.description, args.project_id, - args.date.split('/')[2].lstrip('0'), - args.date.split('/')[1].lstrip('0'), - args.date.split('/')[0].lstrip('0'), - args.hours, - args.minutes, + date.year, + date.month, + date.day, + time.hour, + time.minute, args.source_bucket, args.sink_bucket) diff --git a/storage/transfer_service/transfer_check.py b/storage/transfer_service/transfer_check.py index bccfa3a799be..e6ac67cfb154 100644 --- a/storage/transfer_service/transfer_check.py +++ b/storage/transfer_service/transfer_check.py @@ -46,8 +46,8 @@ def main(project_id, job_name): parser = argparse.ArgumentParser( description='Review the transfer operations associated with a transfer' ' job.') - parser.add_argument('project_id', help='Your Google Cloud project ID') - parser.add_argument('job_name', help='Your job name') + parser.add_argument('project_id', help='Your Google Cloud project ID.') + parser.add_argument('job_name', help='Your job name.') args = parser.parse_args()