Skip to content

Commit

Permalink
Switch to dictionary for JSON string
Browse files Browse the repository at this point in the history
Update description strings
  • Loading branch information
Shun Fan authored and Jon Wayne Parrott committed Sep 18, 2015
1 parent 7df75dc commit 84f0ee3
Show file tree
Hide file tree
Showing 3 changed files with 107 additions and 109 deletions.
111 changes: 55 additions & 56 deletions storage/transfer_service/aws_request.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
#
# [START all]
import argparse
import datetime
import json
import logging

Expand All @@ -26,80 +27,78 @@
# [START main]
def main(description, project_id, day, month, year, hours, minutes,
source_bucket, access_key, secret_access_key, sink_bucket):
"""Create a one-off transfer from Amazon S3 to GCS."""
"""Create a one-off transfer from Amazon S3 to Google Cloud Storage."""
credentials = GoogleCredentials.get_application_default()
storagetransfer = discovery.build(
'storagetransfer', 'v1', credentials=credentials)

# Edit this template with desired parameters.
# Specify times below using US Pacific Time Zone.
transfer_job = '''
{{
"description": "{description}",
"status": "ENABLED",
"projectId": "{project_id}",
"schedule": {{
"scheduleStartDate": {{
"day": {day},
"month": {month},
"year": {year}
}},
"scheduleEndDate": {{
"day": {day},
"month": {month},
"year": {year}
}},
"startTimeOfDay": {{
"hours": {hours},
"minutes": {minutes}
}}
}},
"transferSpec": {{
"awsS3DataSource": {{
"bucketName": "{source_bucket}",
"awsAccessKey": {{
"accessKeyId": "{access_key}",
"secretAccessKey": "{secret_access_key}"
}}
}},
"gcsDataSink": {{
"bucketName": "{sink_bucket}"
}}
}}
}}
'''.format(description=description, project_id=project_id, day=day,
month=month, year=year, hours=hours, minutes=minutes,
source_bucket=source_bucket, access_key=access_key,
secret_access_key=secret_access_key, sink_bucket=sink_bucket)
transfer_job = {
'description': description,
'status': 'ENABLED',
'projectId': project_id,
'schedule': {
'scheduleStartDate': {
'day': day,
'month': month,
'year': year
},
'scheduleEndDate': {
'day': day,
'month': month,
'year': year
},
'startTimeOfDay': {
'hours': hours,
'minutes': minutes
}
},
'transferSpec': {
'awsS3DataSource': {
'bucketName': source_bucket,
'awsAccessKey': {
'accessKeyId': access_key,
'secretAccessKey': secret_access_key
}
},
'gcsDataSink': {
'bucketName': sink_bucket
}
}
}

result = storagetransfer.transferJobs().create(body=json.loads(
transfer_job)).execute()
print(transfer_job)
result = storagetransfer.transferJobs().create(body=transfer_job).execute()
logging.info('Returned transferJob: %s', json.dumps(result, indent=4))
# [END main]

if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Create a one-off transfer from Amazon S3 to GCS.')
parser.add_argument('description', help='Your description')
parser.add_argument('project_id', help='Your Google Cloud project ID')
parser.add_argument('date', help='Date YYYY/MM/DD')
parser.add_argument('hours', help='Hours in 24hr (no leading 0s)')
parser.add_argument('minutes', help='Minutes (no leading 0s)')
parser.add_argument('source_bucket', help='Source bucket')
parser.add_argument('access_key', help='Your AWS access key id')
parser.add_argument('secret_access_key', help='Your AWS secret access key')
parser.add_argument('sink_bucket', help='Sink bucket')
description='Create a one-off transfer from Amazon S3 to Google Cloud '
'Storage.')
parser.add_argument('description', help='Transfer description.')
parser.add_argument('project_id', help='Your Google Cloud project ID.')
parser.add_argument('date', help='Date YYYY/MM/DD.')
parser.add_argument('time', help='Time (24hr) HH:MM.')
parser.add_argument('source_bucket', help='Source bucket name.')
parser.add_argument('access_key', help='Your AWS access key id.')
parser.add_argument('secret_access_key', help='Your AWS secret access '
'key.')
parser.add_argument('sink_bucket', help='Sink bucket name.')

args = parser.parse_args()
date = datetime.datetime.strptime(args.date, '%Y/%m/%d')
time = datetime.datetime.strptime(args.time, '%H:%M')

main(
args.description,
args.project_id,
args.date.split('/')[2].lstrip('0'),
args.date.split('/')[1].lstrip('0'),
args.date.split('/')[0].lstrip('0'),
args.hours,
args.minutes,
date.year,
date.month,
date.day,
time.hour,
time.minute,
args.source_bucket,
args.access_key,
args.secret_access_key,
Expand Down
101 changes: 50 additions & 51 deletions storage/transfer_service/nearline_request.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
#
# [START all]
import argparse
import datetime
import json
import logging

Expand All @@ -26,74 +27,72 @@
# [START main]
def main(description, project_id, day, month, year, hours, minutes,
source_bucket, sink_bucket):
"""Transfer from standard Cloud Storage to Cloud Storage Nearline."""
"""Create a transfer from the Google Cloud Storage Standard class to the
Nearline Storage class."""
credentials = GoogleCredentials.get_application_default()
storagetransfer = discovery.build(
'storagetransfer', 'v1', credentials=credentials)

# Edit this template with desired parameters.
# Specify times below using US Pacific Time Zone.
transfer_job = '''
{{
"description": "{description}",
"status": "ENABLED",
"projectId": "{project_id}",
"schedule": {{
"scheduleStartDate": {{
"day": {day},
"month": {month},
"year": {year}
}},
"startTimeOfDay": {{
"hours": {hours},
"minutes": {minutes}
}}
}},
"transferSpec": {{
"gcsDataSource": {{
"bucketName": "{source_bucket}"
}},
"gcsDataSink": {{
"bucketName": "{sink_bucket}"
}},
"objectConditions": {{
"minTimeElapsedSinceLastModification": "2592000s"
}},
"transferOptions": {{
"deleteObjectsFromSourceAfterTransfer": true
}}
}}
}}
'''.format(description=description, project_id=project_id, day=day,
month=month, year=year, hours=hours, minutes=minutes,
source_bucket=source_bucket, sink_bucket=sink_bucket)
result = storagetransfer.transferJobs().create(body=json.loads(
transfer_job)).execute()
transfer_job = {
'description': description,
'status': 'ENABLED',
'projectId': project_id,
'schedule': {
'scheduleStartDate': {
'day': day,
'month': month,
'year': year
},
'startTimeOfDay': {
'hours': hours,
'minutes': minutes
}
},
'transferSpec': {
'gcsDataSource': {
'bucketName': source_bucket
},
'gcsDataSink': {
'bucketName': sink_bucket
},
'objectConditions': {
'minTimeElapsedSinceLastModification': '2592000s'
},
'transferOptions': {
'deleteObjectsFromSourceAfterTransfer': 'true'
}
}
}

result = storagetransfer.transferJobs().create(body=transfer_job).execute()
logging.info('Returned transferJob: %s', json.dumps(result, indent=4))
# [END main]

if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Create a transfer from standard Cloud Storage to Cloud '
'Storage Nearline.')
parser.add_argument('description', help='Your description')
parser.add_argument('project_id', help='Your Google Cloud project ID')
parser.add_argument('date', help='Date YYYY/MM/DD')
parser.add_argument('hours', help='Hours in 24hr (no leading 0s)')
parser.add_argument('minutes', help='Minutes (no leading 0s)')
parser.add_argument('source_bucket', help='Source bucket')
parser.add_argument('sink_bucket', help='Sink bucket')
description='Create a transfer from the Google Cloud Storage Standard '
'class to the Nearline Storage class.')
parser.add_argument('description', help='Transfer description.')
parser.add_argument('project_id', help='Your Google Cloud project ID.')
parser.add_argument('date', help='Date YYYY/MM/DD.')
parser.add_argument('time', help='Time (24hr) HH:MM.')
parser.add_argument('source_bucket', help='Source bucket name.')
parser.add_argument('sink_bucket', help='Sink bucket name.')

args = parser.parse_args()
date = datetime.datetime.strptime(args.date, '%Y/%m/%d')
time = datetime.datetime.strptime(args.time, '%H:%M')

main(
args.description,
args.project_id,
args.date.split('/')[2].lstrip('0'),
args.date.split('/')[1].lstrip('0'),
args.date.split('/')[0].lstrip('0'),
args.hours,
args.minutes,
date.year,
date.month,
date.day,
time.hour,
time.minute,
args.source_bucket,
args.sink_bucket)

Expand Down
4 changes: 2 additions & 2 deletions storage/transfer_service/transfer_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@ def main(project_id, job_name):
parser = argparse.ArgumentParser(
description='Review the transfer operations associated with a transfer'
' job.')
parser.add_argument('project_id', help='Your Google Cloud project ID')
parser.add_argument('job_name', help='Your job name')
parser.add_argument('project_id', help='Your Google Cloud project ID.')
parser.add_argument('job_name', help='Your job name.')

args = parser.parse_args()

Expand Down

0 comments on commit 84f0ee3

Please sign in to comment.