Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update samples to be run from command line without modification #100

Merged
merged 2 commits into from
Sep 18, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion storage/transfer_service/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ This app creates two types of transfers using the Transfer Service tool.
1. Select Add credentials > Service account > JSON key.
1. Set the environment variable GOOGLE_APPLICATION_CREDENTIALS to point to your JSON key.
1. Add the Storage Transfer service account as an editor of your project
storage-transfer-5031963314028297433@partnercontent.gserviceaccount.com
storage-transfer-<accountId>@partnercontent.gserviceaccount.com
1. Set up gcloud for application default credentials.
1. `gcloud components update`
1. `gcloud auth login`
Expand Down
93 changes: 62 additions & 31 deletions storage/transfer_service/aws_request.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
# limitations under the License.
#
# [START all]
import argparse
import datetime
import json
import logging

Expand All @@ -22,54 +24,83 @@
logging.basicConfig(level=logging.DEBUG)


def main():
"""Create a one-off transfer from Amazon S3 to GCS."""
# [START main]
def main(description, project_id, day, month, year, hours, minutes,
source_bucket, access_key, secret_access_key, sink_bucket):
"""Create a one-off transfer from Amazon S3 to Google Cloud Storage."""
credentials = GoogleCredentials.get_application_default()
storagetransfer = discovery.build(
'storagetransfer', 'v1', credentials=credentials)

# Edit this template with desired parameters.
# Specify times below using US Pacific Time Zone.
transfer_job = '''
{
"description": "YOUR DESCRIPTION",
"status": "ENABLED",
"projectId": "YOUR_PROJECT_ID",
"schedule": {
"scheduleStartDate": {
"day": 1,
"month": 1,
"year": 2015
transfer_job = {
'description': description,
'status': 'ENABLED',
'projectId': project_id,
'schedule': {
'scheduleStartDate': {
'day': day,
'month': month,
'year': year
},
"scheduleEndDate": {
"day": 1,
"month": 1,
"year": 2015
'scheduleEndDate': {
'day': day,
'month': month,
'year': year
},
"startTimeOfDay": {
"hours": 0,
"minutes": 0
'startTimeOfDay': {
'hours': hours,
'minutes': minutes
}
},
"transferSpec": {
"awsS3DataSource": {
"bucketName": "YOUR_SOURCE_BUCKET",
"awsAccessKey": {
"accessKeyId": "YOUR_ACCESS_KEY_ID",
"secretAccessKey": "YOUR_SECRET_ACCESS_KEY"
'transferSpec': {
'awsS3DataSource': {
'bucketName': source_bucket,
'awsAccessKey': {
'accessKeyId': access_key,
'secretAccessKey': secret_access_key
}
},
"gcsDataSink": {
"bucketName": "YOUR_SINK_BUCKET"
'gcsDataSink': {
'bucketName': sink_bucket
}
}
}
'''

result = storagetransfer.transferJobs().create(body=json.loads(
transfer_job)).execute()
result = storagetransfer.transferJobs().create(body=transfer_job).execute()
logging.info('Returned transferJob: %s', json.dumps(result, indent=4))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

any reason to use both logging and print here?

# [END main]

if __name__ == '__main__':
main()
parser = argparse.ArgumentParser(
description='Create a one-off transfer from Amazon S3 to Google Cloud '
'Storage.')
parser.add_argument('description', help='Transfer description.')
parser.add_argument('project_id', help='Your Google Cloud project ID.')
parser.add_argument('date', help='Date YYYY/MM/DD.')
parser.add_argument('time', help='Time (24hr) HH:MM.')
parser.add_argument('source_bucket', help='Source bucket name.')
parser.add_argument('access_key', help='Your AWS access key id.')
parser.add_argument('secret_access_key', help='Your AWS secret access '
'key.')
parser.add_argument('sink_bucket', help='Sink bucket name.')

args = parser.parse_args()
date = datetime.datetime.strptime(args.date, '%Y/%m/%d')
time = datetime.datetime.strptime(args.time, '%H:%M')

main(
args.description,
args.project_id,
date.year,
date.month,
date.day,
time.hour,
time.minute,
args.source_bucket,
args.access_key,
args.secret_access_key,
args.sink_bucket)

# [END all]
84 changes: 56 additions & 28 deletions storage/transfer_service/nearline_request.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
# limitations under the License.
#
# [START all]
import argparse
import datetime
import json
import logging

Expand All @@ -22,50 +24,76 @@
logging.basicConfig(level=logging.DEBUG)


def main():
"""Transfer from standard Cloud Storage to Cloud Storage Nearline."""
# [START main]
def main(description, project_id, day, month, year, hours, minutes,
source_bucket, sink_bucket):
"""Create a transfer from the Google Cloud Storage Standard class to the
Nearline Storage class."""
credentials = GoogleCredentials.get_application_default()
storagetransfer = discovery.build(
'storagetransfer', 'v1', credentials=credentials)

# Edit this template with desired parameters.
# Specify times below using US Pacific Time Zone.
transfer_job = '''
{
"description": "YOUR DESCRIPTION",
"status": "ENABLED",
"projectId": "YOUR_PROJECT_ID",
"schedule": {
"scheduleStartDate": {
"day": 1,
"month": 1,
"year": 2015
transfer_job = {
'description': description,
'status': 'ENABLED',
'projectId': project_id,
'schedule': {
'scheduleStartDate': {
'day': day,
'month': month,
'year': year
},
"startTimeOfDay": {
"hours": 1,
"minutes": 1
'startTimeOfDay': {
'hours': hours,
'minutes': minutes
}
},
"transferSpec": {
"gcsDataSource": {
"bucketName": "YOUR_SOURCE_BUCKET"
'transferSpec': {
'gcsDataSource': {
'bucketName': source_bucket
},
"gcsDataSink": {
"bucketName": "YOUR_SINK_BUCKET"
'gcsDataSink': {
'bucketName': sink_bucket
},
"objectConditions": {
"minTimeElapsedSinceLastModification": "2592000s"
'objectConditions': {
'minTimeElapsedSinceLastModification': '2592000s'
},
"transferOptions": {
"deleteObjectsFromSourceAfterTransfer": true
'transferOptions': {
'deleteObjectsFromSourceAfterTransfer': 'true'
}
}
}
'''
result = storagetransfer.transferJobs().create(body=json.loads(
transfer_job)).execute()

result = storagetransfer.transferJobs().create(body=transfer_job).execute()
logging.info('Returned transferJob: %s', json.dumps(result, indent=4))
# [END main]

if __name__ == '__main__':
main()
parser = argparse.ArgumentParser(
description='Create a transfer from the Google Cloud Storage Standard '
'class to the Nearline Storage class.')
parser.add_argument('description', help='Transfer description.')
parser.add_argument('project_id', help='Your Google Cloud project ID.')
parser.add_argument('date', help='Date YYYY/MM/DD.')
parser.add_argument('time', help='Time (24hr) HH:MM.')
parser.add_argument('source_bucket', help='Source bucket name.')
parser.add_argument('sink_bucket', help='Sink bucket name.')

args = parser.parse_args()
date = datetime.datetime.strptime(args.date, '%Y/%m/%d')
time = datetime.datetime.strptime(args.time, '%H:%M')

main(
args.description,
args.project_id,
date.year,
date.month,
date.day,
time.hour,
time.minute,
args.source_bucket,
args.sink_bucket)

# [END all]
35 changes: 20 additions & 15 deletions storage/transfer_service/transfer_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
# limitations under the License.
#
# [START all]
import argparse
import json
import logging

Expand All @@ -21,31 +22,35 @@

logging.basicConfig(level=logging.DEBUG)

# Edit these values with desired parameters.
PROJECT_ID = 'YOUR_PROJECT_ID'
JOB_NAME = 'YOUR_JOB_NAME'


def check_operation(storagetransfer, project_id, job_name):
# [START main]
def main(project_id, job_name):
"""Review the transfer operations associated with a transfer job."""
credentials = GoogleCredentials.get_application_default()
storagetransfer = discovery.build(
'storagetransfer', 'v1', credentials=credentials)

filterString = (
'{{"project_id": "{project_id}", '
'"job_names": ["{job_name}"]}}'
).format(project_id=project_id, job_name=job_name)
return storagetransfer.transferOperations().list(

result = storagetransfer.transferOperations().list(
name="transferOperations",
filter=filterString).execute()


def main():
credentials = GoogleCredentials.get_application_default()
storagetransfer = discovery.build(
'storagetransfer', 'v1', credentials=credentials)

result = check_operation(storagetransfer, PROJECT_ID, JOB_NAME)
logging.info('Result of transferOperations/list: %s',
json.dumps(result, indent=4, sort_keys=True))
# [END main]

if __name__ == '__main__':
main()
parser = argparse.ArgumentParser(
description='Review the transfer operations associated with a '
'transfer job.')
parser.add_argument('project_id', help='Your Google Cloud project ID.')
parser.add_argument('job_name', help='Your job name.')

args = parser.parse_args()

main(args.project_id, args.job_name)

# [END all]