From 1d6c49ab530b945f23538cea3d5a5441ef12cedd Mon Sep 17 00:00:00 2001 From: Modular Magician Date: Fri, 4 Jun 2021 23:58:03 +0000 Subject: [PATCH] Add Support for Azure Blob Storage Transfer (#4751) * mark field as updatable Co-authored-by: upodroid * add azure support Co-authored-by: upodroid * fix typo * change path options * revert doc change Signed-off-by: Modular Magician --- .changelog/4751.txt | 3 + google/resource_storage_transfer_job.go | 109 ++++++++++++++++-- .../docs/r/storage_transfer_job.html.markdown | 20 +++- 3 files changed, 123 insertions(+), 9 deletions(-) create mode 100644 .changelog/4751.txt diff --git a/.changelog/4751.txt b/.changelog/4751.txt new file mode 100644 index 00000000000..4d1f96057a0 --- /dev/null +++ b/.changelog/4751.txt @@ -0,0 +1,3 @@ +```release-note:enhancement +storage-transfer: added support for `azure_blob_storage_data_source` to `google_storage_transfer_job` +``` diff --git a/google/resource_storage_transfer_job.go b/google/resource_storage_transfer_job.go index f456f49558a..e3a50985e90 100644 --- a/google/resource_storage_transfer_job.go +++ b/google/resource_storage_transfer_job.go @@ -30,6 +30,7 @@ var ( "transfer_spec.0.gcs_data_source", "transfer_spec.0.aws_s3_data_source", "transfer_spec.0.http_data_source", + "transfer_spec.0.azure_blob_storage_data_source", } ) @@ -99,7 +100,15 @@ func resourceStorageTransferJob() *schema.Resource { MaxItems: 1, Elem: httpDataSchema(), ExactlyOneOf: transferSpecDataSourceKeys, - Description: `An HTTP URL data source.`, + Description: `A HTTP URL data source.`, + }, + "azure_blob_storage_data_source": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: azureBlobStorageDataSchema(), + ExactlyOneOf: transferSpecDataSourceKeys, + Description: `An Azure Blob Storage data source.`, }, }, }, @@ -370,6 +379,45 @@ func httpDataSchema() *schema.Resource { } } +func azureBlobStorageDataSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "storage_account": { + Required: true, + Type: schema.TypeString, + Description: `The name of the Azure Storage account.`, + }, + "container": { + Required: true, + Type: schema.TypeString, + Description: `The container to transfer from the Azure Storage account.`, + }, + "path": { + Optional: true, + Computed: true, + Type: schema.TypeString, + Description: `Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.`, + }, + "azure_credentials": { + Type: schema.TypeList, + Required: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "sas_token": { + Type: schema.TypeString, + Required: true, + Sensitive: true, + Description: `Azure shared access signature.`, + }, + }, + }, + Description: ` Credentials used to authenticate API requests to Azure.`, + }, + }, + } +} + func diffSuppressEmptyStartTimeOfDay(k, old, new string, d *schema.ResourceData) bool { return k == "schedule.0.start_time_of_day.#" && old == "1" && new == "0" } @@ -769,6 +817,50 @@ func flattenHttpData(httpData *storagetransfer.HttpData) []map[string]interface{ return []map[string]interface{}{data} } +func expandAzureCredentials(azureCredentials []interface{}) *storagetransfer.AzureCredentials { + if len(azureCredentials) == 0 || azureCredentials[0] == nil { + return nil + } + + azureCredential := azureCredentials[0].(map[string]interface{}) + return &storagetransfer.AzureCredentials{ + SasToken: azureCredential["sas_token"].(string), + } +} + +func flattenAzureCredentials(d *schema.ResourceData) []map[string]interface{} { + data := map[string]interface{}{ + "sas_token": d.Get("transfer_spec.0.azure_blob_storage_data_source.0.azure_credentials.0.sas_token"), + } + + return []map[string]interface{}{data} +} + +func expandAzureBlobStorageData(azureBlobStorageDatas []interface{}) *storagetransfer.AzureBlobStorageData { + if len(azureBlobStorageDatas) == 0 || azureBlobStorageDatas[0] == nil { + return nil + } + + azureBlobStorageData := azureBlobStorageDatas[0].(map[string]interface{}) + return &storagetransfer.AzureBlobStorageData{ + Container: azureBlobStorageData["container"].(string), + Path: azureBlobStorageData["path"].(string), + StorageAccount: azureBlobStorageData["storage_account"].(string), + AzureCredentials: expandAzureCredentials(azureBlobStorageData["sas_token"].([]interface{})), + } +} + +func flattenAzureBlobStorageData(azureBlobStorageData *storagetransfer.AzureBlobStorageData, d *schema.ResourceData) []map[string]interface{} { + data := map[string]interface{}{ + "container": azureBlobStorageData.Container, + "path": azureBlobStorageData.Path, + "storage_account": azureBlobStorageData.StorageAccount, + "azure_credentials": flattenAzureCredentials(d), + } + + return []map[string]interface{}{data} +} + func expandObjectConditions(conditions []interface{}) *storagetransfer.ObjectConditions { if len(conditions) == 0 || conditions[0] == nil { return nil @@ -823,12 +915,13 @@ func expandTransferSpecs(transferSpecs []interface{}) *storagetransfer.TransferS transferSpec := transferSpecs[0].(map[string]interface{}) return &storagetransfer.TransferSpec{ - GcsDataSink: expandGcsData(transferSpec["gcs_data_sink"].([]interface{})), - ObjectConditions: expandObjectConditions(transferSpec["object_conditions"].([]interface{})), - TransferOptions: expandTransferOptions(transferSpec["transfer_options"].([]interface{})), - GcsDataSource: expandGcsData(transferSpec["gcs_data_source"].([]interface{})), - AwsS3DataSource: expandAwsS3Data(transferSpec["aws_s3_data_source"].([]interface{})), - HttpDataSource: expandHttpData(transferSpec["http_data_source"].([]interface{})), + GcsDataSink: expandGcsData(transferSpec["gcs_data_sink"].([]interface{})), + ObjectConditions: expandObjectConditions(transferSpec["object_conditions"].([]interface{})), + TransferOptions: expandTransferOptions(transferSpec["transfer_options"].([]interface{})), + GcsDataSource: expandGcsData(transferSpec["gcs_data_source"].([]interface{})), + AwsS3DataSource: expandAwsS3Data(transferSpec["aws_s3_data_source"].([]interface{})), + HttpDataSource: expandHttpData(transferSpec["http_data_source"].([]interface{})), + AzureBlobStorageDataSource: expandAzureBlobStorageData(transferSpec["azure_blob_storage_data_source"].([]interface{})), } } @@ -850,6 +943,8 @@ func flattenTransferSpec(transferSpec *storagetransfer.TransferSpec, d *schema.R data["aws_s3_data_source"] = flattenAwsS3Data(transferSpec.AwsS3DataSource, d) } else if transferSpec.HttpDataSource != nil { data["http_data_source"] = flattenHttpData(transferSpec.HttpDataSource) + } else if transferSpec.AzureBlobStorageDataSource != nil { + data["azure_blob_storage_data_source"] = flattenAzureBlobStorageData(transferSpec.AzureBlobStorageDataSource, d) } return []map[string][]map[string]interface{}{data} diff --git a/website/docs/r/storage_transfer_job.html.markdown b/website/docs/r/storage_transfer_job.html.markdown index f299072580c..ab9050604db 100644 --- a/website/docs/r/storage_transfer_job.html.markdown +++ b/website/docs/r/storage_transfer_job.html.markdown @@ -14,7 +14,7 @@ Creates a new Transfer Job in Google Cloud Storage Transfer. To get more information about Google Cloud Storage Transfer, see: * [Overview](https://cloud.google.com/storage-transfer/docs/overview) -* [API documentation](https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs#TransferJob) +* [API documentation](https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs) * How-to Guides * [Configuring Access to Data Sources and Sinks](https://cloud.google.com/storage-transfer/docs/configure-access) @@ -118,7 +118,9 @@ The `transfer_spec` block supports: * `aws_s3_data_source` - (Optional) An AWS S3 data source. Structure documented below. -* `http_data_source` - (Optional) An HTTP URL data source. Structure documented below. +* `http_data_source` - (Optional) A HTTP URL data source. Structure documented below. + +* `azure_blob_storage_data_source` - (Optional) An Azure Blob Storage data source. Structure documented below. The `schedule` block supports: @@ -172,6 +174,20 @@ The `http_data_source` block supports: * `list_url` - (Required) The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported. +The `azure_blob_storage_data_source` block supports: + +* `storage_account` - (Required) The name of the Azure Storage account. + +* `container` - (Required) The container to transfer from the Azure Storage account.` + +* `path` - (Required) Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'. + +* `azure_credentials` - (Required) Credentials used to authenticate API requests to Azure block. + +The `azure_credentials` block supports: + +* `sas_token` - (Required) Azure shared access signature. See [Grant limited access to Azure Storage resources using shared access signatures (SAS)](https://docs.microsoft.com/en-us/azure/storage/common/storage-sas-overview). + The `schedule_start_date` and `schedule_end_date` blocks support: * `year` - (Required) Year of date. Must be from 1 to 9999.