Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add event_stream block to google_storage_transfer_job schema #8894

Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,35 @@ func ResourceStorageTransferJob() *schema.Resource {
ForceNew: true,
Description: `The project in which the resource belongs. If it is not provided, the provider project is used.`,
},
"event_stream": {
Type: schema.TypeList,
Optional: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
BBBmau marked this conversation as resolved.
Show resolved Hide resolved
Description: "Specifies a unique name of the resource such as AWS SQS ARN in the form 'arn:aws:sqs:region:account_id:queue_name', or Pub/Sub subscription resource name in the form 'projects/{project}/subscriptions/{sub}'",
},
"event_stream_start_time": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
Description: "Specifies the date and time that Storage Transfer Service starts listening for events from this stream. If no start time is specified or start time is in the past, Storage Transfer Service starts listening immediately",
ValidateFunc: validation.IsRFC3339Time,
BBBmau marked this conversation as resolved.
Show resolved Hide resolved
},
"event_stream_expiration_time": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
Description: "Specifies the data and time at which Storage Transfer Service stops listening for events from this stream. After this time, any transfers in progress will complete, but no new transfers are initiated",
ValidateFunc: validation.IsRFC3339Time,
},
},
},
},
"transfer_spec": {
Type: schema.TypeList,
Required: true,
Expand Down Expand Up @@ -565,6 +594,7 @@ func resourceStorageTransferJobCreate(d *schema.ResourceData, meta interface{})
ProjectId: project,
Status: d.Get("status").(string),
Schedule: expandTransferSchedules(d.Get("schedule").([]interface{})),
EventStream: expandEventStream(d.Get("event_stream").([]interface{})),
TransferSpec: expandTransferSpecs(d.Get("transfer_spec").([]interface{})),
NotificationConfig: expandTransferJobNotificationConfig(d.Get("notification_config").([]interface{})),
}
Expand Down Expand Up @@ -897,6 +927,39 @@ func flattenTransferSchedule(transferSchedule *storagetransfer.Schedule) []map[s
return []map[string]interface{}{data}
}

func expandEventStream(e []interface{}) *storagetransfer.EventStream {
if len(e) == 0 || e[0] == nil {
return nil
}

eventStream := e[0].(map[string]interface{})
return &storagetransfer.EventStream{
Name: eventStream["name"].(string),
EventStreamStartTime: eventStream["event_stream_start_time"].(string),
EventStreamExpirationTime: eventStream["event_stream_expiration_time"].(string),
}
}

func flattenEventStream(eventStream *storagetransfer.EventStream) []map[string]interface{} {
if eventStream == nil || reflect.DeepEqual(eventStream, &storagetransfer.EventStream{}) {
return nil
}

data := map[string]interface{}{
"name": eventStream.Name,
}

if eventStream.EventStreamStartTime != "" {
data["event_stream_start_time"] = eventStream.EventStreamStartTime
}

if eventStream.EventStreamExpirationTime != "" {
data["event_stream_expiration_time"] = eventStream.EventStreamExpirationTime
}

return []map[string]interface{}{data}
}

func expandGcsData(gcsDatas []interface{}) *storagetransfer.GcsData {
if len(gcsDatas) == 0 || gcsDatas[0] == nil {
return nil
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,14 @@ func TestAccStorageTransferJob_basic(t *testing.T) {
ImportState: true,
ImportStateVerify: true,
},
{
Config: testAccStorageTransferJob_eventStreamConfig(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferJobDescription),
},
{
ResourceName: "google_storage_transfer_job.transfer_job",
ImportState: true,
ImportStateVerify: true,
},
{
Config: testAccStorageTransferJob_omitSchedule(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferJobDescription),
},
Expand Down Expand Up @@ -381,6 +389,89 @@ resource "google_storage_transfer_job" "transfer_job" {
`, project, dataSourceBucketName, project, dataSinkBucketName, project, transferJobDescription, project)
}

func testAccStorageTransferJob_eventStreamConfig(project string, dataSourceBucketName string, dataSinkBucketName string, transferJobDescription string) string {
return fmt.Sprintf(`
SarahFrench marked this conversation as resolved.
Show resolved Hide resolved
data "google_storage_transfer_project_service_account" "default" {
project = "%s"
}

resource "google_storage_bucket" "data_source" {
name = "%s"
BBBmau marked this conversation as resolved.
Show resolved Hide resolved
project = "%s"
location = "US"
force_destroy = true
uniform_bucket_level_access = true
}

resource "google_storage_bucket_iam_member" "data_source" {
bucket = google_storage_bucket.data_source.name
role = "roles/storage.admin"
member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}"
}
BBBmau marked this conversation as resolved.
Show resolved Hide resolved

resource "google_storage_bucket" "data_sink" {
name = "%s"
project = "%s"
location = "US"
force_destroy = true
uniform_bucket_level_access = true
}

resource "google_storage_bucket_iam_member" "data_sink" {
bucket = google_storage_bucket.data_sink.name
role = "roles/storage.admin"
member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}"
}

resource "google_storage_transfer_job" "transfer_job" {
description = "%s"
project = "%s"

event_stream {
name = "event_stream"
BBBmau marked this conversation as resolved.
Show resolved Hide resolved
event_stream_start_time = "2014-10-02T15:01:23Z"
event_stream_expiration_time = "2023-10-02T15:01:23Z"
}

transfer_spec {
gcs_data_source {
bucket_name = google_storage_bucket.data_source.name
path = "foo/bar/"
}
gcs_data_sink {
bucket_name = google_storage_bucket.data_sink.name
path = "foo/bar/"
}
}

schedule {
schedule_start_date {
year = 2018
month = 10
day = 1
}
schedule_end_date {
year = 2019
month = 10
day = 1
}
start_time_of_day {
hours = 0
minutes = 30
seconds = 0
nanos = 0
}
repeat_interval = "604800s"
}

depends_on = [
google_storage_bucket_iam_member.data_source,
google_storage_bucket_iam_member.data_sink,
]
}
`, project, dataSourceBucketName, project, dataSinkBucketName, project, transferJobDescription, project)
}

func testAccStorageTransferJob_omitNotificationConfig(project string, dataSourceBucketName string, dataSinkBucketName string, transferJobDescription string) string {
return fmt.Sprintf(`
data "google_storage_transfer_project_service_account" "default" {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,8 @@ The following arguments are supported:

- - -

* `event_stream` - (Optional) Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure [documented below](#nested_event_stream)

* `project` - (Optional) The project in which the resource belongs. If it
is not provided, the provider project is used.

Expand Down Expand Up @@ -161,6 +163,14 @@ The following arguments are supported:

* `repeat_interval` - (Optional) Interval between the start of each scheduled transfer. If unspecified, the default value is 24 hours. This value may not be less than 1 hour. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".

<a name="nested_event_stream"></a>The `event_stream` block supports:

* `name` - (Required) Specifies a unique name of the resource such as AWS SQS ARN in the form 'arn:aws:sqs:region:account_id:queue_name', or Pub/Sub subscription resource name in the form 'projects/{project}/subscriptions/{sub}'.

* `event_stream_start_time` - (Optional) Specifies the date and time that Storage Transfer Service starts listening for events from this stream. If no start time is specified or start time is in the past, Storage Transfer Service starts listening immediately. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".

* `event_stream_expiration_time` - (Optional) Specifies the data and time at which Storage Transfer Service stops listening for events from this stream. After this time, any transfers in progress will complete, but no new transfers are initiated.A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".

<a name="nested_object_conditions"></a>The `object_conditions` block supports:

* `max_time_elapsed_since_last_modification` - (Optional) A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
Expand Down