Skip to content

Commit

Permalink
fix datastream_stream dataset id import (#7451) (#14003)
Browse files Browse the repository at this point in the history
Signed-off-by: Modular Magician <magic-modules@google.com>
  • Loading branch information
modular-magician authored Mar 15, 2023
1 parent ad26fb1 commit 3fbe198
Show file tree
Hide file tree
Showing 4 changed files with 292 additions and 5 deletions.
3 changes: 3 additions & 0 deletions .changelog/7451.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:bug
datastream: fixed bug where field `dataset_id` could not utilize the id from bigquery directly
```
34 changes: 30 additions & 4 deletions google/resource_datastream_stream.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import (
"fmt"
"log"
"reflect"
"regexp"
"strings"
"time"

Expand Down Expand Up @@ -78,6 +79,20 @@ func waitForDatastreamStreamReady(d *schema.ResourceData, config *Config, timeou
})
}

func resourceDatastreamStreamDatabaseIdDiffSuppress(_, old, new string, _ *schema.ResourceData) bool {
re := regexp.MustCompile(`projects/(.+)/datasets/([^\.\?\#]+)`)
paths := re.FindStringSubmatch(new)

// db returns value in form <project>:<dataset_id>
if len(paths) == 3 {
project := paths[1]
datasetId := paths[2]
new = fmt.Sprintf("%s:%s", project, datasetId)
}

return old == new
}

func ResourceDatastreamStream() *schema.Resource {
return &schema.Resource{
Create: resourceDatastreamStreamCreate,
Expand Down Expand Up @@ -135,9 +150,11 @@ A duration in seconds with up to nine fractional digits, terminated by 's'. Exam
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"dataset_id": {
Type: schema.TypeString,
Required: true,
Description: `Dataset ID in the format projects/{project}/datasets/{dataset_id}`,
Type: schema.TypeString,
Required: true,
DiffSuppressFunc: resourceDatastreamStreamDatabaseIdDiffSuppress,
Description: `Dataset ID in the format projects/{project}/datasets/{dataset_id} or
{project}:{dataset_id}`,
},
},
},
Expand Down Expand Up @@ -5086,7 +5103,16 @@ func expandDatastreamStreamDestinationConfigBigqueryDestinationConfigSingleTarge
}

func expandDatastreamStreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetDatasetId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
s := v.(string)
re := regexp.MustCompile(`projects/(.+)/datasets/([^\.\?\#]+)`)
paths := re.FindStringSubmatch(s)
if len(paths) == 3 {
project := paths[1]
datasetId := paths[2]
return fmt.Sprintf("%s:%s", project, datasetId), nil
}

return s, nil
}

func expandDatastreamStreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasets(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
Expand Down
141 changes: 141 additions & 0 deletions google/resource_datastream_stream_generated_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -415,6 +415,147 @@ resource "google_datastream_stream" "default" {
`, context)
}

func TestAccDatastreamStream_datastreamStreamPostgresqlBigqueryDatasetIdExample(t *testing.T) {
SkipIfVcr(t)
t.Parallel()

context := map[string]interface{}{
"random_suffix": RandString(t, 10),
}

VcrTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: TestAccProviders,
ExternalProviders: map[string]resource.ExternalProvider{
"random": {},
"time": {},
},
CheckDestroy: testAccCheckDatastreamStreamDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccDatastreamStream_datastreamStreamPostgresqlBigqueryDatasetIdExample(context),
},
{
ResourceName: "google_datastream_stream.default",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"stream_id", "location"},
},
},
})
}

func testAccDatastreamStream_datastreamStreamPostgresqlBigqueryDatasetIdExample(context map[string]interface{}) string {
return Nprintf(`
resource "google_bigquery_dataset" "postgres" {
dataset_id = "postgres%{random_suffix}"
friendly_name = "postgres"
description = "Database of postgres"
location = "us-central1"
}
resource "google_datastream_stream" "default" {
display_name = "postgres to bigQuery"
location = "us-central1"
stream_id = "postgres-to-big-query%{random_suffix}"
source_config {
source_connection_profile = google_datastream_connection_profile.source_connection_profile.id
mysql_source_config {}
}
destination_config {
destination_connection_profile = google_datastream_connection_profile.destination_connection_profile2.id
bigquery_destination_config {
data_freshness = "900s"
single_target_dataset {
dataset_id = google_bigquery_dataset.postgres.id
}
}
}
backfill_all {
}
}
resource "google_datastream_connection_profile" "destination_connection_profile2" {
display_name = "Connection profile"
location = "us-central1"
connection_profile_id = "tf-test-destination-profile%{random_suffix}"
bigquery_profile {}
}
resource "google_sql_database_instance" "instance" {
name = "tf-test-my-instance%{random_suffix}"
database_version = "MYSQL_8_0"
region = "us-central1"
settings {
tier = "db-f1-micro"
backup_configuration {
enabled = true
binary_log_enabled = true
}
ip_configuration {
// Datastream IPs will vary by region.
authorized_networks {
value = "34.71.242.81"
}
authorized_networks {
value = "34.72.28.29"
}
authorized_networks {
value = "34.67.6.157"
}
authorized_networks {
value = "34.67.234.134"
}
authorized_networks {
value = "34.72.239.218"
}
}
}
deletion_protection = false
}
resource "google_sql_database" "db" {
instance = google_sql_database_instance.instance.name
name = "db"
}
resource "random_password" "pwd" {
length = 16
special = false
}
resource "google_sql_user" "user" {
name = "user%{random_suffix}"
instance = google_sql_database_instance.instance.name
host = "%"
password = random_password.pwd.result
}
resource "google_datastream_connection_profile" "source_connection_profile" {
display_name = "Source connection profile"
location = "us-central1"
connection_profile_id = "tf-test-source-profile%{random_suffix}"
mysql_profile {
hostname = google_sql_database_instance.instance.public_ip_address
username = google_sql_user.user.name
password = google_sql_user.user.password
}
}
`, context)
}

func TestAccDatastreamStream_datastreamStreamBigqueryExample(t *testing.T) {
SkipIfVcr(t)
t.Parallel()
Expand Down
119 changes: 118 additions & 1 deletion website/docs/r/datastream_stream.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -413,6 +413,122 @@ resource "google_datastream_stream" "stream5" {
}
}
```
<div class = "oics-button" style="float: right; margin: 0 0 -15px">
<a href="https://console.cloud.google.com/cloudshell/open?cloudshell_git_repo=https%3A%2F%2Fgit.luolix.top%2Fterraform-google-modules%2Fdocs-examples.git&cloudshell_working_dir=datastream_stream_postgresql_bigquery_dataset_id&cloudshell_image=gcr.io%2Fgraphite-cloud-shell-images%2Fterraform%3Alatest&open_in_editor=main.tf&cloudshell_print=.%2Fmotd&cloudshell_tutorial=.%2Ftutorial.md" target="_blank">
<img alt="Open in Cloud Shell" src="//gstatic.com/cloudssh/images/open-btn.svg" style="max-height: 44px; margin: 32px auto; max-width: 100%;">
</a>
</div>
## Example Usage - Datastream Stream Postgresql Bigquery Dataset


```hcl
resource "google_bigquery_dataset" "postgres" {
dataset_id = "postgres%{random_suffix}"
friendly_name = "postgres"
description = "Database of postgres"
location = "us-central1"
}
resource "google_datastream_stream" "default" {
display_name = "postgres to bigQuery"
location = "us-central1"
stream_id = "postgres-to-big-query%{random_suffix}"
source_config {
source_connection_profile = google_datastream_connection_profile.source_connection_profile.id
mysql_source_config {}
}
destination_config {
destination_connection_profile = google_datastream_connection_profile.destination_connection_profile2.id
bigquery_destination_config {
data_freshness = "900s"
single_target_dataset {
dataset_id = google_bigquery_dataset.postgres.id
}
}
}
backfill_all {
}
}
resource "google_datastream_connection_profile" "destination_connection_profile2" {
display_name = "Connection profile"
location = "us-central1"
connection_profile_id = "tf-test-destination-profile%{random_suffix}"
bigquery_profile {}
}
resource "google_sql_database_instance" "instance" {
name = "tf-test-my-instance%{random_suffix}"
database_version = "MYSQL_8_0"
region = "us-central1"
settings {
tier = "db-f1-micro"
backup_configuration {
enabled = true
binary_log_enabled = true
}
ip_configuration {
// Datastream IPs will vary by region.
authorized_networks {
value = "34.71.242.81"
}
authorized_networks {
value = "34.72.28.29"
}
authorized_networks {
value = "34.67.6.157"
}
authorized_networks {
value = "34.67.234.134"
}
authorized_networks {
value = "34.72.239.218"
}
}
}
deletion_protection = false
}
resource "google_sql_database" "db" {
instance = google_sql_database_instance.instance.name
name = "db"
}
resource "random_password" "pwd" {
length = 16
special = false
}
resource "google_sql_user" "user" {
name = "user%{random_suffix}"
instance = google_sql_database_instance.instance.name
host = "%"
password = random_password.pwd.result
}
resource "google_datastream_connection_profile" "source_connection_profile" {
display_name = "Source connection profile"
location = "us-central1"
connection_profile_id = "tf-test-source-profile%{random_suffix}"
mysql_profile {
hostname = google_sql_database_instance.instance.public_ip_address
username = google_sql_user.user.name
password = google_sql_user.user.password
}
}
```
<div class = "oics-button" style="float: right; margin: 0 0 -15px">
<a href="https://console.cloud.google.com/cloudshell/open?cloudshell_git_repo=https%3A%2F%2Fgit.luolix.top%2Fterraform-google-modules%2Fdocs-examples.git&cloudshell_working_dir=datastream_stream_bigquery&cloudshell_image=gcr.io%2Fgraphite-cloud-shell-images%2Fterraform%3Alatest&open_in_editor=main.tf&cloudshell_print=.%2Fmotd&cloudshell_tutorial=.%2Ftutorial.md" target="_blank">
<img alt="Open in Cloud Shell" src="//gstatic.com/cloudssh/images/open-btn.svg" style="max-height: 44px; margin: 32px auto; max-width: 100%;">
Expand Down Expand Up @@ -1145,7 +1261,8 @@ The following arguments are supported:

* `dataset_id` -
(Required)
Dataset ID in the format projects/{project}/datasets/{dataset_id}
Dataset ID in the format projects/{project}/datasets/{dataset_id} or
{project}:{dataset_id}

<a name="nested_source_hierarchy_datasets"></a>The `source_hierarchy_datasets` block supports:

Expand Down

0 comments on commit 3fbe198

Please sign in to comment.