Skip to content

Commit

Permalink
Enforce mutual exclusivity among view, materialized view, and schema …
Browse files Browse the repository at this point in the history
…in BigQuery table config (#7973)

* Enforce mutual exclusivity among view, materialized view, and schema in BigQuery table config

* fix merge conflict

* fix field specification and add a VCR skip for the new acceptance test

* skip VCR for MaterializedView_WithView test too
  • Loading branch information
wj-chen authored Aug 9, 2023
1 parent a102bc6 commit 87e4f29
Show file tree
Hide file tree
Showing 2 changed files with 159 additions and 49 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -761,6 +761,7 @@ func ResourceBigQueryTable() *schema.Resource {
},

// Schema: [Optional] Describes the schema of this table.
// Schema is mutually exclusive with View and Materialized View.
"schema": {
Type: schema.TypeString,
Optional: true,
Expand All @@ -772,8 +773,10 @@ func ResourceBigQueryTable() *schema.Resource {
},
DiffSuppressFunc: bigQueryTableSchemaDiffSuppress,
Description: `A JSON schema for the table.`,
ConflictsWith: []string{"view", "materialized_view"},
},
// View: [Optional] If specified, configures this table as a view.
// View is mutually exclusive with Schema and Materialized View.
"view": {
Type: schema.TypeList,
Optional: true,
Expand All @@ -800,9 +803,11 @@ func ResourceBigQueryTable() *schema.Resource {
},
},
},
ConflictsWith: []string{"schema", "materialized_view"},
},

// Materialized View: [Optional] If specified, configures this table as a materialized view.
// Materialized View is mutually exclusive with Schema and View.
"materialized_view": {
Type: schema.TypeList,
Optional: true,
Expand Down Expand Up @@ -839,6 +844,7 @@ func ResourceBigQueryTable() *schema.Resource {
},
},
},
ConflictsWith: []string{"schema", "view"},
},

// TimePartitioning: [Experimental] If specified, configures time-based
Expand Down Expand Up @@ -1170,41 +1176,16 @@ func resourceBigQueryTableCreate(d *schema.ResourceData, meta interface{}) error

datasetID := d.Get("dataset_id").(string)

if table.View != nil && table.Schema != nil {
log.Printf("[INFO] Creating BigQuery table: %s", table.TableReference.TableId)

log.Printf("[INFO] Removing schema from table definition because big query does not support setting schema on view creation")
schemaBack := table.Schema
table.Schema = nil

log.Printf("[INFO] Creating BigQuery table: %s without schema", table.TableReference.TableId)

res, err := config.NewBigQueryClient(userAgent).Tables.Insert(project, datasetID, table).Do()
if err != nil {
return err
}

log.Printf("[INFO] BigQuery table %s has been created", res.Id)
d.SetId(fmt.Sprintf("projects/%s/datasets/%s/tables/%s", res.TableReference.ProjectId, res.TableReference.DatasetId, res.TableReference.TableId))

table.Schema = schemaBack
log.Printf("[INFO] Updating BigQuery table: %s with schema", table.TableReference.TableId)
if _, err = config.NewBigQueryClient(userAgent).Tables.Update(project, datasetID, res.TableReference.TableId, table).Do(); err != nil {
return err
}

log.Printf("[INFO] BigQuery table %s has been update with schema", res.Id)
} else {
log.Printf("[INFO] Creating BigQuery table: %s", table.TableReference.TableId)

res, err := config.NewBigQueryClient(userAgent).Tables.Insert(project, datasetID, table).Do()
if err != nil {
return err
}

log.Printf("[INFO] BigQuery table %s has been created", res.Id)
d.SetId(fmt.Sprintf("projects/%s/datasets/%s/tables/%s", res.TableReference.ProjectId, res.TableReference.DatasetId, res.TableReference.TableId))
res, err := config.NewBigQueryClient(userAgent).Tables.Insert(project, datasetID, table).Do()
if err != nil {
return err
}

log.Printf("[INFO] BigQuery table %s has been created", res.Id)
d.SetId(fmt.Sprintf("projects/%s/datasets/%s/tables/%s", res.TableReference.ProjectId, res.TableReference.DatasetId, res.TableReference.TableId))

return resourceBigQueryTableRead(d, meta)
}

Expand Down
163 changes: 146 additions & 17 deletions mmv1/third_party/terraform/tests/resource_bigquery_table_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -367,22 +367,8 @@ func TestAccBigQueryTable_WithViewAndSchema(t *testing.T) {
CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryTableWithViewAndSchema(datasetID, tableID, "table description1"),
},
{
ResourceName: "google_bigquery_table.test",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"deletion_protection"},
},
{
Config: testAccBigQueryTableWithViewAndSchema(datasetID, tableID, "table description2"),
},
{
ResourceName: "google_bigquery_table.test",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"deletion_protection"},
Config: testAccBigQueryTableWithViewAndSchema(datasetID, tableID, "table description"),
ExpectError: regexp.MustCompile("\"view\": conflicts with schema"),
},
},
})
Expand Down Expand Up @@ -487,6 +473,52 @@ func TestAccBigQueryTable_MaterializedView_DailyTimePartioning_Update(t *testing
})
}

func TestAccBigQueryTable_MaterializedView_WithSchema(t *testing.T) {
t.Parallel()
// Pending VCR support in https://github.com/hashicorp/terraform-provider-google/issues/15427.
acctest.SkipIfVcr(t)

datasetID := fmt.Sprintf("tf_test_%s", RandString(t, 10))
tableID := fmt.Sprintf("tf_test_%s", RandString(t, 10))
materializedViewID := fmt.Sprintf("tf_test_%s", RandString(t, 10))
query := fmt.Sprintf("SELECT some_int FROM `%s.%s`", datasetID, tableID)

VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryTableWithMatViewAndSchema(datasetID, tableID, materializedViewID, query),
ExpectError: regexp.MustCompile("\"materialized_view\": conflicts with schema"),
},
},
})
}

func TestAccBigQueryTable_MaterializedView_WithView(t *testing.T) {
t.Parallel()
// Pending VCR support in https://github.com/hashicorp/terraform-provider-google/issues/15427.
acctest.SkipIfVcr(t)

datasetID := fmt.Sprintf("tf_test_%s", RandString(t, 10))
tableID := fmt.Sprintf("tf_test_%s", RandString(t, 10))
materializedViewID := fmt.Sprintf("tf_test_%s", RandString(t, 10))
query := fmt.Sprintf("SELECT some_int FROM `%s.%s`", datasetID, tableID)

VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryTableWithMatViewAndView(datasetID, tableID, materializedViewID, query),
ExpectError: regexp.MustCompile("\"materialized_view\": conflicts with view"),
},
},
})
}

func TestAccBigQueryExternalDataTable_parquet(t *testing.T) {
t.Parallel()

Expand Down Expand Up @@ -949,7 +981,7 @@ func TestAccBigQueryTable_emptySchema(t *testing.T) {

func TestAccBigQueryTable_invalidSchemas(t *testing.T) {
t.Parallel()
// Not an acceptance test.
// Pending VCR support in https://github.com/hashicorp/terraform-provider-google/issues/15427.
acctest.SkipIfVcr(t)

datasetID := fmt.Sprintf("tf_test_%s", RandString(t, 10))
Expand Down Expand Up @@ -1554,6 +1586,103 @@ resource "google_bigquery_table" "mv_test" {
`, datasetID, tableID, mViewID, enable_refresh, refresh_interval, query)
}

func testAccBigQueryTableWithMatViewAndSchema(datasetID, tableID, mViewID, query string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "%s"
}
resource "google_bigquery_table" "test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
schema = <<EOH
[
{
"name": "some_int",
"type": "INTEGER"
}
]
EOH
}
resource "google_bigquery_table" "mv_test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
materialized_view {
enable_refresh = true
refresh_interval_ms = 360000
query = "%s"
}
schema = <<EOH
[
{
"description": "special new description with capital letter Z",
"name": "some_int",
"type": "INTEGER"
}
]
EOH
depends_on = [
google_bigquery_table.test,
]
}
`, datasetID, tableID, mViewID, query)
}

func testAccBigQueryTableWithMatViewAndView(datasetID, tableID, mViewID, query string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "%s"
}
resource "google_bigquery_table" "test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
schema = <<EOH
[
{
"name": "some_int",
"type": "INTEGER"
}
]
EOH
}
resource "google_bigquery_table" "mv_test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
view {
query = <<SQL
select "val1" as col1, "val2" as col2
SQL
use_legacy_sql = false
}
materialized_view {
enable_refresh = true
refresh_interval_ms = 360000
query = "%s"
}
depends_on = [
google_bigquery_table.test,
]
}
`, datasetID, tableID, mViewID, query)
}

func testAccBigQueryTableUpdated(datasetID, tableID string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
Expand Down

0 comments on commit 87e4f29

Please sign in to comment.