Skip to content

Commit

Permalink
Revert "Enforce mutual exclusivity among view, materialized view, and… (
Browse files Browse the repository at this point in the history
#9204) (#16193)

[upstream:713319558de3381c3945cde0c10ce2425043c6bb]

Signed-off-by: Modular Magician <magic-modules@google.com>
  • Loading branch information
modular-magician committed Oct 10, 2023
1 parent 9d37e78 commit 07192b7
Show file tree
Hide file tree
Showing 3 changed files with 51 additions and 157 deletions.
3 changes: 3 additions & 0 deletions .changelog/9204.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:bug
bigquery: removed mutual exclusivity checks for view, materialized view, and schema for the Table resource.
```
45 changes: 32 additions & 13 deletions google/services/bigquery/resource_bigquery_table.go
Original file line number Diff line number Diff line change
Expand Up @@ -823,7 +823,6 @@ func ResourceBigQueryTable() *schema.Resource {
Elem: &schema.Schema{Type: schema.TypeString},
},
// Schema: [Optional] Describes the schema of this table.
// Schema is mutually exclusive with View and Materialized View.
"schema": {
Type: schema.TypeString,
Optional: true,
Expand All @@ -835,10 +834,8 @@ func ResourceBigQueryTable() *schema.Resource {
},
DiffSuppressFunc: bigQueryTableSchemaDiffSuppress,
Description: `A JSON schema for the table.`,
ConflictsWith: []string{"view", "materialized_view"},
},
// View: [Optional] If specified, configures this table as a view.
// View is mutually exclusive with Schema and Materialized View.
"view": {
Type: schema.TypeList,
Optional: true,
Expand All @@ -865,11 +862,9 @@ func ResourceBigQueryTable() *schema.Resource {
},
},
},
ConflictsWith: []string{"schema", "materialized_view"},
},

// Materialized View: [Optional] If specified, configures this table as a materialized view.
// Materialized View is mutually exclusive with Schema and View.
"materialized_view": {
Type: schema.TypeList,
Optional: true,
Expand Down Expand Up @@ -914,7 +909,6 @@ func ResourceBigQueryTable() *schema.Resource {
},
},
},
ConflictsWith: []string{"schema", "view"},
},

// TimePartitioning: [Experimental] If specified, configures time-based
Expand Down Expand Up @@ -1378,15 +1372,40 @@ func resourceBigQueryTableCreate(d *schema.ResourceData, meta interface{}) error

datasetID := d.Get("dataset_id").(string)

log.Printf("[INFO] Creating BigQuery table: %s", table.TableReference.TableId)
if table.View != nil && table.Schema != nil {

res, err := config.NewBigQueryClient(userAgent).Tables.Insert(project, datasetID, table).Do()
if err != nil {
return err
}
log.Printf("[INFO] Removing schema from table definition because big query does not support setting schema on view creation")
schemaBack := table.Schema
table.Schema = nil

log.Printf("[INFO] Creating BigQuery table: %s without schema", table.TableReference.TableId)

res, err := config.NewBigQueryClient(userAgent).Tables.Insert(project, datasetID, table).Do()
if err != nil {
return err
}

log.Printf("[INFO] BigQuery table %s has been created", res.Id)
d.SetId(fmt.Sprintf("projects/%s/datasets/%s/tables/%s", res.TableReference.ProjectId, res.TableReference.DatasetId, res.TableReference.TableId))

log.Printf("[INFO] BigQuery table %s has been created", res.Id)
d.SetId(fmt.Sprintf("projects/%s/datasets/%s/tables/%s", res.TableReference.ProjectId, res.TableReference.DatasetId, res.TableReference.TableId))
table.Schema = schemaBack
log.Printf("[INFO] Updating BigQuery table: %s with schema", table.TableReference.TableId)
if _, err = config.NewBigQueryClient(userAgent).Tables.Update(project, datasetID, res.TableReference.TableId, table).Do(); err != nil {
return err
}

log.Printf("[INFO] BigQuery table %s has been update with schema", res.Id)
} else {
log.Printf("[INFO] Creating BigQuery table: %s", table.TableReference.TableId)

res, err := config.NewBigQueryClient(userAgent).Tables.Insert(project, datasetID, table).Do()
if err != nil {
return err
}

log.Printf("[INFO] BigQuery table %s has been created", res.Id)
d.SetId(fmt.Sprintf("projects/%s/datasets/%s/tables/%s", res.TableReference.ProjectId, res.TableReference.DatasetId, res.TableReference.TableId))
}

return resourceBigQueryTableRead(d, meta)
}
Expand Down
160 changes: 16 additions & 144 deletions google/services/bigquery/resource_bigquery_table_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -456,8 +456,22 @@ func TestAccBigQueryTable_WithViewAndSchema(t *testing.T) {
CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryTableWithViewAndSchema(datasetID, tableID, "table description"),
ExpectError: regexp.MustCompile("\"view\": conflicts with schema"),
Config: testAccBigQueryTableWithViewAndSchema(datasetID, tableID, "table description1"),
},
{
ResourceName: "google_bigquery_table.test",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"deletion_protection"},
},
{
Config: testAccBigQueryTableWithViewAndSchema(datasetID, tableID, "table description2"),
},
{
ResourceName: "google_bigquery_table.test",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"deletion_protection"},
},
},
})
Expand Down Expand Up @@ -595,51 +609,6 @@ func TestAccBigQueryTable_MaterializedView_NonIncremental_basic(t *testing.T) {
})
}

func TestAccBigQueryTable_MaterializedView_WithSchema(t *testing.T) {
t.Parallel()
// Pending VCR support in https://github.com/hashicorp/terraform-provider-google/issues/15427.
acctest.SkipIfVcr(t)

datasetID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10))
tableID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10))
materializedViewID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10))
query := fmt.Sprintf("SELECT some_int FROM `%s.%s`", datasetID, tableID)

acctest.VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryTableWithMatViewAndSchema(datasetID, tableID, materializedViewID, query),
ExpectError: regexp.MustCompile("\"materialized_view\": conflicts with schema"),
},
},
})
}

func TestAccBigQueryTable_MaterializedView_WithView(t *testing.T) {
t.Parallel()
acctest.SkipIfVcr(t)

datasetID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10))
tableID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10))
materializedViewID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10))
query := fmt.Sprintf("SELECT some_int FROM `%s.%s`", datasetID, tableID)

acctest.VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryTableWithMatViewAndView(datasetID, tableID, materializedViewID, query),
ExpectError: regexp.MustCompile("\"materialized_view\": conflicts with view"),
},
},
})
}

func TestAccBigQueryExternalDataTable_parquet(t *testing.T) {
t.Parallel()

Expand Down Expand Up @@ -2176,103 +2145,6 @@ resource "google_bigquery_table" "mv_test" {
`, datasetID, tableID, mViewID, enable_refresh, refresh_interval, query)
}

func testAccBigQueryTableWithMatViewAndSchema(datasetID, tableID, mViewID, query string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "%s"
}
resource "google_bigquery_table" "test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
schema = <<EOH
[
{
"name": "some_int",
"type": "INTEGER"
}
]
EOH
}
resource "google_bigquery_table" "mv_test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
materialized_view {
enable_refresh = true
refresh_interval_ms = 360000
query = "%s"
}
schema = <<EOH
[
{
"description": "special new description with capital letter Z",
"name": "some_int",
"type": "INTEGER"
}
]
EOH
depends_on = [
google_bigquery_table.test,
]
}
`, datasetID, tableID, mViewID, query)
}

func testAccBigQueryTableWithMatViewAndView(datasetID, tableID, mViewID, query string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "%s"
}
resource "google_bigquery_table" "test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
schema = <<EOH
[
{
"name": "some_int",
"type": "INTEGER"
}
]
EOH
}
resource "google_bigquery_table" "mv_test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
view {
query = <<SQL
select "val1" as col1, "val2" as col2
SQL
use_legacy_sql = false
}
materialized_view {
enable_refresh = true
refresh_interval_ms = 360000
query = "%s"
}
depends_on = [
google_bigquery_table.test,
]
}
`, datasetID, tableID, mViewID, query)
}

func testAccBigQueryTableWithMatViewNonIncremental_basic(datasetID, tableID, mViewID, query, maxStaleness string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
Expand Down

0 comments on commit 07192b7

Please sign in to comment.