Skip to content

Commit

Permalink
Add storage_billing_model for BigQuery datasets (#7615) (#15115)
Browse files Browse the repository at this point in the history
* [Waiting for the feature to go GA] Add storage_billing_model for BigQuery datasets

* updating precheck test function name

---------

Signed-off-by: Modular Magician <magic-modules@google.com>
Co-authored-by: Luca Prete <lucaprete@google.com>
  • Loading branch information
modular-magician and Luca Prete committed Jul 10, 2023
1 parent 8e58c0e commit 3894440
Show file tree
Hide file tree
Showing 4 changed files with 84 additions and 0 deletions.
3 changes: 3 additions & 0 deletions .changelog/7615.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
bigquery: add `storage_billing_model` argument to `google_bigquery_dataset`
```
41 changes: 41 additions & 0 deletions google/resource_big_query_dataset_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,28 @@ func TestAccBigQueryDataset_cmek(t *testing.T) {
})
}

func TestAccBigQueryDataset_storageBillModel(t *testing.T) {
t.Parallel()

datasetID := fmt.Sprintf("tf_test_%s", RandString(t, 10))

VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckBigQueryDatasetDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryDatasetStorageBillingModel(datasetID),
},
{
ResourceName: "google_bigquery_dataset.test",
ImportState: true,
ImportStateVerify: true,
},
},
})
}

func testAccAddTable(t *testing.T, datasetID string, tableID string) resource.TestCheckFunc {
// Not actually a check, but adds a table independently of terraform
return func(s *terraform.State) error {
Expand Down Expand Up @@ -391,3 +413,22 @@ resource "google_bigquery_dataset" "test" {
}
`, pid, datasetID, kmsKey)
}

func testAccBigQueryDatasetStorageBillingModel(datasetID string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "%s"
friendly_name = "foo"
description = "This is a foo description"
location = "EU"
default_partition_expiration_ms = 3600000
default_table_expiration_ms = 3600000
storage_billing_model = "PHYSICAL"
labels = {
env = "foo"
default_table_expiration_ms = 3600000
}
}
`, datasetID)
}
33 changes: 33 additions & 0 deletions google/services/bigquery/resource_bigquery_dataset.go
Original file line number Diff line number Diff line change
Expand Up @@ -216,6 +216,16 @@ Changing this forces a new resource to be created.`,
Optional: true,
Description: `Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).`,
},
"storage_billing_model": {
Type: schema.TypeString,
Computed: true,
Optional: true,
Description: `Specifies the storage billing model for the dataset.
Set this flag value to LOGICAL to use logical bytes for storage billing,
or to PHYSICAL to use physical bytes instead.
LOGICAL is the default if this flag isn't specified.`,
},
"creation_time": {
Type: schema.TypeInt,
Computed: true,
Expand Down Expand Up @@ -487,6 +497,12 @@ func resourceBigQueryDatasetCreate(d *schema.ResourceData, meta interface{}) err
} else if v, ok := d.GetOkExists("default_collation"); !tpgresource.IsEmptyValue(reflect.ValueOf(defaultCollationProp)) && (ok || !reflect.DeepEqual(v, defaultCollationProp)) {
obj["defaultCollation"] = defaultCollationProp
}
storageBillingModelProp, err := expandBigQueryDatasetStorageBillingModel(d.Get("storage_billing_model"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("storage_billing_model"); !tpgresource.IsEmptyValue(reflect.ValueOf(storageBillingModelProp)) && (ok || !reflect.DeepEqual(v, storageBillingModelProp)) {
obj["storageBillingModel"] = storageBillingModelProp
}

url, err := tpgresource.ReplaceVars(d, config, "{{BigQueryBasePath}}projects/{{project}}/datasets")
if err != nil {
Expand Down Expand Up @@ -635,6 +651,9 @@ func resourceBigQueryDatasetRead(d *schema.ResourceData, meta interface{}) error
if err := d.Set("default_collation", flattenBigQueryDatasetDefaultCollation(res["defaultCollation"], d, config)); err != nil {
return fmt.Errorf("Error reading Dataset: %s", err)
}
if err := d.Set("storage_billing_model", flattenBigQueryDatasetStorageBillingModel(res["storageBillingModel"], d, config)); err != nil {
return fmt.Errorf("Error reading Dataset: %s", err)
}
if err := d.Set("self_link", tpgresource.ConvertSelfLinkToV1(res["selfLink"].(string))); err != nil {
return fmt.Errorf("Error reading Dataset: %s", err)
}
Expand Down Expand Up @@ -730,6 +749,12 @@ func resourceBigQueryDatasetUpdate(d *schema.ResourceData, meta interface{}) err
} else if v, ok := d.GetOkExists("default_collation"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, defaultCollationProp)) {
obj["defaultCollation"] = defaultCollationProp
}
storageBillingModelProp, err := expandBigQueryDatasetStorageBillingModel(d.Get("storage_billing_model"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("storage_billing_model"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, storageBillingModelProp)) {
obj["storageBillingModel"] = storageBillingModelProp
}

url, err := tpgresource.ReplaceVars(d, config, "{{BigQueryBasePath}}projects/{{project}}/datasets/{{dataset_id}}")
if err != nil {
Expand Down Expand Up @@ -1117,6 +1142,10 @@ func flattenBigQueryDatasetDefaultCollation(v interface{}, d *schema.ResourceDat
return v
}

func flattenBigQueryDatasetStorageBillingModel(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} {
return v
}

func expandBigQueryDatasetMaxTimeTravelHours(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) {
return v, nil
}
Expand Down Expand Up @@ -1444,3 +1473,7 @@ func expandBigQueryDatasetIsCaseInsensitive(v interface{}, d tpgresource.Terrafo
func expandBigQueryDatasetDefaultCollation(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) {
return v, nil
}

func expandBigQueryDatasetStorageBillingModel(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) {
return v, nil
}
7 changes: 7 additions & 0 deletions website/docs/r/bigquery_dataset.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -311,6 +311,13 @@ The following arguments are supported:
- 'und:ci': undetermined locale, case insensitive.
- '': empty string. Default to case-sensitive behavior.

* `storage_billing_model` -
(Optional)
Specifies the storage billing model for the dataset.
Set this flag value to LOGICAL to use logical bytes for storage billing,
or to PHYSICAL to use physical bytes instead.
LOGICAL is the default if this flag isn't specified.

* `project` - (Optional) The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.

Expand Down

0 comments on commit 3894440

Please sign in to comment.