From 9fb464ca878885ea9cac52fe7550f11402b713c1 Mon Sep 17 00:00:00 2001 From: The Magician Date: Wed, 11 Jan 2023 14:05:45 -0800 Subject: [PATCH] Add identifying fields to DLP job trigger (#7037) (#13463) * Add identifying fields to DLP job trigger * Updated identifyingFields description. * Removed unused variable and converted tabs to spaces in example file. * Added DLP identifying fields update test * Updated bigquery field name description * Removed changes to terraform.yaml file Signed-off-by: Modular Magician Signed-off-by: Modular Magician --- .changelog/7037.txt | 3 + ...source_data_loss_prevention_job_trigger.go | 72 ++++++++++ ...e_data_loss_prevention_job_trigger_test.go | 123 ++++++++++++++++++ ..._loss_prevention_job_trigger.html.markdown | 12 ++ 4 files changed, 210 insertions(+) create mode 100644 .changelog/7037.txt diff --git a/.changelog/7037.txt b/.changelog/7037.txt new file mode 100644 index 00000000000..e20256e074e --- /dev/null +++ b/.changelog/7037.txt @@ -0,0 +1,3 @@ +```release-note:enhancement +dlp: Added field 'identifyingFields' to 'bigQueryOptions' for creating DLP jobs. +``` diff --git a/google/resource_data_loss_prevention_job_trigger.go b/google/resource_data_loss_prevention_job_trigger.go index bb0324f601a..d442170eee5 100644 --- a/google/resource_data_loss_prevention_job_trigger.go +++ b/google/resource_data_loss_prevention_job_trigger.go @@ -225,6 +225,21 @@ Only for use with external storage. Possible values: ["BASIC_COLUMNS", "GCS_COLU }, }, }, + "identifying_fields": { + Type: schema.TypeList, + Optional: true, + Description: `Specifies the BigQuery fields that will be returned with findings. +If not specified, no identifying fields will be returned for findings.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + Description: `Name of a BigQuery field to be returned with the findings.`, + }, + }, + }, + }, "rows_limit": { Type: schema.TypeInt, Optional: true, @@ -1114,6 +1129,8 @@ func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptions(v flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsRowsLimitPercent(original["rowsLimitPercent"], d, config) transformed["sample_method"] = flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsSampleMethod(original["sampleMethod"], d, config) + transformed["identifying_fields"] = + flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFields(original["identifyingFields"], d, config) return []interface{}{transformed} } func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReference(v interface{}, d *schema.ResourceData, config *Config) interface{} { @@ -1183,6 +1200,28 @@ func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsSa return v } +func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFields(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "name": flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFieldsName(original["name"], d, config), + }) + } + return transformed +} +func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFieldsName(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + func flattenDataLossPreventionJobTriggerInspectJobActions(v interface{}, d *schema.ResourceData, config *Config) interface{} { if v == nil { return v @@ -1758,6 +1797,13 @@ func expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptions(v transformed["sampleMethod"] = transformedSampleMethod } + transformedIdentifyingFields, err := expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFields(original["identifying_fields"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedIdentifyingFields); val.IsValid() && !isEmptyValue(val) { + transformed["identifyingFields"] = transformedIdentifyingFields + } + return transformed, nil } @@ -1818,6 +1864,32 @@ func expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsSam return v, nil } +func expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFields(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + l := v.([]interface{}) + req := make([]interface{}, 0, len(l)) + for _, raw := range l { + if raw == nil { + continue + } + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedName, err := expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFieldsName(original["name"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedName); val.IsValid() && !isEmptyValue(val) { + transformed["name"] = transformedName + } + + req = append(req, transformed) + } + return req, nil +} + +func expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFieldsName(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + func expandDataLossPreventionJobTriggerInspectJobActions(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { l := v.([]interface{}) req := make([]interface{}, 0, len(l)) diff --git a/google/resource_data_loss_prevention_job_trigger_test.go b/google/resource_data_loss_prevention_job_trigger_test.go index b38fa383a25..1c4fa5ce092 100644 --- a/google/resource_data_loss_prevention_job_trigger_test.go +++ b/google/resource_data_loss_prevention_job_trigger_test.go @@ -41,6 +41,41 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerUpdateExample(t *testing.T }) } +func TestAccDataLossPreventionJobTrigger_dlpJobTriggerUpdateExample2(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": getTestProjectFromEnv(), + "random_suffix": randString(t, 10), + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerIdentifyingFields(context), + }, + { + ResourceName: "google_data_loss_prevention_job_trigger.identifying_fields", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"parent"}, + }, + { + Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerIdentifyingFieldsUpdate(context), + }, + { + ResourceName: "google_data_loss_prevention_job_trigger.identifying_fields_update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"parent"}, + }, + }, + }) +} + func TestAccDataLossPreventionJobTrigger_dlpJobTriggerPubsub(t *testing.T) { t.Parallel() @@ -103,6 +138,50 @@ resource "google_data_loss_prevention_job_trigger" "basic" { `, context) } +func testAccDataLossPreventionJobTrigger_dlpJobTriggerIdentifyingFields(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_job_trigger" "identifying_fields" { + parent = "projects/%{project}" + description = "Starting description" + display_name = "display" + + triggers { + schedule { + recurrence_period_duration = "86400s" + } + } + + inspect_job { + inspect_template_name = "fake" + actions { + save_findings { + output_config { + table { + project_id = "project" + dataset_id = "dataset123" + } + } + } + } + storage_config { + big_query_options { + table_reference { + project_id = "project" + dataset_id = "dataset" + table_id = "table_to_scan" + } + rows_limit = 1000 + sample_method = "RANDOM_START" + identifying_fields { + name = "field" + } + } + } + } +} +`, context) +} + func testAccDataLossPreventionJobTrigger_dlpJobTriggerUpdate(context map[string]interface{}) string { return Nprintf(` resource "google_data_loss_prevention_job_trigger" "basic" { @@ -140,6 +219,50 @@ resource "google_data_loss_prevention_job_trigger" "basic" { `, context) } +func testAccDataLossPreventionJobTrigger_dlpJobTriggerIdentifyingFieldsUpdate(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_loss_prevention_job_trigger" "identifying_fields_update" { + parent = "projects/%{project}" + description = "An updated description" + display_name = "Different" + + triggers { + schedule { + recurrence_period_duration = "86400s" + } + } + + inspect_job { + inspect_template_name = "fake" + actions { + save_findings { + output_config { + table { + project_id = "project" + dataset_id = "dataset123" + } + } + } + } + storage_config { + big_query_options { + table_reference { + project_id = "project" + dataset_id = "dataset" + table_id = "table_to_scan" + } + rows_limit = 1000 + sample_method = "RANDOM_START" + identifying_fields { + name = "different" + } + } + } + } +} +`, context) +} + func testAccDataLossPreventionJobTrigger_publishToPubSub(context map[string]interface{}) string { return Nprintf(` resource "google_data_loss_prevention_job_trigger" "pubsub" { diff --git a/website/docs/r/data_loss_prevention_job_trigger.html.markdown b/website/docs/r/data_loss_prevention_job_trigger.html.markdown index 02322c2b16a..575030d0e67 100644 --- a/website/docs/r/data_loss_prevention_job_trigger.html.markdown +++ b/website/docs/r/data_loss_prevention_job_trigger.html.markdown @@ -406,6 +406,12 @@ The following arguments are supported: Default value is `TOP`. Possible values are `TOP` and `RANDOM_START`. +* `identifying_fields` - + (Optional) + Specifies the BigQuery fields that will be returned with findings. + If not specified, no identifying fields will be returned for findings. + Structure is [documented below](#nested_identifying_fields). + The `table_reference` block supports: @@ -421,6 +427,12 @@ The following arguments are supported: (Required) The name of the table. +The `identifying_fields` block supports: + +* `name` - + (Required) + Name of a BigQuery field to be returned with the findings. + The `actions` block supports: * `save_findings` -