Skip to content

Commit

Permalink
Add identifying fields to DLP job trigger (#7037) (#13463)
Browse files Browse the repository at this point in the history
* Add identifying fields to DLP job trigger

* Updated identifyingFields description.

* Removed unused variable and converted tabs to spaces in example file.

* Added DLP identifying fields update test

* Updated bigquery field name description

* Removed changes to terraform.yaml file

Signed-off-by: Modular Magician <magic-modules@google.com>

Signed-off-by: Modular Magician <magic-modules@google.com>
  • Loading branch information
modular-magician authored Jan 11, 2023
1 parent be45107 commit 9fb464c
Show file tree
Hide file tree
Showing 4 changed files with 210 additions and 0 deletions.
3 changes: 3 additions & 0 deletions .changelog/7037.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
dlp: Added field 'identifyingFields' to 'bigQueryOptions' for creating DLP jobs.
```
72 changes: 72 additions & 0 deletions google/resource_data_loss_prevention_job_trigger.go
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,21 @@ Only for use with external storage. Possible values: ["BASIC_COLUMNS", "GCS_COLU
},
},
},
"identifying_fields": {
Type: schema.TypeList,
Optional: true,
Description: `Specifies the BigQuery fields that will be returned with findings.
If not specified, no identifying fields will be returned for findings.`,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
Description: `Name of a BigQuery field to be returned with the findings.`,
},
},
},
},
"rows_limit": {
Type: schema.TypeInt,
Optional: true,
Expand Down Expand Up @@ -1114,6 +1129,8 @@ func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptions(v
flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsRowsLimitPercent(original["rowsLimitPercent"], d, config)
transformed["sample_method"] =
flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsSampleMethod(original["sampleMethod"], d, config)
transformed["identifying_fields"] =
flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFields(original["identifyingFields"], d, config)
return []interface{}{transformed}
}
func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsTableReference(v interface{}, d *schema.ResourceData, config *Config) interface{} {
Expand Down Expand Up @@ -1183,6 +1200,28 @@ func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsSa
return v
}

func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFields(v interface{}, d *schema.ResourceData, config *Config) interface{} {
if v == nil {
return v
}
l := v.([]interface{})
transformed := make([]interface{}, 0, len(l))
for _, raw := range l {
original := raw.(map[string]interface{})
if len(original) < 1 {
// Do not include empty json objects coming back from the api
continue
}
transformed = append(transformed, map[string]interface{}{
"name": flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFieldsName(original["name"], d, config),
})
}
return transformed
}
func flattenDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFieldsName(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}

func flattenDataLossPreventionJobTriggerInspectJobActions(v interface{}, d *schema.ResourceData, config *Config) interface{} {
if v == nil {
return v
Expand Down Expand Up @@ -1758,6 +1797,13 @@ func expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptions(v
transformed["sampleMethod"] = transformedSampleMethod
}

transformedIdentifyingFields, err := expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFields(original["identifying_fields"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedIdentifyingFields); val.IsValid() && !isEmptyValue(val) {
transformed["identifyingFields"] = transformedIdentifyingFields
}

return transformed, nil
}

Expand Down Expand Up @@ -1818,6 +1864,32 @@ func expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsSam
return v, nil
}

func expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFields(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
l := v.([]interface{})
req := make([]interface{}, 0, len(l))
for _, raw := range l {
if raw == nil {
continue
}
original := raw.(map[string]interface{})
transformed := make(map[string]interface{})

transformedName, err := expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFieldsName(original["name"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedName); val.IsValid() && !isEmptyValue(val) {
transformed["name"] = transformedName
}

req = append(req, transformed)
}
return req, nil
}

func expandDataLossPreventionJobTriggerInspectJobStorageConfigBigQueryOptionsIdentifyingFieldsName(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}

func expandDataLossPreventionJobTriggerInspectJobActions(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
l := v.([]interface{})
req := make([]interface{}, 0, len(l))
Expand Down
123 changes: 123 additions & 0 deletions google/resource_data_loss_prevention_job_trigger_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,41 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerUpdateExample(t *testing.T
})
}

func TestAccDataLossPreventionJobTrigger_dlpJobTriggerUpdateExample2(t *testing.T) {
t.Parallel()

context := map[string]interface{}{
"project": getTestProjectFromEnv(),
"random_suffix": randString(t, 10),
}

vcrTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerIdentifyingFields(context),
},
{
ResourceName: "google_data_loss_prevention_job_trigger.identifying_fields",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"parent"},
},
{
Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerIdentifyingFieldsUpdate(context),
},
{
ResourceName: "google_data_loss_prevention_job_trigger.identifying_fields_update",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"parent"},
},
},
})
}

func TestAccDataLossPreventionJobTrigger_dlpJobTriggerPubsub(t *testing.T) {
t.Parallel()

Expand Down Expand Up @@ -103,6 +138,50 @@ resource "google_data_loss_prevention_job_trigger" "basic" {
`, context)
}

func testAccDataLossPreventionJobTrigger_dlpJobTriggerIdentifyingFields(context map[string]interface{}) string {
return Nprintf(`
resource "google_data_loss_prevention_job_trigger" "identifying_fields" {
parent = "projects/%{project}"
description = "Starting description"
display_name = "display"
triggers {
schedule {
recurrence_period_duration = "86400s"
}
}
inspect_job {
inspect_template_name = "fake"
actions {
save_findings {
output_config {
table {
project_id = "project"
dataset_id = "dataset123"
}
}
}
}
storage_config {
big_query_options {
table_reference {
project_id = "project"
dataset_id = "dataset"
table_id = "table_to_scan"
}
rows_limit = 1000
sample_method = "RANDOM_START"
identifying_fields {
name = "field"
}
}
}
}
}
`, context)
}

func testAccDataLossPreventionJobTrigger_dlpJobTriggerUpdate(context map[string]interface{}) string {
return Nprintf(`
resource "google_data_loss_prevention_job_trigger" "basic" {
Expand Down Expand Up @@ -140,6 +219,50 @@ resource "google_data_loss_prevention_job_trigger" "basic" {
`, context)
}

func testAccDataLossPreventionJobTrigger_dlpJobTriggerIdentifyingFieldsUpdate(context map[string]interface{}) string {
return Nprintf(`
resource "google_data_loss_prevention_job_trigger" "identifying_fields_update" {
parent = "projects/%{project}"
description = "An updated description"
display_name = "Different"
triggers {
schedule {
recurrence_period_duration = "86400s"
}
}
inspect_job {
inspect_template_name = "fake"
actions {
save_findings {
output_config {
table {
project_id = "project"
dataset_id = "dataset123"
}
}
}
}
storage_config {
big_query_options {
table_reference {
project_id = "project"
dataset_id = "dataset"
table_id = "table_to_scan"
}
rows_limit = 1000
sample_method = "RANDOM_START"
identifying_fields {
name = "different"
}
}
}
}
}
`, context)
}

func testAccDataLossPreventionJobTrigger_publishToPubSub(context map[string]interface{}) string {
return Nprintf(`
resource "google_data_loss_prevention_job_trigger" "pubsub" {
Expand Down
12 changes: 12 additions & 0 deletions website/docs/r/data_loss_prevention_job_trigger.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -406,6 +406,12 @@ The following arguments are supported:
Default value is `TOP`.
Possible values are `TOP` and `RANDOM_START`.

* `identifying_fields` -
(Optional)
Specifies the BigQuery fields that will be returned with findings.
If not specified, no identifying fields will be returned for findings.
Structure is [documented below](#nested_identifying_fields).


<a name="nested_table_reference"></a>The `table_reference` block supports:

Expand All @@ -421,6 +427,12 @@ The following arguments are supported:
(Required)
The name of the table.

<a name="nested_identifying_fields"></a>The `identifying_fields` block supports:

* `name` -
(Required)
Name of a BigQuery field to be returned with the findings.

<a name="nested_actions"></a>The `actions` block supports:

* `save_findings` -
Expand Down

0 comments on commit 9fb464c

Please sign in to comment.