Skip to content

Commit

Permalink
Added job_notification_emails and deidentify actions in google_data_l…
Browse files Browse the repository at this point in the history
…oss_prevention_job_trigger (#7687)

* Added job_notification_emails and deidentify actions in google_data_loss_prevention_job_trigger

* Updated the deidentify actions enum and added update tests for deidentify action
  • Loading branch information
abheda-crest authored Apr 13, 2023
1 parent 636581c commit 943e8d4
Show file tree
Hide file tree
Showing 4 changed files with 514 additions and 0 deletions.
121 changes: 121 additions & 0 deletions mmv1/products/dlp/JobTrigger.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,20 @@ examples:
test_env_vars:
project: :PROJECT_NAME
skip_docs: true
- !ruby/object:Provider::Terraform::Examples
name: "dlp_job_trigger_job_notification_emails"
primary_resource_id: "job_notification_emails"
test_env_vars:
project: :PROJECT_NAME
- !ruby/object:Provider::Terraform::Examples
name: "dlp_job_trigger_deidentify"
primary_resource_id: "deidentify"
vars:
name: "tf_test"
test_env_vars:
project: :PROJECT_NAME
test_vars_overrides:
name: '"tf_test_" + RandString(t, 10)'
custom_code: !ruby/object:Provider::Terraform::CustomCode
encoder: templates/terraform/encoders/wrap_object.go.erb
custom_import: templates/terraform/custom_import/dlp_import.go.erb
Expand Down Expand Up @@ -371,6 +385,8 @@ properties:
- pub_sub
- publish_findings_to_cloud_data_catalog
- publish_summary_to_cscc
- job_notification_emails
- deidentify
description: |
If set, the detailed findings will be persisted to the specified OutputStorageConfig. Only a single instance of this action can be specified. Compatible with: Inspect, Risk
properties:
Expand Down Expand Up @@ -425,6 +441,8 @@ properties:
- pub_sub
- publish_findings_to_cloud_data_catalog
- publish_summary_to_cscc
- job_notification_emails
- deidentify
description: |
Publish a message into a given Pub/Sub topic when the job completes.
properties:
Expand All @@ -440,6 +458,8 @@ properties:
- pub_sub
- publish_findings_to_cloud_data_catalog
- publish_summary_to_cscc
- job_notification_emails
- deidentify
allow_empty_object: true
send_empty_value: true
properties: []
Expand All @@ -452,8 +472,109 @@ properties:
- pub_sub
- publish_findings_to_cloud_data_catalog
- publish_summary_to_cscc
- job_notification_emails
- deidentify
allow_empty_object: true
send_empty_value: true
properties: []
description: |
Publish findings of a DlpJob to Data Catalog.
- !ruby/object:Api::Type::NestedObject
name: 'jobNotificationEmails'
exactly_one_of:
- save_findings
- pub_sub
- publish_findings_to_cloud_data_catalog
- publish_summary_to_cscc
- job_notification_emails
- deidentify
allow_empty_object: true
send_empty_value: true
properties: []
description: |
Sends an email when the job completes. The email goes to IAM project owners and technical Essential Contacts.
- !ruby/object:Api::Type::NestedObject
name: 'deidentify'
exactly_one_of:
- save_findings
- pub_sub
- publish_findings_to_cloud_data_catalog
- publish_summary_to_cscc
- job_notification_emails
- deidentify
description: |
Create a de-identified copy of the requested table or files.
properties:
- !ruby/object:Api::Type::String
name: 'cloudStorageOutput'
required: true
description: |
User settable Cloud Storage bucket and folders to store de-identified files.
This field must be set for cloud storage deidentification.
The output Cloud Storage bucket must be different from the input bucket.
De-identified files will overwrite files in the output path.
Form of: gs://bucket/folder/ or gs://bucket
- !ruby/object:Api::Type::Array
name: 'fileTypesToTransform'
description: |
List of user-specified file type groups to transform. If specified, only the files with these filetypes will be transformed.
If empty, all supported files will be transformed. Supported types may be automatically added over time.
If a file type is set in this field that isn't supported by the Deidentify action then the job will fail and will not be successfully created/started.
item_type: !ruby/object:Api::Type::Enum
name: 'fileType'
description: |
This field only has a name and description because of MM
limitations. It should not appear in downstreams.
values:
- :IMAGE
- :TEXT_FILE
- :CSV
- :TSV
- !ruby/object:Api::Type::NestedObject
name: 'transformationConfig'
description: |
User specified deidentify templates and configs for structured, unstructured, and image files.
properties:
- !ruby/object:Api::Type::String
name: 'deidentifyTemplate'
description: |
If this template is specified, it will serve as the default de-identify template.
- !ruby/object:Api::Type::String
name: 'structuredDeidentifyTemplate'
description: |
If this template is specified, it will serve as the de-identify template for structured content such as delimited files and tables.
- !ruby/object:Api::Type::String
name: 'imageRedactTemplate'
description: |
If this template is specified, it will serve as the de-identify template for images.
- !ruby/object:Api::Type::NestedObject
name: 'transformationDetailsStorageConfig'
description: |
Config for storing transformation details.
properties:
- !ruby/object:Api::Type::NestedObject
name: 'table'
required: true
description: |
The BigQuery table in which to store the output.
properties:
- !ruby/object:Api::Type::String
name: 'datasetId'
description: The ID of the dataset containing this table.
required: true
- !ruby/object:Api::Type::String
name: 'projectId'
description: The ID of the project containing this table.
required: true
- !ruby/object:Api::Type::String
name: 'tableId'
description: |
The ID of the table. The ID must contain only letters (a-z,
A-Z), numbers (0-9), or underscores (_). The maximum length
is 1,024 characters.
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
resource "google_data_loss_prevention_job_trigger" "<%= ctx[:primary_resource_id] %>" {
parent = "projects/<%= ctx[:test_env_vars]['project'] %>"
description = "Description for the job_trigger created by terraform"
display_name = "TerraformDisplayName"

triggers {
schedule {
recurrence_period_duration = "86400s"
}
}

inspect_job {
inspect_template_name = "sample-inspect-template"
actions {
deidentify {
cloud_storage_output = "gs://samplebucket/dir/"
file_types_to_transform = ["CSV", "TSV"]
transformation_details_storage_config {
table {
project_id = "<%= ctx[:test_env_vars]['project'] %>"
dataset_id = google_bigquery_dataset.default.dataset_id
table_id = google_bigquery_table.default.table_id
}
}
transformation_config {
deidentify_template = "sample-deidentify-template"
image_redact_template = "sample-image-redact-template"
structured_deidentify_template = "sample-structured-deidentify-template"
}
}
}
storage_config {
cloud_storage_options {
file_set {
url = "gs://mybucket/directory/"
}
}
}
}
}

resource "google_bigquery_dataset" "default" {
dataset_id = "<%= ctx[:vars]['name'] %>"
friendly_name = "terraform-test"
description = "Description for the dataset created by terraform"
location = "US"
default_table_expiration_ms = 3600000

labels = {
env = "default"
}
}

resource "google_bigquery_table" "default" {
dataset_id = google_bigquery_dataset.default.dataset_id
table_id = "<%= ctx[:vars]['name'] %>"
deletion_protection = false

time_partitioning {
type = "DAY"
}

labels = {
env = "default"
}

schema = <<EOF
[
{
"name": "quantity",
"type": "NUMERIC",
"mode": "NULLABLE",
"description": "The quantity"
},
{
"name": "name",
"type": "STRING",
"mode": "NULLABLE",
"description": "Name of the object"
}
]
EOF
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
resource "google_data_loss_prevention_job_trigger" "<%= ctx[:primary_resource_id] %>" {
parent = "projects/<%= ctx[:test_env_vars]['project'] %>"
description = "Description for the job_trigger created by terraform"
display_name = "TerraformDisplayName"

triggers {
schedule {
recurrence_period_duration = "86400s"
}
}

inspect_job {
inspect_template_name = "sample-inspect-template"
actions {
job_notification_emails {}
}
storage_config {
cloud_storage_options {
file_set {
url = "gs://mybucket/directory/"
}
}
}
}
}
Loading

0 comments on commit 943e8d4

Please sign in to comment.