Skip to content

Commit

Permalink
Add extra BigQuery options to DLP inspect job trigger (#6749)
Browse files Browse the repository at this point in the history
  • Loading branch information
Wallace99 authored Nov 8, 2022
1 parent c5063eb commit 50fa3ab
Show file tree
Hide file tree
Showing 5 changed files with 111 additions and 2 deletions.
21 changes: 21 additions & 0 deletions mmv1/products/dlp/api.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -288,6 +288,27 @@ objects:
required: true
description: |
The name of the table.
- !ruby/object:Api::Type::Integer
name: 'rowsLimit'
description: |
Max number of rows to scan. If the table has more rows than this value, the rest of the rows are omitted.
If not set, or if set to 0, all rows will be scanned. Only one of rowsLimit and rowsLimitPercent can be
specified. Cannot be used in conjunction with TimespanConfig.
- !ruby/object:Api::Type::Integer
name: 'rowsLimitPercent'
description: |
Max percentage of rows to scan. The rest are omitted. The number of rows scanned is rounded down.
Must be between 0 and 100, inclusively. Both 0 and 100 means no limit. Defaults to 0. Only one of
rowsLimit and rowsLimitPercent can be specified. Cannot be used in conjunction with TimespanConfig.
- !ruby/object:Api::Type::Enum
name: 'sampleMethod'
description: |
How to sample rows if not all rows are scanned. Meaningful only when used in conjunction with either
rowsLimit or rowsLimitPercent. If not specified, rows are scanned in the order BigQuery reads them.
values:
- :TOP
- :RANDOM_START
default_value: :TOP
- !ruby/object:Api::Type::Array
name: 'actions'
required: true
Expand Down
14 changes: 14 additions & 0 deletions mmv1/products/dlp/terraform.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,20 @@ overrides: !ruby/object:Overrides::ResourceOverrides
trigger: "trigger"
test_env_vars:
project: :PROJECT_NAME
- !ruby/object:Provider::Terraform::Examples
name: "dlp_job_trigger_bigquery_row_limit"
primary_resource_id: "bigquery_row_limit"
vars:
trigger: "trigger"
test_env_vars:
project: :PROJECT_NAME
- !ruby/object:Provider::Terraform::Examples
name: "dlp_job_trigger_bigquery_row_limit_percentage"
primary_resource_id: "bigquery_row_limit_percentage"
vars:
trigger: "trigger"
test_env_vars:
project: :PROJECT_NAME
custom_code: !ruby/object:Provider::Terraform::CustomCode
encoder: templates/terraform/encoders/wrap_object.go.erb
custom_import: templates/terraform/custom_import/dlp_import.go.erb
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ resource "google_data_loss_prevention_job_trigger" "<%= ctx[:primary_resource_id
save_findings {
output_config {
table {
project_id = "asdf"
dataset_id = "asdf"
project_id = "project"
dataset_id = "dataset"
}
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
resource "google_data_loss_prevention_job_trigger" "<%= ctx[:primary_resource_id] %>" {
parent = "projects/<%= ctx[:test_env_vars]['project'] %>"
description = "Description"
display_name = "Displayname"

triggers {
schedule {
recurrence_period_duration = "86400s"
}
}

inspect_job {
inspect_template_name = "fake"
actions {
save_findings {
output_config {
table {
project_id = "project"
dataset_id = "dataset"
}
}
}
}
storage_config {
big_query_options {
table_reference {
project_id = "project"
dataset_id = "dataset"
table_id = "table_to_scan"
}

rows_limit = 1000
sample_method = "RANDOM_START"
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
resource "google_data_loss_prevention_job_trigger" "<%= ctx[:primary_resource_id] %>" {
parent = "projects/<%= ctx[:test_env_vars]['project'] %>"
description = "Description"
display_name = "Displayname"

triggers {
schedule {
recurrence_period_duration = "86400s"
}
}

inspect_job {
inspect_template_name = "fake"
actions {
save_findings {
output_config {
table {
project_id = "project"
dataset_id = "dataset"
}
}
}
}
storage_config {
big_query_options {
table_reference {
project_id = "project"
dataset_id = "dataset"
table_id = "table_to_scan"
}

rows_limit_percent = 50
sample_method = "RANDOM_START"
}
}
}
}

0 comments on commit 50fa3ab

Please sign in to comment.