From b390762d7cf7d56154234c9ce643f7be26a0f33c Mon Sep 17 00:00:00 2001 From: The Magician Date: Thu, 21 May 2020 12:22:09 -0700 Subject: [PATCH] Data Catalog Entry (#3532) (#6444) Signed-off-by: Modular Magician --- .changelog/3532.txt | 3 + google/provider.go | 5 +- google/resource_data_catalog_entry.go | 848 ++++++++++++++++++ ...ource_data_catalog_entry_generated_test.go | 220 +++++ google/resource_data_catalog_entry_group.go | 14 +- .../resource_data_catalog_entry_group_test.go | 21 +- ...esource_data_catalog_entry_sweeper_test.go | 124 +++ google/resource_data_catalog_entry_test.go | 47 + .../docs/r/data_catalog_entry.html.markdown | 328 +++++++ .../r/data_catalog_entry_group.html.markdown | 3 - website/google.erb | 4 + 11 files changed, 1598 insertions(+), 19 deletions(-) create mode 100644 .changelog/3532.txt create mode 100644 google/resource_data_catalog_entry.go create mode 100644 google/resource_data_catalog_entry_generated_test.go create mode 100644 google/resource_data_catalog_entry_sweeper_test.go create mode 100644 google/resource_data_catalog_entry_test.go create mode 100644 website/docs/r/data_catalog_entry.html.markdown diff --git a/.changelog/3532.txt b/.changelog/3532.txt new file mode 100644 index 00000000000..d383f96d242 --- /dev/null +++ b/.changelog/3532.txt @@ -0,0 +1,3 @@ +```release-note:new-resource +`google_data_catalog_entry` +``` diff --git a/google/provider.go b/google/provider.go index 586b183c183..1d1848bd340 100644 --- a/google/provider.go +++ b/google/provider.go @@ -556,9 +556,9 @@ func Provider() terraform.ResourceProvider { return provider } -// Generated resources: 130 +// Generated resources: 131 // Generated IAM resources: 57 -// Total generated resources: 187 +// Total generated resources: 188 func ResourceMap() map[string]*schema.Resource { resourceMap, _ := ResourceMapWithErrors() return resourceMap @@ -662,6 +662,7 @@ func ResourceMapWithErrors() (map[string]*schema.Resource, error) { "google_data_catalog_entry_group_iam_binding": ResourceIamBinding(DataCatalogEntryGroupIamSchema, DataCatalogEntryGroupIamUpdaterProducer, DataCatalogEntryGroupIdParseFunc), "google_data_catalog_entry_group_iam_member": ResourceIamMember(DataCatalogEntryGroupIamSchema, DataCatalogEntryGroupIamUpdaterProducer, DataCatalogEntryGroupIdParseFunc), "google_data_catalog_entry_group_iam_policy": ResourceIamPolicy(DataCatalogEntryGroupIamSchema, DataCatalogEntryGroupIamUpdaterProducer, DataCatalogEntryGroupIdParseFunc), + "google_data_catalog_entry": resourceDataCatalogEntry(), "google_dataproc_autoscaling_policy": resourceDataprocAutoscalingPolicy(), "google_datastore_index": resourceDatastoreIndex(), "google_deployment_manager_deployment": resourceDeploymentManagerDeployment(), diff --git a/google/resource_data_catalog_entry.go b/google/resource_data_catalog_entry.go new file mode 100644 index 00000000000..68a07d522b3 --- /dev/null +++ b/google/resource_data_catalog_entry.go @@ -0,0 +1,848 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "encoding/json" + "fmt" + "log" + "reflect" + "regexp" + "strconv" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/structure" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +func resourceDataCatalogEntry() *schema.Resource { + return &schema.Resource{ + Create: resourceDataCatalogEntryCreate, + Read: resourceDataCatalogEntryRead, + Update: resourceDataCatalogEntryUpdate, + Delete: resourceDataCatalogEntryDelete, + + Importer: &schema.ResourceImporter{ + State: resourceDataCatalogEntryImport, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(4 * time.Minute), + Update: schema.DefaultTimeout(4 * time.Minute), + Delete: schema.DefaultTimeout(4 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "entry_group": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The name of the entry group this entry is in.`, + }, + "entry_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The id of the entry to create.`, + }, + "description": { + Type: schema.TypeString, + Optional: true, + Description: `Entry description, which can consist of several sentences or paragraphs that describe entry contents.`, + }, + "display_name": { + Type: schema.TypeString, + Optional: true, + Description: `Display information such as title and description. A short name to identify the entry, +for example, "Analytics Data - Jan 2011".`, + }, + "gcs_fileset_spec": { + Type: schema.TypeList, + Optional: true, + Description: `Specification that applies to a Cloud Storage fileset. This is only valid on entries of type FILESET.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "file_patterns": { + Type: schema.TypeList, + Required: true, + Description: `Patterns to identify a set of files in Google Cloud Storage. +See [Cloud Storage documentation](https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames) +for more information. Note that bucket wildcards are currently not supported. Examples of valid filePatterns: + +* gs://bucket_name/dir/*: matches all files within bucket_name/dir directory. +* gs://bucket_name/dir/**: matches all files in bucket_name/dir spanning all subdirectories. +* gs://bucket_name/file*: matches files prefixed by file in bucket_name +* gs://bucket_name/??.txt: matches files with two characters followed by .txt in bucket_name +* gs://bucket_name/[aeiou].txt: matches files that contain a single vowel character followed by .txt in bucket_name +* gs://bucket_name/[a-m].txt: matches files that contain a, b, ... or m followed by .txt in bucket_name +* gs://bucket_name/a/*/b: matches all files in bucket_name that match a/*/b pattern, such as a/c/b, a/d/b +* gs://another_bucket/a.txt: matches gs://another_bucket/a.txt`, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "sample_gcs_file_specs": { + Type: schema.TypeList, + Computed: true, + Description: `Sample files contained in this fileset, not all files contained in this fileset are represented here.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "file_path": { + Type: schema.TypeString, + Computed: true, + Description: `The full file path`, + }, + "size_bytes": { + Type: schema.TypeInt, + Computed: true, + Description: `The size of the file, in bytes.`, + }, + }, + }, + }, + }, + }, + }, + "linked_resource": { + Type: schema.TypeString, + Computed: true, + Optional: true, + Description: `The resource this metadata entry refers to. +For Google Cloud Platform resources, linkedResource is the full name of the resource. +For example, the linkedResource for a table resource from BigQuery is: +//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId +Output only when Entry is of type in the EntryType enum. For entries with userSpecifiedType, +this field is optional and defaults to an empty string.`, + }, + "schema": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validation.ValidateJsonString, + StateFunc: func(v interface{}) string { s, _ := structure.NormalizeJsonString(v); return s }, + Description: `Schema of the entry (e.g. BigQuery, GoogleSQL, Avro schema), as a json string. An entry might not have any schema +attached to it. See +https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.entryGroups.entries#schema +for what fields this schema can contain.`, + }, + "type": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateFunc: validation.StringInSlice([]string{"FILESET", ""}, false), + Description: `The type of the entry. Only used for Entries with types in the EntryType enum. +Currently, only FILESET enum value is allowed. All other entries created through Data Catalog must use userSpecifiedType. Possible values: ["FILESET"]`, + ExactlyOneOf: []string{"type", "user_specified_type"}, + }, + "user_specified_system": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validateRegexp(`^[A-z_][A-z0-9_]{0,63}$`), + Description: `This field indicates the entry's source system that Data Catalog does not integrate with. +userSpecifiedSystem strings must begin with a letter or underscore and can only contain letters, numbers, +and underscores; are case insensitive; must be at least 1 character and at most 64 characters long.`, + }, + "user_specified_type": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validateRegexp(`^[A-z_][A-z0-9_]{0,63}$`), + Description: `Entry type if it does not fit any of the input-allowed values listed in EntryType enum above. +When creating an entry, users should check the enum values first, if nothing matches the entry +to be created, then provide a custom value, for example "my_special_type". +userSpecifiedType strings must begin with a letter or underscore and can only contain letters, +numbers, and underscores; are case insensitive; must be at least 1 character and at most 64 characters long.`, + ExactlyOneOf: []string{"type", "user_specified_type"}, + }, + "bigquery_date_sharded_spec": { + Type: schema.TypeList, + Computed: true, + Description: `Specification for a group of BigQuery tables with name pattern [prefix]YYYYMMDD. +Context: https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "dataset": { + Type: schema.TypeString, + Computed: true, + Description: `The Data Catalog resource name of the dataset entry the current table belongs to, for example, +projects/{project_id}/locations/{location}/entrygroups/{entryGroupId}/entries/{entryId}`, + }, + "shard_count": { + Type: schema.TypeInt, + Computed: true, + Description: `Total number of shards.`, + }, + "table_prefix": { + Type: schema.TypeString, + Computed: true, + Description: `The table name prefix of the shards. The name of any given shard is [tablePrefix]YYYYMMDD, +for example, for shard MyTable20180101, the tablePrefix is MyTable.`, + }, + }, + }, + }, + "bigquery_table_spec": { + Type: schema.TypeList, + Computed: true, + Description: `Specification that applies to a BigQuery table. This is only valid on entries of type TABLE.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "table_source_type": { + Type: schema.TypeString, + Computed: true, + Description: `The table source type.`, + }, + "table_spec": { + Type: schema.TypeList, + Computed: true, + Description: `Spec of a BigQuery table. This field should only be populated if tableSourceType is BIGQUERY_TABLE.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "grouped_entry": { + Type: schema.TypeString, + Computed: true, + Description: `If the table is a dated shard, i.e., with name pattern [prefix]YYYYMMDD, groupedEntry is the +Data Catalog resource name of the date sharded grouped entry, for example, +projects/{project_id}/locations/{location}/entrygroups/{entryGroupId}/entries/{entryId}. +Otherwise, groupedEntry is empty.`, + }, + }, + }, + }, + "view_spec": { + Type: schema.TypeList, + Computed: true, + Description: `Table view specification. This field should only be populated if tableSourceType is BIGQUERY_VIEW.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "view_query": { + Type: schema.TypeString, + Computed: true, + Description: `The query that defines the table view.`, + }, + }, + }, + }, + }, + }, + }, + "integrated_system": { + Type: schema.TypeString, + Computed: true, + Description: `This field indicates the entry's source system that Data Catalog integrates with, such as BigQuery or Pub/Sub.`, + }, + "name": { + Type: schema.TypeString, + Computed: true, + Description: `The Data Catalog resource name of the entry in URL format. +Example: projects/{project_id}/locations/{location}/entryGroups/{entryGroupId}/entries/{entryId}. +Note that this Entry and its child resources may not actually be stored in the location in this name.`, + }, + }, + } +} + +func resourceDataCatalogEntryCreate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + obj := make(map[string]interface{}) + linkedResourceProp, err := expandDataCatalogEntryLinkedResource(d.Get("linked_resource"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("linked_resource"); !isEmptyValue(reflect.ValueOf(linkedResourceProp)) && (ok || !reflect.DeepEqual(v, linkedResourceProp)) { + obj["linkedResource"] = linkedResourceProp + } + displayNameProp, err := expandDataCatalogEntryDisplayName(d.Get("display_name"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("display_name"); !isEmptyValue(reflect.ValueOf(displayNameProp)) && (ok || !reflect.DeepEqual(v, displayNameProp)) { + obj["displayName"] = displayNameProp + } + descriptionProp, err := expandDataCatalogEntryDescription(d.Get("description"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("description"); !isEmptyValue(reflect.ValueOf(descriptionProp)) && (ok || !reflect.DeepEqual(v, descriptionProp)) { + obj["description"] = descriptionProp + } + schemaProp, err := expandDataCatalogEntrySchema(d.Get("schema"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("schema"); !isEmptyValue(reflect.ValueOf(schemaProp)) && (ok || !reflect.DeepEqual(v, schemaProp)) { + obj["schema"] = schemaProp + } + typeProp, err := expandDataCatalogEntryType(d.Get("type"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("type"); !isEmptyValue(reflect.ValueOf(typeProp)) && (ok || !reflect.DeepEqual(v, typeProp)) { + obj["type"] = typeProp + } + userSpecifiedTypeProp, err := expandDataCatalogEntryUserSpecifiedType(d.Get("user_specified_type"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("user_specified_type"); !isEmptyValue(reflect.ValueOf(userSpecifiedTypeProp)) && (ok || !reflect.DeepEqual(v, userSpecifiedTypeProp)) { + obj["userSpecifiedType"] = userSpecifiedTypeProp + } + userSpecifiedSystemProp, err := expandDataCatalogEntryUserSpecifiedSystem(d.Get("user_specified_system"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("user_specified_system"); !isEmptyValue(reflect.ValueOf(userSpecifiedSystemProp)) && (ok || !reflect.DeepEqual(v, userSpecifiedSystemProp)) { + obj["userSpecifiedSystem"] = userSpecifiedSystemProp + } + gcsFilesetSpecProp, err := expandDataCatalogEntryGcsFilesetSpec(d.Get("gcs_fileset_spec"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("gcs_fileset_spec"); !isEmptyValue(reflect.ValueOf(gcsFilesetSpecProp)) && (ok || !reflect.DeepEqual(v, gcsFilesetSpecProp)) { + obj["gcsFilesetSpec"] = gcsFilesetSpecProp + } + + url, err := replaceVars(d, config, "{{DataCatalogBasePath}}{{entry_group}}/entries?entryId={{entry_id}}") + if err != nil { + return err + } + + log.Printf("[DEBUG] Creating new Entry: %#v", obj) + var project string + if parts := regexp.MustCompile(`projects\/([^\/]+)\/`).FindStringSubmatch(url); parts != nil { + project = parts[1] + } + res, err := sendRequestWithTimeout(config, "POST", project, url, obj, d.Timeout(schema.TimeoutCreate)) + if err != nil { + return fmt.Errorf("Error creating Entry: %s", err) + } + if err := d.Set("name", flattenDataCatalogEntryName(res["name"], d, config)); err != nil { + return fmt.Errorf(`Error setting computed identity field "name": %s`, err) + } + + // Store the ID now + id, err := replaceVars(d, config, "{{name}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + log.Printf("[DEBUG] Finished creating Entry %q: %#v", d.Id(), res) + + return resourceDataCatalogEntryRead(d, meta) +} + +func resourceDataCatalogEntryRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + url, err := replaceVars(d, config, "{{DataCatalogBasePath}}{{name}}") + if err != nil { + return err + } + + var project string + if parts := regexp.MustCompile(`projects\/([^\/]+)\/`).FindStringSubmatch(url); parts != nil { + project = parts[1] + } + res, err := sendRequest(config, "GET", project, url, nil) + if err != nil { + return handleNotFoundError(err, d, fmt.Sprintf("DataCatalogEntry %q", d.Id())) + } + + if err := d.Set("name", flattenDataCatalogEntryName(res["name"], d, config)); err != nil { + return fmt.Errorf("Error reading Entry: %s", err) + } + if err := d.Set("linked_resource", flattenDataCatalogEntryLinkedResource(res["linkedResource"], d, config)); err != nil { + return fmt.Errorf("Error reading Entry: %s", err) + } + if err := d.Set("display_name", flattenDataCatalogEntryDisplayName(res["displayName"], d, config)); err != nil { + return fmt.Errorf("Error reading Entry: %s", err) + } + if err := d.Set("description", flattenDataCatalogEntryDescription(res["description"], d, config)); err != nil { + return fmt.Errorf("Error reading Entry: %s", err) + } + if err := d.Set("schema", flattenDataCatalogEntrySchema(res["schema"], d, config)); err != nil { + return fmt.Errorf("Error reading Entry: %s", err) + } + if err := d.Set("type", flattenDataCatalogEntryType(res["type"], d, config)); err != nil { + return fmt.Errorf("Error reading Entry: %s", err) + } + if err := d.Set("user_specified_type", flattenDataCatalogEntryUserSpecifiedType(res["userSpecifiedType"], d, config)); err != nil { + return fmt.Errorf("Error reading Entry: %s", err) + } + if err := d.Set("integrated_system", flattenDataCatalogEntryIntegratedSystem(res["integratedSystem"], d, config)); err != nil { + return fmt.Errorf("Error reading Entry: %s", err) + } + if err := d.Set("user_specified_system", flattenDataCatalogEntryUserSpecifiedSystem(res["userSpecifiedSystem"], d, config)); err != nil { + return fmt.Errorf("Error reading Entry: %s", err) + } + if err := d.Set("gcs_fileset_spec", flattenDataCatalogEntryGcsFilesetSpec(res["gcsFilesetSpec"], d, config)); err != nil { + return fmt.Errorf("Error reading Entry: %s", err) + } + if err := d.Set("bigquery_table_spec", flattenDataCatalogEntryBigqueryTableSpec(res["bigqueryTableSpec"], d, config)); err != nil { + return fmt.Errorf("Error reading Entry: %s", err) + } + if err := d.Set("bigquery_date_sharded_spec", flattenDataCatalogEntryBigqueryDateShardedSpec(res["bigqueryDateShardedSpec"], d, config)); err != nil { + return fmt.Errorf("Error reading Entry: %s", err) + } + + return nil +} + +func resourceDataCatalogEntryUpdate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + obj := make(map[string]interface{}) + linkedResourceProp, err := expandDataCatalogEntryLinkedResource(d.Get("linked_resource"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("linked_resource"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, linkedResourceProp)) { + obj["linkedResource"] = linkedResourceProp + } + displayNameProp, err := expandDataCatalogEntryDisplayName(d.Get("display_name"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("display_name"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, displayNameProp)) { + obj["displayName"] = displayNameProp + } + descriptionProp, err := expandDataCatalogEntryDescription(d.Get("description"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("description"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, descriptionProp)) { + obj["description"] = descriptionProp + } + schemaProp, err := expandDataCatalogEntrySchema(d.Get("schema"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("schema"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, schemaProp)) { + obj["schema"] = schemaProp + } + userSpecifiedTypeProp, err := expandDataCatalogEntryUserSpecifiedType(d.Get("user_specified_type"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("user_specified_type"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, userSpecifiedTypeProp)) { + obj["userSpecifiedType"] = userSpecifiedTypeProp + } + userSpecifiedSystemProp, err := expandDataCatalogEntryUserSpecifiedSystem(d.Get("user_specified_system"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("user_specified_system"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, userSpecifiedSystemProp)) { + obj["userSpecifiedSystem"] = userSpecifiedSystemProp + } + gcsFilesetSpecProp, err := expandDataCatalogEntryGcsFilesetSpec(d.Get("gcs_fileset_spec"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("gcs_fileset_spec"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, gcsFilesetSpecProp)) { + obj["gcsFilesetSpec"] = gcsFilesetSpecProp + } + + url, err := replaceVars(d, config, "{{DataCatalogBasePath}}{{name}}") + if err != nil { + return err + } + + log.Printf("[DEBUG] Updating Entry %q: %#v", d.Id(), obj) + updateMask := []string{} + + if d.HasChange("linked_resource") { + updateMask = append(updateMask, "linkedResource") + } + + if d.HasChange("display_name") { + updateMask = append(updateMask, "displayName") + } + + if d.HasChange("description") { + updateMask = append(updateMask, "description") + } + + if d.HasChange("schema") { + updateMask = append(updateMask, "schema") + } + + if d.HasChange("user_specified_type") { + updateMask = append(updateMask, "userSpecifiedType") + } + + if d.HasChange("user_specified_system") { + updateMask = append(updateMask, "userSpecifiedSystem") + } + + if d.HasChange("gcs_fileset_spec") { + updateMask = append(updateMask, "gcsFilesetSpec") + } + // updateMask is a URL parameter but not present in the schema, so replaceVars + // won't set it + url, err = addQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) + if err != nil { + return err + } + var project string + if parts := regexp.MustCompile(`projects\/([^\/]+)\/`).FindStringSubmatch(url); parts != nil { + project = parts[1] + } + _, err = sendRequestWithTimeout(config, "PATCH", project, url, obj, d.Timeout(schema.TimeoutUpdate)) + + if err != nil { + return fmt.Errorf("Error updating Entry %q: %s", d.Id(), err) + } + + return resourceDataCatalogEntryRead(d, meta) +} + +func resourceDataCatalogEntryDelete(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + url, err := replaceVars(d, config, "{{DataCatalogBasePath}}{{name}}") + if err != nil { + return err + } + + var obj map[string]interface{} + var project string + if parts := regexp.MustCompile(`projects\/([^\/]+)\/`).FindStringSubmatch(url); parts != nil { + project = parts[1] + } + log.Printf("[DEBUG] Deleting Entry %q", d.Id()) + + res, err := sendRequestWithTimeout(config, "DELETE", project, url, obj, d.Timeout(schema.TimeoutDelete)) + if err != nil { + return handleNotFoundError(err, d, "Entry") + } + + log.Printf("[DEBUG] Finished deleting Entry %q: %#v", d.Id(), res) + return nil +} + +func resourceDataCatalogEntryImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + config := meta.(*Config) + + // current import_formats can't import fields with forward slashes in their value + if err := parseImportId([]string{"(?P.+)"}, d, config); err != nil { + return nil, err + } + + name := d.Get("name").(string) + egRegex := regexp.MustCompile("(projects/.+/locations/.+/entryGroups/.+)/entries/(.+)") + + parts := egRegex.FindStringSubmatch(name) + if len(parts) != 3 { + return nil, fmt.Errorf("entry name does not fit the format %s", egRegex) + } + d.Set("entry_group", parts[1]) + d.Set("entry_id", parts[2]) + return []*schema.ResourceData{d}, nil +} + +func flattenDataCatalogEntryName(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenDataCatalogEntryLinkedResource(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenDataCatalogEntryDisplayName(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenDataCatalogEntryDescription(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenDataCatalogEntrySchema(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return nil + } + b, err := json.Marshal(v) + if err != nil { + // TODO: return error once https://github.com/GoogleCloudPlatform/magic-modules/issues/3257 is fixed. + log.Printf("[ERROR] failed to marshal schema to JSON: %v", err) + } + return string(b) +} + +func flattenDataCatalogEntryType(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenDataCatalogEntryUserSpecifiedType(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenDataCatalogEntryIntegratedSystem(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenDataCatalogEntryUserSpecifiedSystem(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenDataCatalogEntryGcsFilesetSpec(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["file_patterns"] = + flattenDataCatalogEntryGcsFilesetSpecFilePatterns(original["filePatterns"], d, config) + transformed["sample_gcs_file_specs"] = + flattenDataCatalogEntryGcsFilesetSpecSampleGcsFileSpecs(original["sampleGcsFileSpecs"], d, config) + return []interface{}{transformed} +} +func flattenDataCatalogEntryGcsFilesetSpecFilePatterns(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenDataCatalogEntryGcsFilesetSpecSampleGcsFileSpecs(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "file_path": flattenDataCatalogEntryGcsFilesetSpecSampleGcsFileSpecsFilePath(original["filePath"], d, config), + "size_bytes": flattenDataCatalogEntryGcsFilesetSpecSampleGcsFileSpecsSizeBytes(original["sizeBytes"], d, config), + }) + } + return transformed +} +func flattenDataCatalogEntryGcsFilesetSpecSampleGcsFileSpecsFilePath(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenDataCatalogEntryGcsFilesetSpecSampleGcsFileSpecsSizeBytes(v interface{}, d *schema.ResourceData, config *Config) interface{} { + // Handles the string fixed64 format + if strVal, ok := v.(string); ok { + if intVal, err := strconv.ParseInt(strVal, 10, 64); err == nil { + return intVal + } + } + + // number values are represented as float64 + if floatVal, ok := v.(float64); ok { + intVal := int(floatVal) + return intVal + } + + return v // let terraform core handle it otherwise +} + +func flattenDataCatalogEntryBigqueryTableSpec(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["table_source_type"] = + flattenDataCatalogEntryBigqueryTableSpecTableSourceType(original["tableSourceType"], d, config) + transformed["view_spec"] = + flattenDataCatalogEntryBigqueryTableSpecViewSpec(original["viewSpec"], d, config) + transformed["table_spec"] = + flattenDataCatalogEntryBigqueryTableSpecTableSpec(original["tableSpec"], d, config) + return []interface{}{transformed} +} +func flattenDataCatalogEntryBigqueryTableSpecTableSourceType(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenDataCatalogEntryBigqueryTableSpecViewSpec(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["view_query"] = + flattenDataCatalogEntryBigqueryTableSpecViewSpecViewQuery(original["viewQuery"], d, config) + return []interface{}{transformed} +} +func flattenDataCatalogEntryBigqueryTableSpecViewSpecViewQuery(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenDataCatalogEntryBigqueryTableSpecTableSpec(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["grouped_entry"] = + flattenDataCatalogEntryBigqueryTableSpecTableSpecGroupedEntry(original["groupedEntry"], d, config) + return []interface{}{transformed} +} +func flattenDataCatalogEntryBigqueryTableSpecTableSpecGroupedEntry(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenDataCatalogEntryBigqueryDateShardedSpec(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["dataset"] = + flattenDataCatalogEntryBigqueryDateShardedSpecDataset(original["dataset"], d, config) + transformed["table_prefix"] = + flattenDataCatalogEntryBigqueryDateShardedSpecTablePrefix(original["tablePrefix"], d, config) + transformed["shard_count"] = + flattenDataCatalogEntryBigqueryDateShardedSpecShardCount(original["shardCount"], d, config) + return []interface{}{transformed} +} +func flattenDataCatalogEntryBigqueryDateShardedSpecDataset(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenDataCatalogEntryBigqueryDateShardedSpecTablePrefix(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenDataCatalogEntryBigqueryDateShardedSpecShardCount(v interface{}, d *schema.ResourceData, config *Config) interface{} { + // Handles the string fixed64 format + if strVal, ok := v.(string); ok { + if intVal, err := strconv.ParseInt(strVal, 10, 64); err == nil { + return intVal + } + } + + // number values are represented as float64 + if floatVal, ok := v.(float64); ok { + intVal := int(floatVal) + return intVal + } + + return v // let terraform core handle it otherwise +} + +func expandDataCatalogEntryLinkedResource(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandDataCatalogEntryDisplayName(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandDataCatalogEntryDescription(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandDataCatalogEntrySchema(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + b := []byte(v.(string)) + if len(b) == 0 { + return nil, nil + } + m := make(map[string]interface{}) + if err := json.Unmarshal(b, &m); err != nil { + return nil, err + } + return m, nil +} + +func expandDataCatalogEntryType(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandDataCatalogEntryUserSpecifiedType(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandDataCatalogEntryUserSpecifiedSystem(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandDataCatalogEntryGcsFilesetSpec(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedFilePatterns, err := expandDataCatalogEntryGcsFilesetSpecFilePatterns(original["file_patterns"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedFilePatterns); val.IsValid() && !isEmptyValue(val) { + transformed["filePatterns"] = transformedFilePatterns + } + + transformedSampleGcsFileSpecs, err := expandDataCatalogEntryGcsFilesetSpecSampleGcsFileSpecs(original["sample_gcs_file_specs"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedSampleGcsFileSpecs); val.IsValid() && !isEmptyValue(val) { + transformed["sampleGcsFileSpecs"] = transformedSampleGcsFileSpecs + } + + return transformed, nil +} + +func expandDataCatalogEntryGcsFilesetSpecFilePatterns(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandDataCatalogEntryGcsFilesetSpecSampleGcsFileSpecs(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + l := v.([]interface{}) + req := make([]interface{}, 0, len(l)) + for _, raw := range l { + if raw == nil { + continue + } + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedFilePath, err := expandDataCatalogEntryGcsFilesetSpecSampleGcsFileSpecsFilePath(original["file_path"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedFilePath); val.IsValid() && !isEmptyValue(val) { + transformed["filePath"] = transformedFilePath + } + + transformedSizeBytes, err := expandDataCatalogEntryGcsFilesetSpecSampleGcsFileSpecsSizeBytes(original["size_bytes"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedSizeBytes); val.IsValid() && !isEmptyValue(val) { + transformed["sizeBytes"] = transformedSizeBytes + } + + req = append(req, transformed) + } + return req, nil +} + +func expandDataCatalogEntryGcsFilesetSpecSampleGcsFileSpecsFilePath(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandDataCatalogEntryGcsFilesetSpecSampleGcsFileSpecsSizeBytes(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} diff --git a/google/resource_data_catalog_entry_generated_test.go b/google/resource_data_catalog_entry_generated_test.go new file mode 100644 index 00000000000..2c43bc6142e --- /dev/null +++ b/google/resource_data_catalog_entry_generated_test.go @@ -0,0 +1,220 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" +) + +func TestAccDataCatalogEntry_dataCatalogEntryBasicExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": randString(t, 10), + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataCatalogEntryDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataCatalogEntry_dataCatalogEntryBasicExample(context), + }, + { + ResourceName: "google_data_catalog_entry.basic_entry", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"entry_group", "entry_id"}, + }, + }, + }) +} + +func testAccDataCatalogEntry_dataCatalogEntryBasicExample(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_catalog_entry" "basic_entry" { + entry_group = google_data_catalog_entry_group.entry_group.id + entry_id = "tf_test_my_entry%{random_suffix}" + + user_specified_type = "my_custom_type" + user_specified_system = "SomethingExternal" +} + +resource "google_data_catalog_entry_group" "entry_group" { + entry_group_id = "tf_test_my_group%{random_suffix}" +} +`, context) +} + +func TestAccDataCatalogEntry_dataCatalogEntryFilesetExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": randString(t, 10), + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataCatalogEntryDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataCatalogEntry_dataCatalogEntryFilesetExample(context), + }, + { + ResourceName: "google_data_catalog_entry.basic_entry", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"entry_group", "entry_id"}, + }, + }, + }) +} + +func testAccDataCatalogEntry_dataCatalogEntryFilesetExample(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_catalog_entry" "basic_entry" { + entry_group = google_data_catalog_entry_group.entry_group.id + entry_id = "tf_test_my_entry%{random_suffix}" + + type = "FILESET" + + gcs_fileset_spec { + file_patterns = ["gs://fake_bucket/dir/*"] + } +} + +resource "google_data_catalog_entry_group" "entry_group" { + entry_group_id = "tf_test_my_group%{random_suffix}" +} +`, context) +} + +func TestAccDataCatalogEntry_dataCatalogEntryFullExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": randString(t, 10), + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataCatalogEntryDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataCatalogEntry_dataCatalogEntryFullExample(context), + }, + { + ResourceName: "google_data_catalog_entry.basic_entry", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"entry_group", "entry_id"}, + }, + }, + }) +} + +func testAccDataCatalogEntry_dataCatalogEntryFullExample(context map[string]interface{}) string { + return Nprintf(` +resource "google_data_catalog_entry" "basic_entry" { + entry_group = google_data_catalog_entry_group.entry_group.id + entry_id = "tf_test_my_entry%{random_suffix}" + + user_specified_type = "my_user_specified_type" + user_specified_system = "Something_custom" + linked_resource = "my/linked/resource" + + display_name = "my custom type entry" + description = "a custom type entry for a user specified system" + + schema = <[^ ]+) (?P[^ ]+)", "(?P[^ ]+)"}, d, config); err != nil { + if err := parseImportId([]string{"(?P.+)"}, d, config); err != nil { return nil, err } + name := d.Get("name").(string) + egRegex := regexp.MustCompile("projects/(.+)/locations/(.+)/entryGroups/(.+)") + + parts := egRegex.FindStringSubmatch(name) + if len(parts) != 4 { + return nil, fmt.Errorf("entry group name does not fit the format %s", egRegex) + } + d.Set("project", parts[1]) + d.Set("region", parts[2]) + d.Set("entry_group_id", parts[3]) return []*schema.ResourceData{d}, nil } diff --git a/google/resource_data_catalog_entry_group_test.go b/google/resource_data_catalog_entry_group_test.go index e4f237fe38c..79b8c4e699a 100644 --- a/google/resource_data_catalog_entry_group_test.go +++ b/google/resource_data_catalog_entry_group_test.go @@ -22,28 +22,25 @@ func TestAccDataCatalogEntryGroup_update(t *testing.T) { Config: testAccDataCatalogEntryGroup_dataCatalogEntryGroupBasicExample(context), }, { - ResourceName: "google_data_catalog_entry_group.basic_entry_group", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"region", "entry_group_id"}, + ResourceName: "google_data_catalog_entry_group.basic_entry_group", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccDataCatalogEntryGroup_dataCatalogEntryGroupFullExample(context), }, { - ResourceName: "google_data_catalog_entry_group.basic_entry_group", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"region", "entry_group_id"}, + ResourceName: "google_data_catalog_entry_group.basic_entry_group", + ImportState: true, + ImportStateVerify: true, }, { Config: testAccDataCatalogEntryGroup_dataCatalogEntryGroupBasicExample(context), }, { - ResourceName: "google_data_catalog_entry_group.basic_entry_group", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"region", "entry_group_id"}, + ResourceName: "google_data_catalog_entry_group.basic_entry_group", + ImportState: true, + ImportStateVerify: true, }, }, }) diff --git a/google/resource_data_catalog_entry_sweeper_test.go b/google/resource_data_catalog_entry_sweeper_test.go new file mode 100644 index 00000000000..77d2c82eb19 --- /dev/null +++ b/google/resource_data_catalog_entry_sweeper_test.go @@ -0,0 +1,124 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "context" + "log" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" +) + +func init() { + resource.AddTestSweepers("DataCatalogEntry", &resource.Sweeper{ + Name: "DataCatalogEntry", + F: testSweepDataCatalogEntry, + }) +} + +// At the time of writing, the CI only passes us-central1 as the region +func testSweepDataCatalogEntry(region string) error { + resourceName := "DataCatalogEntry" + log.Printf("[INFO][SWEEPER_LOG] Starting sweeper for %s", resourceName) + + config, err := sharedConfigForRegion(region) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error getting shared config for region: %s", err) + return err + } + + err = config.LoadAndValidate(context.Background()) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error loading: %s", err) + return err + } + + t := &testing.T{} + billingId := getTestBillingAccountFromEnv(t) + + // Setup variables to replace in list template + d := &ResourceDataMock{ + FieldsInSchema: map[string]interface{}{ + "project": config.Project, + "region": region, + "location": region, + "zone": "-", + "billing_account": billingId, + }, + } + + listTemplate := strings.Split("https://datacatalog.googleapis.com/v1/{{entry_group}}/entries", "?")[0] + listUrl, err := replaceVars(d, config, listTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing sweeper list url: %s", err) + return nil + } + + res, err := sendRequest(config, "GET", config.Project, listUrl, nil) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error in response from request %s: %s", listUrl, err) + return nil + } + + resourceList, ok := res["entries"] + if !ok { + log.Printf("[INFO][SWEEPER_LOG] Nothing found in response.") + return nil + } + + rl := resourceList.([]interface{}) + + log.Printf("[INFO][SWEEPER_LOG] Found %d items in %s list response.", len(rl), resourceName) + // Keep count of items that aren't sweepable for logging. + nonPrefixCount := 0 + for _, ri := range rl { + obj := ri.(map[string]interface{}) + if obj["name"] == nil { + log.Printf("[INFO][SWEEPER_LOG] %s resource name was nil", resourceName) + return nil + } + + name := GetResourceNameFromSelfLink(obj["name"].(string)) + // Skip resources that shouldn't be sweeped + if !isSweepableTestResource(name) { + nonPrefixCount++ + continue + } + + deleteTemplate := "https://datacatalog.googleapis.com/v1/{{name}}" + deleteUrl, err := replaceVars(d, config, deleteTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing delete url: %s", err) + return nil + } + deleteUrl = deleteUrl + name + + // Don't wait on operations as we may have a lot to delete + _, err = sendRequest(config, "DELETE", config.Project, deleteUrl, nil) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error deleting for url %s : %s", deleteUrl, err) + } else { + log.Printf("[INFO][SWEEPER_LOG] Sent delete request for %s resource: %s", resourceName, name) + } + } + + if nonPrefixCount > 0 { + log.Printf("[INFO][SWEEPER_LOG] %d items were non-sweepable and skipped.", nonPrefixCount) + } + + return nil +} diff --git a/google/resource_data_catalog_entry_test.go b/google/resource_data_catalog_entry_test.go new file mode 100644 index 00000000000..dff03dc831f --- /dev/null +++ b/google/resource_data_catalog_entry_test.go @@ -0,0 +1,47 @@ +package google + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" +) + +func TestAccDataCatalogEntry_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": randString(t, 10), + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckDataCatalogEntryDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataCatalogEntry_dataCatalogEntryBasicExample(context), + }, + { + ResourceName: "google_data_catalog_entry.basic_entry", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataCatalogEntry_dataCatalogEntryFullExample(context), + }, + { + ResourceName: "google_data_catalog_entry.basic_entry", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDataCatalogEntry_dataCatalogEntryBasicExample(context), + }, + { + ResourceName: "google_data_catalog_entry.basic_entry", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} diff --git a/website/docs/r/data_catalog_entry.html.markdown b/website/docs/r/data_catalog_entry.html.markdown new file mode 100644 index 00000000000..589e843fc16 --- /dev/null +++ b/website/docs/r/data_catalog_entry.html.markdown @@ -0,0 +1,328 @@ +--- +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in +# .github/CONTRIBUTING.md. +# +# ---------------------------------------------------------------------------- +subcategory: "Data catalog" +layout: "google" +page_title: "Google: google_data_catalog_entry" +sidebar_current: "docs-google-data-catalog-entry" +description: |- + Entry Metadata. +--- + +# google\_data\_catalog\_entry + +Entry Metadata. A Data Catalog Entry resource represents another resource in Google Cloud Platform +(such as a BigQuery dataset or a Pub/Sub topic) or outside of Google Cloud Platform. Clients can use +the linkedResource field in the Entry resource to refer to the original resource ID of the source system. + +An Entry resource contains resource details, such as its schema. An Entry can also be used to attach +flexible metadata, such as a Tag. + + +To get more information about Entry, see: + +* [API documentation](https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.entryGroups.entries) +* How-to Guides + * [Official Documentation](https://cloud.google.com/data-catalog/docs) + + +## Example Usage - Data Catalog Entry Basic + + +```hcl +resource "google_data_catalog_entry" "basic_entry" { + entry_group = google_data_catalog_entry_group.entry_group.id + entry_id = "my_entry" + + user_specified_type = "my_custom_type" + user_specified_system = "SomethingExternal" +} + +resource "google_data_catalog_entry_group" "entry_group" { + entry_group_id = "my_group" +} +``` + +## Example Usage - Data Catalog Entry Fileset + + +```hcl +resource "google_data_catalog_entry" "basic_entry" { + entry_group = google_data_catalog_entry_group.entry_group.id + entry_id = "my_entry" + + type = "FILESET" + + gcs_fileset_spec { + file_patterns = ["gs://fake_bucket/dir/*"] + } +} + +resource "google_data_catalog_entry_group" "entry_group" { + entry_group_id = "my_group" +} +``` + +## Example Usage - Data Catalog Entry Full + + +```hcl +resource "google_data_catalog_entry" "basic_entry" { + entry_group = google_data_catalog_entry_group.entry_group.id + entry_id = "my_entry" + + user_specified_type = "my_user_specified_type" + user_specified_system = "Something_custom" + linked_resource = "my/linked/resource" + + display_name = "my custom type entry" + description = "a custom type entry for a user specified system" + + schema = < If you're importing a resource with beta features, make sure to include `-provider=google-beta` +as an argument so that Terraform uses the correct provider to import your resource. + +## User Project Overrides + +This resource supports [User Project Overrides](https://www.terraform.io/docs/providers/google/guides/provider_reference.html#user_project_override). diff --git a/website/docs/r/data_catalog_entry_group.html.markdown b/website/docs/r/data_catalog_entry_group.html.markdown index bcaec9894ed..8e771d38018 100644 --- a/website/docs/r/data_catalog_entry_group.html.markdown +++ b/website/docs/r/data_catalog_entry_group.html.markdown @@ -115,9 +115,6 @@ This resource provides the following EntryGroup can be imported using any of these accepted formats: ``` -$ terraform import google_data_catalog_entry_group.default projects/{{project}}/locations/{{region}}/entryGroups/{{name}} -$ terraform import google_data_catalog_entry_group.default {{project}}/{{region}}/{{name}} -$ terraform import google_data_catalog_entry_group.default {{region}}/{{name}} $ terraform import google_data_catalog_entry_group.default {{name}} ``` diff --git a/website/google.erb b/website/google.erb index fd7c6fff8a7..cdef680d2e8 100644 --- a/website/google.erb +++ b/website/google.erb @@ -1491,6 +1491,10 @@ Resources