diff --git a/google-beta/config.go b/google-beta/config.go index 4daf8e81dc..8bb3652082 100644 --- a/google-beta/config.go +++ b/google-beta/config.go @@ -72,6 +72,7 @@ type Config struct { AccessContextManagerBasePath string AppEngineBasePath string + BigqueryDataTransferBasePath string BinaryAuthorizationBasePath string CloudBuildBasePath string CloudSchedulerBasePath string diff --git a/google-beta/provider.go b/google-beta/provider.go index fe543449a2..f021703f06 100644 --- a/google-beta/provider.go +++ b/google-beta/provider.go @@ -100,6 +100,7 @@ func Provider() terraform.ResourceProvider { // Generated Products AccessContextManagerCustomEndpointEntryKey: AccessContextManagerCustomEndpointEntry, AppEngineCustomEndpointEntryKey: AppEngineCustomEndpointEntry, + BigqueryDataTransferCustomEndpointEntryKey: BigqueryDataTransferCustomEndpointEntry, BinaryAuthorizationCustomEndpointEntryKey: BinaryAuthorizationCustomEndpointEntry, CloudBuildCustomEndpointEntryKey: CloudBuildCustomEndpointEntry, CloudSchedulerCustomEndpointEntryKey: CloudSchedulerCustomEndpointEntry, @@ -217,6 +218,7 @@ func ResourceMapWithErrors() (map[string]*schema.Resource, error) { return mergeResourceMaps( GeneratedAccessContextManagerResourcesMap, GeneratedAppEngineResourcesMap, + GeneratedBigqueryDataTransferResourcesMap, GeneratedBinaryAuthorizationResourcesMap, GeneratedCloudBuildResourcesMap, GeneratedCloudSchedulerResourcesMap, @@ -406,6 +408,7 @@ func providerConfigure(d *schema.ResourceData) (interface{}, error) { // Generated products config.AccessContextManagerBasePath = d.Get(AccessContextManagerCustomEndpointEntryKey).(string) config.AppEngineBasePath = d.Get(AppEngineCustomEndpointEntryKey).(string) + config.BigqueryDataTransferBasePath = d.Get(BigqueryDataTransferCustomEndpointEntryKey).(string) config.BinaryAuthorizationBasePath = d.Get(BinaryAuthorizationCustomEndpointEntryKey).(string) config.CloudBuildBasePath = d.Get(CloudBuildCustomEndpointEntryKey).(string) config.CloudSchedulerBasePath = d.Get(CloudSchedulerCustomEndpointEntryKey).(string) @@ -467,6 +470,7 @@ func ConfigureBasePaths(c *Config) { // Generated Products c.AccessContextManagerBasePath = AccessContextManagerDefaultBasePath c.AppEngineBasePath = AppEngineDefaultBasePath + c.BigqueryDataTransferBasePath = BigqueryDataTransferDefaultBasePath c.BinaryAuthorizationBasePath = BinaryAuthorizationDefaultBasePath c.CloudBuildBasePath = CloudBuildDefaultBasePath c.CloudSchedulerBasePath = CloudSchedulerDefaultBasePath diff --git a/google-beta/provider_bigquery_data_transfer_gen.go b/google-beta/provider_bigquery_data_transfer_gen.go new file mode 100644 index 0000000000..9bf4ef2b67 --- /dev/null +++ b/google-beta/provider_bigquery_data_transfer_gen.go @@ -0,0 +1,34 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import "github.com/hashicorp/terraform/helper/schema" + +// If the base path has changed as a result of your PR, make sure to update +// the provider_reference page! +var BigqueryDataTransferDefaultBasePath = "https://bigquerydatatransfer.googleapis.com/v1/" +var BigqueryDataTransferCustomEndpointEntryKey = "bigquery_data_transfer_custom_endpoint" +var BigqueryDataTransferCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: validateCustomEndpoint, + DefaultFunc: schema.MultiEnvDefaultFunc([]string{ + "GOOGLE_BIGQUERY_DATA_TRANSFER_CUSTOM_ENDPOINT", + }, BigqueryDataTransferDefaultBasePath), +} + +var GeneratedBigqueryDataTransferResourcesMap = map[string]*schema.Resource{ + "google_bigquery_data_transfer_config": resourceBigqueryDataTransferConfig(), +} diff --git a/google-beta/resource_bigquery_data_transfer_config.go b/google-beta/resource_bigquery_data_transfer_config.go new file mode 100644 index 0000000000..1ce2b03e92 --- /dev/null +++ b/google-beta/resource_bigquery_data_transfer_config.go @@ -0,0 +1,403 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "fmt" + "log" + "reflect" + "strconv" + "strings" + "time" + + "github.com/hashicorp/terraform/helper/schema" +) + +func resourceBigqueryDataTransferConfig() *schema.Resource { + return &schema.Resource{ + Create: resourceBigqueryDataTransferConfigCreate, + Read: resourceBigqueryDataTransferConfigRead, + Update: resourceBigqueryDataTransferConfigUpdate, + Delete: resourceBigqueryDataTransferConfigDelete, + + Importer: &schema.ResourceImporter{ + State: resourceBigqueryDataTransferConfigImport, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(4 * time.Minute), + Update: schema.DefaultTimeout(4 * time.Minute), + Delete: schema.DefaultTimeout(4 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "data_source_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + "destination_dataset_id": { + Type: schema.TypeString, + Required: true, + }, + "display_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + "params": { + Type: schema.TypeMap, + Required: true, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "data_refresh_window_days": { + Type: schema.TypeInt, + Optional: true, + }, + "disabled": { + Type: schema.TypeBool, + Optional: true, + }, + "location": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Default: "US", + }, + "schedule": { + Type: schema.TypeString, + Optional: true, + }, + "name": { + Type: schema.TypeString, + Computed: true, + }, + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + }, + }, + } +} + +func resourceBigqueryDataTransferConfigCreate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + obj := make(map[string]interface{}) + displayNameProp, err := expandBigqueryDataTransferConfigDisplayName(d.Get("display_name"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("display_name"); !isEmptyValue(reflect.ValueOf(displayNameProp)) && (ok || !reflect.DeepEqual(v, displayNameProp)) { + obj["displayName"] = displayNameProp + } + destinationDatasetIdProp, err := expandBigqueryDataTransferConfigDestinationDatasetId(d.Get("destination_dataset_id"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("destination_dataset_id"); !isEmptyValue(reflect.ValueOf(destinationDatasetIdProp)) && (ok || !reflect.DeepEqual(v, destinationDatasetIdProp)) { + obj["destinationDatasetId"] = destinationDatasetIdProp + } + dataSourceIdProp, err := expandBigqueryDataTransferConfigDataSourceId(d.Get("data_source_id"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("data_source_id"); !isEmptyValue(reflect.ValueOf(dataSourceIdProp)) && (ok || !reflect.DeepEqual(v, dataSourceIdProp)) { + obj["dataSourceId"] = dataSourceIdProp + } + scheduleProp, err := expandBigqueryDataTransferConfigSchedule(d.Get("schedule"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("schedule"); !isEmptyValue(reflect.ValueOf(scheduleProp)) && (ok || !reflect.DeepEqual(v, scheduleProp)) { + obj["schedule"] = scheduleProp + } + dataRefreshWindowDaysProp, err := expandBigqueryDataTransferConfigDataRefreshWindowDays(d.Get("data_refresh_window_days"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("data_refresh_window_days"); !isEmptyValue(reflect.ValueOf(dataRefreshWindowDaysProp)) && (ok || !reflect.DeepEqual(v, dataRefreshWindowDaysProp)) { + obj["dataRefreshWindowDays"] = dataRefreshWindowDaysProp + } + disabledProp, err := expandBigqueryDataTransferConfigDisabled(d.Get("disabled"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("disabled"); !isEmptyValue(reflect.ValueOf(disabledProp)) && (ok || !reflect.DeepEqual(v, disabledProp)) { + obj["disabled"] = disabledProp + } + paramsProp, err := expandBigqueryDataTransferConfigParams(d.Get("params"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("params"); !isEmptyValue(reflect.ValueOf(paramsProp)) && (ok || !reflect.DeepEqual(v, paramsProp)) { + obj["params"] = paramsProp + } + + url, err := replaceVars(d, config, "{{BigqueryDataTransferBasePath}}projects/{{project}}/locations/{{location}}/transferConfigs") + if err != nil { + return err + } + + log.Printf("[DEBUG] Creating new Config: %#v", obj) + res, err := sendRequestWithTimeout(config, "POST", url, obj, d.Timeout(schema.TimeoutCreate)) + if err != nil { + return fmt.Errorf("Error creating Config: %s", err) + } + + // Store the ID now + id, err := replaceVars(d, config, "{{name}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + log.Printf("[DEBUG] Finished creating Config %q: %#v", d.Id(), res) + + // `name` is autogenerated from the api so needs to be set post-create + name, ok := res["name"] + if !ok { + return fmt.Errorf("Create response didn't contain critical fields. Create may not have succeeded.") + } + d.Set("name", name.(string)) + d.SetId(name.(string)) + + return resourceBigqueryDataTransferConfigRead(d, meta) +} + +func resourceBigqueryDataTransferConfigRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + url, err := replaceVars(d, config, "{{BigqueryDataTransferBasePath}}{{name}}") + if err != nil { + return err + } + + res, err := sendRequest(config, "GET", url, nil) + if err != nil { + return handleNotFoundError(err, d, fmt.Sprintf("BigqueryDataTransferConfig %q", d.Id())) + } + + project, err := getProject(d, config) + if err != nil { + return err + } + if err := d.Set("project", project); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + + if err := d.Set("display_name", flattenBigqueryDataTransferConfigDisplayName(res["displayName"], d)); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + if err := d.Set("name", flattenBigqueryDataTransferConfigName(res["name"], d)); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + if err := d.Set("destination_dataset_id", flattenBigqueryDataTransferConfigDestinationDatasetId(res["destinationDatasetId"], d)); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + if err := d.Set("data_source_id", flattenBigqueryDataTransferConfigDataSourceId(res["dataSourceId"], d)); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + if err := d.Set("schedule", flattenBigqueryDataTransferConfigSchedule(res["schedule"], d)); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + if err := d.Set("data_refresh_window_days", flattenBigqueryDataTransferConfigDataRefreshWindowDays(res["dataRefreshWindowDays"], d)); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + if err := d.Set("disabled", flattenBigqueryDataTransferConfigDisabled(res["disabled"], d)); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + if err := d.Set("params", flattenBigqueryDataTransferConfigParams(res["params"], d)); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + + return nil +} + +func resourceBigqueryDataTransferConfigUpdate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + obj := make(map[string]interface{}) + destinationDatasetIdProp, err := expandBigqueryDataTransferConfigDestinationDatasetId(d.Get("destination_dataset_id"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("destination_dataset_id"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, destinationDatasetIdProp)) { + obj["destinationDatasetId"] = destinationDatasetIdProp + } + scheduleProp, err := expandBigqueryDataTransferConfigSchedule(d.Get("schedule"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("schedule"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, scheduleProp)) { + obj["schedule"] = scheduleProp + } + dataRefreshWindowDaysProp, err := expandBigqueryDataTransferConfigDataRefreshWindowDays(d.Get("data_refresh_window_days"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("data_refresh_window_days"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, dataRefreshWindowDaysProp)) { + obj["dataRefreshWindowDays"] = dataRefreshWindowDaysProp + } + disabledProp, err := expandBigqueryDataTransferConfigDisabled(d.Get("disabled"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("disabled"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, disabledProp)) { + obj["disabled"] = disabledProp + } + paramsProp, err := expandBigqueryDataTransferConfigParams(d.Get("params"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("params"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, paramsProp)) { + obj["params"] = paramsProp + } + + url, err := replaceVars(d, config, "{{BigqueryDataTransferBasePath}}{{name}}") + if err != nil { + return err + } + + log.Printf("[DEBUG] Updating Config %q: %#v", d.Id(), obj) + updateMask := []string{} + + if d.HasChange("destination_dataset_id") { + updateMask = append(updateMask, "destinationDatasetId") + } + + if d.HasChange("schedule") { + updateMask = append(updateMask, "schedule") + } + + if d.HasChange("data_refresh_window_days") { + updateMask = append(updateMask, "dataRefreshWindowDays") + } + + if d.HasChange("disabled") { + updateMask = append(updateMask, "disabled") + } + + if d.HasChange("params") { + updateMask = append(updateMask, "params") + } + // updateMask is a URL parameter but not present in the schema, so replaceVars + // won't set it + url, err = addQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) + if err != nil { + return err + } + _, err = sendRequestWithTimeout(config, "PATCH", url, obj, d.Timeout(schema.TimeoutUpdate)) + + if err != nil { + return fmt.Errorf("Error updating Config %q: %s", d.Id(), err) + } + + return resourceBigqueryDataTransferConfigRead(d, meta) +} + +func resourceBigqueryDataTransferConfigDelete(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + url, err := replaceVars(d, config, "{{BigqueryDataTransferBasePath}}{{name}}") + if err != nil { + return err + } + + var obj map[string]interface{} + log.Printf("[DEBUG] Deleting Config %q", d.Id()) + res, err := sendRequestWithTimeout(config, "DELETE", url, obj, d.Timeout(schema.TimeoutDelete)) + if err != nil { + return handleNotFoundError(err, d, "Config") + } + + log.Printf("[DEBUG] Finished deleting Config %q: %#v", d.Id(), res) + return nil +} + +func resourceBigqueryDataTransferConfigImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + + config := meta.(*Config) + + // current import_formats can't import fields with forward slashes in their value + if err := parseImportId([]string{"(?P.+)"}, d, config); err != nil { + return nil, err + } + + return []*schema.ResourceData{d}, nil +} + +func flattenBigqueryDataTransferConfigDisplayName(v interface{}, d *schema.ResourceData) interface{} { + return v +} + +func flattenBigqueryDataTransferConfigName(v interface{}, d *schema.ResourceData) interface{} { + return v +} + +func flattenBigqueryDataTransferConfigDestinationDatasetId(v interface{}, d *schema.ResourceData) interface{} { + return v +} + +func flattenBigqueryDataTransferConfigDataSourceId(v interface{}, d *schema.ResourceData) interface{} { + return v +} + +func flattenBigqueryDataTransferConfigSchedule(v interface{}, d *schema.ResourceData) interface{} { + return v +} + +func flattenBigqueryDataTransferConfigDataRefreshWindowDays(v interface{}, d *schema.ResourceData) interface{} { + // Handles the string fixed64 format + if strVal, ok := v.(string); ok { + if intVal, err := strconv.ParseInt(strVal, 10, 64); err == nil { + return intVal + } // let terraform core handle it if we can't convert the string to an int. + } + return v +} + +func flattenBigqueryDataTransferConfigDisabled(v interface{}, d *schema.ResourceData) interface{} { + return v +} + +func flattenBigqueryDataTransferConfigParams(v interface{}, d *schema.ResourceData) interface{} { + return v +} + +func expandBigqueryDataTransferConfigDisplayName(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandBigqueryDataTransferConfigDestinationDatasetId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandBigqueryDataTransferConfigDataSourceId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandBigqueryDataTransferConfigSchedule(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandBigqueryDataTransferConfigDataRefreshWindowDays(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandBigqueryDataTransferConfigDisabled(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandBigqueryDataTransferConfigParams(v interface{}, d TerraformResourceData, config *Config) (map[string]string, error) { + if v == nil { + return map[string]string{}, nil + } + m := make(map[string]string) + for k, val := range v.(map[string]interface{}) { + m[k] = val.(string) + } + return m, nil +} diff --git a/google-beta/resource_bigquery_data_transfer_config_generated_test.go b/google-beta/resource_bigquery_data_transfer_config_generated_test.go new file mode 100644 index 0000000000..52f8505171 --- /dev/null +++ b/google-beta/resource_bigquery_data_transfer_config_generated_test.go @@ -0,0 +1,99 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccBigqueryDataTransferConfig_scheduledQueryExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(10), + } + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckBigqueryDataTransferConfigDestroy, + Steps: []resource.TestStep{ + { + Config: testAccBigqueryDataTransferConfig_scheduledQueryExample(context), + }, + { + ResourceName: "google_bigquery_data_transfer_config.query_config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location"}, + }, + }, + }) +} + +func testAccBigqueryDataTransferConfig_scheduledQueryExample(context map[string]interface{}) string { + return Nprintf(` +resource "google_bigquery_data_transfer_config" "query_config" { + display_name = "my-query%{random_suffix}" + location = "asia-northeast1" + data_source_id = "scheduled_query" + schedule = "first sunday of quarter 00:00" + destination_dataset_id = "${google_bigquery_dataset.my-dataset.dataset_id}" + params = { + destination_table_name_template = "my-table" + write_disposition = "WRITE_APPEND" + query = "SELECT name FROM tabl WHERE x = 'y'" + } +} + +resource "google_bigquery_dataset" "my-dataset" { + dataset_id = "my_dataset%{random_suffix}" + friendly_name = "foo" + description = "bar" + location = "asia-northeast1" +} +`, context) +} + +func testAccCheckBigqueryDataTransferConfigDestroy(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_bigquery_data_transfer_config" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := testAccProvider.Meta().(*Config) + + url, err := replaceVarsForTest(config, rs, "{{BigqueryDataTransferBasePath}}{{name}}") + if err != nil { + return err + } + + _, err = sendRequest(config, "GET", url, nil) + if err == nil { + return fmt.Errorf("BigqueryDataTransferConfig still exists at %s", url) + } + } + + return nil +} diff --git a/google-beta/resource_bigquery_data_transfer_config_test.go b/google-beta/resource_bigquery_data_transfer_config_test.go new file mode 100644 index 0000000000..6f52e37d9c --- /dev/null +++ b/google-beta/resource_bigquery_data_transfer_config_test.go @@ -0,0 +1,59 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" +) + +func TestAccBigqueryDataTransferConfig_scheduledQueryUpdate(t *testing.T) { + t.Parallel() + + random_suffix := acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckBigqueryDataTransferConfigDestroy, + Steps: []resource.TestStep{ + { + Config: testAccBigqueryDataTransferConfig_scheduledQueryUpdate(random_suffix, "first", "y"), + }, + { + Config: testAccBigqueryDataTransferConfig_scheduledQueryUpdate(random_suffix, "second", "z"), + }, + { + ResourceName: "google_bigquery_data_transfer_config.query_config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location"}, + }, + }, + }) +} + +func testAccBigqueryDataTransferConfig_scheduledQueryUpdate(random_suffix, schedule, letter string) string { + return fmt.Sprintf(` +resource "google_bigquery_data_transfer_config" "query_config" { + display_name = "my-query-%s" + location = "asia-northeast1" + data_source_id = "scheduled_query" + schedule = "%s sunday of quarter 00:00" + destination_dataset_id = "${google_bigquery_dataset.my-dataset.dataset_id}" + params = { + destination_table_name_template = "my-table" + write_disposition = "WRITE_APPEND" + query = "SELECT name FROM tabl WHERE x = '%s'" + } +} + +resource "google_bigquery_dataset" "my-dataset" { + dataset_id = "my_dataset%s" + friendly_name = "foo" + description = "bar" + location = "asia-northeast1" +} +`, random_suffix, schedule, letter, random_suffix) +} diff --git a/website/docs/r/bigquery_data_transfer_config.html.markdown b/website/docs/r/bigquery_data_transfer_config.html.markdown new file mode 100644 index 0000000000..9ca7f566dc --- /dev/null +++ b/website/docs/r/bigquery_data_transfer_config.html.markdown @@ -0,0 +1,151 @@ +--- +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in +# .github/CONTRIBUTING.md. +# +# ---------------------------------------------------------------------------- +layout: "google" +page_title: "Google: google_bigquery_data_transfer_config" +sidebar_current: "docs-google-bigquery-data-transfer-config" +description: |- + Represents a data transfer configuration. +--- + +# google\_bigquery\_data\_transfer\_config + +Represents a data transfer configuration. A transfer configuration +contains all metadata needed to perform a data transfer. + + +To get more information about Config, see: + +* [API documentation](https://cloud.google.com/bigquery/docs/reference/datatransfer/rest/v1/projects.locations.transferConfigs/create) +* How-to Guides + * [Official Documentation](https://cloud.google.com/bigquery/docs/reference/datatransfer/rest/) + +
+ + Open in Cloud Shell + +
+## Example Usage - Scheduled Query + + +```hcl +resource "google_bigquery_data_transfer_config" "query_config" { + display_name = "my-query" + location = "asia-northeast1" + data_source_id = "scheduled_query" + schedule = "first sunday of quarter 00:00" + destination_dataset_id = "${google_bigquery_dataset.my-dataset.dataset_id}" + params = { + destination_table_name_template = "my-table" + write_disposition = "WRITE_APPEND" + query = "SELECT name FROM tabl WHERE x = 'y'" + } +} + +resource "google_bigquery_dataset" "my-dataset" { + dataset_id = "my_dataset" + friendly_name = "foo" + description = "bar" + location = "asia-northeast1" +} +``` + +## Argument Reference + +The following arguments are supported: + + +* `display_name` - + (Required) + The user specified display name for the transfer config. + +* `destination_dataset_id` - + (Required) + The BigQuery target dataset id. + +* `data_source_id` - + (Required) + The data source id. Cannot be changed once the transfer config is created. + +* `params` - + (Required) + These parameters are specific to each data source. + + +- - - + + +* `schedule` - + (Optional) + Data transfer schedule. If the data source does not support a custom + schedule, this should be empty. If it is empty, the default value for + the data source will be used. The specified times are in UTC. Examples + of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan, + jun 13:15, and first sunday of quarter 00:00. See more explanation + about the format here: + https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format + NOTE: the granularity should be at least 8 hours, or less frequent. + +* `data_refresh_window_days` - + (Optional) + The number of days to look back to automatically refresh the data. + For example, if dataRefreshWindowDays = 10, then every day BigQuery + reingests data for [today-10, today-1], rather than ingesting data for + just [today-1]. Only valid if the data source supports the feature. + Set the value to 0 to use the default value. + +* `disabled` - + (Optional) + When set to true, no runs are scheduled for a given transfer. + +* `location` - + (Optional) + The geographic location where the transfer config should reside. + Examples: US, EU, asia-northeast1. The default value is US. + +* `project` - (Optional) The ID of the project in which the resource belongs. + If it is not provided, the provider project is used. + + +## Attributes Reference + +In addition to the arguments listed above, the following computed attributes are exported: + + +* `name` - + The resource name of the transfer config. Transfer config names have the + form projects/{projectId}/locations/{location}/transferConfigs/{configId}. + Where configId is usually a uuid, but this is not required. + The name is ignored when creating a transfer config. + + +## Timeouts + +This resource provides the following +[Timeouts](/docs/configuration/resources.html#timeouts) configuration options: + +- `create` - Default is 4 minutes. +- `update` - Default is 4 minutes. +- `delete` - Default is 4 minutes. + +## Import + +Config can be imported using any of these accepted formats: + +``` +$ terraform import google_bigquery_data_transfer_config.default {{name}} +``` + +-> If you're importing a resource with beta features, make sure to include `-provider=google-beta` +as an argument so that Terraform uses the correct provider to import your resource.