Skip to content

Commit

Permalink
Add transfer configs (#2100)
Browse files Browse the repository at this point in the history
Merged PR #2100.
  • Loading branch information
Ty Larrabee authored and modular-magician committed Aug 13, 2019
1 parent 5cb8c58 commit c59fe71
Show file tree
Hide file tree
Showing 12 changed files with 370 additions and 23 deletions.
2 changes: 1 addition & 1 deletion build/terraform
2 changes: 1 addition & 1 deletion build/terraform-beta
2 changes: 1 addition & 1 deletion build/terraform-mapper
4 changes: 4 additions & 0 deletions overrides/terraform/resource_override.rb
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,9 @@ def self.attributes

:timeouts,

# An array of function names that determine whether an error is retryable.
:error_retry_predicates,

:schema_version
]
end
Expand All @@ -73,6 +76,7 @@ def validate
check :exclude_import, type: :boolean, default: false

check :timeouts, type: Api::Timeouts, default: Api::Timeouts.new
check :error_retry_predicates, type: Array, item_type: String
check :schema_version, type: Integer
end

Expand Down
103 changes: 103 additions & 0 deletions products/bigquerydatatransfer/api.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
# Copyright 2019 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

--- !ruby/object:Api::Product
name: BigqueryDataTransfer
display_name: BigQueryDataTransfer
versions:
- !ruby/object:Api::Product::Version
name: ga
base_url: https://bigquerydatatransfer.googleapis.com/v1/
scopes:
- https://www.googleapis.com/auth/bigquery
apis_required:
- !ruby/object:Api::Product::ApiReference
name: BigQueryDataTransfer API
url: https://console.cloud.google.com/apis/api/bigquerydatatransfer.googleapis.com/
objects:
- !ruby/object:Api::Resource
name: 'Config'
base_url: projects/{{project}}/locations/{{location}}/transferConfigs
self_link: "{{name}}"
update_verb: :PATCH
update_mask: true
description: |
Represents a data transfer configuration. A transfer configuration
contains all metadata needed to perform a data transfer.
references: !ruby/object:Api::Resource::ReferenceLinks
guides:
"Official Documentation": "https://cloud.google.com/bigquery/docs/reference/datatransfer/rest/"
api: "https://cloud.google.com/bigquery/docs/reference/datatransfer/rest/v1/projects.locations.transferConfigs/create"
parameters:
- !ruby/object:Api::Type::String
name: 'location'
url_param_only: true
input: true
default_value: US
description: |
The geographic location where the transfer config should reside.
Examples: US, EU, asia-northeast1. The default value is US.
properties:
- !ruby/object:Api::Type::String
name: 'displayName'
input: true
required: true
description: |
The user specified display name for the transfer config.
- !ruby/object:Api::Type::String
name: 'name'
output: true
description: |
The resource name of the transfer config. Transfer config names have the
form projects/{projectId}/locations/{location}/transferConfigs/{configId}.
Where configId is usually a uuid, but this is not required.
The name is ignored when creating a transfer config.
- !ruby/object:Api::Type::String
name: 'destinationDatasetId'
required: true
description: |
The BigQuery target dataset id.
- !ruby/object:Api::Type::String
name: 'dataSourceId'
input: true
required: true
description: |
The data source id. Cannot be changed once the transfer config is created.
- !ruby/object:Api::Type::String
name: 'schedule'
description: |
Data transfer schedule. If the data source does not support a custom
schedule, this should be empty. If it is empty, the default value for
the data source will be used. The specified times are in UTC. Examples
of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan,
jun 13:15, and first sunday of quarter 00:00. See more explanation
about the format here:
https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
NOTE: the granularity should be at least 8 hours, or less frequent.
- !ruby/object:Api::Type::Integer
name: 'dataRefreshWindowDays'
description: |
The number of days to look back to automatically refresh the data.
For example, if dataRefreshWindowDays = 10, then every day BigQuery
reingests data for [today-10, today-1], rather than ingesting data for
just [today-1]. Only valid if the data source supports the feature.
Set the value to 0 to use the default value.
- !ruby/object:Api::Type::Boolean
name: 'disabled'
description: |
When set to true, no runs are scheduled for a given transfer.
- !ruby/object:Api::Type::KeyValuePairs
name: 'params'
required: true
description: |
These parameters are specific to each data source.
39 changes: 39 additions & 0 deletions products/bigquerydatatransfer/terraform.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# Copyright 2019 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

--- !ruby/object:Provider::Terraform::Config
overrides: !ruby/object:Overrides::ResourceOverrides
Config: !ruby/object:Overrides::Terraform::ResourceOverride
import_format: ["{{name}}"]
id_format: "{{name}}"
error_retry_predicates: ["iamMemberMissing"]
custom_code: !ruby/object:Provider::Terraform::CustomCode
custom_import: templates/terraform/custom_import/self_link_as_name.erb
post_create: templates/terraform/post_create/set_computed_name.erb
properties:
location: !ruby/object:Overrides::Terraform::PropertyOverride
ignore_read: true
examples:
- !ruby/object:Provider::Terraform::Examples
skip_test: true
name: "scheduled_query"
primary_resource_id: "query_config"
vars:
display_name: "my-query"
dataset_id: "my_dataset"
# This is for copying files over
files: !ruby/object:Provider::Config::Files
# These files have templating (ERB) code that will be run.
# This is usually to add licensing info, autogeneration notices, etc.
compile:
<%= lines(indent(compile('provider/terraform/product~compile.yaml'), 4)) -%>
32 changes: 32 additions & 0 deletions templates/terraform/examples/scheduled_query.tf.erb
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
data "google_project" "project" {}

resource "google_project_iam_member" "permissions" {
role = "roles/iam.serviceAccountShortTermTokenMinter"
member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com"
}

resource "google_bigquery_data_transfer_config" "<%= ctx[:primary_resource_id] %>" {

depends_on = [google_project_iam_member.permissions]

display_name = "<%= ctx[:vars]['display_name'] %>"
location = "asia-northeast1"
data_source_id = "scheduled_query"
schedule = "first sunday of quarter 00:00"
destination_dataset_id = "${google_bigquery_dataset.my_dataset.dataset_id}"
params = {
destination_table_name_template = "my-table"
write_disposition = "WRITE_APPEND"
query = "SELECT name FROM tabl WHERE x = 'y'"
}
}

resource "google_bigquery_dataset" "my_dataset" {

depends_on = [google_project_iam_member.permissions]

dataset_id = "<%= ctx[:vars]['dataset_id'].delete("-") %>"
friendly_name = "foo"
description = "bar"
location = "asia-northeast1"
}
2 changes: 1 addition & 1 deletion templates/terraform/resource.erb
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ func resource<%= resource_name -%>Create(d *schema.ResourceData, meta interface{
}

log.Printf("[DEBUG] Creating new <%= object.name -%>: %#v", obj)
res, err := sendRequestWithTimeout(config, "<%= object.create_verb.to_s.upcase -%>", url, obj, d.Timeout(schema.TimeoutCreate))
res, err := sendRequestWithTimeout(config, "<%= object.create_verb.to_s.upcase -%>", url, obj, d.Timeout(schema.TimeoutCreate) <%= object.error_retry_predicates ? ", " + object.error_retry_predicates.join(',') : "" -%>)
if err != nil {
return fmt.Errorf("Error creating <%= object.name -%>: %s", err)
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
package google

import (
"fmt"
"strings"
"testing"

"github.com/hashicorp/terraform/helper/acctest"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
)

// The service account TF uses needs the permission granted in the configs
// but it will get deleted by parallel tests, so they need to be ran serially.
func TestAccBigqueryDataTransferConfig(t *testing.T) {
testCases := map[string]func(t *testing.T){
"basic": testAccBigqueryDataTransferConfig_scheduledQuery_basic,
"update": testAccBigqueryDataTransferConfig_scheduledQuery_update,
}

for name, tc := range testCases {
// shadow the tc variable into scope so that when
// the loop continues, if t.Run hasn't executed tc(t)
// yet, we don't have a race condition
// see https://github.com/golang/go/wiki/CommonMistakes#using-goroutines-on-loop-iterator-variables
tc := tc
t.Run(name, func(t *testing.T) {
tc(t)
})
}
}

func testAccBigqueryDataTransferConfig_scheduledQuery_basic(t *testing.T) {
random_suffix := acctest.RandString(10)

resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckBigqueryDataTransferConfigDestroy,
Steps: []resource.TestStep{
{
Config: testAccBigqueryDataTransferConfig_scheduledQuery(random_suffix, "third", "y"),
},
{
ResourceName: "google_bigquery_data_transfer_config.query_config",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"location"},
},
},
})
}

func testAccBigqueryDataTransferConfig_scheduledQuery_update(t *testing.T) {
random_suffix := acctest.RandString(10)

resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckBigqueryDataTransferConfigDestroy,
Steps: []resource.TestStep{
{
Config: testAccBigqueryDataTransferConfig_scheduledQuery(random_suffix, "first", "y"),
},
{
Config: testAccBigqueryDataTransferConfig_scheduledQuery(random_suffix, "second", "z"),
},
{
ResourceName: "google_bigquery_data_transfer_config.query_config",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"location"},
},
},
})
}

func testAccCheckBigqueryDataTransferConfigDestroy(s *terraform.State) error {
for name, rs := range s.RootModule().Resources {
if rs.Type != "google_bigquery_data_transfer_config" {
continue
}
if strings.HasPrefix(name, "data.") {
continue
}

config := testAccProvider.Meta().(*Config)

url, err := replaceVarsForTest(config, rs, "{{BigqueryDataTransferBasePath}}{{name}}")
if err != nil {
return err
}

_, err = sendRequest(config, "GET", url, nil)
if err == nil {
return fmt.Errorf("BigqueryDataTransferConfig still exists at %s", url)
}
}

return nil
}

func testAccBigqueryDataTransferConfig_scheduledQuery(random_suffix, schedule, letter string) string {
return fmt.Sprintf(`
data "google_project" "project" {}
resource "google_project_iam_member" "permissions" {
role = "roles/iam.serviceAccountShortTermTokenMinter"
member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com"
}
resource "google_bigquery_dataset" "my_dataset" {
depends_on = [google_project_iam_member.permissions]
dataset_id = "my_dataset%s"
friendly_name = "foo"
description = "bar"
location = "asia-northeast1"
}
resource "google_bigquery_data_transfer_config" "query_config" {
depends_on = [google_project_iam_member.permissions]
display_name = "my-query-%s"
location = "asia-northeast1"
data_source_id = "scheduled_query"
schedule = "%s sunday of quarter 00:00"
destination_dataset_id = google_bigquery_dataset.my_dataset.dataset_id
params = {
destination_table_name_template = "my-table"
write_disposition = "WRITE_APPEND"
query = "SELECT name FROM tabl WHERE x = '%s'"
}
}
`, random_suffix, random_suffix, schedule, letter)
}
19 changes: 19 additions & 0 deletions third_party/terraform/utils/error_retry_predicates.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
package google

import (
"strings"

"google.golang.org/api/googleapi"
)

// If a permission necessary to provision a resource is created in the same config
// as the resource itself, the permission may not have propagated by the time terraform
// attempts to create the resource. This allows those errors to be retried until the timeout expires
func iamMemberMissing(err error) (bool, string) {
if gerr, ok := err.(*googleapi.Error); ok {
if gerr.Code == 400 && strings.Contains(gerr.Body, "permission") {
return true, "Waiting for IAM member permissions to propagate."
}
}
return false, ""
}
3 changes: 2 additions & 1 deletion third_party/terraform/utils/transport.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ func sendRequest(config *Config, method, rawurl string, body map[string]interfac
return sendRequestWithTimeout(config, method, rawurl, body, DefaultRequestTimeout)
}

func sendRequestWithTimeout(config *Config, method, rawurl string, body map[string]interface{}, timeout time.Duration) (map[string]interface{}, error) {
func sendRequestWithTimeout(config *Config, method, rawurl string, body map[string]interface{}, timeout time.Duration, errorRetryPredicates ...func(e error) (bool, string)) (map[string]interface{}, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("User-Agent", config.userAgent)
reqHeaders.Set("Content-Type", "application/json")
Expand Down Expand Up @@ -81,6 +81,7 @@ func sendRequestWithTimeout(config *Config, method, rawurl string, body map[stri
return nil
},
timeout,
errorRetryPredicates...,
)
if err != nil {
return nil, err
Expand Down
Loading

0 comments on commit c59fe71

Please sign in to comment.