Skip to content

Commit

Permalink
Add coverage to service factory methods
Browse files Browse the repository at this point in the history
Rename Backoff to GrpcBackoff, since this is now private.
Fix calls to Core::GCE.
  • Loading branch information
blowmage committed Aug 2, 2016
1 parent 39714cc commit 2c7ddd8
Show file tree
Hide file tree
Showing 23 changed files with 934 additions and 438 deletions.
133 changes: 93 additions & 40 deletions google-cloud-bigquery/test/google/cloud/bigquery_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -16,51 +16,104 @@
require "google/cloud/bigquery"

describe Google::Cloud do
it "calls out to Google::Cloud.bigquery" do
gcloud = Google::Cloud.new
stubbed_bigquery = ->(project, keyfile, scope: nil, retries: nil, timeout: nil) {
project.must_equal nil
keyfile.must_equal nil
scope.must_be :nil?
retries.must_be :nil?
timeout.must_be :nil?
"bigquery-project-object-empty"
}
Google::Cloud.stub :bigquery, stubbed_bigquery do
project = gcloud.bigquery
project.must_equal "bigquery-project-object-empty"
describe "#bigquery" do
it "calls out to Google::Cloud.bigquery" do
gcloud = Google::Cloud.new
stubbed_bigquery = ->(project, keyfile, scope: nil, retries: nil, timeout: nil) {
project.must_equal nil
keyfile.must_equal nil
scope.must_be :nil?
retries.must_be :nil?
timeout.must_be :nil?
"bigquery-project-object-empty"
}
Google::Cloud.stub :bigquery, stubbed_bigquery do
project = gcloud.bigquery
project.must_equal "bigquery-project-object-empty"
end
end

it "passes project and keyfile to Google::Cloud.bigquery" do
gcloud = Google::Cloud.new "project-id", "keyfile-path"
stubbed_bigquery = ->(project, keyfile, scope: nil, retries: nil, timeout: nil) {
project.must_equal "project-id"
keyfile.must_equal "keyfile-path"
scope.must_be :nil?
retries.must_be :nil?
timeout.must_be :nil?
"bigquery-project-object"
}
Google::Cloud.stub :bigquery, stubbed_bigquery do
project = gcloud.bigquery
project.must_equal "bigquery-project-object"
end
end
end

it "passes project and keyfile to Google::Cloud.bigquery" do
gcloud = Google::Cloud.new "project-id", "keyfile-path"
stubbed_bigquery = ->(project, keyfile, scope: nil, retries: nil, timeout: nil) {
project.must_equal "project-id"
keyfile.must_equal "keyfile-path"
scope.must_be :nil?
retries.must_be :nil?
timeout.must_be :nil?
"bigquery-project-object"
}
Google::Cloud.stub :bigquery, stubbed_bigquery do
project = gcloud.bigquery
project.must_equal "bigquery-project-object"
it "passes project and keyfile and options to Google::Cloud.bigquery" do
gcloud = Google::Cloud.new "project-id", "keyfile-path"
stubbed_bigquery = ->(project, keyfile, scope: nil, retries: nil, timeout: nil) {
project.must_equal "project-id"
keyfile.must_equal "keyfile-path"
scope.must_equal "http://example.com/scope"
retries.must_equal 5
timeout.must_equal 60
"bigquery-project-object-scoped"
}
Google::Cloud.stub :bigquery, stubbed_bigquery do
project = gcloud.bigquery scope: "http://example.com/scope", retries: 5, timeout: 60
project.must_equal "bigquery-project-object-scoped"
end
end
end

it "passes project and keyfile and options to Google::Cloud.bigquery" do
gcloud = Google::Cloud.new "project-id", "keyfile-path"
stubbed_bigquery = ->(project, keyfile, scope: nil, retries: nil, timeout: nil) {
project.must_equal "project-id"
keyfile.must_equal "keyfile-path"
scope.must_equal "http://example.com/scope"
retries.must_equal 5
timeout.must_equal 60
"bigquery-project-object-scoped"
}
Google::Cloud.stub :bigquery, stubbed_bigquery do
project = gcloud.bigquery scope: "http://example.com/scope", retries: 5, timeout: 60
project.must_equal "bigquery-project-object-scoped"
describe ".bigquery" do
let(:default_credentials) { OpenStruct.new empty: true }
let(:found_credentials) { "{}" }

it "gets defaults for project_id and keyfile" do
# Clear all environment variables
ENV.stub :[], nil do
# Get project_id from Google Compute Engine
Google::Cloud::Core::GCE.stub :project_id, "project-id" do
Google::Cloud::Bigquery::Credentials.stub :default, default_credentials do
bigquery = Google::Cloud.bigquery
bigquery.must_be_kind_of Google::Cloud::Bigquery::Project
bigquery.project.must_equal "project-id"
bigquery.service.credentials.must_equal default_credentials
end
end
end
end

it "uses provided project_id and keyfile" do
stubbed_credentials = ->(keyfile, scope: nil) {
keyfile.must_equal "path/to/keyfile.json"
scope.must_equal nil
"bigquery-credentials"
}
stubbed_service = ->(project, credentials, retries: nil, timeout: nil) {
project.must_equal "project-id"
credentials.must_equal "bigquery-credentials"
retries.must_equal nil
timeout.must_equal nil
OpenStruct.new project: project
}

# Clear all environment variables
ENV.stub :[], nil do
File.stub :file?, true, ["path/to/keyfile.json"] do
File.stub :read, found_credentials, ["path/to/keyfile.json"] do
Google::Cloud::Bigquery::Credentials.stub :new, stubbed_credentials do
Google::Cloud::Bigquery::Service.stub :new, stubbed_service do
bigquery = Google::Cloud.bigquery "project-id", "path/to/keyfile.json"
bigquery.must_be_kind_of Google::Cloud::Bigquery::Project
bigquery.project.must_equal "project-id"
bigquery.service.must_be_kind_of OpenStruct
end
end
end
end
end
end
end
end
77 changes: 8 additions & 69 deletions google-cloud-core/lib/google/cloud/core/backoff.rb
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ module Core
# increasing delay will be added between each retried call. The first
# retry will be delayed one second, the second retry will be delayed
# two seconds, and so on.
class Backoff
class GrpcBackoff
class << self
##
# The number of times a retriable API call should be retried.
Expand All @@ -42,19 +42,6 @@ def retries= new_retries
# The default values are `14`.
attr_accessor :grpc_codes

##
# The HTTP Status Codes that should be retried.
#
# The default values are `500` and `503`.
attr_accessor :http_codes

##
# The Google API error reasons that should be retried.
#
# The default values are `rateLimitExceeded` and
# `userRateLimitExceeded`.
attr_accessor :reasons

##
# The code to run when a backoff is handled.
# This must be a Proc and must take the number of
Expand All @@ -66,42 +53,24 @@ def retries= new_retries
# Set the default values
self.retries = 3
self.grpc_codes = [14]
self.http_codes = [500, 503]
self.reasons = %w(rateLimitExceeded userRateLimitExceeded)
self.backoff = ->(retries) { sleep retries.to_i }

##
# @private
# Creates a new Backoff object to catch common errors when calling
# Creates a new GrpcBackoff object to catch common errors when calling
# the Google API and handle the error by retrying the call.
#
# Google::Cloud::Backoff.new(options).execute_gapi do
# client.execute api_method: service.things.insert,
# parameters: { thing: @thing },
# body_object: { name: thing_name }
# Google::Cloud::Core::GrpcBackoff.new(options).execute do
# datastore.lookup lookup_req
# end
def initialize options = {}
@retries = (options[:retries] || Backoff.retries).to_i
@grpc_codes = (options[:grpc_codes] || Backoff.grpc_codes).to_a
@http_codes = (options[:http_codes] || Backoff.http_codes).to_a
@reasons = (options[:reasons] || Backoff.reasons).to_a
@backoff = options[:backoff] || Backoff.backoff
@retries = (options[:retries] || GrpcBackoff.retries).to_i
@grpc_codes = (options[:grpc_codes] || GrpcBackoff.grpc_codes).to_a
@backoff = options[:backoff] || GrpcBackoff.backoff
end

# @private
def execute_gapi
current_retries = 0
loop do
result = yield
return result unless result.is_a? Google::APIClient::Result
break result if result.success? || !retry?(result, current_retries)
current_retries += 1
@backoff.call current_retries
end
end

# @private
def execute_grpc
def execute
current_retries = 0
loop do
begin
Expand All @@ -114,36 +83,6 @@ def execute_grpc
end
end
end

protected

# @private
def retry? result, current_retries #:nodoc:
if current_retries < @retries
return true if retry_http_code? result
return true if retry_error_reason? result
end
false
end

# @private
def retry_http_code? result #:nodoc:
@http_codes.include? result.response.status
end

# @private
def retry_error_reason? result
if result.data &&
result.data["error"] &&
result.data["error"]["errors"]
Array(result.data["error"]["errors"]).each do |error|
if error["reason"] && @reasons.include?(error["reason"])
return true
end
end
end
false
end
end
end
end
Expand Down
89 changes: 89 additions & 0 deletions google-cloud-core/lib/google/cloud/core/grpc_backoff.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


module Google
module Cloud
module Core
##
# @private
# Backoff allows users to control how Google API calls are retried.
# If an API call fails the response will be checked to see if the
# call can be retried. If the response matches the criteria, then it
# will be retried with an incremental backoff. This means that an
# increasing delay will be added between each retried call. The first
# retry will be delayed one second, the second retry will be delayed
# two seconds, and so on.
class GrpcBackoff
class << self
##
# The number of times a retriable API call should be retried.
#
# The default value is `3`.
attr_reader :retries
def retries= new_retries
@retries = new_retries
end

##
# The GRPC Status Codes that should be retried.
#
# The default values are `14`.
attr_accessor :grpc_codes

##
# The code to run when a backoff is handled.
# This must be a Proc and must take the number of
# retries as an argument.
#
# Note: This method is undocumented and may change.
attr_accessor :backoff # :nodoc:
end
# Set the default values
self.retries = 3
self.grpc_codes = [14]
self.backoff = ->(retries) { sleep retries.to_i }

##
# @private
# Creates a new Backoff object to catch common errors when calling
# the Google API and handle the error by retrying the call.
#
# Google::Cloud::Core::GrpcBackoff.new(options).execute do
# datastore.lookup lookup_ref
# end
def initialize options = {}
@retries = (options[:retries] || GrpcBackoff.retries).to_i
@grpc_codes = (options[:grpc_codes] || GrpcBackoff.grpc_codes).to_a
@backoff = options[:backoff] || GrpcBackoff.backoff
end

# @private
def execute
current_retries = 0
loop do
begin
return yield
rescue GRPC::BadStatus => e
raise e unless @grpc_codes.include?(e.code) &&
(current_retries < @retries)
current_retries += 1
@backoff.call current_retries
end
end
end
end
end
end
end
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def self.default_project
ENV["DATASTORE_PROJECT"] ||
ENV["GCLOUD_PROJECT"] ||
ENV["GOOGLE_CLOUD_PROJECT"] ||
Google::Cloud::GCE.project_id
Google::Cloud::Core::GCE.project_id
end

##
Expand Down
4 changes: 2 additions & 2 deletions google-cloud-datastore/lib/google/cloud/datastore/service.rb
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

require "google/cloud/datastore/credentials"
require "google/datastore/v1beta3/datastore_services"
require "google/cloud/core/backoff"
require "google/cloud/core/grpc_backoff"

module Google
module Cloud
Expand Down Expand Up @@ -143,7 +143,7 @@ def inspect
##
# Performs backoff and error handling
def execute
Google::Cloud::Core::Backoff.new(retries: retries).execute_grpc do
Google::Cloud::Core::GrpcBackoff.new(retries: retries).execute do
yield
end
rescue GRPC::BadStatus => e
Expand Down
Loading

0 comments on commit 2c7ddd8

Please sign in to comment.