Skip to content

Commit

Permalink
feat(spanner): Add support for Cloud Spanner Scheduled Backups (#540)
Browse files Browse the repository at this point in the history
* chore: Update gapic-generator-python to v1.18.0

PiperOrigin-RevId: 638650618

Source-Link: googleapis/googleapis@6330f03

Source-Link: googleapis/googleapis-gen@44fa4f1
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDRmYTRmMTk3OWRjNDVjMTc3OGZkN2NhZjEzZjhlNjFjNmQxY2FlOCJ9

feat: New PropertyMask field which allows partial commits, lookups, and query results

PiperOrigin-RevId: 635449160

Source-Link: googleapis/googleapis@dde0ec1

Source-Link: googleapis/googleapis-gen@8caa60d
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOGNhYTYwZDlhZWE4Mjk2NGExOWNkZjhmYWY5MTM4NDkxMWRiOGJkZCJ9

chore: Update gapic-generator-python to v1.17.1

PiperOrigin-RevId: 629071173

Source-Link: googleapis/googleapis@4afa392

Source-Link: googleapis/googleapis-gen@16dbbb4
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTZkYmJiNGQwNDU3ZGI1ZTYxYWM5Zjk5YjBkNTJhNDYxNTQ0NTVhYyJ9

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* feat(spanner): Add support for Cloud Spanner Scheduled Backups

PiperOrigin-RevId: 649277844

Source-Link: googleapis/googleapis@fd7efa2

Source-Link: googleapis/googleapis-gen@50be251
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTBiZTI1MTMyOWQ4ZGI1YjU1NTYyNmViZDQ4ODY3MjFmNTQ3ZDNjYyJ9

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

---------

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
gcf-owl-bot[bot] and gcf-owl-bot[bot] authored Jul 8, 2024
1 parent c86b51e commit 7fd218b
Show file tree
Hide file tree
Showing 16 changed files with 2,203 additions and 303 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import re
from typing import (
Dict,
Callable,
Mapping,
MutableMapping,
MutableSequence,
Expand All @@ -37,6 +38,7 @@
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore


try:
OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
Expand Down Expand Up @@ -250,7 +252,9 @@ def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, DatastoreAdminTransport] = "grpc_asyncio",
transport: Optional[
Union[str, DatastoreAdminTransport, Callable[..., DatastoreAdminTransport]]
] = "grpc_asyncio",
client_options: Optional[ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
Expand All @@ -262,9 +266,11 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.DatastoreAdminTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
transport (Optional[Union[str,DatastoreAdminTransport,Callable[..., DatastoreAdminTransport]]]):
The transport to use, or a Callable that constructs and returns a new transport to use.
If a Callable is given, it will be called with the same set of initialization
arguments as used in the DatastoreAdminTransport constructor.
If set to None, a transport is chosen automatically.
client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
Custom options for the client.
Expand Down Expand Up @@ -429,8 +435,8 @@ async def sample_export_entities():
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any(
[project_id, labels, entity_filter, output_url_prefix]
)
Expand All @@ -440,7 +446,10 @@ async def sample_export_entities():
"the individual field arguments should be set."
)

request = datastore_admin.ExportEntitiesRequest(request)
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, datastore_admin.ExportEntitiesRequest):
request = datastore_admin.ExportEntitiesRequest(request)

# If we have keyword arguments corresponding to fields on the
# request, apply these.
Expand All @@ -456,11 +465,9 @@ async def sample_export_entities():

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.export_entities,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
rpc = self._client._transport._wrapped_methods[
self._client._transport.export_entities
]

# Certain fields should be provided within the metadata header;
# add these here.
Expand Down Expand Up @@ -613,16 +620,19 @@ async def sample_import_entities():
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([project_id, labels, input_url, entity_filter])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)

request = datastore_admin.ImportEntitiesRequest(request)
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, datastore_admin.ImportEntitiesRequest):
request = datastore_admin.ImportEntitiesRequest(request)

# If we have keyword arguments corresponding to fields on the
# request, apply these.
Expand All @@ -638,11 +648,9 @@ async def sample_import_entities():

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.import_entities,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
rpc = self._client._transport._wrapped_methods[
self._client._transport.import_entities
]

# Certain fields should be provided within the metadata header;
# add these here.
Expand Down Expand Up @@ -747,15 +755,16 @@ async def sample_create_index():
"""
# Create or coerce a protobuf request object.
request = datastore_admin.CreateIndexRequest(request)
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, datastore_admin.CreateIndexRequest):
request = datastore_admin.CreateIndexRequest(request)

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_index,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
rpc = self._client._transport._wrapped_methods[
self._client._transport.create_index
]

# Certain fields should be provided within the metadata header;
# add these here.
Expand Down Expand Up @@ -859,15 +868,16 @@ async def sample_delete_index():
"""
# Create or coerce a protobuf request object.
request = datastore_admin.DeleteIndexRequest(request)
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, datastore_admin.DeleteIndexRequest):
request = datastore_admin.DeleteIndexRequest(request)

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_index,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
rpc = self._client._transport._wrapped_methods[
self._client._transport.delete_index
]

# Certain fields should be provided within the metadata header;
# add these here.
Expand Down Expand Up @@ -952,25 +962,16 @@ async def sample_get_index():
Datastore composite index definition.
"""
# Create or coerce a protobuf request object.
request = datastore_admin.GetIndexRequest(request)
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, datastore_admin.GetIndexRequest):
request = datastore_admin.GetIndexRequest(request)

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_index,
default_retry=retries.AsyncRetry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
rpc = self._client._transport._wrapped_methods[
self._client._transport.get_index
]

# Certain fields should be provided within the metadata header;
# add these here.
Expand Down Expand Up @@ -1056,25 +1057,16 @@ async def sample_list_indexes():
"""
# Create or coerce a protobuf request object.
request = datastore_admin.ListIndexesRequest(request)
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, datastore_admin.ListIndexesRequest):
request = datastore_admin.ListIndexesRequest(request)

# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_indexes,
default_retry=retries.AsyncRetry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.ServiceUnavailable,
),
deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
rpc = self._client._transport._wrapped_methods[
self._client._transport.list_indexes
]

# Certain fields should be provided within the metadata header;
# add these here.
Expand Down
68 changes: 34 additions & 34 deletions google/cloud/datastore_admin_v1/services/datastore_admin/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import re
from typing import (
Dict,
Callable,
Mapping,
MutableMapping,
MutableSequence,
Expand Down Expand Up @@ -565,7 +566,9 @@ def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Optional[Union[str, DatastoreAdminTransport]] = None,
transport: Optional[
Union[str, DatastoreAdminTransport, Callable[..., DatastoreAdminTransport]]
] = None,
client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
Expand All @@ -577,9 +580,11 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, DatastoreAdminTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
transport (Optional[Union[str,DatastoreAdminTransport,Callable[..., DatastoreAdminTransport]]]):
The transport to use, or a Callable that constructs and returns a new transport.
If a Callable is given, it will be called with the same set of initialization
arguments as used in the DatastoreAdminTransport constructor.
If set to None, a transport is chosen automatically.
client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
Custom options for the client.
Expand Down Expand Up @@ -688,8 +693,15 @@ def __init__(
api_key_value
)

Transport = type(self).get_transport_class(cast(str, transport))
self._transport = Transport(
transport_init: Union[
Type[DatastoreAdminTransport], Callable[..., DatastoreAdminTransport]
] = (
type(self).get_transport_class(transport)
if isinstance(transport, str) or transport is None
else cast(Callable[..., DatastoreAdminTransport], transport)
)
# initialize with the provided callable or the passed in class
self._transport = transport_init(
credentials=credentials,
credentials_file=self._client_options.credentials_file,
host=self._api_endpoint,
Expand Down Expand Up @@ -822,8 +834,8 @@ def sample_export_entities():
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any(
[project_id, labels, entity_filter, output_url_prefix]
)
Expand All @@ -833,10 +845,8 @@ def sample_export_entities():
"the individual field arguments should be set."
)

# Minor optimization to avoid making a copy if the user passes
# in a datastore_admin.ExportEntitiesRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, datastore_admin.ExportEntitiesRequest):
request = datastore_admin.ExportEntitiesRequest(request)
# If we have keyword arguments corresponding to fields on the
Expand Down Expand Up @@ -1005,19 +1015,17 @@ def sample_import_entities():
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
# - Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([project_id, labels, input_url, entity_filter])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)

# Minor optimization to avoid making a copy if the user passes
# in a datastore_admin.ImportEntitiesRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, datastore_admin.ImportEntitiesRequest):
request = datastore_admin.ImportEntitiesRequest(request)
# If we have keyword arguments corresponding to fields on the
Expand Down Expand Up @@ -1138,10 +1146,8 @@ def sample_create_index():
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a datastore_admin.CreateIndexRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, datastore_admin.CreateIndexRequest):
request = datastore_admin.CreateIndexRequest(request)

Expand Down Expand Up @@ -1251,10 +1257,8 @@ def sample_delete_index():
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a datastore_admin.DeleteIndexRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, datastore_admin.DeleteIndexRequest):
request = datastore_admin.DeleteIndexRequest(request)

Expand Down Expand Up @@ -1345,10 +1349,8 @@ def sample_get_index():
Datastore composite index definition.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a datastore_admin.GetIndexRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, datastore_admin.GetIndexRequest):
request = datastore_admin.GetIndexRequest(request)

Expand Down Expand Up @@ -1440,10 +1442,8 @@ def sample_list_indexes():
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a datastore_admin.ListIndexesRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
# - Use the request object if provided (there's no risk of modifying the input as
# there are no flattened fields), or create one.
if not isinstance(request, datastore_admin.ListIndexesRequest):
request = datastore_admin.ListIndexesRequest(request)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,8 @@ def __init__(

# Save the scopes.
self._scopes = scopes
if not hasattr(self, "_ignore_credentials"):
self._ignore_credentials: bool = False

# If no credentials are provided, then determine the appropriate
# defaults.
Expand All @@ -100,7 +102,7 @@ def __init__(
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
elif credentials is None and not self._ignore_credentials:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
Expand Down
Loading

0 comments on commit 7fd218b

Please sign in to comment.