diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/populate-secrets.sh b/packages/google-cloud-bigquery-datatransfer/.kokoro/populate-secrets.sh new file mode 100755 index 000000000000..f52514257ef0 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/populate-secrets.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Copyright 2020 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} +function msg { println "$*" >&2 ;} +function println { printf '%s\n' "$(now) $*" ;} + + +# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: +# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com +SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" +msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" +mkdir -p ${SECRET_LOCATION} +for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") +do + msg "Retrieving secret ${key}" + docker run --entrypoint=gcloud \ + --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ + gcr.io/google.com/cloudsdktool/cloud-sdk \ + secrets versions access latest \ + --project cloud-devrel-kokoro-resources \ + --secret ${key} > \ + "${SECRET_LOCATION}/${key}" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + fi +done diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/release/common.cfg b/packages/google-cloud-bigquery-datatransfer/.kokoro/release/common.cfg index a9da659434be..437024a8bd54 100644 --- a/packages/google-cloud-bigquery-datatransfer/.kokoro/release/common.cfg +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/release/common.cfg @@ -23,42 +23,18 @@ env_vars: { value: "github/python-bigquery-datatransfer/.kokoro/release.sh" } -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - -# Fetch magictoken to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "releasetool-magictoken" - } - } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google_cloud_pypi_password" + } + } } -# Fetch api key to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "magic-github-proxy-api-key" - } - } -} +# Tokens needed to report release status back to GitHub +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" +} \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/.kokoro/trampoline.sh b/packages/google-cloud-bigquery-datatransfer/.kokoro/trampoline.sh index e8c4251f3ed4..f39236e943a8 100755 --- a/packages/google-cloud-bigquery-datatransfer/.kokoro/trampoline.sh +++ b/packages/google-cloud-bigquery-datatransfer/.kokoro/trampoline.sh @@ -15,9 +15,14 @@ set -eo pipefail -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$? +# Always run the cleanup script, regardless of the success of bouncing into +# the container. +function cleanup() { + chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + echo "cleanup"; +} +trap cleanup EXIT -chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh -${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true - -exit ${ret_code} +$(dirname $0)/populate-secrets.sh # Secret Manager secrets. +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/README.rst b/packages/google-cloud-bigquery-datatransfer/README.rst index 204f55d60721..cdb6ce319468 100644 --- a/packages/google-cloud-bigquery-datatransfer/README.rst +++ b/packages/google-cloud-bigquery-datatransfer/README.rst @@ -48,11 +48,14 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.5 +Python >= 3.6 Deprecated Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. +Python == 2.7. + +The last version of this library compatible with Python 2.7 is +``google-cloud-bigquery-datatransfer==1.1.1``. Mac/Linux diff --git a/packages/google-cloud-bigquery-datatransfer/UPGRADING.md b/packages/google-cloud-bigquery-datatransfer/UPGRADING.md new file mode 100644 index 000000000000..4c157b23f9a9 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/UPGRADING.md @@ -0,0 +1,211 @@ + + + +# 2.0.0 Migration Guide + +The 2.0 release of the `google-cloud-bigquery-datatransfer` client is a significant +upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), +and includes substantial interface changes. Existing code written for earlier versions +of this library will likely require updates to use this version. This document +describes the changes that have been made, and what you need to do to update your usage. + +If you experience issues or have questions, please file an +[issue](https://github.com/googleapis/python-bigquery-datatransfer/issues). + + +## Supported Python Versions + +> **WARNING**: Breaking change + +The 2.0.0 release requires Python 3.6+. + + +## Import Path + +> **WARNING**: Breaking change + +The library was moved into `google.cloud.bigquery` namespace. Existing imports +need to be updated. + +**Before:** +```py +from google.cloud import bigquery_datatransfer +from google.cloud import bigquery_datatransfer_v1 +``` + +**After:** +```py +from google.cloud.bigquery import datatransfer +from google.cloud.bigquery import datatransfer_v1 +``` + + +## Method Calls + +> **WARNING**: Breaking change + +Methods that send requests to the backend expect request objects. We provide a script +that will convert most common use cases. + +* Install the library + +```py +python3 -m pip install google-cloud-bigquery-datatransfer +``` + +* The script `fixup_datatransfer_v1_keywords.py` is shipped with the library. It expects +an input directory (with the code to convert) and an empty destination directory. + +```sh +$ scripts/fixup_datatransfer_v1_keywords.py --input-directory .samples/ --output-directory samples/ +``` + +**Before:** +```py +from google.cloud import bigquery_datatransfer + +client = bigquery_datatransfer.DataTransferServiceClient() + +parent_project = "..." +transfer_config = {...} +authorization_code = "..." + +response = client.create_transfer_config( + parent_project, transfer_config, authorization_code=authorization_code +) +``` + + +**After:** +```py +from google.cloud.bigquery import datatransfer + +client = datatransfer.DataTransferServiceClient() + +parent_project = "..." +transfer_config = {...} +authorization_code = "..." + +response = client.create_transfer_config( + request={ + "parent": parent_project, + "transfer_config": transfer_config, + "authorization_code": authorization_code, + } +) +``` + +### More Details + +In `google-cloud-bigquery-datatransfer<2.0.0`, parameters required by the API were positional +parameters and optional parameters were keyword parameters. + +**Before:** +```py +def create_transfer_config( + self, + parent, + transfer_config, + authorization_code=None, + version_info=None, + service_account_name=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, +): +``` + +In the `2.0.0` release, methods that interact with the backend have a single +positional parameter `request`. Method docstrings indicate whether a parameter is +required or optional. + +Some methods have additional keyword only parameters. The available parameters depend +on the [`google.api.method_signature` annotation](https://github.com/googleapis/python-bigquery-datatransfer/blob/master/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto#L80) +specified by the API producer. + + +**After:** +```py +def create_transfer_config( + self, + request: datatransfer.CreateTransferConfigRequest = None, + *, + parent: str = None, + transfer_config: transfer.TransferConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), +) -> transfer.TransferConfig: +``` + +> **NOTE:** The `request` parameter and flattened keyword parameters for the API are +> mutually exclusive. Passing both will result in an error. + + +Both of these calls are valid: + +```py +response = client.create_transfer_config( + request={ + "parent": project_path, + "transfer_config": {"foo": "bar"}, + } +) +``` + +```py +response = client.create_transfer_config( + parent=project_path, + transfer_config={"foo": "bar"}, +) +``` + +This call is _invalid_ because it mixes `request` with a keyword argument `transfer_config`. +Executing this code will result in an error: + +```py +response = client.create_transfer_config( + request={"parent": project_path}, + transfer_config= {"foo": "bar"}, +) +``` + +> **NOTE:** The `request` parameter of some methods can also contain a more rich set of +> options that are otherwise not available as explicit keyword only parameters, thus +> these _must_ be passed through `request`. + + +## Removed Utility Methods + +> **WARNING**: Breaking change + +Most utility methods such as `project_path()` have been removed. The paths must +now be constructed manually: + +```py +project_path = f"project/{PROJECT_ID}" +``` + + The only two that remained are `transfer_config_path()` and `parse_transfer_config_path()`. + + +## Removed `client_config` Parameter + +The client cannot be constructed with `client_config` argument anymore, this deprecated +argument has been removed. If you want to customize retry and timeout settings for a particular +method, you need to do it upon method invocation by passing the custom `timeout` and +`retry` arguments, respectively. diff --git a/packages/google-cloud-bigquery-datatransfer/docs/UPGRADING.md b/packages/google-cloud-bigquery-datatransfer/docs/UPGRADING.md new file mode 120000 index 000000000000..01097c8c0fb8 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/docs/UPGRADING.md @@ -0,0 +1 @@ +../UPGRADING.md \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/docs/datatransfer_v1/services.rst b/packages/google-cloud-bigquery-datatransfer/docs/datatransfer_v1/services.rst new file mode 100644 index 000000000000..46c2ba6f8cfa --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/docs/datatransfer_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Bigquery Datatransfer v1 API +====================================================== + +.. automodule:: google.cloud.bigquery.datatransfer_v1.services.data_transfer_service + :members: + :inherited-members: diff --git a/packages/google-cloud-bigquery-datatransfer/docs/datatransfer_v1/types.rst b/packages/google-cloud-bigquery-datatransfer/docs/datatransfer_v1/types.rst new file mode 100644 index 000000000000..6e8bc0008011 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/docs/datatransfer_v1/types.rst @@ -0,0 +1,5 @@ +Types for Google Cloud Bigquery Datatransfer v1 API +=================================================== + +.. automodule:: google.cloud.bigquery.datatransfer_v1.types + :members: diff --git a/packages/google-cloud-bigquery-datatransfer/docs/gapic/v1/api.rst b/packages/google-cloud-bigquery-datatransfer/docs/gapic/v1/api.rst deleted file mode 100644 index a8b855bb4cc4..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/docs/gapic/v1/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for BigQuery Data Transfer API -===================================== - -.. automodule:: google.cloud.bigquery_datatransfer_v1 - :members: - :inherited-members: \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/docs/gapic/v1/types.rst b/packages/google-cloud-bigquery-datatransfer/docs/gapic/v1/types.rst deleted file mode 100644 index 2a77ee73ecef..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/docs/gapic/v1/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for BigQuery Data Transfer API Client -=========================================== - -.. automodule:: google.cloud.bigquery_datatransfer_v1.types - :members: \ No newline at end of file diff --git a/packages/google-cloud-bigquery-datatransfer/docs/index.rst b/packages/google-cloud-bigquery-datatransfer/docs/index.rst index fa973e38cbba..824b52076097 100644 --- a/packages/google-cloud-bigquery-datatransfer/docs/index.rst +++ b/packages/google-cloud-bigquery-datatransfer/docs/index.rst @@ -8,8 +8,19 @@ API Reference .. toctree:: :maxdepth: 2 - gapic/v1/api - gapic/v1/types + Client + Types + + +Migration Guide +--------------- + +See the guide below for instructions on migrating to the 2.x release of this library. + +.. toctree:: + :maxdepth: 2 + + UPGRADING Changelog diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer/__init__.py new file mode 100644 index 000000000000..55860e3fd5e7 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer/__init__.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.bigquery.datatransfer_v1.services.data_transfer_service.async_client import ( + DataTransferServiceAsyncClient, +) +from google.cloud.bigquery.datatransfer_v1.services.data_transfer_service.client import ( + DataTransferServiceClient, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + CheckValidCredsRequest, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + CheckValidCredsResponse, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + CreateTransferConfigRequest, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import DataSource +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import DataSourceParameter +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + DeleteTransferConfigRequest, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + DeleteTransferRunRequest, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + GetDataSourceRequest, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + GetTransferConfigRequest, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + GetTransferRunRequest, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + ListDataSourcesRequest, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + ListDataSourcesResponse, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + ListTransferConfigsRequest, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + ListTransferConfigsResponse, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + ListTransferLogsRequest, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + ListTransferLogsResponse, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + ListTransferRunsRequest, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + ListTransferRunsResponse, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + ScheduleTransferRunsRequest, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + ScheduleTransferRunsResponse, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + StartManualTransferRunsRequest, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + StartManualTransferRunsResponse, +) +from google.cloud.bigquery.datatransfer_v1.types.datatransfer import ( + UpdateTransferConfigRequest, +) +from google.cloud.bigquery.datatransfer_v1.types.transfer import EmailPreferences +from google.cloud.bigquery.datatransfer_v1.types.transfer import ScheduleOptions +from google.cloud.bigquery.datatransfer_v1.types.transfer import TransferConfig +from google.cloud.bigquery.datatransfer_v1.types.transfer import TransferMessage +from google.cloud.bigquery.datatransfer_v1.types.transfer import TransferRun +from google.cloud.bigquery.datatransfer_v1.types.transfer import TransferState +from google.cloud.bigquery.datatransfer_v1.types.transfer import TransferType + +__all__ = ( + "CheckValidCredsRequest", + "CheckValidCredsResponse", + "CreateTransferConfigRequest", + "DataSource", + "DataSourceParameter", + "DataTransferServiceAsyncClient", + "DataTransferServiceClient", + "DeleteTransferConfigRequest", + "DeleteTransferRunRequest", + "EmailPreferences", + "GetDataSourceRequest", + "GetTransferConfigRequest", + "GetTransferRunRequest", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "ListTransferConfigsRequest", + "ListTransferConfigsResponse", + "ListTransferLogsRequest", + "ListTransferLogsResponse", + "ListTransferRunsRequest", + "ListTransferRunsResponse", + "ScheduleOptions", + "ScheduleTransferRunsRequest", + "ScheduleTransferRunsResponse", + "StartManualTransferRunsRequest", + "StartManualTransferRunsResponse", + "TransferConfig", + "TransferMessage", + "TransferRun", + "TransferState", + "TransferType", + "UpdateTransferConfigRequest", +) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer/py.typed b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer/py.typed new file mode 100644 index 000000000000..1bd9d383cee6 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-bigquery-datatransfer package uses inline types. diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/__init__.py new file mode 100644 index 000000000000..258e1f36e868 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/__init__.py @@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.data_transfer_service import DataTransferServiceClient +from .services.data_transfer_service import DataTransferServiceAsyncClient +from .types.datatransfer import CheckValidCredsRequest +from .types.datatransfer import CheckValidCredsResponse +from .types.datatransfer import CreateTransferConfigRequest +from .types.datatransfer import DataSource +from .types.datatransfer import DataSourceParameter +from .types.datatransfer import DeleteTransferConfigRequest +from .types.datatransfer import DeleteTransferRunRequest +from .types.datatransfer import GetDataSourceRequest +from .types.datatransfer import GetTransferConfigRequest +from .types.datatransfer import GetTransferRunRequest +from .types.datatransfer import ListDataSourcesRequest +from .types.datatransfer import ListDataSourcesResponse +from .types.datatransfer import ListTransferConfigsRequest +from .types.datatransfer import ListTransferConfigsResponse +from .types.datatransfer import ListTransferLogsRequest +from .types.datatransfer import ListTransferLogsResponse +from .types.datatransfer import ListTransferRunsRequest +from .types.datatransfer import ListTransferRunsResponse +from .types.datatransfer import ScheduleTransferRunsRequest +from .types.datatransfer import ScheduleTransferRunsResponse +from .types.datatransfer import StartManualTransferRunsRequest +from .types.datatransfer import StartManualTransferRunsResponse +from .types.datatransfer import UpdateTransferConfigRequest +from .types.transfer import EmailPreferences +from .types.transfer import ScheduleOptions +from .types.transfer import TransferConfig +from .types.transfer import TransferMessage +from .types.transfer import TransferRun +from .types.transfer import TransferState +from .types.transfer import TransferType + + +__all__ = ( + "CheckValidCredsRequest", + "CheckValidCredsResponse", + "CreateTransferConfigRequest", + "DataSource", + "DataSourceParameter", + "DeleteTransferConfigRequest", + "DeleteTransferRunRequest", + "EmailPreferences", + "GetDataSourceRequest", + "GetTransferConfigRequest", + "GetTransferRunRequest", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "ListTransferConfigsRequest", + "ListTransferConfigsResponse", + "ListTransferLogsRequest", + "ListTransferLogsResponse", + "ListTransferRunsRequest", + "ListTransferRunsResponse", + "ScheduleOptions", + "ScheduleTransferRunsRequest", + "ScheduleTransferRunsResponse", + "StartManualTransferRunsRequest", + "StartManualTransferRunsResponse", + "TransferConfig", + "TransferMessage", + "TransferRun", + "TransferState", + "TransferType", + "UpdateTransferConfigRequest", + "DataTransferServiceClient", + "DataTransferServiceAsyncClient", +) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/py.typed b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/py.typed new file mode 100644 index 000000000000..1bd9d383cee6 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-bigquery-datatransfer package uses inline types. diff --git a/packages/google-cloud-bigquery-datatransfer/google/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/__init__.py similarity index 71% rename from packages/google-cloud-bigquery-datatransfer/google/__init__.py rename to packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/__init__.py index 9a1b64a6d586..42ffdf2bc43d 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/__init__.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/__init__.py @@ -1,24 +1,16 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) +# diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/__init__.py similarity index 70% rename from packages/google-cloud-bigquery-datatransfer/google/cloud/__init__.py rename to packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/__init__.py index 9a1b64a6d586..b64f150a5896 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/__init__.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/__init__.py @@ -1,24 +1,24 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil +from .client import DataTransferServiceClient +from .async_client import DataTransferServiceAsyncClient - __path__ = pkgutil.extend_path(__path__, __name__) +__all__ = ( + "DataTransferServiceClient", + "DataTransferServiceAsyncClient", +) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/async_client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/async_client.py new file mode 100644 index 000000000000..2bbd0f2f053c --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/async_client.py @@ -0,0 +1,1321 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery.datatransfer_v1.services.data_transfer_service import pagers +from google.cloud.bigquery.datatransfer_v1.types import datatransfer +from google.cloud.bigquery.datatransfer_v1.types import transfer +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import struct_pb2 as struct # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.rpc import status_pb2 as status # type: ignore + +from .transports.base import DataTransferServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DataTransferServiceGrpcAsyncIOTransport +from .client import DataTransferServiceClient + + +class DataTransferServiceAsyncClient: + """The Google BigQuery Data Transfer Service API enables + BigQuery users to configure the transfer of their data from + other Google Products into BigQuery. This service contains + methods that are end user exposed. It backs up the frontend. + """ + + _client: DataTransferServiceClient + + DEFAULT_ENDPOINT = DataTransferServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataTransferServiceClient.DEFAULT_MTLS_ENDPOINT + + transfer_config_path = staticmethod(DataTransferServiceClient.transfer_config_path) + parse_transfer_config_path = staticmethod( + DataTransferServiceClient.parse_transfer_config_path + ) + + from_service_account_file = DataTransferServiceClient.from_service_account_file + from_service_account_json = from_service_account_file + + get_transport_class = functools.partial( + type(DataTransferServiceClient).get_transport_class, + type(DataTransferServiceClient), + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, DataTransferServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the data transfer service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DataTransferServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = DataTransferServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def get_data_source( + self, + request: datatransfer.GetDataSourceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.DataSource: + r"""Retrieves a supported data source and returns its + settings, which can be used for UI rendering. + + Args: + request (:class:`~.datatransfer.GetDataSourceRequest`): + The request object. A request to get data source info. + name (:class:`str`): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/dataSources/{data_source_id}`` + or + ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.DataSource: + Represents data source metadata. + Metadata is sufficient to render UI and + request proper OAuth tokens. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.GetDataSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_data_source, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_data_sources( + self, + request: datatransfer.ListDataSourcesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataSourcesAsyncPager: + r"""Lists supported data sources and returns their + settings, which can be used for UI rendering. + + Args: + request (:class:`~.datatransfer.ListDataSourcesRequest`): + The request object. Request to list supported data + sources and their data transfer settings. + parent (:class:`str`): + Required. The BigQuery project id for which data sources + should be returned. Must be in the form: + ``projects/{project_id}`` or + \`projects/{project_id}/locations/{location_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListDataSourcesAsyncPager: + Returns list of supported data + sources and their metadata. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.ListDataSourcesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_data_sources, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataSourcesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_transfer_config( + self, + request: datatransfer.CreateTransferConfigRequest = None, + *, + parent: str = None, + transfer_config: transfer.TransferConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Creates a new data transfer configuration. + + Args: + request (:class:`~.datatransfer.CreateTransferConfigRequest`): + The request object. A request to create a data transfer + configuration. If new credentials are needed for this + transfer configuration, an authorization code must be + provided. If an authorization code is provided, the + transfer configuration will be associated with the user + id corresponding to the authorization code. Otherwise, + the transfer configuration will be associated with the + calling user. + parent (:class:`str`): + Required. The BigQuery project id where the transfer + configuration should be created. Must be in the format + projects/{project_id}/locations/{location_id} or + projects/{project_id}. If specified location and + location of the destination bigquery dataset do not + match - the request will fail. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transfer_config (:class:`~.transfer.TransferConfig`): + Required. Data transfer configuration + to create. + This corresponds to the ``transfer_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transfer.TransferConfig: + Represents a data transfer configuration. A transfer + configuration contains all metadata needed to perform a + data transfer. For example, ``destination_dataset_id`` + specifies where data should be stored. When a new + transfer configuration is created, the specified + ``destination_dataset_id`` is created when needed and + shared with the appropriate data source service account. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, transfer_config]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.CreateTransferConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if transfer_config is not None: + request.transfer_config = transfer_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_transfer_config, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_transfer_config( + self, + request: datatransfer.UpdateTransferConfigRequest = None, + *, + transfer_config: transfer.TransferConfig = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Updates a data transfer configuration. + All fields must be set, even if they are not updated. + + Args: + request (:class:`~.datatransfer.UpdateTransferConfigRequest`): + The request object. A request to update a transfer + configuration. To update the user id of the transfer + configuration, an authorization code needs to be + provided. + transfer_config (:class:`~.transfer.TransferConfig`): + Required. Data transfer configuration + to create. + This corresponds to the ``transfer_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + Required. Required list of fields to + be updated in this request. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transfer.TransferConfig: + Represents a data transfer configuration. A transfer + configuration contains all metadata needed to perform a + data transfer. For example, ``destination_dataset_id`` + specifies where data should be stored. When a new + transfer configuration is created, the specified + ``destination_dataset_id`` is created when needed and + shared with the appropriate data source service account. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([transfer_config, update_mask]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.UpdateTransferConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if transfer_config is not None: + request.transfer_config = transfer_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_transfer_config, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("transfer_config.name", request.transfer_config.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_transfer_config( + self, + request: datatransfer.DeleteTransferConfigRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a data transfer configuration, + including any associated transfer runs and logs. + + Args: + request (:class:`~.datatransfer.DeleteTransferConfigRequest`): + The request object. A request to delete data transfer + information. All associated transfer runs and log + messages will be deleted as well. + name (:class:`str`): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.DeleteTransferConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_transfer_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def get_transfer_config( + self, + request: datatransfer.GetTransferConfigRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Returns information about a data transfer config. + + Args: + request (:class:`~.datatransfer.GetTransferConfigRequest`): + The request object. A request to get data transfer + information. + name (:class:`str`): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transfer.TransferConfig: + Represents a data transfer configuration. A transfer + configuration contains all metadata needed to perform a + data transfer. For example, ``destination_dataset_id`` + specifies where data should be stored. When a new + transfer configuration is created, the specified + ``destination_dataset_id`` is created when needed and + shared with the appropriate data source service account. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.GetTransferConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_transfer_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_transfer_configs( + self, + request: datatransfer.ListTransferConfigsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTransferConfigsAsyncPager: + r"""Returns information about all data transfers in the + project. + + Args: + request (:class:`~.datatransfer.ListTransferConfigsRequest`): + The request object. A request to list data transfers + configured for a BigQuery project. + parent (:class:`str`): + Required. The BigQuery project id for which data sources + should be returned: ``projects/{project_id}`` or + ``projects/{project_id}/locations/{location_id}`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTransferConfigsAsyncPager: + The returned list of pipelines in the + project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.ListTransferConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_transfer_configs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTransferConfigsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def schedule_transfer_runs( + self, + request: datatransfer.ScheduleTransferRunsRequest = None, + *, + parent: str = None, + start_time: timestamp.Timestamp = None, + end_time: timestamp.Timestamp = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.ScheduleTransferRunsResponse: + r"""Creates transfer runs for a time range [start_time, end_time]. + For each date - or whatever granularity the data source supports + - in the range, one transfer run is created. Note that runs are + created per UTC time in the time range. DEPRECATED: use + StartManualTransferRuns instead. + + Args: + request (:class:`~.datatransfer.ScheduleTransferRunsRequest`): + The request object. A request to schedule transfer runs + for a time range. + parent (:class:`str`): + Required. Transfer configuration name in the form: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + start_time (:class:`~.timestamp.Timestamp`): + Required. Start time of the range of transfer runs. For + example, ``"2017-05-25T00:00:00+00:00"``. + This corresponds to the ``start_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + end_time (:class:`~.timestamp.Timestamp`): + Required. End time of the range of transfer runs. For + example, ``"2017-05-30T00:00:00+00:00"``. + This corresponds to the ``end_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.ScheduleTransferRunsResponse: + A response to schedule transfer runs + for a time range. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, start_time, end_time]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.ScheduleTransferRunsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if start_time is not None: + request.start_time = start_time + if end_time is not None: + request.end_time = end_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.schedule_transfer_runs, + default_timeout=30.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def start_manual_transfer_runs( + self, + request: datatransfer.StartManualTransferRunsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.StartManualTransferRunsResponse: + r"""Start manual transfer runs to be executed now with schedule_time + equal to current time. The transfer runs can be created for a + time range where the run_time is between start_time (inclusive) + and end_time (exclusive), or for a specific run_time. + + Args: + request (:class:`~.datatransfer.StartManualTransferRunsRequest`): + The request object. A request to start manual transfer + runs. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.StartManualTransferRunsResponse: + A response to start manual transfer + runs. + + """ + # Create or coerce a protobuf request object. + + request = datatransfer.StartManualTransferRunsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.start_manual_transfer_runs, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_transfer_run( + self, + request: datatransfer.GetTransferRunRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferRun: + r"""Returns information about the particular transfer + run. + + Args: + request (:class:`~.datatransfer.GetTransferRunRequest`): + The request object. A request to get data transfer run + information. + name (:class:`str`): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transfer.TransferRun: + Represents a data transfer run. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.GetTransferRunRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_transfer_run, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_transfer_run( + self, + request: datatransfer.DeleteTransferRunRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified transfer run. + + Args: + request (:class:`~.datatransfer.DeleteTransferRunRequest`): + The request object. A request to delete data transfer + run information. + name (:class:`str`): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.DeleteTransferRunRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_transfer_run, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def list_transfer_runs( + self, + request: datatransfer.ListTransferRunsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTransferRunsAsyncPager: + r"""Returns information about running and completed jobs. + + Args: + request (:class:`~.datatransfer.ListTransferRunsRequest`): + The request object. A request to list data transfer + runs. UI can use this method to show/filter specific + data transfer runs. The data source can use this method + to request all scheduled transfer runs. + parent (:class:`str`): + Required. Name of transfer configuration for which + transfer runs should be retrieved. Format of transfer + configuration resource name is: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTransferRunsAsyncPager: + The returned list of pipelines in the + project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.ListTransferRunsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_transfer_runs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTransferRunsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_transfer_logs( + self, + request: datatransfer.ListTransferLogsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTransferLogsAsyncPager: + r"""Returns user facing log messages for the data + transfer run. + + Args: + request (:class:`~.datatransfer.ListTransferLogsRequest`): + The request object. A request to get user facing log + messages associated with data transfer run. + parent (:class:`str`): + Required. Transfer run name in the form: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTransferLogsAsyncPager: + The returned list transfer run + messages. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.ListTransferLogsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_transfer_logs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTransferLogsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def check_valid_creds( + self, + request: datatransfer.CheckValidCredsRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.CheckValidCredsResponse: + r"""Returns true if valid credentials exist for the given + data source and requesting user. + Some data sources doesn't support service account, so we + need to talk to them on behalf of the end user. This API + just checks whether we have OAuth token for the + particular user, which is a pre-requisite before user + can create a transfer config. + + Args: + request (:class:`~.datatransfer.CheckValidCredsRequest`): + The request object. A request to determine whether the + user has valid credentials. This method is used to limit + the number of OAuth popups in the user interface. The + user id is inferred from the API call context. + If the data source has the Google+ authorization type, + this method returns false, as it cannot be determined + whether the credentials are already valid merely based + on the user id. + name (:class:`str`): + Required. The data source in the form: + ``projects/{project_id}/dataSources/{data_source_id}`` + or + ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.CheckValidCredsResponse: + A response indicating whether the + credentials exist and are valid. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = datatransfer.CheckValidCredsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.check_valid_creds, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-bigquery-datatransfer", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DataTransferServiceAsyncClient",) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/client.py new file mode 100644 index 000000000000..2c3c755bdd62 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/client.py @@ -0,0 +1,1430 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.bigquery.datatransfer_v1.services.data_transfer_service import pagers +from google.cloud.bigquery.datatransfer_v1.types import datatransfer +from google.cloud.bigquery.datatransfer_v1.types import transfer +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import struct_pb2 as struct # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.rpc import status_pb2 as status # type: ignore + +from .transports.base import DataTransferServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DataTransferServiceGrpcTransport +from .transports.grpc_asyncio import DataTransferServiceGrpcAsyncIOTransport + + +class DataTransferServiceClientMeta(type): + """Metaclass for the DataTransferService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DataTransferServiceTransport]] + _transport_registry["grpc"] = DataTransferServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DataTransferServiceGrpcAsyncIOTransport + + def get_transport_class( + cls, label: str = None, + ) -> Type[DataTransferServiceTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataTransferServiceClient(metaclass=DataTransferServiceClientMeta): + """The Google BigQuery Data Transfer Service API enables + BigQuery users to configure the transfer of their data from + other Google Products into BigQuery. This service contains + methods that are end user exposed. It backs up the frontend. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "bigquerydatatransfer.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @staticmethod + def transfer_config_path(project: str, transfer_config: str,) -> str: + """Return a fully-qualified transfer_config string.""" + return "projects/{project}/transferConfigs/{transfer_config}".format( + project=project, transfer_config=transfer_config, + ) + + @staticmethod + def parse_transfer_config_path(path: str) -> Dict[str, str]: + """Parse a transfer_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/transferConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, DataTransferServiceTransport] = None, + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the data transfer service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DataTransferServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = ClientOptions.from_dict(client_options) + if client_options is None: + client_options = ClientOptions.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DataTransferServiceTransport): + # transport is a DataTransferServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + ssl_channel_credentials=ssl_credentials, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def get_data_source( + self, + request: datatransfer.GetDataSourceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.DataSource: + r"""Retrieves a supported data source and returns its + settings, which can be used for UI rendering. + + Args: + request (:class:`~.datatransfer.GetDataSourceRequest`): + The request object. A request to get data source info. + name (:class:`str`): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/dataSources/{data_source_id}`` + or + ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.DataSource: + Represents data source metadata. + Metadata is sufficient to render UI and + request proper OAuth tokens. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.GetDataSourceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.GetDataSourceRequest): + request = datatransfer.GetDataSourceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_source] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_data_sources( + self, + request: datatransfer.ListDataSourcesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDataSourcesPager: + r"""Lists supported data sources and returns their + settings, which can be used for UI rendering. + + Args: + request (:class:`~.datatransfer.ListDataSourcesRequest`): + The request object. Request to list supported data + sources and their data transfer settings. + parent (:class:`str`): + Required. The BigQuery project id for which data sources + should be returned. Must be in the form: + ``projects/{project_id}`` or + \`projects/{project_id}/locations/{location_id} + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListDataSourcesPager: + Returns list of supported data + sources and their metadata. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.ListDataSourcesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.ListDataSourcesRequest): + request = datatransfer.ListDataSourcesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_sources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataSourcesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def create_transfer_config( + self, + request: datatransfer.CreateTransferConfigRequest = None, + *, + parent: str = None, + transfer_config: transfer.TransferConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Creates a new data transfer configuration. + + Args: + request (:class:`~.datatransfer.CreateTransferConfigRequest`): + The request object. A request to create a data transfer + configuration. If new credentials are needed for this + transfer configuration, an authorization code must be + provided. If an authorization code is provided, the + transfer configuration will be associated with the user + id corresponding to the authorization code. Otherwise, + the transfer configuration will be associated with the + calling user. + parent (:class:`str`): + Required. The BigQuery project id where the transfer + configuration should be created. Must be in the format + projects/{project_id}/locations/{location_id} or + projects/{project_id}. If specified location and + location of the destination bigquery dataset do not + match - the request will fail. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + transfer_config (:class:`~.transfer.TransferConfig`): + Required. Data transfer configuration + to create. + This corresponds to the ``transfer_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transfer.TransferConfig: + Represents a data transfer configuration. A transfer + configuration contains all metadata needed to perform a + data transfer. For example, ``destination_dataset_id`` + specifies where data should be stored. When a new + transfer configuration is created, the specified + ``destination_dataset_id`` is created when needed and + shared with the appropriate data source service account. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, transfer_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.CreateTransferConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.CreateTransferConfigRequest): + request = datatransfer.CreateTransferConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if transfer_config is not None: + request.transfer_config = transfer_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_transfer_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_transfer_config( + self, + request: datatransfer.UpdateTransferConfigRequest = None, + *, + transfer_config: transfer.TransferConfig = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Updates a data transfer configuration. + All fields must be set, even if they are not updated. + + Args: + request (:class:`~.datatransfer.UpdateTransferConfigRequest`): + The request object. A request to update a transfer + configuration. To update the user id of the transfer + configuration, an authorization code needs to be + provided. + transfer_config (:class:`~.transfer.TransferConfig`): + Required. Data transfer configuration + to create. + This corresponds to the ``transfer_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + Required. Required list of fields to + be updated in this request. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transfer.TransferConfig: + Represents a data transfer configuration. A transfer + configuration contains all metadata needed to perform a + data transfer. For example, ``destination_dataset_id`` + specifies where data should be stored. When a new + transfer configuration is created, the specified + ``destination_dataset_id`` is created when needed and + shared with the appropriate data source service account. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([transfer_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.UpdateTransferConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.UpdateTransferConfigRequest): + request = datatransfer.UpdateTransferConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if transfer_config is not None: + request.transfer_config = transfer_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_transfer_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("transfer_config.name", request.transfer_config.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_transfer_config( + self, + request: datatransfer.DeleteTransferConfigRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a data transfer configuration, + including any associated transfer runs and logs. + + Args: + request (:class:`~.datatransfer.DeleteTransferConfigRequest`): + The request object. A request to delete data transfer + information. All associated transfer runs and log + messages will be deleted as well. + name (:class:`str`): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.DeleteTransferConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.DeleteTransferConfigRequest): + request = datatransfer.DeleteTransferConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_transfer_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def get_transfer_config( + self, + request: datatransfer.GetTransferConfigRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Returns information about a data transfer config. + + Args: + request (:class:`~.datatransfer.GetTransferConfigRequest`): + The request object. A request to get data transfer + information. + name (:class:`str`): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transfer.TransferConfig: + Represents a data transfer configuration. A transfer + configuration contains all metadata needed to perform a + data transfer. For example, ``destination_dataset_id`` + specifies where data should be stored. When a new + transfer configuration is created, the specified + ``destination_dataset_id`` is created when needed and + shared with the appropriate data source service account. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.GetTransferConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.GetTransferConfigRequest): + request = datatransfer.GetTransferConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_transfer_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_transfer_configs( + self, + request: datatransfer.ListTransferConfigsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTransferConfigsPager: + r"""Returns information about all data transfers in the + project. + + Args: + request (:class:`~.datatransfer.ListTransferConfigsRequest`): + The request object. A request to list data transfers + configured for a BigQuery project. + parent (:class:`str`): + Required. The BigQuery project id for which data sources + should be returned: ``projects/{project_id}`` or + ``projects/{project_id}/locations/{location_id}`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTransferConfigsPager: + The returned list of pipelines in the + project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.ListTransferConfigsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.ListTransferConfigsRequest): + request = datatransfer.ListTransferConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_transfer_configs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTransferConfigsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def schedule_transfer_runs( + self, + request: datatransfer.ScheduleTransferRunsRequest = None, + *, + parent: str = None, + start_time: timestamp.Timestamp = None, + end_time: timestamp.Timestamp = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.ScheduleTransferRunsResponse: + r"""Creates transfer runs for a time range [start_time, end_time]. + For each date - or whatever granularity the data source supports + - in the range, one transfer run is created. Note that runs are + created per UTC time in the time range. DEPRECATED: use + StartManualTransferRuns instead. + + Args: + request (:class:`~.datatransfer.ScheduleTransferRunsRequest`): + The request object. A request to schedule transfer runs + for a time range. + parent (:class:`str`): + Required. Transfer configuration name in the form: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + start_time (:class:`~.timestamp.Timestamp`): + Required. Start time of the range of transfer runs. For + example, ``"2017-05-25T00:00:00+00:00"``. + This corresponds to the ``start_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + end_time (:class:`~.timestamp.Timestamp`): + Required. End time of the range of transfer runs. For + example, ``"2017-05-30T00:00:00+00:00"``. + This corresponds to the ``end_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.ScheduleTransferRunsResponse: + A response to schedule transfer runs + for a time range. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, start_time, end_time]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.ScheduleTransferRunsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.ScheduleTransferRunsRequest): + request = datatransfer.ScheduleTransferRunsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if start_time is not None: + request.start_time = start_time + if end_time is not None: + request.end_time = end_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.schedule_transfer_runs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def start_manual_transfer_runs( + self, + request: datatransfer.StartManualTransferRunsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.StartManualTransferRunsResponse: + r"""Start manual transfer runs to be executed now with schedule_time + equal to current time. The transfer runs can be created for a + time range where the run_time is between start_time (inclusive) + and end_time (exclusive), or for a specific run_time. + + Args: + request (:class:`~.datatransfer.StartManualTransferRunsRequest`): + The request object. A request to start manual transfer + runs. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.StartManualTransferRunsResponse: + A response to start manual transfer + runs. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.StartManualTransferRunsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.StartManualTransferRunsRequest): + request = datatransfer.StartManualTransferRunsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.start_manual_transfer_runs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_transfer_run( + self, + request: datatransfer.GetTransferRunRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferRun: + r"""Returns information about the particular transfer + run. + + Args: + request (:class:`~.datatransfer.GetTransferRunRequest`): + The request object. A request to get data transfer run + information. + name (:class:`str`): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transfer.TransferRun: + Represents a data transfer run. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.GetTransferRunRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.GetTransferRunRequest): + request = datatransfer.GetTransferRunRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_transfer_run] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_transfer_run( + self, + request: datatransfer.DeleteTransferRunRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the specified transfer run. + + Args: + request (:class:`~.datatransfer.DeleteTransferRunRequest`): + The request object. A request to delete data transfer + run information. + name (:class:`str`): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.DeleteTransferRunRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.DeleteTransferRunRequest): + request = datatransfer.DeleteTransferRunRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_transfer_run] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def list_transfer_runs( + self, + request: datatransfer.ListTransferRunsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTransferRunsPager: + r"""Returns information about running and completed jobs. + + Args: + request (:class:`~.datatransfer.ListTransferRunsRequest`): + The request object. A request to list data transfer + runs. UI can use this method to show/filter specific + data transfer runs. The data source can use this method + to request all scheduled transfer runs. + parent (:class:`str`): + Required. Name of transfer configuration for which + transfer runs should be retrieved. Format of transfer + configuration resource name is: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTransferRunsPager: + The returned list of pipelines in the + project. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.ListTransferRunsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.ListTransferRunsRequest): + request = datatransfer.ListTransferRunsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_transfer_runs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTransferRunsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def list_transfer_logs( + self, + request: datatransfer.ListTransferLogsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTransferLogsPager: + r"""Returns user facing log messages for the data + transfer run. + + Args: + request (:class:`~.datatransfer.ListTransferLogsRequest`): + The request object. A request to get user facing log + messages associated with data transfer run. + parent (:class:`str`): + Required. Transfer run name in the form: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTransferLogsPager: + The returned list transfer run + messages. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.ListTransferLogsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.ListTransferLogsRequest): + request = datatransfer.ListTransferLogsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_transfer_logs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTransferLogsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def check_valid_creds( + self, + request: datatransfer.CheckValidCredsRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.CheckValidCredsResponse: + r"""Returns true if valid credentials exist for the given + data source and requesting user. + Some data sources doesn't support service account, so we + need to talk to them on behalf of the end user. This API + just checks whether we have OAuth token for the + particular user, which is a pre-requisite before user + can create a transfer config. + + Args: + request (:class:`~.datatransfer.CheckValidCredsRequest`): + The request object. A request to determine whether the + user has valid credentials. This method is used to limit + the number of OAuth popups in the user interface. The + user id is inferred from the API call context. + If the data source has the Google+ authorization type, + this method returns false, as it cannot be determined + whether the credentials are already valid merely based + on the user id. + name (:class:`str`): + Required. The data source in the form: + ``projects/{project_id}/dataSources/{data_source_id}`` + or + ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.CheckValidCredsResponse: + A response indicating whether the + credentials exist and are valid. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a datatransfer.CheckValidCredsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, datatransfer.CheckValidCredsRequest): + request = datatransfer.CheckValidCredsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.check_valid_creds] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-bigquery-datatransfer", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DataTransferServiceClient",) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/pagers.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/pagers.py new file mode 100644 index 000000000000..922e208624e1 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/pagers.py @@ -0,0 +1,533 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from google.cloud.bigquery.datatransfer_v1.types import datatransfer +from google.cloud.bigquery.datatransfer_v1.types import transfer + + +class ListDataSourcesPager: + """A pager for iterating through ``list_data_sources`` requests. + + This class thinly wraps an initial + :class:`~.datatransfer.ListDataSourcesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_sources`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataSources`` requests and continue to iterate + through the ``data_sources`` field on the + corresponding responses. + + All the usual :class:`~.datatransfer.ListDataSourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., datatransfer.ListDataSourcesResponse], + request: datatransfer.ListDataSourcesRequest, + response: datatransfer.ListDataSourcesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.datatransfer.ListDataSourcesRequest`): + The initial request object. + response (:class:`~.datatransfer.ListDataSourcesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datatransfer.ListDataSourcesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[datatransfer.ListDataSourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[datatransfer.DataSource]: + for page in self.pages: + yield from page.data_sources + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataSourcesAsyncPager: + """A pager for iterating through ``list_data_sources`` requests. + + This class thinly wraps an initial + :class:`~.datatransfer.ListDataSourcesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_sources`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataSources`` requests and continue to iterate + through the ``data_sources`` field on the + corresponding responses. + + All the usual :class:`~.datatransfer.ListDataSourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[datatransfer.ListDataSourcesResponse]], + request: datatransfer.ListDataSourcesRequest, + response: datatransfer.ListDataSourcesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.datatransfer.ListDataSourcesRequest`): + The initial request object. + response (:class:`~.datatransfer.ListDataSourcesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datatransfer.ListDataSourcesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[datatransfer.ListDataSourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[datatransfer.DataSource]: + async def async_generator(): + async for page in self.pages: + for response in page.data_sources: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTransferConfigsPager: + """A pager for iterating through ``list_transfer_configs`` requests. + + This class thinly wraps an initial + :class:`~.datatransfer.ListTransferConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``transfer_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTransferConfigs`` requests and continue to iterate + through the ``transfer_configs`` field on the + corresponding responses. + + All the usual :class:`~.datatransfer.ListTransferConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., datatransfer.ListTransferConfigsResponse], + request: datatransfer.ListTransferConfigsRequest, + response: datatransfer.ListTransferConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.datatransfer.ListTransferConfigsRequest`): + The initial request object. + response (:class:`~.datatransfer.ListTransferConfigsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datatransfer.ListTransferConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[datatransfer.ListTransferConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[transfer.TransferConfig]: + for page in self.pages: + yield from page.transfer_configs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTransferConfigsAsyncPager: + """A pager for iterating through ``list_transfer_configs`` requests. + + This class thinly wraps an initial + :class:`~.datatransfer.ListTransferConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``transfer_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTransferConfigs`` requests and continue to iterate + through the ``transfer_configs`` field on the + corresponding responses. + + All the usual :class:`~.datatransfer.ListTransferConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[datatransfer.ListTransferConfigsResponse]], + request: datatransfer.ListTransferConfigsRequest, + response: datatransfer.ListTransferConfigsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.datatransfer.ListTransferConfigsRequest`): + The initial request object. + response (:class:`~.datatransfer.ListTransferConfigsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datatransfer.ListTransferConfigsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[datatransfer.ListTransferConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[transfer.TransferConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.transfer_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTransferRunsPager: + """A pager for iterating through ``list_transfer_runs`` requests. + + This class thinly wraps an initial + :class:`~.datatransfer.ListTransferRunsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``transfer_runs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTransferRuns`` requests and continue to iterate + through the ``transfer_runs`` field on the + corresponding responses. + + All the usual :class:`~.datatransfer.ListTransferRunsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., datatransfer.ListTransferRunsResponse], + request: datatransfer.ListTransferRunsRequest, + response: datatransfer.ListTransferRunsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.datatransfer.ListTransferRunsRequest`): + The initial request object. + response (:class:`~.datatransfer.ListTransferRunsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datatransfer.ListTransferRunsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[datatransfer.ListTransferRunsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[transfer.TransferRun]: + for page in self.pages: + yield from page.transfer_runs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTransferRunsAsyncPager: + """A pager for iterating through ``list_transfer_runs`` requests. + + This class thinly wraps an initial + :class:`~.datatransfer.ListTransferRunsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``transfer_runs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTransferRuns`` requests and continue to iterate + through the ``transfer_runs`` field on the + corresponding responses. + + All the usual :class:`~.datatransfer.ListTransferRunsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[datatransfer.ListTransferRunsResponse]], + request: datatransfer.ListTransferRunsRequest, + response: datatransfer.ListTransferRunsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.datatransfer.ListTransferRunsRequest`): + The initial request object. + response (:class:`~.datatransfer.ListTransferRunsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datatransfer.ListTransferRunsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[datatransfer.ListTransferRunsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[transfer.TransferRun]: + async def async_generator(): + async for page in self.pages: + for response in page.transfer_runs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTransferLogsPager: + """A pager for iterating through ``list_transfer_logs`` requests. + + This class thinly wraps an initial + :class:`~.datatransfer.ListTransferLogsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``transfer_messages`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTransferLogs`` requests and continue to iterate + through the ``transfer_messages`` field on the + corresponding responses. + + All the usual :class:`~.datatransfer.ListTransferLogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., datatransfer.ListTransferLogsResponse], + request: datatransfer.ListTransferLogsRequest, + response: datatransfer.ListTransferLogsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.datatransfer.ListTransferLogsRequest`): + The initial request object. + response (:class:`~.datatransfer.ListTransferLogsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datatransfer.ListTransferLogsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[datatransfer.ListTransferLogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[transfer.TransferMessage]: + for page in self.pages: + yield from page.transfer_messages + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTransferLogsAsyncPager: + """A pager for iterating through ``list_transfer_logs`` requests. + + This class thinly wraps an initial + :class:`~.datatransfer.ListTransferLogsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``transfer_messages`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTransferLogs`` requests and continue to iterate + through the ``transfer_messages`` field on the + corresponding responses. + + All the usual :class:`~.datatransfer.ListTransferLogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[datatransfer.ListTransferLogsResponse]], + request: datatransfer.ListTransferLogsRequest, + response: datatransfer.ListTransferLogsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.datatransfer.ListTransferLogsRequest`): + The initial request object. + response (:class:`~.datatransfer.ListTransferLogsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = datatransfer.ListTransferLogsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[datatransfer.ListTransferLogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[transfer.TransferMessage]: + async def async_generator(): + async for page in self.pages: + for response in page.transfer_messages: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/transports/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/transports/__init__.py new file mode 100644 index 000000000000..2b71d0f3021b --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataTransferServiceTransport +from .grpc import DataTransferServiceGrpcTransport +from .grpc_asyncio import DataTransferServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[DataTransferServiceTransport]] +_transport_registry["grpc"] = DataTransferServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DataTransferServiceGrpcAsyncIOTransport + + +__all__ = ( + "DataTransferServiceTransport", + "DataTransferServiceGrpcTransport", + "DataTransferServiceGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/transports/base.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/transports/base.py new file mode 100644 index 000000000000..d4d82fa2b8d7 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/transports/base.py @@ -0,0 +1,419 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.bigquery.datatransfer_v1.types import datatransfer +from google.cloud.bigquery.datatransfer_v1.types import transfer +from google.protobuf import empty_pb2 as empty # type: ignore + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-bigquery-datatransfer", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class DataTransferServiceTransport(abc.ABC): + """Abstract transport class for DataTransferService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, + *, + host: str = "bigquerydatatransfer.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_data_source: gapic_v1.method.wrap_method( + self.get_data_source, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=client_info, + ), + self.list_data_sources: gapic_v1.method.wrap_method( + self.list_data_sources, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=client_info, + ), + self.create_transfer_config: gapic_v1.method.wrap_method( + self.create_transfer_config, + default_timeout=30.0, + client_info=client_info, + ), + self.update_transfer_config: gapic_v1.method.wrap_method( + self.update_transfer_config, + default_timeout=30.0, + client_info=client_info, + ), + self.delete_transfer_config: gapic_v1.method.wrap_method( + self.delete_transfer_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=client_info, + ), + self.get_transfer_config: gapic_v1.method.wrap_method( + self.get_transfer_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=client_info, + ), + self.list_transfer_configs: gapic_v1.method.wrap_method( + self.list_transfer_configs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=client_info, + ), + self.schedule_transfer_runs: gapic_v1.method.wrap_method( + self.schedule_transfer_runs, + default_timeout=30.0, + client_info=client_info, + ), + self.start_manual_transfer_runs: gapic_v1.method.wrap_method( + self.start_manual_transfer_runs, + default_timeout=None, + client_info=client_info, + ), + self.get_transfer_run: gapic_v1.method.wrap_method( + self.get_transfer_run, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=client_info, + ), + self.delete_transfer_run: gapic_v1.method.wrap_method( + self.delete_transfer_run, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=client_info, + ), + self.list_transfer_runs: gapic_v1.method.wrap_method( + self.list_transfer_runs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=client_info, + ), + self.list_transfer_logs: gapic_v1.method.wrap_method( + self.list_transfer_logs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=client_info, + ), + self.check_valid_creds: gapic_v1.method.wrap_method( + self.check_valid_creds, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + ), + ), + default_timeout=20.0, + client_info=client_info, + ), + } + + @property + def get_data_source( + self, + ) -> typing.Callable[ + [datatransfer.GetDataSourceRequest], + typing.Union[ + datatransfer.DataSource, typing.Awaitable[datatransfer.DataSource] + ], + ]: + raise NotImplementedError() + + @property + def list_data_sources( + self, + ) -> typing.Callable[ + [datatransfer.ListDataSourcesRequest], + typing.Union[ + datatransfer.ListDataSourcesResponse, + typing.Awaitable[datatransfer.ListDataSourcesResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_transfer_config( + self, + ) -> typing.Callable[ + [datatransfer.CreateTransferConfigRequest], + typing.Union[ + transfer.TransferConfig, typing.Awaitable[transfer.TransferConfig] + ], + ]: + raise NotImplementedError() + + @property + def update_transfer_config( + self, + ) -> typing.Callable[ + [datatransfer.UpdateTransferConfigRequest], + typing.Union[ + transfer.TransferConfig, typing.Awaitable[transfer.TransferConfig] + ], + ]: + raise NotImplementedError() + + @property + def delete_transfer_config( + self, + ) -> typing.Callable[ + [datatransfer.DeleteTransferConfigRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def get_transfer_config( + self, + ) -> typing.Callable[ + [datatransfer.GetTransferConfigRequest], + typing.Union[ + transfer.TransferConfig, typing.Awaitable[transfer.TransferConfig] + ], + ]: + raise NotImplementedError() + + @property + def list_transfer_configs( + self, + ) -> typing.Callable[ + [datatransfer.ListTransferConfigsRequest], + typing.Union[ + datatransfer.ListTransferConfigsResponse, + typing.Awaitable[datatransfer.ListTransferConfigsResponse], + ], + ]: + raise NotImplementedError() + + @property + def schedule_transfer_runs( + self, + ) -> typing.Callable[ + [datatransfer.ScheduleTransferRunsRequest], + typing.Union[ + datatransfer.ScheduleTransferRunsResponse, + typing.Awaitable[datatransfer.ScheduleTransferRunsResponse], + ], + ]: + raise NotImplementedError() + + @property + def start_manual_transfer_runs( + self, + ) -> typing.Callable[ + [datatransfer.StartManualTransferRunsRequest], + typing.Union[ + datatransfer.StartManualTransferRunsResponse, + typing.Awaitable[datatransfer.StartManualTransferRunsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_transfer_run( + self, + ) -> typing.Callable[ + [datatransfer.GetTransferRunRequest], + typing.Union[transfer.TransferRun, typing.Awaitable[transfer.TransferRun]], + ]: + raise NotImplementedError() + + @property + def delete_transfer_run( + self, + ) -> typing.Callable[ + [datatransfer.DeleteTransferRunRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def list_transfer_runs( + self, + ) -> typing.Callable[ + [datatransfer.ListTransferRunsRequest], + typing.Union[ + datatransfer.ListTransferRunsResponse, + typing.Awaitable[datatransfer.ListTransferRunsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_transfer_logs( + self, + ) -> typing.Callable[ + [datatransfer.ListTransferLogsRequest], + typing.Union[ + datatransfer.ListTransferLogsResponse, + typing.Awaitable[datatransfer.ListTransferLogsResponse], + ], + ]: + raise NotImplementedError() + + @property + def check_valid_creds( + self, + ) -> typing.Callable[ + [datatransfer.CheckValidCredsRequest], + typing.Union[ + datatransfer.CheckValidCredsResponse, + typing.Awaitable[datatransfer.CheckValidCredsResponse], + ], + ]: + raise NotImplementedError() + + +__all__ = ("DataTransferServiceTransport",) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/transports/grpc.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/transports/grpc.py new file mode 100644 index 000000000000..282f8eb7776d --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/transports/grpc.py @@ -0,0 +1,639 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.bigquery.datatransfer_v1.types import datatransfer +from google.cloud.bigquery.datatransfer_v1.types import transfer +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import DataTransferServiceTransport, DEFAULT_CLIENT_INFO + + +class DataTransferServiceGrpcTransport(DataTransferServiceTransport): + """gRPC backend transport for DataTransferService. + + The Google BigQuery Data Transfer Service API enables + BigQuery users to configure the transfer of their data from + other Google Products into BigQuery. This service contains + methods that are end user exposed. It backs up the frontend. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "bigquerydatatransfer.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "bigquerydatatransfer.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_data_source( + self, + ) -> Callable[[datatransfer.GetDataSourceRequest], datatransfer.DataSource]: + r"""Return a callable for the get data source method over gRPC. + + Retrieves a supported data source and returns its + settings, which can be used for UI rendering. + + Returns: + Callable[[~.GetDataSourceRequest], + ~.DataSource]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_source" not in self._stubs: + self._stubs["get_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetDataSource", + request_serializer=datatransfer.GetDataSourceRequest.serialize, + response_deserializer=datatransfer.DataSource.deserialize, + ) + return self._stubs["get_data_source"] + + @property + def list_data_sources( + self, + ) -> Callable[ + [datatransfer.ListDataSourcesRequest], datatransfer.ListDataSourcesResponse + ]: + r"""Return a callable for the list data sources method over gRPC. + + Lists supported data sources and returns their + settings, which can be used for UI rendering. + + Returns: + Callable[[~.ListDataSourcesRequest], + ~.ListDataSourcesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_sources" not in self._stubs: + self._stubs["list_data_sources"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListDataSources", + request_serializer=datatransfer.ListDataSourcesRequest.serialize, + response_deserializer=datatransfer.ListDataSourcesResponse.deserialize, + ) + return self._stubs["list_data_sources"] + + @property + def create_transfer_config( + self, + ) -> Callable[[datatransfer.CreateTransferConfigRequest], transfer.TransferConfig]: + r"""Return a callable for the create transfer config method over gRPC. + + Creates a new data transfer configuration. + + Returns: + Callable[[~.CreateTransferConfigRequest], + ~.TransferConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_transfer_config" not in self._stubs: + self._stubs["create_transfer_config"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CreateTransferConfig", + request_serializer=datatransfer.CreateTransferConfigRequest.serialize, + response_deserializer=transfer.TransferConfig.deserialize, + ) + return self._stubs["create_transfer_config"] + + @property + def update_transfer_config( + self, + ) -> Callable[[datatransfer.UpdateTransferConfigRequest], transfer.TransferConfig]: + r"""Return a callable for the update transfer config method over gRPC. + + Updates a data transfer configuration. + All fields must be set, even if they are not updated. + + Returns: + Callable[[~.UpdateTransferConfigRequest], + ~.TransferConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_transfer_config" not in self._stubs: + self._stubs["update_transfer_config"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/UpdateTransferConfig", + request_serializer=datatransfer.UpdateTransferConfigRequest.serialize, + response_deserializer=transfer.TransferConfig.deserialize, + ) + return self._stubs["update_transfer_config"] + + @property + def delete_transfer_config( + self, + ) -> Callable[[datatransfer.DeleteTransferConfigRequest], empty.Empty]: + r"""Return a callable for the delete transfer config method over gRPC. + + Deletes a data transfer configuration, + including any associated transfer runs and logs. + + Returns: + Callable[[~.DeleteTransferConfigRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_transfer_config" not in self._stubs: + self._stubs["delete_transfer_config"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferConfig", + request_serializer=datatransfer.DeleteTransferConfigRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_transfer_config"] + + @property + def get_transfer_config( + self, + ) -> Callable[[datatransfer.GetTransferConfigRequest], transfer.TransferConfig]: + r"""Return a callable for the get transfer config method over gRPC. + + Returns information about a data transfer config. + + Returns: + Callable[[~.GetTransferConfigRequest], + ~.TransferConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_transfer_config" not in self._stubs: + self._stubs["get_transfer_config"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferConfig", + request_serializer=datatransfer.GetTransferConfigRequest.serialize, + response_deserializer=transfer.TransferConfig.deserialize, + ) + return self._stubs["get_transfer_config"] + + @property + def list_transfer_configs( + self, + ) -> Callable[ + [datatransfer.ListTransferConfigsRequest], + datatransfer.ListTransferConfigsResponse, + ]: + r"""Return a callable for the list transfer configs method over gRPC. + + Returns information about all data transfers in the + project. + + Returns: + Callable[[~.ListTransferConfigsRequest], + ~.ListTransferConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transfer_configs" not in self._stubs: + self._stubs["list_transfer_configs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferConfigs", + request_serializer=datatransfer.ListTransferConfigsRequest.serialize, + response_deserializer=datatransfer.ListTransferConfigsResponse.deserialize, + ) + return self._stubs["list_transfer_configs"] + + @property + def schedule_transfer_runs( + self, + ) -> Callable[ + [datatransfer.ScheduleTransferRunsRequest], + datatransfer.ScheduleTransferRunsResponse, + ]: + r"""Return a callable for the schedule transfer runs method over gRPC. + + Creates transfer runs for a time range [start_time, end_time]. + For each date - or whatever granularity the data source supports + - in the range, one transfer run is created. Note that runs are + created per UTC time in the time range. DEPRECATED: use + StartManualTransferRuns instead. + + Returns: + Callable[[~.ScheduleTransferRunsRequest], + ~.ScheduleTransferRunsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "schedule_transfer_runs" not in self._stubs: + self._stubs["schedule_transfer_runs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ScheduleTransferRuns", + request_serializer=datatransfer.ScheduleTransferRunsRequest.serialize, + response_deserializer=datatransfer.ScheduleTransferRunsResponse.deserialize, + ) + return self._stubs["schedule_transfer_runs"] + + @property + def start_manual_transfer_runs( + self, + ) -> Callable[ + [datatransfer.StartManualTransferRunsRequest], + datatransfer.StartManualTransferRunsResponse, + ]: + r"""Return a callable for the start manual transfer runs method over gRPC. + + Start manual transfer runs to be executed now with schedule_time + equal to current time. The transfer runs can be created for a + time range where the run_time is between start_time (inclusive) + and end_time (exclusive), or for a specific run_time. + + Returns: + Callable[[~.StartManualTransferRunsRequest], + ~.StartManualTransferRunsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_manual_transfer_runs" not in self._stubs: + self._stubs["start_manual_transfer_runs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/StartManualTransferRuns", + request_serializer=datatransfer.StartManualTransferRunsRequest.serialize, + response_deserializer=datatransfer.StartManualTransferRunsResponse.deserialize, + ) + return self._stubs["start_manual_transfer_runs"] + + @property + def get_transfer_run( + self, + ) -> Callable[[datatransfer.GetTransferRunRequest], transfer.TransferRun]: + r"""Return a callable for the get transfer run method over gRPC. + + Returns information about the particular transfer + run. + + Returns: + Callable[[~.GetTransferRunRequest], + ~.TransferRun]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_transfer_run" not in self._stubs: + self._stubs["get_transfer_run"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferRun", + request_serializer=datatransfer.GetTransferRunRequest.serialize, + response_deserializer=transfer.TransferRun.deserialize, + ) + return self._stubs["get_transfer_run"] + + @property + def delete_transfer_run( + self, + ) -> Callable[[datatransfer.DeleteTransferRunRequest], empty.Empty]: + r"""Return a callable for the delete transfer run method over gRPC. + + Deletes the specified transfer run. + + Returns: + Callable[[~.DeleteTransferRunRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_transfer_run" not in self._stubs: + self._stubs["delete_transfer_run"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferRun", + request_serializer=datatransfer.DeleteTransferRunRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_transfer_run"] + + @property + def list_transfer_runs( + self, + ) -> Callable[ + [datatransfer.ListTransferRunsRequest], datatransfer.ListTransferRunsResponse + ]: + r"""Return a callable for the list transfer runs method over gRPC. + + Returns information about running and completed jobs. + + Returns: + Callable[[~.ListTransferRunsRequest], + ~.ListTransferRunsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transfer_runs" not in self._stubs: + self._stubs["list_transfer_runs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferRuns", + request_serializer=datatransfer.ListTransferRunsRequest.serialize, + response_deserializer=datatransfer.ListTransferRunsResponse.deserialize, + ) + return self._stubs["list_transfer_runs"] + + @property + def list_transfer_logs( + self, + ) -> Callable[ + [datatransfer.ListTransferLogsRequest], datatransfer.ListTransferLogsResponse + ]: + r"""Return a callable for the list transfer logs method over gRPC. + + Returns user facing log messages for the data + transfer run. + + Returns: + Callable[[~.ListTransferLogsRequest], + ~.ListTransferLogsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transfer_logs" not in self._stubs: + self._stubs["list_transfer_logs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferLogs", + request_serializer=datatransfer.ListTransferLogsRequest.serialize, + response_deserializer=datatransfer.ListTransferLogsResponse.deserialize, + ) + return self._stubs["list_transfer_logs"] + + @property + def check_valid_creds( + self, + ) -> Callable[ + [datatransfer.CheckValidCredsRequest], datatransfer.CheckValidCredsResponse + ]: + r"""Return a callable for the check valid creds method over gRPC. + + Returns true if valid credentials exist for the given + data source and requesting user. + Some data sources doesn't support service account, so we + need to talk to them on behalf of the end user. This API + just checks whether we have OAuth token for the + particular user, which is a pre-requisite before user + can create a transfer config. + + Returns: + Callable[[~.CheckValidCredsRequest], + ~.CheckValidCredsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_valid_creds" not in self._stubs: + self._stubs["check_valid_creds"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CheckValidCreds", + request_serializer=datatransfer.CheckValidCredsRequest.serialize, + response_deserializer=datatransfer.CheckValidCredsResponse.deserialize, + ) + return self._stubs["check_valid_creds"] + + +__all__ = ("DataTransferServiceGrpcTransport",) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..32f5706c2eb7 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py @@ -0,0 +1,653 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.bigquery.datatransfer_v1.types import datatransfer +from google.cloud.bigquery.datatransfer_v1.types import transfer +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import DataTransferServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import DataTransferServiceGrpcTransport + + +class DataTransferServiceGrpcAsyncIOTransport(DataTransferServiceTransport): + """gRPC AsyncIO backend transport for DataTransferService. + + The Google BigQuery Data Transfer Service API enables + BigQuery users to configure the transfer of their data from + other Google Products into BigQuery. This service contains + methods that are end user exposed. It backs up the frontend. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "bigquerydatatransfer.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "bigquerydatatransfer.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_data_source( + self, + ) -> Callable[ + [datatransfer.GetDataSourceRequest], Awaitable[datatransfer.DataSource] + ]: + r"""Return a callable for the get data source method over gRPC. + + Retrieves a supported data source and returns its + settings, which can be used for UI rendering. + + Returns: + Callable[[~.GetDataSourceRequest], + Awaitable[~.DataSource]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_source" not in self._stubs: + self._stubs["get_data_source"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetDataSource", + request_serializer=datatransfer.GetDataSourceRequest.serialize, + response_deserializer=datatransfer.DataSource.deserialize, + ) + return self._stubs["get_data_source"] + + @property + def list_data_sources( + self, + ) -> Callable[ + [datatransfer.ListDataSourcesRequest], + Awaitable[datatransfer.ListDataSourcesResponse], + ]: + r"""Return a callable for the list data sources method over gRPC. + + Lists supported data sources and returns their + settings, which can be used for UI rendering. + + Returns: + Callable[[~.ListDataSourcesRequest], + Awaitable[~.ListDataSourcesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_sources" not in self._stubs: + self._stubs["list_data_sources"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListDataSources", + request_serializer=datatransfer.ListDataSourcesRequest.serialize, + response_deserializer=datatransfer.ListDataSourcesResponse.deserialize, + ) + return self._stubs["list_data_sources"] + + @property + def create_transfer_config( + self, + ) -> Callable[ + [datatransfer.CreateTransferConfigRequest], Awaitable[transfer.TransferConfig] + ]: + r"""Return a callable for the create transfer config method over gRPC. + + Creates a new data transfer configuration. + + Returns: + Callable[[~.CreateTransferConfigRequest], + Awaitable[~.TransferConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_transfer_config" not in self._stubs: + self._stubs["create_transfer_config"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CreateTransferConfig", + request_serializer=datatransfer.CreateTransferConfigRequest.serialize, + response_deserializer=transfer.TransferConfig.deserialize, + ) + return self._stubs["create_transfer_config"] + + @property + def update_transfer_config( + self, + ) -> Callable[ + [datatransfer.UpdateTransferConfigRequest], Awaitable[transfer.TransferConfig] + ]: + r"""Return a callable for the update transfer config method over gRPC. + + Updates a data transfer configuration. + All fields must be set, even if they are not updated. + + Returns: + Callable[[~.UpdateTransferConfigRequest], + Awaitable[~.TransferConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_transfer_config" not in self._stubs: + self._stubs["update_transfer_config"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/UpdateTransferConfig", + request_serializer=datatransfer.UpdateTransferConfigRequest.serialize, + response_deserializer=transfer.TransferConfig.deserialize, + ) + return self._stubs["update_transfer_config"] + + @property + def delete_transfer_config( + self, + ) -> Callable[[datatransfer.DeleteTransferConfigRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete transfer config method over gRPC. + + Deletes a data transfer configuration, + including any associated transfer runs and logs. + + Returns: + Callable[[~.DeleteTransferConfigRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_transfer_config" not in self._stubs: + self._stubs["delete_transfer_config"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferConfig", + request_serializer=datatransfer.DeleteTransferConfigRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_transfer_config"] + + @property + def get_transfer_config( + self, + ) -> Callable[ + [datatransfer.GetTransferConfigRequest], Awaitable[transfer.TransferConfig] + ]: + r"""Return a callable for the get transfer config method over gRPC. + + Returns information about a data transfer config. + + Returns: + Callable[[~.GetTransferConfigRequest], + Awaitable[~.TransferConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_transfer_config" not in self._stubs: + self._stubs["get_transfer_config"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferConfig", + request_serializer=datatransfer.GetTransferConfigRequest.serialize, + response_deserializer=transfer.TransferConfig.deserialize, + ) + return self._stubs["get_transfer_config"] + + @property + def list_transfer_configs( + self, + ) -> Callable[ + [datatransfer.ListTransferConfigsRequest], + Awaitable[datatransfer.ListTransferConfigsResponse], + ]: + r"""Return a callable for the list transfer configs method over gRPC. + + Returns information about all data transfers in the + project. + + Returns: + Callable[[~.ListTransferConfigsRequest], + Awaitable[~.ListTransferConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transfer_configs" not in self._stubs: + self._stubs["list_transfer_configs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferConfigs", + request_serializer=datatransfer.ListTransferConfigsRequest.serialize, + response_deserializer=datatransfer.ListTransferConfigsResponse.deserialize, + ) + return self._stubs["list_transfer_configs"] + + @property + def schedule_transfer_runs( + self, + ) -> Callable[ + [datatransfer.ScheduleTransferRunsRequest], + Awaitable[datatransfer.ScheduleTransferRunsResponse], + ]: + r"""Return a callable for the schedule transfer runs method over gRPC. + + Creates transfer runs for a time range [start_time, end_time]. + For each date - or whatever granularity the data source supports + - in the range, one transfer run is created. Note that runs are + created per UTC time in the time range. DEPRECATED: use + StartManualTransferRuns instead. + + Returns: + Callable[[~.ScheduleTransferRunsRequest], + Awaitable[~.ScheduleTransferRunsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "schedule_transfer_runs" not in self._stubs: + self._stubs["schedule_transfer_runs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ScheduleTransferRuns", + request_serializer=datatransfer.ScheduleTransferRunsRequest.serialize, + response_deserializer=datatransfer.ScheduleTransferRunsResponse.deserialize, + ) + return self._stubs["schedule_transfer_runs"] + + @property + def start_manual_transfer_runs( + self, + ) -> Callable[ + [datatransfer.StartManualTransferRunsRequest], + Awaitable[datatransfer.StartManualTransferRunsResponse], + ]: + r"""Return a callable for the start manual transfer runs method over gRPC. + + Start manual transfer runs to be executed now with schedule_time + equal to current time. The transfer runs can be created for a + time range where the run_time is between start_time (inclusive) + and end_time (exclusive), or for a specific run_time. + + Returns: + Callable[[~.StartManualTransferRunsRequest], + Awaitable[~.StartManualTransferRunsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_manual_transfer_runs" not in self._stubs: + self._stubs["start_manual_transfer_runs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/StartManualTransferRuns", + request_serializer=datatransfer.StartManualTransferRunsRequest.serialize, + response_deserializer=datatransfer.StartManualTransferRunsResponse.deserialize, + ) + return self._stubs["start_manual_transfer_runs"] + + @property + def get_transfer_run( + self, + ) -> Callable[ + [datatransfer.GetTransferRunRequest], Awaitable[transfer.TransferRun] + ]: + r"""Return a callable for the get transfer run method over gRPC. + + Returns information about the particular transfer + run. + + Returns: + Callable[[~.GetTransferRunRequest], + Awaitable[~.TransferRun]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_transfer_run" not in self._stubs: + self._stubs["get_transfer_run"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferRun", + request_serializer=datatransfer.GetTransferRunRequest.serialize, + response_deserializer=transfer.TransferRun.deserialize, + ) + return self._stubs["get_transfer_run"] + + @property + def delete_transfer_run( + self, + ) -> Callable[[datatransfer.DeleteTransferRunRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete transfer run method over gRPC. + + Deletes the specified transfer run. + + Returns: + Callable[[~.DeleteTransferRunRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_transfer_run" not in self._stubs: + self._stubs["delete_transfer_run"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferRun", + request_serializer=datatransfer.DeleteTransferRunRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_transfer_run"] + + @property + def list_transfer_runs( + self, + ) -> Callable[ + [datatransfer.ListTransferRunsRequest], + Awaitable[datatransfer.ListTransferRunsResponse], + ]: + r"""Return a callable for the list transfer runs method over gRPC. + + Returns information about running and completed jobs. + + Returns: + Callable[[~.ListTransferRunsRequest], + Awaitable[~.ListTransferRunsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transfer_runs" not in self._stubs: + self._stubs["list_transfer_runs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferRuns", + request_serializer=datatransfer.ListTransferRunsRequest.serialize, + response_deserializer=datatransfer.ListTransferRunsResponse.deserialize, + ) + return self._stubs["list_transfer_runs"] + + @property + def list_transfer_logs( + self, + ) -> Callable[ + [datatransfer.ListTransferLogsRequest], + Awaitable[datatransfer.ListTransferLogsResponse], + ]: + r"""Return a callable for the list transfer logs method over gRPC. + + Returns user facing log messages for the data + transfer run. + + Returns: + Callable[[~.ListTransferLogsRequest], + Awaitable[~.ListTransferLogsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_transfer_logs" not in self._stubs: + self._stubs["list_transfer_logs"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferLogs", + request_serializer=datatransfer.ListTransferLogsRequest.serialize, + response_deserializer=datatransfer.ListTransferLogsResponse.deserialize, + ) + return self._stubs["list_transfer_logs"] + + @property + def check_valid_creds( + self, + ) -> Callable[ + [datatransfer.CheckValidCredsRequest], + Awaitable[datatransfer.CheckValidCredsResponse], + ]: + r"""Return a callable for the check valid creds method over gRPC. + + Returns true if valid credentials exist for the given + data source and requesting user. + Some data sources doesn't support service account, so we + need to talk to them on behalf of the end user. This API + just checks whether we have OAuth token for the + particular user, which is a pre-requisite before user + can create a transfer config. + + Returns: + Callable[[~.CheckValidCredsRequest], + Awaitable[~.CheckValidCredsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "check_valid_creds" not in self._stubs: + self._stubs["check_valid_creds"] = self.grpc_channel.unary_unary( + "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CheckValidCreds", + request_serializer=datatransfer.CheckValidCredsRequest.serialize, + response_deserializer=datatransfer.CheckValidCredsResponse.deserialize, + ) + return self._stubs["check_valid_creds"] + + +__all__ = ("DataTransferServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/types/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/types/__init__.py new file mode 100644 index 000000000000..c4f07ee6d7dd --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/types/__init__.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .transfer import ( + EmailPreferences, + ScheduleOptions, + TransferConfig, + TransferRun, + TransferMessage, +) +from .datatransfer import ( + DataSourceParameter, + DataSource, + GetDataSourceRequest, + ListDataSourcesRequest, + ListDataSourcesResponse, + CreateTransferConfigRequest, + UpdateTransferConfigRequest, + GetTransferConfigRequest, + DeleteTransferConfigRequest, + GetTransferRunRequest, + DeleteTransferRunRequest, + ListTransferConfigsRequest, + ListTransferConfigsResponse, + ListTransferRunsRequest, + ListTransferRunsResponse, + ListTransferLogsRequest, + ListTransferLogsResponse, + CheckValidCredsRequest, + CheckValidCredsResponse, + ScheduleTransferRunsRequest, + ScheduleTransferRunsResponse, + StartManualTransferRunsRequest, + StartManualTransferRunsResponse, +) + + +__all__ = ( + "EmailPreferences", + "ScheduleOptions", + "TransferConfig", + "TransferRun", + "TransferMessage", + "DataSourceParameter", + "DataSource", + "GetDataSourceRequest", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "CreateTransferConfigRequest", + "UpdateTransferConfigRequest", + "GetTransferConfigRequest", + "DeleteTransferConfigRequest", + "GetTransferRunRequest", + "DeleteTransferRunRequest", + "ListTransferConfigsRequest", + "ListTransferConfigsResponse", + "ListTransferRunsRequest", + "ListTransferRunsResponse", + "ListTransferLogsRequest", + "ListTransferLogsResponse", + "CheckValidCredsRequest", + "CheckValidCredsResponse", + "ScheduleTransferRunsRequest", + "ScheduleTransferRunsResponse", + "StartManualTransferRunsRequest", + "StartManualTransferRunsResponse", +) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/types/datatransfer.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/types/datatransfer.py new file mode 100644 index 000000000000..f067a49af802 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/types/datatransfer.py @@ -0,0 +1,839 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.bigquery.datatransfer_v1.types import transfer +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import wrappers_pb2 as wrappers # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.bigquery.datatransfer.v1", + manifest={ + "DataSourceParameter", + "DataSource", + "GetDataSourceRequest", + "ListDataSourcesRequest", + "ListDataSourcesResponse", + "CreateTransferConfigRequest", + "UpdateTransferConfigRequest", + "GetTransferConfigRequest", + "DeleteTransferConfigRequest", + "GetTransferRunRequest", + "DeleteTransferRunRequest", + "ListTransferConfigsRequest", + "ListTransferConfigsResponse", + "ListTransferRunsRequest", + "ListTransferRunsResponse", + "ListTransferLogsRequest", + "ListTransferLogsResponse", + "CheckValidCredsRequest", + "CheckValidCredsResponse", + "ScheduleTransferRunsRequest", + "ScheduleTransferRunsResponse", + "StartManualTransferRunsRequest", + "StartManualTransferRunsResponse", + }, +) + + +class DataSourceParameter(proto.Message): + r"""Represents a data source parameter with validation rules, so + that parameters can be rendered in the UI. These parameters are + given to us by supported data sources, and include all needed + information for rendering and validation. + Thus, whoever uses this api can decide to generate either + generic ui, or custom data source specific forms. + + Attributes: + param_id (str): + Parameter identifier. + display_name (str): + Parameter display name in the user interface. + description (str): + Parameter description. + type (~.datatransfer.DataSourceParameter.Type): + Parameter type. + required (bool): + Is parameter required. + repeated (bool): + Deprecated. This field has no effect. + validation_regex (str): + Regular expression which can be used for + parameter validation. + allowed_values (Sequence[str]): + All possible values for the parameter. + min_value (~.wrappers.DoubleValue): + For integer and double values specifies + minimum allowed value. + max_value (~.wrappers.DoubleValue): + For integer and double values specifies + maxminum allowed value. + fields (Sequence[~.datatransfer.DataSourceParameter]): + Deprecated. This field has no effect. + validation_description (str): + Description of the requirements for this + field, in case the user input does not fulfill + the regex pattern or min/max values. + validation_help_url (str): + URL to a help document to further explain the + naming requirements. + immutable (bool): + Cannot be changed after initial creation. + recurse (bool): + Deprecated. This field has no effect. + deprecated (bool): + If true, it should not be used in new + transfers, and it should not be visible to + users. + """ + + class Type(proto.Enum): + r"""Parameter type.""" + TYPE_UNSPECIFIED = 0 + STRING = 1 + INTEGER = 2 + DOUBLE = 3 + BOOLEAN = 4 + RECORD = 5 + PLUS_PAGE = 6 + + param_id = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + description = proto.Field(proto.STRING, number=3) + + type = proto.Field(proto.ENUM, number=4, enum=Type,) + + required = proto.Field(proto.BOOL, number=5) + + repeated = proto.Field(proto.BOOL, number=6) + + validation_regex = proto.Field(proto.STRING, number=7) + + allowed_values = proto.RepeatedField(proto.STRING, number=8) + + min_value = proto.Field(proto.MESSAGE, number=9, message=wrappers.DoubleValue,) + + max_value = proto.Field(proto.MESSAGE, number=10, message=wrappers.DoubleValue,) + + fields = proto.RepeatedField( + proto.MESSAGE, number=11, message="DataSourceParameter", + ) + + validation_description = proto.Field(proto.STRING, number=12) + + validation_help_url = proto.Field(proto.STRING, number=13) + + immutable = proto.Field(proto.BOOL, number=14) + + recurse = proto.Field(proto.BOOL, number=15) + + deprecated = proto.Field(proto.BOOL, number=20) + + +class DataSource(proto.Message): + r"""Represents data source metadata. Metadata is sufficient to + render UI and request proper OAuth tokens. + + Attributes: + name (str): + Output only. Data source resource name. + data_source_id (str): + Data source id. + display_name (str): + User friendly data source name. + description (str): + User friendly data source description string. + client_id (str): + Data source client id which should be used to + receive refresh token. + scopes (Sequence[str]): + Api auth scopes for which refresh token needs + to be obtained. These are scopes needed by a + data source to prepare data and ingest them into + BigQuery, e.g., + https://www.googleapis.com/auth/bigquery + transfer_type (~.transfer.TransferType): + Deprecated. This field has no effect. + supports_multiple_transfers (bool): + Deprecated. This field has no effect. + update_deadline_seconds (int): + The number of seconds to wait for an update + from the data source before the Data Transfer + Service marks the transfer as FAILED. + default_schedule (str): + Default data transfer schedule. Examples of valid schedules + include: ``1st,3rd monday of month 15:30``, + ``every wed,fri of jan,jun 13:15``, and + ``first sunday of quarter 00:00``. + supports_custom_schedule (bool): + Specifies whether the data source supports a user defined + schedule, or operates on the default schedule. When set to + ``true``, user can override default schedule. + parameters (Sequence[~.datatransfer.DataSourceParameter]): + Data source parameters. + help_url (str): + Url for the help document for this data + source. + authorization_type (~.datatransfer.DataSource.AuthorizationType): + Indicates the type of authorization. + data_refresh_type (~.datatransfer.DataSource.DataRefreshType): + Specifies whether the data source supports + automatic data refresh for the past few days, + and how it's supported. For some data sources, + data might not be complete until a few days + later, so it's useful to refresh data + automatically. + default_data_refresh_window_days (int): + Default data refresh window on days. Only meaningful when + ``data_refresh_type`` = ``SLIDING_WINDOW``. + manual_runs_disabled (bool): + Disables backfilling and manual run + scheduling for the data source. + minimum_schedule_interval (~.duration.Duration): + The minimum interval for scheduler to + schedule runs. + """ + + class AuthorizationType(proto.Enum): + r"""The type of authorization needed for this data source.""" + AUTHORIZATION_TYPE_UNSPECIFIED = 0 + AUTHORIZATION_CODE = 1 + GOOGLE_PLUS_AUTHORIZATION_CODE = 2 + FIRST_PARTY_OAUTH = 3 + + class DataRefreshType(proto.Enum): + r"""Represents how the data source supports data auto refresh.""" + DATA_REFRESH_TYPE_UNSPECIFIED = 0 + SLIDING_WINDOW = 1 + CUSTOM_SLIDING_WINDOW = 2 + + name = proto.Field(proto.STRING, number=1) + + data_source_id = proto.Field(proto.STRING, number=2) + + display_name = proto.Field(proto.STRING, number=3) + + description = proto.Field(proto.STRING, number=4) + + client_id = proto.Field(proto.STRING, number=5) + + scopes = proto.RepeatedField(proto.STRING, number=6) + + transfer_type = proto.Field(proto.ENUM, number=7, enum=transfer.TransferType,) + + supports_multiple_transfers = proto.Field(proto.BOOL, number=8) + + update_deadline_seconds = proto.Field(proto.INT32, number=9) + + default_schedule = proto.Field(proto.STRING, number=10) + + supports_custom_schedule = proto.Field(proto.BOOL, number=11) + + parameters = proto.RepeatedField( + proto.MESSAGE, number=12, message=DataSourceParameter, + ) + + help_url = proto.Field(proto.STRING, number=13) + + authorization_type = proto.Field(proto.ENUM, number=14, enum=AuthorizationType,) + + data_refresh_type = proto.Field(proto.ENUM, number=15, enum=DataRefreshType,) + + default_data_refresh_window_days = proto.Field(proto.INT32, number=16) + + manual_runs_disabled = proto.Field(proto.BOOL, number=17) + + minimum_schedule_interval = proto.Field( + proto.MESSAGE, number=18, message=duration.Duration, + ) + + +class GetDataSourceRequest(proto.Message): + r"""A request to get data source info. + + Attributes: + name (str): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/dataSources/{data_source_id}`` or + ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListDataSourcesRequest(proto.Message): + r"""Request to list supported data sources and their data + transfer settings. + + Attributes: + parent (str): + Required. The BigQuery project id for which data sources + should be returned. Must be in the form: + ``projects/{project_id}`` or + \`projects/{project_id}/locations/{location_id} + page_token (str): + Pagination token, which can be used to request a specific + page of ``ListDataSourcesRequest`` list results. For + multiple-page results, ``ListDataSourcesResponse`` outputs a + ``next_page`` token, which can be used as the ``page_token`` + value to request the next page of list results. + page_size (int): + Page size. The default page size is the + maximum value of 1000 results. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_token = proto.Field(proto.STRING, number=3) + + page_size = proto.Field(proto.INT32, number=4) + + +class ListDataSourcesResponse(proto.Message): + r"""Returns list of supported data sources and their metadata. + + Attributes: + data_sources (Sequence[~.datatransfer.DataSource]): + List of supported data sources and their + transfer settings. + next_page_token (str): + Output only. The next-pagination token. For multiple-page + list results, this token can be used as the + ``ListDataSourcesRequest.page_token`` to request the next + page of list results. + """ + + @property + def raw_page(self): + return self + + data_sources = proto.RepeatedField(proto.MESSAGE, number=1, message=DataSource,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class CreateTransferConfigRequest(proto.Message): + r"""A request to create a data transfer configuration. If new + credentials are needed for this transfer configuration, an + authorization code must be provided. If an authorization code is + provided, the transfer configuration will be associated with the + user id corresponding to the authorization code. Otherwise, the + transfer configuration will be associated with the calling user. + + Attributes: + parent (str): + Required. The BigQuery project id where the transfer + configuration should be created. Must be in the format + projects/{project_id}/locations/{location_id} or + projects/{project_id}. If specified location and location of + the destination bigquery dataset do not match - the request + will fail. + transfer_config (~.transfer.TransferConfig): + Required. Data transfer configuration to + create. + authorization_code (str): + Optional OAuth2 authorization code to use with this transfer + configuration. This is required if new credentials are + needed, as indicated by ``CheckValidCreds``. In order to + obtain authorization_code, please make a request to + https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redirect_uri= + + - client_id should be OAuth client_id of BigQuery DTS API + for the given data source returned by ListDataSources + method. + - data_source_scopes are the scopes returned by + ListDataSources method. + - redirect_uri is an optional parameter. If not specified, + then authorization code is posted to the opener of + authorization flow window. Otherwise it will be sent to + the redirect uri. A special value of + urn:ietf:wg:oauth:2.0:oob means that authorization code + should be returned in the title bar of the browser, with + the page text prompting the user to copy the code and + paste it in the application. + version_info (str): + Optional version info. If users want to find a very recent + access token, that is, immediately after approving access, + users have to set the version_info claim in the token + request. To obtain the version_info, users must use the + "none+gsession" response type. which be return a + version_info back in the authorization response which be be + put in a JWT claim in the token request. + service_account_name (str): + Optional service account name. If this field + is set, transfer config will be created with + this service account credentials. It requires + that requesting user calling this API has + permissions to act as this service account. + """ + + parent = proto.Field(proto.STRING, number=1) + + transfer_config = proto.Field( + proto.MESSAGE, number=2, message=transfer.TransferConfig, + ) + + authorization_code = proto.Field(proto.STRING, number=3) + + version_info = proto.Field(proto.STRING, number=5) + + service_account_name = proto.Field(proto.STRING, number=6) + + +class UpdateTransferConfigRequest(proto.Message): + r"""A request to update a transfer configuration. To update the + user id of the transfer configuration, an authorization code + needs to be provided. + + Attributes: + transfer_config (~.transfer.TransferConfig): + Required. Data transfer configuration to + create. + authorization_code (str): + Optional OAuth2 authorization code to use with this transfer + configuration. If it is provided, the transfer configuration + will be associated with the authorizing user. In order to + obtain authorization_code, please make a request to + https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redirect_uri= + + - client_id should be OAuth client_id of BigQuery DTS API + for the given data source returned by ListDataSources + method. + - data_source_scopes are the scopes returned by + ListDataSources method. + - redirect_uri is an optional parameter. If not specified, + then authorization code is posted to the opener of + authorization flow window. Otherwise it will be sent to + the redirect uri. A special value of + urn:ietf:wg:oauth:2.0:oob means that authorization code + should be returned in the title bar of the browser, with + the page text prompting the user to copy the code and + paste it in the application. + update_mask (~.field_mask.FieldMask): + Required. Required list of fields to be + updated in this request. + version_info (str): + Optional version info. If users want to find a very recent + access token, that is, immediately after approving access, + users have to set the version_info claim in the token + request. To obtain the version_info, users must use the + "none+gsession" response type. which be return a + version_info back in the authorization response which be be + put in a JWT claim in the token request. + service_account_name (str): + Optional service account name. If this field is set and + "service_account_name" is set in update_mask, transfer + config will be updated to use this service account + credentials. It requires that requesting user calling this + API has permissions to act as this service account. + """ + + transfer_config = proto.Field( + proto.MESSAGE, number=1, message=transfer.TransferConfig, + ) + + authorization_code = proto.Field(proto.STRING, number=3) + + update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + + version_info = proto.Field(proto.STRING, number=5) + + service_account_name = proto.Field(proto.STRING, number=6) + + +class GetTransferConfigRequest(proto.Message): + r"""A request to get data transfer information. + + Attributes: + name (str): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class DeleteTransferConfigRequest(proto.Message): + r"""A request to delete data transfer information. All associated + transfer runs and log messages will be deleted as well. + + Attributes: + name (str): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class GetTransferRunRequest(proto.Message): + r"""A request to get data transfer run information. + + Attributes: + name (str): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class DeleteTransferRunRequest(proto.Message): + r"""A request to delete data transfer run information. + + Attributes: + name (str): + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListTransferConfigsRequest(proto.Message): + r"""A request to list data transfers configured for a BigQuery + project. + + Attributes: + parent (str): + Required. The BigQuery project id for which data sources + should be returned: ``projects/{project_id}`` or + ``projects/{project_id}/locations/{location_id}`` + data_source_ids (Sequence[str]): + When specified, only configurations of + requested data sources are returned. + page_token (str): + Pagination token, which can be used to request a specific + page of ``ListTransfersRequest`` list results. For + multiple-page results, ``ListTransfersResponse`` outputs a + ``next_page`` token, which can be used as the ``page_token`` + value to request the next page of list results. + page_size (int): + Page size. The default page size is the + maximum value of 1000 results. + """ + + parent = proto.Field(proto.STRING, number=1) + + data_source_ids = proto.RepeatedField(proto.STRING, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + page_size = proto.Field(proto.INT32, number=4) + + +class ListTransferConfigsResponse(proto.Message): + r"""The returned list of pipelines in the project. + + Attributes: + transfer_configs (Sequence[~.transfer.TransferConfig]): + Output only. The stored pipeline transfer + configurations. + next_page_token (str): + Output only. The next-pagination token. For multiple-page + list results, this token can be used as the + ``ListTransferConfigsRequest.page_token`` to request the + next page of list results. + """ + + @property + def raw_page(self): + return self + + transfer_configs = proto.RepeatedField( + proto.MESSAGE, number=1, message=transfer.TransferConfig, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class ListTransferRunsRequest(proto.Message): + r"""A request to list data transfer runs. UI can use this method + to show/filter specific data transfer runs. The data source can + use this method to request all scheduled transfer runs. + + Attributes: + parent (str): + Required. Name of transfer configuration for which transfer + runs should be retrieved. Format of transfer configuration + resource name is: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + states (Sequence[~.transfer.TransferState]): + When specified, only transfer runs with + requested states are returned. + page_token (str): + Pagination token, which can be used to request a specific + page of ``ListTransferRunsRequest`` list results. For + multiple-page results, ``ListTransferRunsResponse`` outputs + a ``next_page`` token, which can be used as the + ``page_token`` value to request the next page of list + results. + page_size (int): + Page size. The default page size is the + maximum value of 1000 results. + run_attempt (~.datatransfer.ListTransferRunsRequest.RunAttempt): + Indicates how run attempts are to be pulled. + """ + + class RunAttempt(proto.Enum): + r"""Represents which runs should be pulled.""" + RUN_ATTEMPT_UNSPECIFIED = 0 + LATEST = 1 + + parent = proto.Field(proto.STRING, number=1) + + states = proto.RepeatedField(proto.ENUM, number=2, enum=transfer.TransferState,) + + page_token = proto.Field(proto.STRING, number=3) + + page_size = proto.Field(proto.INT32, number=4) + + run_attempt = proto.Field(proto.ENUM, number=5, enum=RunAttempt,) + + +class ListTransferRunsResponse(proto.Message): + r"""The returned list of pipelines in the project. + + Attributes: + transfer_runs (Sequence[~.transfer.TransferRun]): + Output only. The stored pipeline transfer + runs. + next_page_token (str): + Output only. The next-pagination token. For multiple-page + list results, this token can be used as the + ``ListTransferRunsRequest.page_token`` to request the next + page of list results. + """ + + @property + def raw_page(self): + return self + + transfer_runs = proto.RepeatedField( + proto.MESSAGE, number=1, message=transfer.TransferRun, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class ListTransferLogsRequest(proto.Message): + r"""A request to get user facing log messages associated with + data transfer run. + + Attributes: + parent (str): + Required. Transfer run name in the form: + ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` + or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` + page_token (str): + Pagination token, which can be used to request a specific + page of ``ListTransferLogsRequest`` list results. For + multiple-page results, ``ListTransferLogsResponse`` outputs + a ``next_page`` token, which can be used as the + ``page_token`` value to request the next page of list + results. + page_size (int): + Page size. The default page size is the + maximum value of 1000 results. + message_types (Sequence[~.transfer.TransferMessage.MessageSeverity]): + Message types to return. If not populated - + INFO, WARNING and ERROR messages are returned. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_token = proto.Field(proto.STRING, number=4) + + page_size = proto.Field(proto.INT32, number=5) + + message_types = proto.RepeatedField( + proto.ENUM, number=6, enum=transfer.TransferMessage.MessageSeverity, + ) + + +class ListTransferLogsResponse(proto.Message): + r"""The returned list transfer run messages. + + Attributes: + transfer_messages (Sequence[~.transfer.TransferMessage]): + Output only. The stored pipeline transfer + messages. + next_page_token (str): + Output only. The next-pagination token. For multiple-page + list results, this token can be used as the + ``GetTransferRunLogRequest.page_token`` to request the next + page of list results. + """ + + @property + def raw_page(self): + return self + + transfer_messages = proto.RepeatedField( + proto.MESSAGE, number=1, message=transfer.TransferMessage, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class CheckValidCredsRequest(proto.Message): + r"""A request to determine whether the user has valid + credentials. This method is used to limit the number of OAuth + popups in the user interface. The user id is inferred from the + API call context. + If the data source has the Google+ authorization type, this + method returns false, as it cannot be determined whether the + credentials are already valid merely based on the user id. + + Attributes: + name (str): + Required. The data source in the form: + ``projects/{project_id}/dataSources/{data_source_id}`` or + ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class CheckValidCredsResponse(proto.Message): + r"""A response indicating whether the credentials exist and are + valid. + + Attributes: + has_valid_creds (bool): + If set to ``true``, the credentials exist and are valid. + """ + + has_valid_creds = proto.Field(proto.BOOL, number=1) + + +class ScheduleTransferRunsRequest(proto.Message): + r"""A request to schedule transfer runs for a time range. + + Attributes: + parent (str): + Required. Transfer configuration name in the form: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + start_time (~.timestamp.Timestamp): + Required. Start time of the range of transfer runs. For + example, ``"2017-05-25T00:00:00+00:00"``. + end_time (~.timestamp.Timestamp): + Required. End time of the range of transfer runs. For + example, ``"2017-05-30T00:00:00+00:00"``. + """ + + parent = proto.Field(proto.STRING, number=1) + + start_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + end_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + + +class ScheduleTransferRunsResponse(proto.Message): + r"""A response to schedule transfer runs for a time range. + + Attributes: + runs (Sequence[~.transfer.TransferRun]): + The transfer runs that were scheduled. + """ + + runs = proto.RepeatedField(proto.MESSAGE, number=1, message=transfer.TransferRun,) + + +class StartManualTransferRunsRequest(proto.Message): + r"""A request to start manual transfer runs. + + Attributes: + parent (str): + Transfer configuration name in the form: + ``projects/{project_id}/transferConfigs/{config_id}`` or + ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. + requested_time_range (~.datatransfer.StartManualTransferRunsRequest.TimeRange): + Time range for the transfer runs that should + be started. + requested_run_time (~.timestamp.Timestamp): + Specific run_time for a transfer run to be started. The + requested_run_time must not be in the future. + """ + + class TimeRange(proto.Message): + r"""A specification for a time range, this will request transfer runs + with run_time between start_time (inclusive) and end_time + (exclusive). + + Attributes: + start_time (~.timestamp.Timestamp): + Start time of the range of transfer runs. For example, + ``"2017-05-25T00:00:00+00:00"``. The start_time must be + strictly less than the end_time. Creates transfer runs where + run_time is in the range betwen start_time (inclusive) and + end_time (exlusive). + end_time (~.timestamp.Timestamp): + End time of the range of transfer runs. For example, + ``"2017-05-30T00:00:00+00:00"``. The end_time must not be in + the future. Creates transfer runs where run_time is in the + range betwen start_time (inclusive) and end_time (exlusive). + """ + + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) + + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + parent = proto.Field(proto.STRING, number=1) + + requested_time_range = proto.Field( + proto.MESSAGE, number=3, oneof="time", message=TimeRange, + ) + + requested_run_time = proto.Field( + proto.MESSAGE, number=4, oneof="time", message=timestamp.Timestamp, + ) + + +class StartManualTransferRunsResponse(proto.Message): + r"""A response to start manual transfer runs. + + Attributes: + runs (Sequence[~.transfer.TransferRun]): + The transfer runs that were created. + """ + + runs = proto.RepeatedField(proto.MESSAGE, number=1, message=transfer.TransferRun,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/types/transfer.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/types/transfer.py new file mode 100644 index 000000000000..86f085e49b7a --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery/datatransfer_v1/types/transfer.py @@ -0,0 +1,332 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import struct_pb2 as struct # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.rpc import status_pb2 as status # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.bigquery.datatransfer.v1", + manifest={ + "TransferType", + "TransferState", + "EmailPreferences", + "ScheduleOptions", + "TransferConfig", + "TransferRun", + "TransferMessage", + }, +) + + +class TransferType(proto.Enum): + r"""DEPRECATED. Represents data transfer type.""" + TRANSFER_TYPE_UNSPECIFIED = 0 + BATCH = 1 + STREAMING = 2 + + +class TransferState(proto.Enum): + r"""Represents data transfer run state.""" + TRANSFER_STATE_UNSPECIFIED = 0 + PENDING = 2 + RUNNING = 3 + SUCCEEDED = 4 + FAILED = 5 + CANCELLED = 6 + + +class EmailPreferences(proto.Message): + r"""Represents preferences for sending email notifications for + transfer run events. + + Attributes: + enable_failure_email (bool): + If true, email notifications will be sent on + transfer run failures. + """ + + enable_failure_email = proto.Field(proto.BOOL, number=1) + + +class ScheduleOptions(proto.Message): + r"""Options customizing the data transfer schedule. + + Attributes: + disable_auto_scheduling (bool): + If true, automatic scheduling of data + transfer runs for this configuration will be + disabled. The runs can be started on ad-hoc + basis using StartManualTransferRuns API. When + automatic scheduling is disabled, the + TransferConfig.schedule field will be ignored. + start_time (~.timestamp.Timestamp): + Specifies time to start scheduling transfer + runs. The first run will be scheduled at or + after the start time according to a recurrence + pattern defined in the schedule string. The + start time can be changed at any moment. The + time when a data transfer can be trigerred + manually is not limited by this option. + end_time (~.timestamp.Timestamp): + Defines time to stop scheduling transfer + runs. A transfer run cannot be scheduled at or + after the end time. The end time can be changed + at any moment. The time when a data transfer can + be trigerred manually is not limited by this + option. + """ + + disable_auto_scheduling = proto.Field(proto.BOOL, number=3) + + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) + + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + +class TransferConfig(proto.Message): + r"""Represents a data transfer configuration. A transfer configuration + contains all metadata needed to perform a data transfer. For + example, ``destination_dataset_id`` specifies where data should be + stored. When a new transfer configuration is created, the specified + ``destination_dataset_id`` is created when needed and shared with + the appropriate data source service account. + + Attributes: + name (str): + The resource name of the transfer config. Transfer config + names have the form of + ``projects/{project_id}/locations/{region}/transferConfigs/{config_id}``. + The name is automatically generated based on the config_id + specified in CreateTransferConfigRequest along with + project_id and region. If config_id is not provided, usually + a uuid, even though it is not guaranteed or required, will + be generated for config_id. + destination_dataset_id (str): + The BigQuery target dataset id. + display_name (str): + User specified display name for the data + transfer. + data_source_id (str): + Data source id. Cannot be changed once data + transfer is created. + params (~.struct.Struct): + Data transfer specific parameters. + schedule (str): + Data transfer schedule. If the data source does not support + a custom schedule, this should be empty. If it is empty, the + default value for the data source will be used. The + specified times are in UTC. Examples of valid format: + ``1st,3rd monday of month 15:30``, + ``every wed,fri of jan,jun 13:15``, and + ``first sunday of quarter 00:00``. See more explanation + about the format here: + https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format + NOTE: the granularity should be at least 8 hours, or less + frequent. + schedule_options (~.transfer.ScheduleOptions): + Options customizing the data transfer + schedule. + data_refresh_window_days (int): + The number of days to look back to automatically refresh the + data. For example, if ``data_refresh_window_days = 10``, + then every day BigQuery reingests data for [today-10, + today-1], rather than ingesting data for just [today-1]. + Only valid if the data source supports the feature. Set the + value to 0 to use the default value. + disabled (bool): + Is this config disabled. When set to true, no + runs are scheduled for a given transfer. + update_time (~.timestamp.Timestamp): + Output only. Data transfer modification time. + Ignored by server on input. + next_run_time (~.timestamp.Timestamp): + Output only. Next time when data transfer + will run. + state (~.transfer.TransferState): + Output only. State of the most recently + updated transfer run. + user_id (int): + Deprecated. Unique ID of the user on whose + behalf transfer is done. + dataset_region (str): + Output only. Region in which BigQuery dataset + is located. + notification_pubsub_topic (str): + Pub/Sub topic where notifications will be + sent after transfer runs associated with this + transfer config finish. + email_preferences (~.transfer.EmailPreferences): + Email notifications will be sent according to + these preferences to the email address of the + user who owns this transfer config. + """ + + name = proto.Field(proto.STRING, number=1) + + destination_dataset_id = proto.Field(proto.STRING, number=2, oneof="destination") + + display_name = proto.Field(proto.STRING, number=3) + + data_source_id = proto.Field(proto.STRING, number=5) + + params = proto.Field(proto.MESSAGE, number=9, message=struct.Struct,) + + schedule = proto.Field(proto.STRING, number=7) + + schedule_options = proto.Field(proto.MESSAGE, number=24, message=ScheduleOptions,) + + data_refresh_window_days = proto.Field(proto.INT32, number=12) + + disabled = proto.Field(proto.BOOL, number=13) + + update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + + next_run_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) + + state = proto.Field(proto.ENUM, number=10, enum="TransferState",) + + user_id = proto.Field(proto.INT64, number=11) + + dataset_region = proto.Field(proto.STRING, number=14) + + notification_pubsub_topic = proto.Field(proto.STRING, number=15) + + email_preferences = proto.Field(proto.MESSAGE, number=18, message=EmailPreferences,) + + +class TransferRun(proto.Message): + r"""Represents a data transfer run. + + Attributes: + name (str): + The resource name of the transfer run. Transfer run names + have the form + ``projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}``. + The name is ignored when creating a transfer run. + schedule_time (~.timestamp.Timestamp): + Minimum time after which a transfer run can + be started. + run_time (~.timestamp.Timestamp): + For batch transfer runs, specifies the date + and time of the data should be ingested. + error_status (~.status.Status): + Status of the transfer run. + start_time (~.timestamp.Timestamp): + Output only. Time when transfer run was + started. Parameter ignored by server for input + requests. + end_time (~.timestamp.Timestamp): + Output only. Time when transfer run ended. + Parameter ignored by server for input requests. + update_time (~.timestamp.Timestamp): + Output only. Last time the data transfer run + state was updated. + params (~.struct.Struct): + Output only. Data transfer specific + parameters. + destination_dataset_id (str): + Output only. The BigQuery target dataset id. + data_source_id (str): + Output only. Data source id. + state (~.transfer.TransferState): + Data transfer run state. Ignored for input + requests. + user_id (int): + Deprecated. Unique ID of the user on whose + behalf transfer is done. + schedule (str): + Output only. Describes the schedule of this transfer run if + it was created as part of a regular schedule. For batch + transfer runs that are scheduled manually, this is empty. + NOTE: the system might choose to delay the schedule + depending on the current load, so ``schedule_time`` doesn't + always match this. + notification_pubsub_topic (str): + Output only. Pub/Sub topic where a + notification will be sent after this transfer + run finishes + email_preferences (~.transfer.EmailPreferences): + Output only. Email notifications will be sent + according to these preferences to the email + address of the user who owns the transfer config + this run was derived from. + """ + + name = proto.Field(proto.STRING, number=1) + + schedule_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + + run_time = proto.Field(proto.MESSAGE, number=10, message=timestamp.Timestamp,) + + error_status = proto.Field(proto.MESSAGE, number=21, message=status.Status,) + + start_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + + end_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + + params = proto.Field(proto.MESSAGE, number=9, message=struct.Struct,) + + destination_dataset_id = proto.Field(proto.STRING, number=2, oneof="destination") + + data_source_id = proto.Field(proto.STRING, number=7) + + state = proto.Field(proto.ENUM, number=8, enum="TransferState",) + + user_id = proto.Field(proto.INT64, number=11) + + schedule = proto.Field(proto.STRING, number=12) + + notification_pubsub_topic = proto.Field(proto.STRING, number=23) + + email_preferences = proto.Field(proto.MESSAGE, number=25, message=EmailPreferences,) + + +class TransferMessage(proto.Message): + r"""Represents a user facing message for a particular data + transfer run. + + Attributes: + message_time (~.timestamp.Timestamp): + Time when message was logged. + severity (~.transfer.TransferMessage.MessageSeverity): + Message severity. + message_text (str): + Message text. + """ + + class MessageSeverity(proto.Enum): + r"""Represents data transfer user facing message severity.""" + MESSAGE_SEVERITY_UNSPECIFIED = 0 + INFO = 1 + WARNING = 2 + ERROR = 3 + + message_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) + + severity = proto.Field(proto.ENUM, number=2, enum=MessageSeverity,) + + message_text = proto.Field(proto.STRING, number=3) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer.py deleted file mode 100644 index ea41f623fa43..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import - -from google.cloud.bigquery_datatransfer_v1 import DataTransferServiceClient -from google.cloud.bigquery_datatransfer_v1 import enums -from google.cloud.bigquery_datatransfer_v1 import types - - -__all__ = ( - "enums", - "types", - "DataTransferServiceClient", -) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py deleted file mode 100644 index 8334830b3625..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys -import warnings - -from google.cloud.bigquery_datatransfer_v1 import types -from google.cloud.bigquery_datatransfer_v1.gapic import data_transfer_service_client -from google.cloud.bigquery_datatransfer_v1.gapic import enums - - -if sys.version_info[:2] == (2, 7): - message = ( - "A future version of this library will drop support for Python 2.7. " - "More details about Python 2 support for Google Cloud Client Libraries " - "can be found at https://cloud.google.com/python/docs/python2-sunset/" - ) - warnings.warn(message, DeprecationWarning) - - -class DataTransferServiceClient(data_transfer_service_client.DataTransferServiceClient): - __doc__ = data_transfer_service_client.DataTransferServiceClient.__doc__ - enums = enums - - -__all__ = ( - "enums", - "types", - "DataTransferServiceClient", -) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py deleted file mode 100644 index fe05c758688e..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py +++ /dev/null @@ -1,1769 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.cloud.bigquery.datatransfer.v1 DataTransferService API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.path_template -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import google.api_core.protobuf_helpers -import grpc - -from google.cloud.bigquery_datatransfer_v1.gapic import ( - data_transfer_service_client_config, -) -from google.cloud.bigquery_datatransfer_v1.gapic import enums -from google.cloud.bigquery_datatransfer_v1.gapic.transports import ( - data_transfer_service_grpc_transport, -) -from google.cloud.bigquery_datatransfer_v1.proto import datatransfer_pb2 -from google.cloud.bigquery_datatransfer_v1.proto import datatransfer_pb2_grpc -from google.cloud.bigquery_datatransfer_v1.proto import transfer_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-bigquery-datatransfer", -).version - - -class DataTransferServiceClient(object): - """ - The Google BigQuery Data Transfer Service API enables BigQuery users to - configure the transfer of their data from other Google Products into - BigQuery. This service contains methods that are end user exposed. It backs - up the frontend. - """ - - SERVICE_ADDRESS = "bigquerydatatransfer.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.bigquery.datatransfer.v1.DataTransferService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataTransferServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def data_source_path(cls, project, data_source): - """Return a fully-qualified data_source string.""" - return google.api_core.path_template.expand( - "projects/{project}/dataSources/{data_source}", - project=project, - data_source=data_source, - ) - - @classmethod - def location_path(cls, project, location): - """Return a fully-qualified location string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}", - project=project, - location=location, - ) - - @classmethod - def location_data_source_path(cls, project, location, data_source): - """Return a fully-qualified location_data_source string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}/dataSources/{data_source}", - project=project, - location=location, - data_source=data_source, - ) - - @classmethod - def location_run_path(cls, project, location, transfer_config, run): - """Return a fully-qualified location_run string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}", - project=project, - location=location, - transfer_config=transfer_config, - run=run, - ) - - @classmethod - def location_transfer_config_path(cls, project, location, transfer_config): - """Return a fully-qualified location_transfer_config string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}/transferConfigs/{transfer_config}", - project=project, - location=location, - transfer_config=transfer_config, - ) - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project, - ) - - @classmethod - def project_data_source_path(cls, project, data_source): - """Return a fully-qualified project_data_source string.""" - return google.api_core.path_template.expand( - "projects/{project}/dataSources/{data_source}", - project=project, - data_source=data_source, - ) - - @classmethod - def project_run_path(cls, project, transfer_config, run): - """Return a fully-qualified project_run string.""" - return google.api_core.path_template.expand( - "projects/{project}/transferConfigs/{transfer_config}/runs/{run}", - project=project, - transfer_config=transfer_config, - run=run, - ) - - @classmethod - def project_transfer_config_path(cls, project, transfer_config): - """Return a fully-qualified project_transfer_config string.""" - return google.api_core.path_template.expand( - "projects/{project}/transferConfigs/{transfer_config}", - project=project, - transfer_config=transfer_config, - ) - - @classmethod - def run_path(cls, project, transfer_config, run): - """Return a fully-qualified run string.""" - return google.api_core.path_template.expand( - "projects/{project}/transferConfigs/{transfer_config}/runs/{run}", - project=project, - transfer_config=transfer_config, - run=run, - ) - - @classmethod - def transfer_config_path(cls, project, transfer_config): - """Return a fully-qualified transfer_config string.""" - return google.api_core.path_template.expand( - "projects/{project}/transferConfigs/{transfer_config}", - project=project, - transfer_config=transfer_config, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.DataTransferServiceGrpcTransport, - Callable[[~.Credentials, type], ~.DataTransferServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = data_transfer_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=data_transfer_service_grpc_transport.DataTransferServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = data_transfer_service_grpc_transport.DataTransferServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def get_data_source( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Retrieves a supported data source and returns its settings, - which can be used for UI rendering. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> name = client.project_data_source_path('[PROJECT]', '[DATA_SOURCE]') - >>> - >>> response = client.get_data_source(name) - - Args: - name (str): Start time of the range of transfer runs. For example, - ``"2017-05-25T00:00:00+00:00"``. The start_time must be strictly less - than the end_time. Creates transfer runs where run_time is in the range - betwen start_time (inclusive) and end_time (exlusive). - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.DataSource` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_data_source" not in self._inner_api_calls: - self._inner_api_calls[ - "get_data_source" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_data_source, - default_retry=self._method_configs["GetDataSource"].retry, - default_timeout=self._method_configs["GetDataSource"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.GetDataSourceRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_data_source"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_data_sources( - self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists supported data sources and returns their settings, - which can be used for UI rendering. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_data_sources(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_data_sources(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Signed seconds of the span of time. Must be from -315,576,000,000 to - +315,576,000,000 inclusive. Note: these bounds are computed from: 60 - sec/min \* 60 min/hr \* 24 hr/day \* 365.25 days/year \* 10000 years - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.bigquery_datatransfer_v1.types.DataSource` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_data_sources" not in self._inner_api_calls: - self._inner_api_calls[ - "list_data_sources" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_data_sources, - default_retry=self._method_configs["ListDataSources"].retry, - default_timeout=self._method_configs["ListDataSources"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.ListDataSourcesRequest( - parent=parent, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_data_sources"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="data_sources", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def create_transfer_config( - self, - parent, - transfer_config, - authorization_code=None, - version_info=None, - service_account_name=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a new data transfer configuration. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `transfer_config`: - >>> transfer_config = {} - >>> - >>> response = client.create_transfer_config(parent, transfer_config) - - Args: - parent (str): Signed fractions of a second at nanosecond resolution of the span of - time. Durations less than one second are represented with a 0 - ``seconds`` field and a positive or negative ``nanos`` field. For - durations of one second or more, a non-zero value for the ``nanos`` - field must be of the same sign as the ``seconds`` field. Must be from - -999,999,999 to +999,999,999 inclusive. - transfer_config (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): Required. Data transfer configuration to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` - authorization_code (str): Required. Name of transfer configuration for which transfer runs - should be retrieved. Format of transfer configuration resource name is: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. - version_info (str): Should this field be parsed lazily? Lazy applies only to - message-type fields. It means that when the outer message is initially - parsed, the inner message's contents will not be parsed but instead - stored in encoded form. The inner message will actually be parsed when - it is first accessed. - - This is only a hint. Implementations are free to choose whether to use - eager or lazy parsing regardless of the value of this option. However, - setting this option true suggests that the protocol author believes that - using lazy parsing on this field is worth the additional bookkeeping - overhead typically needed to implement it. - - This option does not affect the public interface of any generated code; - all method signatures remain the same. Furthermore, thread-safety of the - interface is not affected by this option; const methods remain safe to - call from multiple threads concurrently, while non-const methods - continue to require exclusive access. - - Note that implementations may choose not to check required fields within - a lazy sub-message. That is, calling IsInitialized() on the outer - message may return true even if the inner message has missing required - fields. This is necessary because otherwise the inner message would have - to be parsed in order to perform the check, defeating the purpose of - lazy parsing. An implementation which chooses not to check required - fields must be consistent about it. That is, for any particular - sub-message, the implementation must either *always* check its required - fields, or *never* check its required fields, regardless of whether or - not the message has been parsed. - service_account_name (str): Optional service account name. If this field is set, transfer config will - be created with this service account credentials. It requires that - requesting user calling this API has permissions to act as this service - account. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_transfer_config" not in self._inner_api_calls: - self._inner_api_calls[ - "create_transfer_config" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_transfer_config, - default_retry=self._method_configs["CreateTransferConfig"].retry, - default_timeout=self._method_configs["CreateTransferConfig"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.CreateTransferConfigRequest( - parent=parent, - transfer_config=transfer_config, - authorization_code=authorization_code, - version_info=version_info, - service_account_name=service_account_name, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_transfer_config"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_transfer_config( - self, - transfer_config, - update_mask, - authorization_code=None, - version_info=None, - service_account_name=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates a data transfer configuration. - All fields must be set, even if they are not updated. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> # TODO: Initialize `transfer_config`: - >>> transfer_config = {} - >>> - >>> # TODO: Initialize `update_mask`: - >>> update_mask = {} - >>> - >>> response = client.update_transfer_config(transfer_config, update_mask) - - Args: - transfer_config (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): Required. Data transfer configuration to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` - update_mask (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.FieldMask]): Required. Required list of fields to be updated in this request. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_datatransfer_v1.types.FieldMask` - authorization_code (str): Optional. The historical or future-looking state of the resource - pattern. - - Example: - - :: - - // The InspectTemplate message originally only supported resource - // names with organization, and project was added later. - message InspectTemplate { - option (google.api.resource) = { - type: "dlp.googleapis.com/InspectTemplate" - pattern: - "organizations/{organization}/inspectTemplates/{inspect_template}" - pattern: "projects/{project}/inspectTemplates/{inspect_template}" - history: ORIGINALLY_SINGLE_PATTERN - }; - } - version_info (str): Transfer configuration name in the form: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. - service_account_name (str): Required. Start time of the range of transfer runs. For example, - ``"2017-05-25T00:00:00+00:00"``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_transfer_config" not in self._inner_api_calls: - self._inner_api_calls[ - "update_transfer_config" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_transfer_config, - default_retry=self._method_configs["UpdateTransferConfig"].retry, - default_timeout=self._method_configs["UpdateTransferConfig"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.UpdateTransferConfigRequest( - transfer_config=transfer_config, - update_mask=update_mask, - authorization_code=authorization_code, - version_info=version_info, - service_account_name=service_account_name, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("transfer_config.name", transfer_config.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_transfer_config"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_transfer_config( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a data transfer configuration, - including any associated transfer runs and logs. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> name = client.project_transfer_config_path('[PROJECT]', '[TRANSFER_CONFIG]') - >>> - >>> client.delete_transfer_config(name) - - Args: - name (str): The resource name of the transfer config. Transfer config names have - the form of - ``projects/{project_id}/locations/{region}/transferConfigs/{config_id}``. - The name is automatically generated based on the config_id specified in - CreateTransferConfigRequest along with project_id and region. If - config_id is not provided, usually a uuid, even though it is not - guaranteed or required, will be generated for config_id. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_transfer_config" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_transfer_config" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_transfer_config, - default_retry=self._method_configs["DeleteTransferConfig"].retry, - default_timeout=self._method_configs["DeleteTransferConfig"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.DeleteTransferConfigRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_transfer_config"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_transfer_config( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns information about a data transfer config. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> name = client.project_transfer_config_path('[PROJECT]', '[TRANSFER_CONFIG]') - >>> - >>> response = client.get_transfer_config(name) - - Args: - name (str): Protocol Buffers - Google's data interchange format Copyright 2008 - Google Inc. All rights reserved. - https://developers.google.com/protocol-buffers/ - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are - met: - - :: - - * Redistributions of source code must retain the above copyright - - notice, this list of conditions and the following disclaimer. \* - Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. \* - Neither the name of Google Inc. nor the names of its contributors may be - used to endorse or promote products derived from this software without - specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS - IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED - TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A - PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER - OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, - EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR - PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF - LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING - NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS - SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_transfer_config" not in self._inner_api_calls: - self._inner_api_calls[ - "get_transfer_config" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_transfer_config, - default_retry=self._method_configs["GetTransferConfig"].retry, - default_timeout=self._method_configs["GetTransferConfig"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.GetTransferConfigRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_transfer_config"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_transfer_configs( - self, - parent, - data_source_ids=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns information about all data transfers in the project. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_transfer_configs(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_transfer_configs(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): See ``HttpRule``. - data_source_ids (list[str]): When specified, only configurations of requested data sources are returned. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_transfer_configs" not in self._inner_api_calls: - self._inner_api_calls[ - "list_transfer_configs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_transfer_configs, - default_retry=self._method_configs["ListTransferConfigs"].retry, - default_timeout=self._method_configs["ListTransferConfigs"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.ListTransferConfigsRequest( - parent=parent, data_source_ids=data_source_ids, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_transfer_configs"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="transfer_configs", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def schedule_transfer_runs( - self, - parent, - start_time, - end_time, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - If type_name is set, this need not be set. If both this and - type_name are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or - TYPE_GROUP. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> parent = client.project_transfer_config_path('[PROJECT]', '[TRANSFER_CONFIG]') - >>> - >>> # TODO: Initialize `start_time`: - >>> start_time = {} - >>> - >>> # TODO: Initialize `end_time`: - >>> end_time = {} - >>> - >>> response = client.schedule_transfer_runs(parent, start_time, end_time) - - Args: - parent (str): The name of the uninterpreted option. Each string represents a - segment in a dot-separated name. is_extension is true iff a segment - represents an extension (denoted with parentheses in options specs in - .proto files). E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] - } represents "foo.(bar.baz).qux". - start_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): A generic empty message that you can re-use to avoid defining - duplicated empty messages in your APIs. A typical example is to use it - as the request or the response type of an API method. For instance: - - :: - - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } - - The JSON representation for ``Empty`` is empty JSON object ``{}``. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_datatransfer_v1.types.Timestamp` - end_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): The resource has one pattern, but the API owner expects to add more - later. (This is the inverse of ORIGINALLY_SINGLE_PATTERN, and prevents - that from being necessary once there are multiple patterns.) - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_datatransfer_v1.types.Timestamp` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "schedule_transfer_runs" not in self._inner_api_calls: - self._inner_api_calls[ - "schedule_transfer_runs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.schedule_transfer_runs, - default_retry=self._method_configs["ScheduleTransferRuns"].retry, - default_timeout=self._method_configs["ScheduleTransferRuns"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.ScheduleTransferRunsRequest( - parent=parent, start_time=start_time, end_time=end_time, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["schedule_transfer_runs"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def start_manual_transfer_runs( - self, - parent=None, - requested_time_range=None, - requested_run_time=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Pagination token, which can be used to request a specific page of - ``ListTransferRunsRequest`` list results. For multiple-page results, - ``ListTransferRunsResponse`` outputs a ``next_page`` token, which can be - used as the ``page_token`` value to request the next page of list - results. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> response = client.start_manual_transfer_runs() - - Args: - parent (str): A Timestamp represents a point in time independent of any time zone - or local calendar, encoded as a count of seconds and fractions of - seconds at nanosecond resolution. The count is relative to an epoch at - UTC midnight on January 1, 1970, in the proleptic Gregorian calendar - which extends the Gregorian calendar backwards to year one. - - All minutes are 60 seconds long. Leap seconds are "smeared" so that no - leap second table is needed for interpretation, using a `24-hour linear - smear `__. - - The range is from 0001-01-01T00:00:00Z to - 9999-12-31T23:59:59.999999999Z. By restricting to that range, we ensure - that we can convert to and from `RFC - 3339 `__ date strings. - - # Examples - - Example 1: Compute Timestamp from POSIX ``time()``. - - :: - - Timestamp timestamp; - timestamp.set_seconds(time(NULL)); - timestamp.set_nanos(0); - - Example 2: Compute Timestamp from POSIX ``gettimeofday()``. - - :: - - struct timeval tv; - gettimeofday(&tv, NULL); - - Timestamp timestamp; - timestamp.set_seconds(tv.tv_sec); - timestamp.set_nanos(tv.tv_usec * 1000); - - Example 3: Compute Timestamp from Win32 ``GetSystemTimeAsFileTime()``. - - :: - - FILETIME ft; - GetSystemTimeAsFileTime(&ft); - UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; - - // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z - // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. - Timestamp timestamp; - timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); - timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); - - Example 4: Compute Timestamp from Java ``System.currentTimeMillis()``. - - :: - - long millis = System.currentTimeMillis(); - - Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) - .setNanos((int) ((millis % 1000) * 1000000)).build(); - - Example 5: Compute Timestamp from current time in Python. - - :: - - timestamp = Timestamp() - timestamp.GetCurrentTime() - - # JSON Mapping - - In JSON format, the Timestamp type is encoded as a string in the `RFC - 3339 `__ format. That is, the - format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" where - {year} is always expressed using four digits while {month}, {day}, - {hour}, {min}, and {sec} are zero-padded to two digits each. The - fractional seconds, which can go up to 9 digits (i.e. up to 1 nanosecond - resolution), are optional. The "Z" suffix indicates the timezone - ("UTC"); the timezone is required. A proto3 JSON serializer should - always use UTC (as indicated by "Z") when printing the Timestamp type - and a proto3 JSON parser should be able to accept both UTC and other - timezones (as indicated by an offset). - - For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past 01:30 - UTC on January 15, 2017. - - In JavaScript, one can convert a Date object to this format using the - standard - `toISOString() `__ - method. In Python, a standard ``datetime.datetime`` object can be - converted to this format using - ```strftime`` `__ - with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, - one can use the Joda Time's - ```ISODateTimeFormat.dateTime()`` `__ - to obtain a formatter capable of generating timestamps in this format. - requested_time_range (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TimeRange]): Time range for the transfer runs that should be started. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_datatransfer_v1.types.TimeRange` - requested_run_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): Protocol Buffers - Google's data interchange format Copyright 2008 - Google Inc. All rights reserved. - https://developers.google.com/protocol-buffers/ - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are - met: - - :: - - * Redistributions of source code must retain the above copyright - - notice, this list of conditions and the following disclaimer. \* - Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. \* - Neither the name of Google Inc. nor the names of its contributors may be - used to endorse or promote products derived from this software without - specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS - IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED - TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A - PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER - OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, - EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR - PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF - LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING - NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS - SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.bigquery_datatransfer_v1.types.Timestamp` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "start_manual_transfer_runs" not in self._inner_api_calls: - self._inner_api_calls[ - "start_manual_transfer_runs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.start_manual_transfer_runs, - default_retry=self._method_configs["StartManualTransferRuns"].retry, - default_timeout=self._method_configs["StartManualTransferRuns"].timeout, - client_info=self._client_info, - ) - - # Sanity check: We have some fields which are mutually exclusive; - # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof( - requested_time_range=requested_time_range, - requested_run_time=requested_run_time, - ) - - request = datatransfer_pb2.StartManualTransferRunsRequest( - parent=parent, - requested_time_range=requested_time_range, - requested_run_time=requested_run_time, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["start_manual_transfer_runs"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_transfer_run( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns information about the particular transfer run. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> name = client.project_run_path('[PROJECT]', '[TRANSFER_CONFIG]', '[RUN]') - >>> - >>> response = client.get_transfer_run(name) - - Args: - name (str): An annotation that describes a resource definition, see - ``ResourceDescriptor``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferRun` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_transfer_run" not in self._inner_api_calls: - self._inner_api_calls[ - "get_transfer_run" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_transfer_run, - default_retry=self._method_configs["GetTransferRun"].retry, - default_timeout=self._method_configs["GetTransferRun"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.GetTransferRunRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_transfer_run"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_transfer_run( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes the specified transfer run. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> name = client.project_run_path('[PROJECT]', '[TRANSFER_CONFIG]', '[RUN]') - >>> - >>> client.delete_transfer_run(name) - - Args: - name (str): Optional OAuth2 authorization code to use with this transfer - configuration. This is required if new credentials are needed, as - indicated by ``CheckValidCreds``. In order to obtain authorization_code, - please make a request to - https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redirect_uri= - - - client_id should be OAuth client_id of BigQuery DTS API for the given - data source returned by ListDataSources method. - - data_source_scopes are the scopes returned by ListDataSources method. - - redirect_uri is an optional parameter. If not specified, then - authorization code is posted to the opener of authorization flow - window. Otherwise it will be sent to the redirect uri. A special - value of urn:ietf:wg:oauth:2.0:oob means that authorization code - should be returned in the title bar of the browser, with the page - text prompting the user to copy the code and paste it in the - application. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_transfer_run" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_transfer_run" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_transfer_run, - default_retry=self._method_configs["DeleteTransferRun"].retry, - default_timeout=self._method_configs["DeleteTransferRun"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.DeleteTransferRunRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_transfer_run"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_transfer_runs( - self, - parent, - states=None, - page_size=None, - run_attempt=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns information about running and completed jobs. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> parent = client.project_transfer_config_path('[PROJECT]', '[TRANSFER_CONFIG]') - >>> - >>> # Iterate over all results - >>> for element in client.list_transfer_runs(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_transfer_runs(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Optional version info. If users want to find a very recent access - token, that is, immediately after approving access, users have to set - the version_info claim in the token request. To obtain the version_info, - users must use the "none+gsession" response type. which be return a - version_info back in the authorization response which be be put in a JWT - claim in the token request. - states (list[~google.cloud.bigquery_datatransfer_v1.types.TransferState]): When specified, only transfer runs with requested states are returned. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - run_attempt (~google.cloud.bigquery_datatransfer_v1.types.RunAttempt): Indicates how run attempts are to be pulled. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferRun` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_transfer_runs" not in self._inner_api_calls: - self._inner_api_calls[ - "list_transfer_runs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_transfer_runs, - default_retry=self._method_configs["ListTransferRuns"].retry, - default_timeout=self._method_configs["ListTransferRuns"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.ListTransferRunsRequest( - parent=parent, states=states, page_size=page_size, run_attempt=run_attempt, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_transfer_runs"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="transfer_runs", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def list_transfer_logs( - self, - parent, - page_size=None, - message_types=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns user facing log messages for the data transfer run. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> parent = client.project_run_path('[PROJECT]', '[TRANSFER_CONFIG]', '[RUN]') - >>> - >>> # Iterate over all results - >>> for element in client.list_transfer_logs(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_transfer_logs(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): ``ListValue`` is a wrapper around a repeated field of values. - - The JSON representation for ``ListValue`` is JSON array. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - message_types (list[~google.cloud.bigquery_datatransfer_v1.types.MessageSeverity]): Message types to return. If not populated - INFO, WARNING and ERROR - messages are returned. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferMessage` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_transfer_logs" not in self._inner_api_calls: - self._inner_api_calls[ - "list_transfer_logs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_transfer_logs, - default_retry=self._method_configs["ListTransferLogs"].retry, - default_timeout=self._method_configs["ListTransferLogs"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.ListTransferLogsRequest( - parent=parent, page_size=page_size, message_types=message_types, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_transfer_logs"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="transfer_messages", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def check_valid_creds( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns true if valid credentials exist for the given data source and - requesting user. - Some data sources doesn't support service account, so we need to talk to - them on behalf of the end user. This API just checks whether we have OAuth - token for the particular user, which is a pre-requisite before user can - create a transfer config. - - Example: - >>> from google.cloud import bigquery_datatransfer_v1 - >>> - >>> client = bigquery_datatransfer_v1.DataTransferServiceClient() - >>> - >>> name = client.project_data_source_path('[PROJECT]', '[DATA_SOURCE]') - >>> - >>> response = client.check_valid_creds(name) - - Args: - name (str): Output only. The next-pagination token. For multiple-page list - results, this token can be used as the - ``ListTransferConfigsRequest.page_token`` to request the next page of - list results. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "check_valid_creds" not in self._inner_api_calls: - self._inner_api_calls[ - "check_valid_creds" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.check_valid_creds, - default_retry=self._method_configs["CheckValidCreds"].retry, - default_timeout=self._method_configs["CheckValidCreds"].timeout, - client_info=self._client_info, - ) - - request = datatransfer_pb2.CheckValidCredsRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["check_valid_creds"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client_config.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client_config.py deleted file mode 100644 index 5d57aacd6ada..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client_config.py +++ /dev/null @@ -1,112 +0,0 @@ -config = { - "interfaces": { - "google.cloud.bigquery.datatransfer.v1.DataTransferService": { - "retry_codes": { - "retry_policy_1_codes": ["UNAVAILABLE", "DEADLINE_EXCEEDED"], - "no_retry_codes": [], - "no_retry_1_codes": [], - }, - "retry_params": { - "retry_policy_1_params": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 20000, - }, - "no_retry_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 0, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 0, - "total_timeout_millis": 0, - }, - "no_retry_1_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 30000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 30000, - "total_timeout_millis": 30000, - }, - }, - "methods": { - "GetDataSource": { - "timeout_millis": 20000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListDataSources": { - "timeout_millis": 20000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "CreateTransferConfig": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "UpdateTransferConfig": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "DeleteTransferConfig": { - "timeout_millis": 20000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "GetTransferConfig": { - "timeout_millis": 20000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListTransferConfigs": { - "timeout_millis": 20000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ScheduleTransferRuns": { - "timeout_millis": 30000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "StartManualTransferRuns": { - "timeout_millis": 60000, - "retry_codes_name": "no_retry_codes", - "retry_params_name": "no_retry_params", - }, - "GetTransferRun": { - "timeout_millis": 20000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "DeleteTransferRun": { - "timeout_millis": 20000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListTransferRuns": { - "timeout_millis": 20000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListTransferLogs": { - "timeout_millis": 20000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "CheckValidCreds": { - "timeout_millis": 20000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - }, - } - } -} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/enums.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/enums.py deleted file mode 100644 index eb5d11cad902..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/enums.py +++ /dev/null @@ -1,174 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class NullValue(enum.IntEnum): - """ - A designation of a specific field behavior (required, output only, - etc.) in protobuf messages. - - Examples: - - string name = 1 [(google.api.field_behavior) = REQUIRED]; State state = - 1 [(google.api.field_behavior) = OUTPUT_ONLY]; google.protobuf.Duration - ttl = 1 [(google.api.field_behavior) = INPUT_ONLY]; - google.protobuf.Timestamp expire_time = 1 [(google.api.field_behavior) = - OUTPUT_ONLY, (google.api.field_behavior) = IMMUTABLE]; - - Attributes: - NULL_VALUE (int): Null value. - """ - - NULL_VALUE = 0 - - -class TransferState(enum.IntEnum): - """ - Represents data transfer run state. - - Attributes: - TRANSFER_STATE_UNSPECIFIED (int): State placeholder. - PENDING (int): Data transfer is scheduled and is waiting to be picked up by - data transfer backend. - RUNNING (int): Data transfer is in progress. - SUCCEEDED (int): Data transfer completed successfully. - FAILED (int): Data transfer failed. - CANCELLED (int): Data transfer is cancelled. - """ - - TRANSFER_STATE_UNSPECIFIED = 0 - PENDING = 2 - RUNNING = 3 - SUCCEEDED = 4 - FAILED = 5 - CANCELLED = 6 - - -class TransferType(enum.IntEnum): - """ - DEPRECATED. Represents data transfer type. - - Attributes: - TRANSFER_TYPE_UNSPECIFIED (int): Invalid or Unknown transfer type placeholder. - BATCH (int): Batch data transfer. - STREAMING (int): Streaming data transfer. Streaming data source currently doesn't - support multiple transfer configs per project. - """ - - TRANSFER_TYPE_UNSPECIFIED = 0 - BATCH = 1 - STREAMING = 2 - - -class DataSource(object): - class AuthorizationType(enum.IntEnum): - """ - The type of authorization needed for this data source. - - Attributes: - AUTHORIZATION_TYPE_UNSPECIFIED (int): Type unspecified. - AUTHORIZATION_CODE (int): Use OAuth 2 authorization codes that can be exchanged - for a refresh token on the backend. - GOOGLE_PLUS_AUTHORIZATION_CODE (int): Return an authorization code for a given Google+ page that can then be - exchanged for a refresh token on the backend. - FIRST_PARTY_OAUTH (int): Use First Party Client OAuth. First Party Client OAuth doesn't require a - refresh token to get an offline access token. Instead, it uses a - client-signed JWT assertion to retrieve an access token. - """ - - AUTHORIZATION_TYPE_UNSPECIFIED = 0 - AUTHORIZATION_CODE = 1 - GOOGLE_PLUS_AUTHORIZATION_CODE = 2 - FIRST_PARTY_OAUTH = 3 - - class DataRefreshType(enum.IntEnum): - """ - Represents how the data source supports data auto refresh. - - Attributes: - DATA_REFRESH_TYPE_UNSPECIFIED (int): The data source won't support data auto refresh, which is default value. - SLIDING_WINDOW (int): The data source supports data auto refresh, and runs will be scheduled - for the past few days. Does not allow custom values to be set for each - transfer config. - CUSTOM_SLIDING_WINDOW (int): The data source supports data auto refresh, and runs will be scheduled - for the past few days. Allows custom values to be set for each transfer - config. - """ - - DATA_REFRESH_TYPE_UNSPECIFIED = 0 - SLIDING_WINDOW = 1 - CUSTOM_SLIDING_WINDOW = 2 - - -class DataSourceParameter(object): - class Type(enum.IntEnum): - """ - Parameter type. - - Attributes: - TYPE_UNSPECIFIED (int): Type unspecified. - STRING (int): String parameter. - INTEGER (int): Integer parameter (64-bits). - Will be serialized to json as string. - DOUBLE (int): Double precision floating point parameter. - BOOLEAN (int): Boolean parameter. - RECORD (int): Deprecated. This field has no effect. - PLUS_PAGE (int): Page ID for a Google+ Page. - """ - - TYPE_UNSPECIFIED = 0 - STRING = 1 - INTEGER = 2 - DOUBLE = 3 - BOOLEAN = 4 - RECORD = 5 - PLUS_PAGE = 6 - - -class ListTransferRunsRequest(object): - class RunAttempt(enum.IntEnum): - """ - Represents which runs should be pulled. - - Attributes: - RUN_ATTEMPT_UNSPECIFIED (int): All runs should be returned. - LATEST (int): Only latest run per day should be returned. - """ - - RUN_ATTEMPT_UNSPECIFIED = 0 - LATEST = 1 - - -class TransferMessage(object): - class MessageSeverity(enum.IntEnum): - """ - Represents data transfer user facing message severity. - - Attributes: - MESSAGE_SEVERITY_UNSPECIFIED (int): No severity specified. - INFO (int): Informational message. - WARNING (int): Warning message. - ERROR (int): Error message. - """ - - MESSAGE_SEVERITY_UNSPECIFIED = 0 - INFO = 1 - WARNING = 2 - ERROR = 3 diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/data_transfer_service_grpc_transport.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/data_transfer_service_grpc_transport.py deleted file mode 100644 index f372a1d483b9..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/transports/data_transfer_service_grpc_transport.py +++ /dev/null @@ -1,313 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.bigquery_datatransfer_v1.proto import datatransfer_pb2_grpc - - -class DataTransferServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.bigquery.datatransfer.v1 DataTransferService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, - channel=None, - credentials=None, - address="bigquerydatatransfer.googleapis.com:443", - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "data_transfer_service_stub": datatransfer_pb2_grpc.DataTransferServiceStub( - channel - ), - } - - @classmethod - def create_channel( - cls, - address="bigquerydatatransfer.googleapis.com:443", - credentials=None, - **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def get_data_source(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.get_data_source`. - - Retrieves a supported data source and returns its settings, - which can be used for UI rendering. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].GetDataSource - - @property - def list_data_sources(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.list_data_sources`. - - Lists supported data sources and returns their settings, - which can be used for UI rendering. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].ListDataSources - - @property - def create_transfer_config(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.create_transfer_config`. - - Creates a new data transfer configuration. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].CreateTransferConfig - - @property - def update_transfer_config(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.update_transfer_config`. - - Updates a data transfer configuration. - All fields must be set, even if they are not updated. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].UpdateTransferConfig - - @property - def delete_transfer_config(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.delete_transfer_config`. - - Deletes a data transfer configuration, - including any associated transfer runs and logs. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].DeleteTransferConfig - - @property - def get_transfer_config(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.get_transfer_config`. - - Returns information about a data transfer config. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].GetTransferConfig - - @property - def list_transfer_configs(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.list_transfer_configs`. - - Returns information about all data transfers in the project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].ListTransferConfigs - - @property - def schedule_transfer_runs(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.schedule_transfer_runs`. - - If type_name is set, this need not be set. If both this and - type_name are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or - TYPE_GROUP. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].ScheduleTransferRuns - - @property - def start_manual_transfer_runs(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.start_manual_transfer_runs`. - - Pagination token, which can be used to request a specific page of - ``ListTransferRunsRequest`` list results. For multiple-page results, - ``ListTransferRunsResponse`` outputs a ``next_page`` token, which can be - used as the ``page_token`` value to request the next page of list - results. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].StartManualTransferRuns - - @property - def get_transfer_run(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.get_transfer_run`. - - Returns information about the particular transfer run. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].GetTransferRun - - @property - def delete_transfer_run(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.delete_transfer_run`. - - Deletes the specified transfer run. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].DeleteTransferRun - - @property - def list_transfer_runs(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.list_transfer_runs`. - - Returns information about running and completed jobs. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].ListTransferRuns - - @property - def list_transfer_logs(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.list_transfer_logs`. - - Returns user facing log messages for the data transfer run. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].ListTransferLogs - - @property - def check_valid_creds(self): - """Return the gRPC stub for :meth:`DataTransferServiceClient.check_valid_creds`. - - Returns true if valid credentials exist for the given data source and - requesting user. - Some data sources doesn't support service account, so we need to talk to - them on behalf of the end user. This API just checks whether we have OAuth - token for the particular user, which is a pre-requisite before user can - create a transfer config. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["data_transfer_service_stub"].CheckValidCreds diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/__init__.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource.proto b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource.proto deleted file mode 100644 index d7400a55935d..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource.proto +++ /dev/null @@ -1,542 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.cloud.bigquery.datatransfer.v1; - -import "google/api/annotations.proto"; -import "google/cloud/bigquery/datatransfer/v1/datatransfer.proto"; -import "google/cloud/bigquery/datatransfer/v1/transfer.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; -import "google/protobuf/timestamp.proto"; -import "google/protobuf/wrappers.proto"; -import "google/api/client.proto"; - -option csharp_namespace = "Google.Cloud.BigQuery.DataTransfer.V1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer"; -option java_multiple_files = true; -option java_outer_classname = "DataSourceProto"; -option java_package = "com.google.cloud.bigquery.datatransfer.v1"; -option php_namespace = "Google\\Cloud\\BigQuery\\DataTransfer\\V1"; - -// The Google BigQuery Data Transfer API allows BigQuery users to -// configure transfer of their data from other Google Products into BigQuery. -// This service exposes methods that should be used by data source backend. -service DataSourceService { - option (google.api.default_host) = "bigquerydatatransfer.googleapis.com"; - - // Update a transfer run. If successful, resets - // data_source.update_deadline_seconds timer. - rpc UpdateTransferRun(UpdateTransferRunRequest) returns (TransferRun) { - option (google.api.http) = { - patch: "/v1/{transfer_run.name=projects/*/locations/*/transferConfigs/*/runs/*}" - body: "transfer_run" - }; - } - - // Log messages for a transfer run. If successful (at least 1 message), resets - // data_source.update_deadline_seconds timer. - rpc LogTransferRunMessages(LogTransferRunMessagesRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:logMessages" - body: "*" - }; - } - - // Notify the Data Transfer Service that data is ready for loading. - // The Data Transfer Service will start and monitor multiple BigQuery Load - // jobs for a transfer run. Monitored jobs will be automatically retried - // and produce log messages when starting and finishing a job. - // Can be called multiple times for the same transfer run. - rpc StartBigQueryJobs(StartBigQueryJobsRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:startBigQueryJobs" - body: "*" - }; - } - - // Notify the Data Transfer Service that the data source is done processing - // the run. No more status updates or requests to start/monitor jobs will be - // accepted. The run will be finalized by the Data Transfer Service when all - // monitored jobs are completed. - // Does not need to be called if the run is set to FAILED. - rpc FinishRun(FinishRunRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:finishRun" - body: "*" - }; - } - - // Creates a data source definition. Calling this method will automatically - // use your credentials to create the following Google Cloud resources in - // YOUR Google Cloud project. - // 1. OAuth client - // 2. Pub/Sub Topics and Subscriptions in each supported_location_ids. e.g., - // projects/{project_id}/{topics|subscriptions}/bigquerydatatransfer.{data_source_id}.{location_id}.run - // The field data_source.client_id should be left empty in the input request, - // as the API will create a new OAuth client on behalf of the caller. On the - // other hand data_source.scopes usually need to be set when there are OAuth - // scopes that need to be granted by end users. - // 3. We need a longer deadline due to the 60 seconds SLO from Pub/Sub admin - // Operations. This also applies to update and delete data source definition. - rpc CreateDataSourceDefinition(CreateDataSourceDefinitionRequest) returns (DataSourceDefinition) { - option (google.api.http) = { - post: "/v1/{parent=projects/*/locations/*}/dataSourceDefinitions" - body: "data_source_definition" - }; - } - - // Updates an existing data source definition. If changing - // supported_location_ids, triggers same effects as mentioned in "Create a - // data source definition." - rpc UpdateDataSourceDefinition(UpdateDataSourceDefinitionRequest) returns (DataSourceDefinition) { - option (google.api.http) = { - patch: "/v1/{data_source_definition.name=projects/*/locations/*/dataSourceDefinitions/*}" - body: "data_source_definition" - }; - } - - // Deletes a data source definition, all of the transfer configs associated - // with this data source definition (if any) must be deleted first by the user - // in ALL regions, in order to delete the data source definition. - // This method is primarily meant for deleting data sources created during - // testing stage. - // If the data source is referenced by transfer configs in the region - // specified in the request URL, the method will fail immediately. If in the - // current region (e.g., US) it's not used by any transfer configs, but in - // another region (e.g., EU) it is, then although the method will succeed in - // region US, but it will fail when the deletion operation is replicated to - // region EU. And eventually, the system will replicate the data source - // definition back from EU to US, in order to bring all regions to - // consistency. The final effect is that the data source appears to be - // 'undeleted' in the US region. - rpc DeleteDataSourceDefinition(DeleteDataSourceDefinitionRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1/{name=projects/*/locations/*/dataSourceDefinitions/*}" - }; - } - - // Retrieves an existing data source definition. - rpc GetDataSourceDefinition(GetDataSourceDefinitionRequest) returns (DataSourceDefinition) { - option (google.api.http) = { - get: "/v1/{name=projects/*/locations/*/dataSourceDefinitions/*}" - }; - } - - // Lists supported data source definitions. - rpc ListDataSourceDefinitions(ListDataSourceDefinitionsRequest) returns (ListDataSourceDefinitionsResponse) { - option (google.api.http) = { - get: "/v1/{parent=projects/*/locations/*}/dataSourceDefinitions" - }; - } -} - -// Describes data which should be imported. -message ImportedDataInfo { - // Defines schema of a field in the imported data. - message FieldSchema { - // LINT.IfChange - // Field type. - enum Type { - // Illegal value. - TYPE_UNSPECIFIED = 0; - - // 64K, UTF8. - STRING = 1; - - // 64-bit signed. - INTEGER = 2; - - // 64-bit IEEE floating point. - FLOAT = 3; - - // Aggregate type. - RECORD = 4; - - // 64K, Binary. - BYTES = 5; - - // 2-valued. - BOOLEAN = 6; - - // 64-bit signed usec since UTC epoch. - TIMESTAMP = 7; - - // Civil date - Year, Month, Day. - DATE = 8; - - // Civil time - Hour, Minute, Second, Microseconds. - TIME = 9; - - // Combination of civil date and civil time. - DATETIME = 10; - - // Numeric type with 38 decimal digits of precision and 9 decimal digits - // of scale. - NUMERIC = 11; - - // Geography object (go/googlesql_geography). - GEOGRAPHY = 12; - } - - // Field name. Matches: [A-Za-z_][A-Za-z_0-9]{0,127} - string field_name = 1; - - // Field type - Type type = 2; - - // Is field repeated. - bool is_repeated = 3; - - // Description for this field. - string description = 4; - - // Present iff type == RECORD. - RecordSchema schema = 5; - } - - // Describes schema of the data to be ingested. - message RecordSchema { - // One field per column in the record. - repeated FieldSchema fields = 1; - } - - // External table definition. These tables can be referenced with 'name' - // in the query and can be read just like any other table. - message TableDefinition { - // CSV specific options. - message CsvOptions { - // The delimiter. We currently restrict this to U+0001 to U+00FF and - // apply additional constraints during validation. - google.protobuf.StringValue field_delimiter = 1; - - // Whether CSV files are allowed to have quoted newlines. If quoted - // newlines are allowed, we can't split CSV files. - google.protobuf.BoolValue allow_quoted_newlines = 2; - - // The quote character. We currently restrict this to U+0000 to U+00FF - // and apply additional constraints during validation. Set to '\0' to - // indicate no quote is used. - google.protobuf.StringValue quote_char = 3; - - // Number of leading rows to skip. - google.protobuf.Int64Value skip_leading_rows = 4; - - // Accept rows that are missing trailing optional columns. - google.protobuf.BoolValue allow_jagged_rows = 5; - } - - // BigQuery table_id (required). This will be used to reference this - // table in the query. - string table_id = 1; - - // URIs for the data to be imported. All URIs must be from the same storage - // system. - repeated string source_uris = 2; - - // Describes the format of the data in source_uri. - Format format = 3; - - // Specify the maximum number of bad records that can be ignored. - // If bad records exceed this threshold the query is aborted. - int32 max_bad_records = 4; - - // Character encoding of the input when applicable (CSV, JSON). - // Defaults to UTF8. - Encoding encoding = 5; - - // CSV specific options. - CsvOptions csv_options = 6; - - // Optional schema for the data. When not specified for JSON and CSV formats - // we will try to detect it automatically. - RecordSchema schema = 7; - - // Indicates if extra values that are not represented in the table schema is - // allowed. - google.protobuf.BoolValue ignore_unknown_values = 10; - } - - // Data format. - enum Format { - // Unspecified format. In this case, we have to infer the format from the - // data source. - FORMAT_UNSPECIFIED = 0; - - // CSV format. - CSV = 1; - - // Newline-delimited JSON. - JSON = 2; - - // Avro format. See http://avro.apache.org . - AVRO = 3; - - // RecordIO. - RECORDIO = 4; - - // ColumnIO. - COLUMNIO = 5; - - // Capacitor. - CAPACITOR = 6; - - // Parquet format. See https://parquet.apache.org . - PARQUET = 7; - - // ORC format. See https://orc.apache.org . - ORC = 8; - } - - // Encoding of input data in CSV/JSON format. - enum Encoding { - // Default encoding (UTF8). - ENCODING_UNSPECIFIED = 0; - - // ISO_8859_1 encoding. - ISO_8859_1 = 1; - - // UTF8 encoding. - UTF8 = 2; - } - - // SQL query to run. When empty, API checks that there is only one - // table_def specified and loads this table. Only Standard SQL queries - // are accepted. Legacy SQL is not allowed. - string sql = 1; - - // Table where results should be written. - string destination_table_id = 2; - - // The description of a destination table. This can be several sentences - // or paragraphs describing the table contents in detail. - string destination_table_description = 10; - - // When used WITHOUT the "sql" parameter, describes the schema of the - // destination table. - // When used WITH the "sql" parameter, describes tables with data stored - // outside of BigQuery. - repeated TableDefinition table_defs = 3; - - // Inline code for User-defined function resources. - // Ignored when "sql" parameter is empty. - repeated string user_defined_functions = 4; - - // Specifies the action if the destination table already exists. - WriteDisposition write_disposition = 6; -} - -// A request to update a transfer run. -message UpdateTransferRunRequest { - // Run name must be set and correspond to an already existing run. Only - // state, error_status, and data_version fields will be updated. All other - // fields will be ignored. - TransferRun transfer_run = 1; - - // Required list of fields to be updated in this request. - google.protobuf.FieldMask update_mask = 2; -} - -// A request to add transfer status messages to the run. -message LogTransferRunMessagesRequest { - // Name of the resource in the form: - // "projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}" - string name = 1; - - // Messages to append. - repeated TransferMessage transfer_messages = 2; -} - -// A request to start and monitor a BigQuery load job. -message StartBigQueryJobsRequest { - // Name of the resource in the form: - // "projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}" - string name = 1; - - // Import jobs which should be started and monitored. - repeated ImportedDataInfo imported_data = 2; - - // User credentials which should be used to start/monitor - // BigQuery jobs. If not specified, then jobs - // are started using data source service account credentials. - // This may be OAuth token or JWT token. - bytes user_credentials = 3; - - // The number of BQ Jobs that can run in parallel. - int32 max_parallelism = 8; -} - -// A request to finish a run. -message FinishRunRequest { - // Name of the resource in the form: - // "projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}" - string name = 1; -} - -// Represents the request of the CreateDataSourceDefinition method. -message CreateDataSourceDefinitionRequest { - // The BigQuery project id for which data source definition is associated. - // Must be in the form: `projects/{project_id}/locations/{location_id}` - string parent = 1; - - // Data source definition. - DataSourceDefinition data_source_definition = 2; -} - -// Represents the request of the UpdateDataSourceDefinition method. -message UpdateDataSourceDefinitionRequest { - // Data source definition. - DataSourceDefinition data_source_definition = 1; - - // Update field mask. - google.protobuf.FieldMask update_mask = 2; -} - -// Represents the request of the DeleteDataSourceDefinition method. All transfer -// configs associated with the data source must be deleted first, before the -// data source can be deleted. -message DeleteDataSourceDefinitionRequest { - // The field will contain name of the resource requested, for example: - // `projects/{project_id}/locations/{location_id}/dataSourceDefinitions/{data_source_id}` - string name = 1; -} - -// Represents the request of the GetDataSourceDefinition method. -message GetDataSourceDefinitionRequest { - // The field will contain name of the resource requested. - string name = 1; -} - -// Options for writing to the table. -// The WRITE_EMPTY option is intentionally excluded from the enum and is not -// supported by the data transfer service. -enum WriteDisposition { - // The default writeDispostion - WRITE_DISPOSITION_UNSPECIFIED = 0; - - // overwrites the table data. - WRITE_TRUNCATE = 1; - - // the data is appended to the table. - // Note duplication might happen if this mode is used. - WRITE_APPEND = 2; -} - -// Represents the request of the ListDataSourceDefinitions method. -message ListDataSourceDefinitionsRequest { - // The BigQuery project id for which data sources should be returned. - // Must be in the form: `projects/{project_id}/locations/{location_id}` - string parent = 1; - - // Pagination token, which can be used to request a specific page - // of `ListDataSourceDefinitionsRequest` list results. For multiple-page - // results, `ListDataSourceDefinitionsResponse` outputs a `next_page` token, - // which can be used as the `page_token` value to request the next page of - // the list results. - string page_token = 2; - - // Page size. The default page size is the maximum value of 1000 results. - int32 page_size = 3; -} - -// Returns a list of supported data source definitions. -message ListDataSourceDefinitionsResponse { - // List of supported data source definitions. - repeated DataSourceDefinition data_source_definitions = 1; - - // Output only. The next-pagination token. For multiple-page list results, - // this token can be used as the - // `ListDataSourceDefinitionsRequest.page_token` - // to request the next page of the list results. - string next_page_token = 2; -} - -// Represents the data source definition. -message DataSourceDefinition { - // The resource name of the data source definition. - // Data source definition names have the form - // `projects/{project_id}/locations/{location}/dataSourceDefinitions/{data_source_id}`. - string name = 21; - - // Data source metadata. - DataSource data_source = 1; - - // The Pub/Sub topic to be used for broadcasting a message when a transfer run - // is created. Both this topic and transfer_config_pubsub_topic can be - // set to a custom topic. By default, both topics are auto-generated if none - // of them is provided when creating the definition. However, if one topic is - // manually set, the other topic has to be manually set as well. The only - // difference is that transfer_run_pubsub_topic must be a non-empty Pub/Sub - // topic, but transfer_config_pubsub_topic can be set to empty. The comments - // about "{location}" for transfer_config_pubsub_topic apply here too. - string transfer_run_pubsub_topic = 13; - - // Duration which should be added to schedule_time to calculate - // run_time when job is scheduled. Only applicable for automatically - // scheduled transfer runs. Used to start a run early on a data source that - // supports continuous data refresh to compensate for unknown timezone - // offsets. Use a negative number to start a run late for data sources not - // supporting continuous data refresh. - google.protobuf.Duration run_time_offset = 16; - - // Support e-mail address of the OAuth client's Brand, which contains the - // consent screen data. - string support_email = 22; - - // When service account is specified, BigQuery will share created dataset - // with the given service account. Also, this service account will be - // eligible to perform status updates and message logging for data transfer - // runs for the corresponding data_source_id. - string service_account = 2; - - // Is data source disabled? If true, data_source is not visible. - // API will also stop returning any data transfer configs and/or runs - // associated with the data source. This setting has higher priority - // than whitelisted_project_ids. - bool disabled = 5; - - // The Pub/Sub topic to use for broadcasting a message for transfer config. If - // empty, a message will not be broadcasted. Both this topic and - // transfer_run_pubsub_topic are auto-generated if none of them is provided - // when creating the definition. It is recommended to provide - // transfer_config_pubsub_topic if a user-owned transfer_run_pubsub_topic is - // provided. Otherwise, it will be set to empty. If "{location}" is found in - // the value, then that means, data source wants to handle message separately - // for datasets in different regions. We will replace {location} with the - // actual dataset location, as the actual topic name. For example, - // projects/connector/topics/scheduler-{location} could become - // projects/connector/topics/scheduler-us. If "{location}" is not found, then - // we will use the input value as topic name. - string transfer_config_pubsub_topic = 12; - - // Supported location_ids used for deciding in which locations Pub/Sub topics - // need to be created. If custom Pub/Sub topics are used and they contains - // '{location}', the location_ids will be used for validating the topics by - // replacing the '{location}' with the individual location in the list. The - // valid values are the "location_id" field of the response of `GET - // https://bigquerydatatransfer.googleapis.com/v1/{name=projects/*}/locations` - // In addition, if the data source needs to support all available regions, - // supported_location_ids can be set to "global" (a single string element). - // When "global" is specified: - // 1) the data source implementation is supposed to stage the data in proper - // region of the destination dataset; - // 2) Data source developer should be aware of the implications (e.g., network - // traffic latency, potential charge associated with cross-region traffic, - // etc.) of supporting the "global" region; - repeated string supported_location_ids = 23; -} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource_pb2.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource_pb2.py deleted file mode 100644 index 82c7e654e364..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource_pb2.py +++ /dev/null @@ -1,2221 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery/datatransfer_v1/proto/datasource.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.bigquery.datatransfer_v1.proto import ( - datatransfer_pb2 as google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2, -) -from google.cloud.bigquery.datatransfer_v1.proto import ( - transfer_pb2 as google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery/datatransfer_v1/proto/datasource.proto", - package="google.cloud.bigquery.datatransfer.v1", - syntax="proto3", - serialized_options=_b( - "\n)com.google.cloud.bigquery.datatransfer.v1B\017DataSourceProtoP\001ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\252\002%Google.Cloud.BigQuery.DataTransfer.V1\312\002%Google\\Cloud\\BigQuery\\DataTransfer\\V1" - ), - serialized_pb=_b( - '\ngoogle/cloud/bigquery/datatransfer_v1/proto/datatransfer.proto\x1a:google/cloud/bigquery/datatransfer_v1/proto/transfer.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x17google/api/client.proto"\x9e\x0e\n\x10ImportedDataInfo\x12\x0b\n\x03sql\x18\x01 \x01(\t\x12\x1c\n\x14\x64\x65stination_table_id\x18\x02 \x01(\t\x12%\n\x1d\x64\x65stination_table_description\x18\n \x01(\t\x12[\n\ntable_defs\x18\x03 \x03(\x0b\x32G.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition\x12\x1e\n\x16user_defined_functions\x18\x04 \x03(\t\x12R\n\x11write_disposition\x18\x06 \x01(\x0e\x32\x37.google.cloud.bigquery.datatransfer.v1.WriteDisposition\x1a\xad\x03\n\x0b\x46ieldSchema\x12\x12\n\nfield_name\x18\x01 \x01(\t\x12V\n\x04type\x18\x02 \x01(\x0e\x32H.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema.Type\x12\x13\n\x0bis_repeated\x18\x03 \x01(\x08\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12T\n\x06schema\x18\x05 \x01(\x0b\x32\x44.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.RecordSchema"\xb1\x01\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06STRING\x10\x01\x12\x0b\n\x07INTEGER\x10\x02\x12\t\n\x05\x46LOAT\x10\x03\x12\n\n\x06RECORD\x10\x04\x12\t\n\x05\x42YTES\x10\x05\x12\x0b\n\x07\x42OOLEAN\x10\x06\x12\r\n\tTIMESTAMP\x10\x07\x12\x08\n\x04\x44\x41TE\x10\x08\x12\x08\n\x04TIME\x10\t\x12\x0c\n\x08\x44\x41TETIME\x10\n\x12\x0b\n\x07NUMERIC\x10\x0b\x12\r\n\tGEOGRAPHY\x10\x0c\x1a\x63\n\x0cRecordSchema\x12S\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x43.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema\x1a\x91\x06\n\x0fTableDefinition\x12\x10\n\x08table_id\x18\x01 \x01(\t\x12\x13\n\x0bsource_uris\x18\x02 \x03(\t\x12N\n\x06\x66ormat\x18\x03 \x01(\x0e\x32>.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.Format\x12\x17\n\x0fmax_bad_records\x18\x04 \x01(\x05\x12R\n\x08\x65ncoding\x18\x05 \x01(\x0e\x32@.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.Encoding\x12g\n\x0b\x63sv_options\x18\x06 \x01(\x0b\x32R.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions\x12T\n\x06schema\x18\x07 \x01(\x0b\x32\x44.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.RecordSchema\x12\x39\n\x15ignore_unknown_values\x18\n \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x1a\x9f\x02\n\nCsvOptions\x12\x35\n\x0f\x66ield_delimiter\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x39\n\x15\x61llow_quoted_newlines\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x30\n\nquote_char\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x36\n\x11skip_leading_rows\x18\x04 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x35\n\x11\x61llow_jagged_rows\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.BoolValue"~\n\x06\x46ormat\x12\x16\n\x12\x46ORMAT_UNSPECIFIED\x10\x00\x12\x07\n\x03\x43SV\x10\x01\x12\x08\n\x04JSON\x10\x02\x12\x08\n\x04\x41VRO\x10\x03\x12\x0c\n\x08RECORDIO\x10\x04\x12\x0c\n\x08\x43OLUMNIO\x10\x05\x12\r\n\tCAPACITOR\x10\x06\x12\x0b\n\x07PARQUET\x10\x07\x12\x07\n\x03ORC\x10\x08">\n\x08\x45ncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0e\n\nISO_8859_1\x10\x01\x12\x08\n\x04UTF8\x10\x02"\x95\x01\n\x18UpdateTransferRunRequest\x12H\n\x0ctransfer_run\x18\x01 \x01(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\x80\x01\n\x1dLogTransferRunMessagesRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12Q\n\x11transfer_messages\x18\x02 \x03(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.TransferMessage"\xab\x01\n\x18StartBigQueryJobsRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12N\n\rimported_data\x18\x02 \x03(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.ImportedDataInfo\x12\x18\n\x10user_credentials\x18\x03 \x01(\x0c\x12\x17\n\x0fmax_parallelism\x18\x08 \x01(\x05" \n\x10\x46inishRunRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x90\x01\n!CreateDataSourceDefinitionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12[\n\x16\x64\x61ta_source_definition\x18\x02 \x01(\x0b\x32;.google.cloud.bigquery.datatransfer.v1.DataSourceDefinition"\xb1\x01\n!UpdateDataSourceDefinitionRequest\x12[\n\x16\x64\x61ta_source_definition\x18\x01 \x01(\x0b\x32;.google.cloud.bigquery.datatransfer.v1.DataSourceDefinition\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"1\n!DeleteDataSourceDefinitionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t".\n\x1eGetDataSourceDefinitionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"Y\n ListDataSourceDefinitionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"\x9a\x01\n!ListDataSourceDefinitionsResponse\x12\\\n\x17\x64\x61ta_source_definitions\x18\x01 \x03(\x0b\x32;.google.cloud.bigquery.datatransfer.v1.DataSourceDefinition\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xcb\x02\n\x14\x44\x61taSourceDefinition\x12\x0c\n\x04name\x18\x15 \x01(\t\x12\x46\n\x0b\x64\x61ta_source\x18\x01 \x01(\x0b\x32\x31.google.cloud.bigquery.datatransfer.v1.DataSource\x12!\n\x19transfer_run_pubsub_topic\x18\r \x01(\t\x12\x32\n\x0frun_time_offset\x18\x10 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x15\n\rsupport_email\x18\x16 \x01(\t\x12\x17\n\x0fservice_account\x18\x02 \x01(\t\x12\x10\n\x08\x64isabled\x18\x05 \x01(\x08\x12$\n\x1ctransfer_config_pubsub_topic\x18\x0c \x01(\t\x12\x1e\n\x16supported_location_ids\x18\x17 \x03(\t*[\n\x10WriteDisposition\x12!\n\x1dWRITE_DISPOSITION_UNSPECIFIED\x10\x00\x12\x12\n\x0eWRITE_TRUNCATE\x10\x01\x12\x10\n\x0cWRITE_APPEND\x10\x02\x32\x9d\x10\n\x11\x44\x61taSourceService\x12\xe7\x01\n\x11UpdateTransferRun\x12?.google.cloud.bigquery.datatransfer.v1.UpdateTransferRunRequest\x1a\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"]\x82\xd3\xe4\x93\x02W2G/v1/{transfer_run.name=projects/*/locations/*/transferConfigs/*/runs/*}:\x0ctransfer_run\x12\xc9\x01\n\x16LogTransferRunMessages\x12\x44.google.cloud.bigquery.datatransfer.v1.LogTransferRunMessagesRequest\x1a\x16.google.protobuf.Empty"Q\x82\xd3\xe4\x93\x02K"F/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:logMessages:\x01*\x12\xc5\x01\n\x11StartBigQueryJobs\x12?.google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest\x1a\x16.google.protobuf.Empty"W\x82\xd3\xe4\x93\x02Q"L/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:startBigQueryJobs:\x01*\x12\xad\x01\n\tFinishRun\x12\x37.google.cloud.bigquery.datatransfer.v1.FinishRunRequest\x1a\x16.google.protobuf.Empty"O\x82\xd3\xe4\x93\x02I"D/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:finishRun:\x01*\x12\xfe\x01\n\x1a\x43reateDataSourceDefinition\x12H.google.cloud.bigquery.datatransfer.v1.CreateDataSourceDefinitionRequest\x1a;.google.cloud.bigquery.datatransfer.v1.DataSourceDefinition"Y\x82\xd3\xe4\x93\x02S"9/v1/{parent=projects/*/locations/*}/dataSourceDefinitions:\x16\x64\x61ta_source_definition\x12\x95\x02\n\x1aUpdateDataSourceDefinition\x12H.google.cloud.bigquery.datatransfer.v1.UpdateDataSourceDefinitionRequest\x1a;.google.cloud.bigquery.datatransfer.v1.DataSourceDefinition"p\x82\xd3\xe4\x93\x02j2P/v1/{data_source_definition.name=projects/*/locations/*/dataSourceDefinitions/*}:\x16\x64\x61ta_source_definition\x12\xc1\x01\n\x1a\x44\x65leteDataSourceDefinition\x12H.google.cloud.bigquery.datatransfer.v1.DeleteDataSourceDefinitionRequest\x1a\x16.google.protobuf.Empty"A\x82\xd3\xe4\x93\x02;*9/v1/{name=projects/*/locations/*/dataSourceDefinitions/*}\x12\xe0\x01\n\x17GetDataSourceDefinition\x12\x45.google.cloud.bigquery.datatransfer.v1.GetDataSourceDefinitionRequest\x1a;.google.cloud.bigquery.datatransfer.v1.DataSourceDefinition"A\x82\xd3\xe4\x93\x02;\x12\x39/v1/{name=projects/*/locations/*/dataSourceDefinitions/*}\x12\xf1\x01\n\x19ListDataSourceDefinitions\x12G.google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsRequest\x1aH.google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsResponse"A\x82\xd3\xe4\x93\x02;\x12\x39/v1/{parent=projects/*/locations/*}/dataSourceDefinitions\x1a&\xca\x41#bigquerydatatransfer.googleapis.comB\xe1\x01\n)com.google.cloud.bigquery.datatransfer.v1B\x0f\x44\x61taSourceProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2.DESCRIPTOR, - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - -_WRITEDISPOSITION = _descriptor.EnumDescriptor( - name="WriteDisposition", - full_name="google.cloud.bigquery.datatransfer.v1.WriteDisposition", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="WRITE_DISPOSITION_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="WRITE_TRUNCATE", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="WRITE_APPEND", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3766, - serialized_end=3857, -) -_sym_db.RegisterEnumDescriptor(_WRITEDISPOSITION) - -WriteDisposition = enum_type_wrapper.EnumTypeWrapper(_WRITEDISPOSITION) -WRITE_DISPOSITION_UNSPECIFIED = 0 -WRITE_TRUNCATE = 1 -WRITE_APPEND = 2 - - -_IMPORTEDDATAINFO_FIELDSCHEMA_TYPE = _descriptor.EnumDescriptor( - name="Type", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema.Type", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="STRING", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="INTEGER", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FLOAT", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RECORD", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="BYTES", index=5, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="BOOLEAN", index=6, number=6, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="TIMESTAMP", index=7, number=7, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DATE", index=8, number=8, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="TIME", index=9, number=9, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DATETIME", index=10, number=10, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="NUMERIC", index=11, number=11, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="GEOGRAPHY", index=12, number=12, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1007, - serialized_end=1184, -) -_sym_db.RegisterEnumDescriptor(_IMPORTEDDATAINFO_FIELDSCHEMA_TYPE) - -_IMPORTEDDATAINFO_FORMAT = _descriptor.EnumDescriptor( - name="Format", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.Format", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="FORMAT_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="CSV", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="JSON", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="AVRO", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RECORDIO", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="COLUMNIO", index=5, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CAPACITOR", index=6, number=6, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="PARQUET", index=7, number=7, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ORC", index=8, number=8, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2075, - serialized_end=2201, -) -_sym_db.RegisterEnumDescriptor(_IMPORTEDDATAINFO_FORMAT) - -_IMPORTEDDATAINFO_ENCODING = _descriptor.EnumDescriptor( - name="Encoding", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.Encoding", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="ENCODING_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ISO_8859_1", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="UTF8", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2203, - serialized_end=2265, -) -_sym_db.RegisterEnumDescriptor(_IMPORTEDDATAINFO_ENCODING) - - -_IMPORTEDDATAINFO_FIELDSCHEMA = _descriptor.Descriptor( - name="FieldSchema", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="field_name", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema.field_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="type", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema.type", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="is_repeated", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema.is_repeated", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema.description", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="schema", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema.schema", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_IMPORTEDDATAINFO_FIELDSCHEMA_TYPE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=755, - serialized_end=1184, -) - -_IMPORTEDDATAINFO_RECORDSCHEMA = _descriptor.Descriptor( - name="RecordSchema", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.RecordSchema", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="fields", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.RecordSchema.fields", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1186, - serialized_end=1285, -) - -_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS = _descriptor.Descriptor( - name="CsvOptions", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="field_delimiter", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions.field_delimiter", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="allow_quoted_newlines", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions.allow_quoted_newlines", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="quote_char", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions.quote_char", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="skip_leading_rows", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions.skip_leading_rows", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="allow_jagged_rows", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions.allow_jagged_rows", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1786, - serialized_end=2073, -) - -_IMPORTEDDATAINFO_TABLEDEFINITION = _descriptor.Descriptor( - name="TableDefinition", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="table_id", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.table_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="source_uris", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.source_uris", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="format", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.format", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="max_bad_records", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.max_bad_records", - index=3, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="encoding", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.encoding", - index=4, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="csv_options", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.csv_options", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="schema", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.schema", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="ignore_unknown_values", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.ignore_unknown_values", - index=7, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1288, - serialized_end=2073, -) - -_IMPORTEDDATAINFO = _descriptor.Descriptor( - name="ImportedDataInfo", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sql", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.sql", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="destination_table_id", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.destination_table_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="destination_table_description", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.destination_table_description", - index=2, - number=10, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="table_defs", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.table_defs", - index=3, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="user_defined_functions", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.user_defined_functions", - index=4, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="write_disposition", - full_name="google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.write_disposition", - index=5, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _IMPORTEDDATAINFO_FIELDSCHEMA, - _IMPORTEDDATAINFO_RECORDSCHEMA, - _IMPORTEDDATAINFO_TABLEDEFINITION, - ], - enum_types=[_IMPORTEDDATAINFO_FORMAT, _IMPORTEDDATAINFO_ENCODING], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=443, - serialized_end=2265, -) - - -_UPDATETRANSFERRUNREQUEST = _descriptor.Descriptor( - name="UpdateTransferRunRequest", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferRunRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="transfer_run", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferRunRequest.transfer_run", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferRunRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2268, - serialized_end=2417, -) - - -_LOGTRANSFERRUNMESSAGESREQUEST = _descriptor.Descriptor( - name="LogTransferRunMessagesRequest", - full_name="google.cloud.bigquery.datatransfer.v1.LogTransferRunMessagesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.LogTransferRunMessagesRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="transfer_messages", - full_name="google.cloud.bigquery.datatransfer.v1.LogTransferRunMessagesRequest.transfer_messages", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2420, - serialized_end=2548, -) - - -_STARTBIGQUERYJOBSREQUEST = _descriptor.Descriptor( - name="StartBigQueryJobsRequest", - full_name="google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="imported_data", - full_name="google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest.imported_data", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="user_credentials", - full_name="google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest.user_credentials", - index=2, - number=3, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b(""), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="max_parallelism", - full_name="google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest.max_parallelism", - index=3, - number=8, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2551, - serialized_end=2722, -) - - -_FINISHRUNREQUEST = _descriptor.Descriptor( - name="FinishRunRequest", - full_name="google.cloud.bigquery.datatransfer.v1.FinishRunRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.FinishRunRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2724, - serialized_end=2756, -) - - -_CREATEDATASOURCEDEFINITIONREQUEST = _descriptor.Descriptor( - name="CreateDataSourceDefinitionRequest", - full_name="google.cloud.bigquery.datatransfer.v1.CreateDataSourceDefinitionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.CreateDataSourceDefinitionRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="data_source_definition", - full_name="google.cloud.bigquery.datatransfer.v1.CreateDataSourceDefinitionRequest.data_source_definition", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2759, - serialized_end=2903, -) - - -_UPDATEDATASOURCEDEFINITIONREQUEST = _descriptor.Descriptor( - name="UpdateDataSourceDefinitionRequest", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateDataSourceDefinitionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="data_source_definition", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateDataSourceDefinitionRequest.data_source_definition", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateDataSourceDefinitionRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2906, - serialized_end=3083, -) - - -_DELETEDATASOURCEDEFINITIONREQUEST = _descriptor.Descriptor( - name="DeleteDataSourceDefinitionRequest", - full_name="google.cloud.bigquery.datatransfer.v1.DeleteDataSourceDefinitionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.DeleteDataSourceDefinitionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3085, - serialized_end=3134, -) - - -_GETDATASOURCEDEFINITIONREQUEST = _descriptor.Descriptor( - name="GetDataSourceDefinitionRequest", - full_name="google.cloud.bigquery.datatransfer.v1.GetDataSourceDefinitionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.GetDataSourceDefinitionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3136, - serialized_end=3182, -) - - -_LISTDATASOURCEDEFINITIONSREQUEST = _descriptor.Descriptor( - name="ListDataSourceDefinitionsRequest", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3184, - serialized_end=3273, -) - - -_LISTDATASOURCEDEFINITIONSRESPONSE = _descriptor.Descriptor( - name="ListDataSourceDefinitionsResponse", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="data_source_definitions", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsResponse.data_source_definitions", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3276, - serialized_end=3430, -) - - -_DATASOURCEDEFINITION = _descriptor.Descriptor( - name="DataSourceDefinition", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.name", - index=0, - number=21, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="data_source", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.data_source", - index=1, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="transfer_run_pubsub_topic", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.transfer_run_pubsub_topic", - index=2, - number=13, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="run_time_offset", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.run_time_offset", - index=3, - number=16, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="support_email", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.support_email", - index=4, - number=22, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="service_account", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.service_account", - index=5, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="disabled", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.disabled", - index=6, - number=5, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="transfer_config_pubsub_topic", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.transfer_config_pubsub_topic", - index=7, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="supported_location_ids", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceDefinition.supported_location_ids", - index=8, - number=23, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3433, - serialized_end=3764, -) - -_IMPORTEDDATAINFO_FIELDSCHEMA.fields_by_name[ - "type" -].enum_type = _IMPORTEDDATAINFO_FIELDSCHEMA_TYPE -_IMPORTEDDATAINFO_FIELDSCHEMA.fields_by_name[ - "schema" -].message_type = _IMPORTEDDATAINFO_RECORDSCHEMA -_IMPORTEDDATAINFO_FIELDSCHEMA.containing_type = _IMPORTEDDATAINFO -_IMPORTEDDATAINFO_FIELDSCHEMA_TYPE.containing_type = _IMPORTEDDATAINFO_FIELDSCHEMA -_IMPORTEDDATAINFO_RECORDSCHEMA.fields_by_name[ - "fields" -].message_type = _IMPORTEDDATAINFO_FIELDSCHEMA -_IMPORTEDDATAINFO_RECORDSCHEMA.containing_type = _IMPORTEDDATAINFO -_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS.fields_by_name[ - "field_delimiter" -].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE -_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS.fields_by_name[ - "allow_quoted_newlines" -].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE -_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS.fields_by_name[ - "quote_char" -].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE -_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS.fields_by_name[ - "skip_leading_rows" -].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE -_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS.fields_by_name[ - "allow_jagged_rows" -].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE -_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS.containing_type = ( - _IMPORTEDDATAINFO_TABLEDEFINITION -) -_IMPORTEDDATAINFO_TABLEDEFINITION.fields_by_name[ - "format" -].enum_type = _IMPORTEDDATAINFO_FORMAT -_IMPORTEDDATAINFO_TABLEDEFINITION.fields_by_name[ - "encoding" -].enum_type = _IMPORTEDDATAINFO_ENCODING -_IMPORTEDDATAINFO_TABLEDEFINITION.fields_by_name[ - "csv_options" -].message_type = _IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS -_IMPORTEDDATAINFO_TABLEDEFINITION.fields_by_name[ - "schema" -].message_type = _IMPORTEDDATAINFO_RECORDSCHEMA -_IMPORTEDDATAINFO_TABLEDEFINITION.fields_by_name[ - "ignore_unknown_values" -].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE -_IMPORTEDDATAINFO_TABLEDEFINITION.containing_type = _IMPORTEDDATAINFO -_IMPORTEDDATAINFO.fields_by_name[ - "table_defs" -].message_type = _IMPORTEDDATAINFO_TABLEDEFINITION -_IMPORTEDDATAINFO.fields_by_name["write_disposition"].enum_type = _WRITEDISPOSITION -_IMPORTEDDATAINFO_FORMAT.containing_type = _IMPORTEDDATAINFO -_IMPORTEDDATAINFO_ENCODING.containing_type = _IMPORTEDDATAINFO -_UPDATETRANSFERRUNREQUEST.fields_by_name[ - "transfer_run" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN -) -_UPDATETRANSFERRUNREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LOGTRANSFERRUNMESSAGESREQUEST.fields_by_name[ - "transfer_messages" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERMESSAGE -) -_STARTBIGQUERYJOBSREQUEST.fields_by_name[ - "imported_data" -].message_type = _IMPORTEDDATAINFO -_CREATEDATASOURCEDEFINITIONREQUEST.fields_by_name[ - "data_source_definition" -].message_type = _DATASOURCEDEFINITION -_UPDATEDATASOURCEDEFINITIONREQUEST.fields_by_name[ - "data_source_definition" -].message_type = _DATASOURCEDEFINITION -_UPDATEDATASOURCEDEFINITIONREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTDATASOURCEDEFINITIONSRESPONSE.fields_by_name[ - "data_source_definitions" -].message_type = _DATASOURCEDEFINITION -_DATASOURCEDEFINITION.fields_by_name[ - "data_source" -].message_type = ( - google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datatransfer__pb2._DATASOURCE -) -_DATASOURCEDEFINITION.fields_by_name[ - "run_time_offset" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -DESCRIPTOR.message_types_by_name["ImportedDataInfo"] = _IMPORTEDDATAINFO -DESCRIPTOR.message_types_by_name["UpdateTransferRunRequest"] = _UPDATETRANSFERRUNREQUEST -DESCRIPTOR.message_types_by_name[ - "LogTransferRunMessagesRequest" -] = _LOGTRANSFERRUNMESSAGESREQUEST -DESCRIPTOR.message_types_by_name["StartBigQueryJobsRequest"] = _STARTBIGQUERYJOBSREQUEST -DESCRIPTOR.message_types_by_name["FinishRunRequest"] = _FINISHRUNREQUEST -DESCRIPTOR.message_types_by_name[ - "CreateDataSourceDefinitionRequest" -] = _CREATEDATASOURCEDEFINITIONREQUEST -DESCRIPTOR.message_types_by_name[ - "UpdateDataSourceDefinitionRequest" -] = _UPDATEDATASOURCEDEFINITIONREQUEST -DESCRIPTOR.message_types_by_name[ - "DeleteDataSourceDefinitionRequest" -] = _DELETEDATASOURCEDEFINITIONREQUEST -DESCRIPTOR.message_types_by_name[ - "GetDataSourceDefinitionRequest" -] = _GETDATASOURCEDEFINITIONREQUEST -DESCRIPTOR.message_types_by_name[ - "ListDataSourceDefinitionsRequest" -] = _LISTDATASOURCEDEFINITIONSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListDataSourceDefinitionsResponse" -] = _LISTDATASOURCEDEFINITIONSRESPONSE -DESCRIPTOR.message_types_by_name["DataSourceDefinition"] = _DATASOURCEDEFINITION -DESCRIPTOR.enum_types_by_name["WriteDisposition"] = _WRITEDISPOSITION -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ImportedDataInfo = _reflection.GeneratedProtocolMessageType( - "ImportedDataInfo", - (_message.Message,), - dict( - FieldSchema=_reflection.GeneratedProtocolMessageType( - "FieldSchema", - (_message.Message,), - dict( - DESCRIPTOR=_IMPORTEDDATAINFO_FIELDSCHEMA, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Defines schema of a field in the imported data. - - - Attributes: - field_name: - Field name. Matches: [A-Za-z\_][A-Za-z\_0-9]{0,127} - type: - Field type - is_repeated: - Is field repeated. - description: - Description for this field. - schema: - Present iff type == RECORD. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema) - ), - ), - RecordSchema=_reflection.GeneratedProtocolMessageType( - "RecordSchema", - (_message.Message,), - dict( - DESCRIPTOR=_IMPORTEDDATAINFO_RECORDSCHEMA, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Describes schema of the data to be ingested. - - - Attributes: - fields: - One field per column in the record. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.RecordSchema) - ), - ), - TableDefinition=_reflection.GeneratedProtocolMessageType( - "TableDefinition", - (_message.Message,), - dict( - CsvOptions=_reflection.GeneratedProtocolMessageType( - "CsvOptions", - (_message.Message,), - dict( - DESCRIPTOR=_IMPORTEDDATAINFO_TABLEDEFINITION_CSVOPTIONS, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""CSV specific options. - - - Attributes: - field_delimiter: - The delimiter. We currently restrict this to U+0001 to U+00FF - and apply additional constraints during validation. - allow_quoted_newlines: - Whether CSV files are allowed to have quoted newlines. If - quoted newlines are allowed, we can't split CSV files. - quote_char: - The quote character. We currently restrict this to U+0000 to - U+00FF and apply additional constraints during validation. Set - to ':raw-latex:`\0`' to indicate no quote is used. - skip_leading_rows: - Number of leading rows to skip. - allow_jagged_rows: - Accept rows that are missing trailing optional columns. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions) - ), - ), - DESCRIPTOR=_IMPORTEDDATAINFO_TABLEDEFINITION, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""External table definition. These tables can be referenced with 'name' in - the query and can be read just like any other table. - - - Attributes: - table_id: - BigQuery table\_id (required). This will be used to reference - this table in the query. - source_uris: - URIs for the data to be imported. All URIs must be from the - same storage system. - format: - Describes the format of the data in source\_uri. - max_bad_records: - Specify the maximum number of bad records that can be ignored. - If bad records exceed this threshold the query is aborted. - encoding: - Character encoding of the input when applicable (CSV, JSON). - Defaults to UTF8. - csv_options: - CSV specific options. - schema: - Optional schema for the data. When not specified for JSON and - CSV formats we will try to detect it automatically. - ignore_unknown_values: - Indicates if extra values that are not represented in the - table schema is allowed. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition) - ), - ), - DESCRIPTOR=_IMPORTEDDATAINFO, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Describes data which should be imported. - - - Attributes: - sql: - SQL query to run. When empty, API checks that there is only - one table\_def specified and loads this table. Only Standard - SQL queries are accepted. Legacy SQL is not allowed. - destination_table_id: - Table where results should be written. - destination_table_description: - The description of a destination table. This can be several - sentences or paragraphs describing the table contents in - detail. - table_defs: - When used WITHOUT the "sql" parameter, describes the schema of - the destination table. When used WITH the "sql" parameter, - describes tables with data stored outside of BigQuery. - user_defined_functions: - Inline code for User-defined function resources. Ignored when - "sql" parameter is empty. - write_disposition: - Specifies the action if the destination table already exists. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ImportedDataInfo) - ), -) -_sym_db.RegisterMessage(ImportedDataInfo) -_sym_db.RegisterMessage(ImportedDataInfo.FieldSchema) -_sym_db.RegisterMessage(ImportedDataInfo.RecordSchema) -_sym_db.RegisterMessage(ImportedDataInfo.TableDefinition) -_sym_db.RegisterMessage(ImportedDataInfo.TableDefinition.CsvOptions) - -UpdateTransferRunRequest = _reflection.GeneratedProtocolMessageType( - "UpdateTransferRunRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATETRANSFERRUNREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""A request to update a transfer run. - - - Attributes: - transfer_run: - Run name must be set and correspond to an already existing - run. Only state, error\_status, and data\_version fields will - be updated. All other fields will be ignored. - update_mask: - Required list of fields to be updated in this request. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.UpdateTransferRunRequest) - ), -) -_sym_db.RegisterMessage(UpdateTransferRunRequest) - -LogTransferRunMessagesRequest = _reflection.GeneratedProtocolMessageType( - "LogTransferRunMessagesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LOGTRANSFERRUNMESSAGESREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""A request to add transfer status messages to the run. - - - Attributes: - name: - Name of the resource in the form: "projects/{project\_id}/loca - tions/{location\_id}/transferConfigs/{config\_id}/runs/{run\_i - d}" - transfer_messages: - Messages to append. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.LogTransferRunMessagesRequest) - ), -) -_sym_db.RegisterMessage(LogTransferRunMessagesRequest) - -StartBigQueryJobsRequest = _reflection.GeneratedProtocolMessageType( - "StartBigQueryJobsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_STARTBIGQUERYJOBSREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""A request to start and monitor a BigQuery load job. - - - Attributes: - name: - Name of the resource in the form: "projects/{project\_id}/loca - tions/{location\_id}/transferConfigs/{config\_id}/runs/{run\_i - d}" - imported_data: - Import jobs which should be started and monitored. - user_credentials: - User credentials which should be used to start/monitor - BigQuery jobs. If not specified, then jobs are started using - data source service account credentials. This may be OAuth - token or JWT token. - max_parallelism: - The number of BQ Jobs that can run in parallel. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest) - ), -) -_sym_db.RegisterMessage(StartBigQueryJobsRequest) - -FinishRunRequest = _reflection.GeneratedProtocolMessageType( - "FinishRunRequest", - (_message.Message,), - dict( - DESCRIPTOR=_FINISHRUNREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""A request to finish a run. - - - Attributes: - name: - Name of the resource in the form: "projects/{project\_id}/loca - tions/{location\_id}/transferConfigs/{config\_id}/runs/{run\_i - d}" - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.FinishRunRequest) - ), -) -_sym_db.RegisterMessage(FinishRunRequest) - -CreateDataSourceDefinitionRequest = _reflection.GeneratedProtocolMessageType( - "CreateDataSourceDefinitionRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATEDATASOURCEDEFINITIONREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Represents the request of the CreateDataSourceDefinition method. - - - Attributes: - parent: - The BigQuery project id for which data source definition is - associated. Must be in the form: - ``projects/{project_id}/locations/{location_id}`` - data_source_definition: - Data source definition. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.CreateDataSourceDefinitionRequest) - ), -) -_sym_db.RegisterMessage(CreateDataSourceDefinitionRequest) - -UpdateDataSourceDefinitionRequest = _reflection.GeneratedProtocolMessageType( - "UpdateDataSourceDefinitionRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATEDATASOURCEDEFINITIONREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Represents the request of the UpdateDataSourceDefinition method. - - - Attributes: - data_source_definition: - Data source definition. - update_mask: - Update field mask. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.UpdateDataSourceDefinitionRequest) - ), -) -_sym_db.RegisterMessage(UpdateDataSourceDefinitionRequest) - -DeleteDataSourceDefinitionRequest = _reflection.GeneratedProtocolMessageType( - "DeleteDataSourceDefinitionRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETEDATASOURCEDEFINITIONREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Represents the request of the DeleteDataSourceDefinition method. All - transfer configs associated with the data source must be deleted first, - before the data source can be deleted. - - - Attributes: - name: - The field will contain name of the resource requested, for - example: ``projects/{project_id}/locations/{location_id}/dataS - ourceDefinitions/{data_source_id}`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DeleteDataSourceDefinitionRequest) - ), -) -_sym_db.RegisterMessage(DeleteDataSourceDefinitionRequest) - -GetDataSourceDefinitionRequest = _reflection.GeneratedProtocolMessageType( - "GetDataSourceDefinitionRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETDATASOURCEDEFINITIONREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Represents the request of the GetDataSourceDefinition method. - - - Attributes: - name: - The field will contain name of the resource requested. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetDataSourceDefinitionRequest) - ), -) -_sym_db.RegisterMessage(GetDataSourceDefinitionRequest) - -ListDataSourceDefinitionsRequest = _reflection.GeneratedProtocolMessageType( - "ListDataSourceDefinitionsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTDATASOURCEDEFINITIONSREQUEST, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Represents the request of the ListDataSourceDefinitions method. - - - Attributes: - parent: - The BigQuery project id for which data sources should be - returned. Must be in the form: - ``projects/{project_id}/locations/{location_id}`` - page_token: - Pagination token, which can be used to request a specific page - of ``ListDataSourceDefinitionsRequest`` list results. For - multiple-page results, ``ListDataSourceDefinitionsResponse`` - outputs a ``next_page`` token, which can be used as the - ``page_token`` value to request the next page of the list - results. - page_size: - Page size. The default page size is the maximum value of 1000 - results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsRequest) - ), -) -_sym_db.RegisterMessage(ListDataSourceDefinitionsRequest) - -ListDataSourceDefinitionsResponse = _reflection.GeneratedProtocolMessageType( - "ListDataSourceDefinitionsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTDATASOURCEDEFINITIONSRESPONSE, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Returns a list of supported data source definitions. - - - Attributes: - data_source_definitions: - List of supported data source definitions. - next_page_token: - Output only. The next-pagination token. For multiple-page list - results, this token can be used as the - ``ListDataSourceDefinitionsRequest.page_token`` to request the - next page of the list results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsResponse) - ), -) -_sym_db.RegisterMessage(ListDataSourceDefinitionsResponse) - -DataSourceDefinition = _reflection.GeneratedProtocolMessageType( - "DataSourceDefinition", - (_message.Message,), - dict( - DESCRIPTOR=_DATASOURCEDEFINITION, - __module__="google.cloud.bigquery.datatransfer_v1.proto.datasource_pb2", - __doc__="""Represents the data source definition. - - - Attributes: - name: - The resource name of the data source definition. Data source - definition names have the form ``projects/{project_id}/locatio - ns/{location}/dataSourceDefinitions/{data_source_id}``. - data_source: - Data source metadata. - transfer_run_pubsub_topic: - The Pub/Sub topic to be used for broadcasting a message when a - transfer run is created. Both this topic and - transfer\_config\_pubsub\_topic can be set to a custom topic. - By default, both topics are auto-generated if none of them is - provided when creating the definition. However, if one topic - is manually set, the other topic has to be manually set as - well. The only difference is that transfer\_run\_pubsub\_topic - must be a non-empty Pub/Sub topic, but - transfer\_config\_pubsub\_topic can be set to empty. The - comments about "{location}" for - transfer\_config\_pubsub\_topic apply here too. - run_time_offset: - Duration which should be added to schedule\_time to calculate - run\_time when job is scheduled. Only applicable for - automatically scheduled transfer runs. Used to start a run - early on a data source that supports continuous data refresh - to compensate for unknown timezone offsets. Use a negative - number to start a run late for data sources not supporting - continuous data refresh. - support_email: - Support e-mail address of the OAuth client's Brand, which - contains the consent screen data. - service_account: - When service account is specified, BigQuery will share created - dataset with the given service account. Also, this service - account will be eligible to perform status updates and message - logging for data transfer runs for the corresponding - data\_source\_id. - disabled: - Is data source disabled? If true, data\_source is not visible. - API will also stop returning any data transfer configs and/or - runs associated with the data source. This setting has higher - priority than whitelisted\_project\_ids. - transfer_config_pubsub_topic: - The Pub/Sub topic to use for broadcasting a message for - transfer config. If empty, a message will not be broadcasted. - Both this topic and transfer\_run\_pubsub\_topic are auto- - generated if none of them is provided when creating the - definition. It is recommended to provide - transfer\_config\_pubsub\_topic if a user-owned - transfer\_run\_pubsub\_topic is provided. Otherwise, it will - be set to empty. If "{location}" is found in the value, then - that means, data source wants to handle message separately for - datasets in different regions. We will replace {location} with - the actual dataset location, as the actual topic name. For - example, projects/connector/topics/scheduler-{location} could - become projects/connector/topics/scheduler-us. If "{location}" - is not found, then we will use the input value as topic name. - supported_location_ids: - Supported location\_ids used for deciding in which locations - Pub/Sub topics need to be created. If custom Pub/Sub topics - are used and they contains '{location}', the location\_ids - will be used for validating the topics by replacing the - '{location}' with the individual location in the list. The - valid values are the "location\_id" field of the response of - ``GET https://bigquerydatatransfer.googleapis.com/v1/{name=pro - jects/*}/locations`` In addition, if the data source needs to - support all available regions, supported\_location\_ids can be - set to "global" (a single string element). When "global" is - specified: 1) the data source implementation is supposed to - stage the data in proper region of the destination dataset; 2) - Data source developer should be aware of the implications - (e.g., network traffic latency, potential charge associated - with cross-region traffic, etc.) of supporting the "global" - region; - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DataSourceDefinition) - ), -) -_sym_db.RegisterMessage(DataSourceDefinition) - - -DESCRIPTOR._options = None - -_DATASOURCESERVICE = _descriptor.ServiceDescriptor( - name="DataSourceService", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService", - file=DESCRIPTOR, - index=0, - serialized_options=_b("\312A#bigquerydatatransfer.googleapis.com"), - serialized_start=3860, - serialized_end=5937, - methods=[ - _descriptor.MethodDescriptor( - name="UpdateTransferRun", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.UpdateTransferRun", - index=0, - containing_service=None, - input_type=_UPDATETRANSFERRUNREQUEST, - output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN, - serialized_options=_b( - "\202\323\344\223\002W2G/v1/{transfer_run.name=projects/*/locations/*/transferConfigs/*/runs/*}:\014transfer_run" - ), - ), - _descriptor.MethodDescriptor( - name="LogTransferRunMessages", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.LogTransferRunMessages", - index=1, - containing_service=None, - input_type=_LOGTRANSFERRUNMESSAGESREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - '\202\323\344\223\002K"F/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:logMessages:\001*' - ), - ), - _descriptor.MethodDescriptor( - name="StartBigQueryJobs", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.StartBigQueryJobs", - index=2, - containing_service=None, - input_type=_STARTBIGQUERYJOBSREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - '\202\323\344\223\002Q"L/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:startBigQueryJobs:\001*' - ), - ), - _descriptor.MethodDescriptor( - name="FinishRun", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.FinishRun", - index=3, - containing_service=None, - input_type=_FINISHRUNREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - '\202\323\344\223\002I"D/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}:finishRun:\001*' - ), - ), - _descriptor.MethodDescriptor( - name="CreateDataSourceDefinition", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.CreateDataSourceDefinition", - index=4, - containing_service=None, - input_type=_CREATEDATASOURCEDEFINITIONREQUEST, - output_type=_DATASOURCEDEFINITION, - serialized_options=_b( - '\202\323\344\223\002S"9/v1/{parent=projects/*/locations/*}/dataSourceDefinitions:\026data_source_definition' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateDataSourceDefinition", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.UpdateDataSourceDefinition", - index=5, - containing_service=None, - input_type=_UPDATEDATASOURCEDEFINITIONREQUEST, - output_type=_DATASOURCEDEFINITION, - serialized_options=_b( - "\202\323\344\223\002j2P/v1/{data_source_definition.name=projects/*/locations/*/dataSourceDefinitions/*}:\026data_source_definition" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteDataSourceDefinition", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.DeleteDataSourceDefinition", - index=6, - containing_service=None, - input_type=_DELETEDATASOURCEDEFINITIONREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002;*9/v1/{name=projects/*/locations/*/dataSourceDefinitions/*}" - ), - ), - _descriptor.MethodDescriptor( - name="GetDataSourceDefinition", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.GetDataSourceDefinition", - index=7, - containing_service=None, - input_type=_GETDATASOURCEDEFINITIONREQUEST, - output_type=_DATASOURCEDEFINITION, - serialized_options=_b( - "\202\323\344\223\002;\0229/v1/{name=projects/*/locations/*/dataSourceDefinitions/*}" - ), - ), - _descriptor.MethodDescriptor( - name="ListDataSourceDefinitions", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceService.ListDataSourceDefinitions", - index=8, - containing_service=None, - input_type=_LISTDATASOURCEDEFINITIONSREQUEST, - output_type=_LISTDATASOURCEDEFINITIONSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002;\0229/v1/{parent=projects/*/locations/*}/dataSourceDefinitions" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_DATASOURCESERVICE) - -DESCRIPTOR.services_by_name["DataSourceService"] = _DATASOURCESERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource_pb2_grpc.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource_pb2_grpc.py deleted file mode 100644 index 136c84c3623a..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datasource_pb2_grpc.py +++ /dev/null @@ -1,229 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.bigquery.datatransfer_v1.proto import ( - datasource_pb2 as google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2, -) -from google.cloud.bigquery.datatransfer_v1.proto import ( - transfer_pb2 as google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class DataSourceServiceStub(object): - """The Google BigQuery Data Transfer API allows BigQuery users to - configure transfer of their data from other Google Products into BigQuery. - This service exposes methods that should be used by data source backend. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.UpdateTransferRun = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/UpdateTransferRun", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.UpdateTransferRunRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferRun.FromString, - ) - self.LogTransferRunMessages = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/LogTransferRunMessages", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.LogTransferRunMessagesRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.StartBigQueryJobs = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/StartBigQueryJobs", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.StartBigQueryJobsRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.FinishRun = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/FinishRun", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.FinishRunRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.CreateDataSourceDefinition = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/CreateDataSourceDefinition", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.CreateDataSourceDefinitionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.DataSourceDefinition.FromString, - ) - self.UpdateDataSourceDefinition = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/UpdateDataSourceDefinition", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.UpdateDataSourceDefinitionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.DataSourceDefinition.FromString, - ) - self.DeleteDataSourceDefinition = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/DeleteDataSourceDefinition", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.DeleteDataSourceDefinitionRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.GetDataSourceDefinition = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/GetDataSourceDefinition", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.GetDataSourceDefinitionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.DataSourceDefinition.FromString, - ) - self.ListDataSourceDefinitions = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataSourceService/ListDataSourceDefinitions", - request_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.ListDataSourceDefinitionsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.ListDataSourceDefinitionsResponse.FromString, - ) - - -class DataSourceServiceServicer(object): - """The Google BigQuery Data Transfer API allows BigQuery users to - configure transfer of their data from other Google Products into BigQuery. - This service exposes methods that should be used by data source backend. - """ - - def UpdateTransferRun(self, request, context): - """Update a transfer run. If successful, resets - data_source.update_deadline_seconds timer. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def LogTransferRunMessages(self, request, context): - """Log messages for a transfer run. If successful (at least 1 message), resets - data_source.update_deadline_seconds timer. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def StartBigQueryJobs(self, request, context): - """Notify the Data Transfer Service that data is ready for loading. - The Data Transfer Service will start and monitor multiple BigQuery Load - jobs for a transfer run. Monitored jobs will be automatically retried - and produce log messages when starting and finishing a job. - Can be called multiple times for the same transfer run. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def FinishRun(self, request, context): - """Notify the Data Transfer Service that the data source is done processing - the run. No more status updates or requests to start/monitor jobs will be - accepted. The run will be finalized by the Data Transfer Service when all - monitored jobs are completed. - Does not need to be called if the run is set to FAILED. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateDataSourceDefinition(self, request, context): - """Creates a data source definition. Calling this method will automatically - use your credentials to create the following Google Cloud resources in - YOUR Google Cloud project. - 1. OAuth client - 2. Pub/Sub Topics and Subscriptions in each supported_location_ids. e.g., - projects/{project_id}/{topics|subscriptions}/bigquerydatatransfer.{data_source_id}.{location_id}.run - The field data_source.client_id should be left empty in the input request, - as the API will create a new OAuth client on behalf of the caller. On the - other hand data_source.scopes usually need to be set when there are OAuth - scopes that need to be granted by end users. - 3. We need a longer deadline due to the 60 seconds SLO from Pub/Sub admin - Operations. This also applies to update and delete data source definition. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateDataSourceDefinition(self, request, context): - """Updates an existing data source definition. If changing - supported_location_ids, triggers same effects as mentioned in "Create a - data source definition." - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteDataSourceDefinition(self, request, context): - """Deletes a data source definition, all of the transfer configs associated - with this data source definition (if any) must be deleted first by the user - in ALL regions, in order to delete the data source definition. - This method is primarily meant for deleting data sources created during - testing stage. - If the data source is referenced by transfer configs in the region - specified in the request URL, the method will fail immediately. If in the - current region (e.g., US) it's not used by any transfer configs, but in - another region (e.g., EU) it is, then although the method will succeed in - region US, but it will fail when the deletion operation is replicated to - region EU. And eventually, the system will replicate the data source - definition back from EU to US, in order to bring all regions to - consistency. The final effect is that the data source appears to be - 'undeleted' in the US region. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetDataSourceDefinition(self, request, context): - """Retrieves an existing data source definition. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListDataSourceDefinitions(self, request, context): - """Lists supported data source definitions. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_DataSourceServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "UpdateTransferRun": grpc.unary_unary_rpc_method_handler( - servicer.UpdateTransferRun, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.UpdateTransferRunRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.TransferRun.SerializeToString, - ), - "LogTransferRunMessages": grpc.unary_unary_rpc_method_handler( - servicer.LogTransferRunMessages, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.LogTransferRunMessagesRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "StartBigQueryJobs": grpc.unary_unary_rpc_method_handler( - servicer.StartBigQueryJobs, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.StartBigQueryJobsRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "FinishRun": grpc.unary_unary_rpc_method_handler( - servicer.FinishRun, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.FinishRunRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "CreateDataSourceDefinition": grpc.unary_unary_rpc_method_handler( - servicer.CreateDataSourceDefinition, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.CreateDataSourceDefinitionRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.DataSourceDefinition.SerializeToString, - ), - "UpdateDataSourceDefinition": grpc.unary_unary_rpc_method_handler( - servicer.UpdateDataSourceDefinition, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.UpdateDataSourceDefinitionRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.DataSourceDefinition.SerializeToString, - ), - "DeleteDataSourceDefinition": grpc.unary_unary_rpc_method_handler( - servicer.DeleteDataSourceDefinition, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.DeleteDataSourceDefinitionRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "GetDataSourceDefinition": grpc.unary_unary_rpc_method_handler( - servicer.GetDataSourceDefinition, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.GetDataSourceDefinitionRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.DataSourceDefinition.SerializeToString, - ), - "ListDataSourceDefinitions": grpc.unary_unary_rpc_method_handler( - servicer.ListDataSourceDefinitions, - request_deserializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.ListDataSourceDefinitionsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_datasource__pb2.ListDataSourceDefinitionsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.bigquery.datatransfer.v1.DataSourceService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py deleted file mode 100644 index 089b914b8867..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py +++ /dev/null @@ -1,3446 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.bigquery_datatransfer_v1.proto import ( - transfer_pb2 as google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto", - package="google.cloud.bigquery.datatransfer.v1", - syntax="proto3", - serialized_options=b"\n)com.google.cloud.bigquery.datatransfer.v1B\021DataTransferProtoP\001ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\252\002%Google.Cloud.BigQuery.DataTransfer.V1\312\002%Google\\Cloud\\BigQuery\\DataTransfer\\V1\352\002)Google::Cloud::Bigquery::DataTransfer::V1", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n>google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a:google/cloud/bigquery_datatransfer_v1/proto/transfer.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto"\x85\x05\n\x13\x44\x61taSourceParameter\x12\x10\n\x08param_id\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12M\n\x04type\x18\x04 \x01(\x0e\x32?.google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type\x12\x10\n\x08required\x18\x05 \x01(\x08\x12\x10\n\x08repeated\x18\x06 \x01(\x08\x12\x18\n\x10validation_regex\x18\x07 \x01(\t\x12\x16\n\x0e\x61llowed_values\x18\x08 \x03(\t\x12/\n\tmin_value\x18\t \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tmax_value\x18\n \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12J\n\x06\x66ields\x18\x0b \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x1e\n\x16validation_description\x18\x0c \x01(\t\x12\x1b\n\x13validation_help_url\x18\r \x01(\t\x12\x11\n\timmutable\x18\x0e \x01(\x08\x12\x0f\n\x07recurse\x18\x0f \x01(\x08\x12\x12\n\ndeprecated\x18\x14 \x01(\x08"i\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06STRING\x10\x01\x12\x0b\n\x07INTEGER\x10\x02\x12\n\n\x06\x44OUBLE\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x12\n\n\x06RECORD\x10\x05\x12\r\n\tPLUS_PAGE\x10\x06"\x9c\t\n\nDataSource\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x16\n\x0e\x64\x61ta_source_id\x18\x02 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x11\n\tclient_id\x18\x05 \x01(\t\x12\x0e\n\x06scopes\x18\x06 \x03(\t\x12N\n\rtransfer_type\x18\x07 \x01(\x0e\x32\x33.google.cloud.bigquery.datatransfer.v1.TransferTypeB\x02\x18\x01\x12\'\n\x1bsupports_multiple_transfers\x18\x08 \x01(\x08\x42\x02\x18\x01\x12\x1f\n\x17update_deadline_seconds\x18\t \x01(\x05\x12\x18\n\x10\x64\x65\x66\x61ult_schedule\x18\n \x01(\t\x12 \n\x18supports_custom_schedule\x18\x0b \x01(\x08\x12N\n\nparameters\x18\x0c \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x10\n\x08help_url\x18\r \x01(\t\x12_\n\x12\x61uthorization_type\x18\x0e \x01(\x0e\x32\x43.google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType\x12\\\n\x11\x64\x61ta_refresh_type\x18\x0f \x01(\x0e\x32\x41.google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType\x12(\n default_data_refresh_window_days\x18\x10 \x01(\x05\x12\x1c\n\x14manual_runs_disabled\x18\x11 \x01(\x08\x12<\n\x19minimum_schedule_interval\x18\x12 \x01(\x0b\x32\x19.google.protobuf.Duration"\x8a\x01\n\x11\x41uthorizationType\x12"\n\x1e\x41UTHORIZATION_TYPE_UNSPECIFIED\x10\x00\x12\x16\n\x12\x41UTHORIZATION_CODE\x10\x01\x12"\n\x1eGOOGLE_PLUS_AUTHORIZATION_CODE\x10\x02\x12\x15\n\x11\x46IRST_PARTY_OAUTH\x10\x03"c\n\x0f\x44\x61taRefreshType\x12!\n\x1d\x44\x41TA_REFRESH_TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eSLIDING_WINDOW\x10\x01\x12\x19\n\x15\x43USTOM_SLIDING_WINDOW\x10\x02:\xa5\x01\xea\x41\xa1\x01\n.bigquerydatatransfer.googleapis.com/DataSource\x12,projects/{project}/dataSources/{data_source}\x12\x41projects/{project}/locations/{location}/dataSources/{data_source}"\\\n\x14GetDataSourceRequest\x12\x44\n\x04name\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\n.bigquerydatatransfer.googleapis.com/DataSource"\x87\x01\n\x16ListDataSourcesRequest\x12\x46\n\x06parent\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\x12.bigquerydatatransfer.googleapis.com/DataSource\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"\x80\x01\n\x17ListDataSourcesResponse\x12G\n\x0c\x64\x61ta_sources\x18\x01 \x03(\x0b\x32\x31.google.cloud.bigquery.datatransfer.v1.DataSource\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\x8e\x02\n\x1b\x43reateTransferConfigRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\x12\x32\x62igquerydatatransfer.googleapis.com/TransferConfig\x12S\n\x0ftransfer_config\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x02\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t\x12\x14\n\x0cversion_info\x18\x05 \x01(\t\x12\x1c\n\x14service_account_name\x18\x06 \x01(\t"\xf8\x01\n\x1bUpdateTransferConfigRequest\x12S\n\x0ftransfer_config\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x02\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x14\n\x0cversion_info\x18\x05 \x01(\t\x12\x1c\n\x14service_account_name\x18\x06 \x01(\t"d\n\x18GetTransferConfigRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig"g\n\x1b\x44\x65leteTransferConfigRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig"V\n\x15GetTransferRunRequest\x12=\n\x04name\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run"Y\n\x18\x44\x65leteTransferRunRequest\x12=\n\x04name\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run"\xa8\x01\n\x1aListTransferConfigsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\x12\x32\x62igquerydatatransfer.googleapis.com/TransferConfig\x12\x17\n\x0f\x64\x61ta_source_ids\x18\x02 \x03(\t\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"\x91\x01\n\x1bListTransferConfigsResponse\x12T\n\x10transfer_configs\x18\x01 \x03(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xde\x02\n\x17ListTransferRunsRequest\x12?\n\x06parent\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\x12\'bigquerydatatransfer.googleapis.com/Run\x12\x44\n\x06states\x18\x02 \x03(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12^\n\x0brun_attempt\x18\x05 \x01(\x0e\x32I.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt"5\n\nRunAttempt\x12\x1b\n\x17RUN_ATTEMPT_UNSPECIFIED\x10\x00\x12\n\n\x06LATEST\x10\x01"\x88\x01\n\x18ListTransferRunsResponse\x12N\n\rtransfer_runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRunB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xe0\x01\n\x17ListTransferLogsRequest\x12?\n\x06parent\x18\x01 \x01(\tB/\xe0\x41\x02\xfa\x41)\n\'bigquerydatatransfer.googleapis.com/Run\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12]\n\rmessage_types\x18\x06 \x03(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity"\x90\x01\n\x18ListTransferLogsResponse\x12V\n\x11transfer_messages\x18\x01 \x03(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.TransferMessageB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"^\n\x16\x43heckValidCredsRequest\x12\x44\n\x04name\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\n.bigquerydatatransfer.googleapis.com/DataSource"2\n\x17\x43heckValidCredsResponse\x12\x17\n\x0fhas_valid_creds\x18\x01 \x01(\x08"\xd1\x01\n\x1bScheduleTransferRunsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x33\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12\x31\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02"`\n\x1cScheduleTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"\x87\x03\n\x1eStartManualTransferRunsRequest\x12G\n\x06parent\x18\x01 \x01(\tB7\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12o\n\x14requested_time_range\x18\x03 \x01(\x0b\x32O.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRangeH\x00\x12\x38\n\x12requested_run_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x1ai\n\tTimeRange\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x06\n\x04time"c\n\x1fStartManualTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun2\xc3\x1f\n\x13\x44\x61taTransferService\x12\xe6\x01\n\rGetDataSource\x12;.google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest\x1a\x31.google.cloud.bigquery.datatransfer.v1.DataSource"e\x82\xd3\xe4\x93\x02X\x12//v1/{name=projects/*/locations/*/dataSources/*}Z%\x12#/v1/{name=projects/*/dataSources/*}\xda\x41\x04name\x12\xf9\x01\n\x0fListDataSources\x12=.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest\x1a>.google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse"g\x82\xd3\xe4\x93\x02X\x12//v1/{parent=projects/*/locations/*}/dataSourcesZ%\x12#/v1/{parent=projects/*}/dataSources\xda\x41\x06parent\x12\xb6\x02\n\x14\x43reateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\xa2\x01\x82\xd3\xe4\x93\x02\x82\x01"3/v1/{parent=projects/*/locations/*}/transferConfigs:\x0ftransfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\x0ftransfer_config\xda\x41\x16parent,transfer_config\x12\xdb\x02\n\x14UpdateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\xc7\x01\x82\xd3\xe4\x93\x02\xa2\x01\x32\x43/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\x0ftransfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\x0ftransfer_config\xda\x41\x1btransfer_config,update_mask\x12\xe1\x01\n\x14\x44\x65leteTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest\x1a\x16.google.protobuf.Empty"m\x82\xd3\xe4\x93\x02`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*\'/v1/{name=projects/*/transferConfigs/*}\xda\x41\x04name\x12\xfa\x01\n\x11GetTransferConfig\x12?.google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"m\x82\xd3\xe4\x93\x02`\x12\x33/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\x12\'/v1/{name=projects/*/transferConfigs/*}\xda\x41\x04name\x12\x8d\x02\n\x13ListTransferConfigs\x12\x41.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest\x1a\x42.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse"o\x82\xd3\xe4\x93\x02`\x12\x33/v1/{parent=projects/*/locations/*}/transferConfigsZ)\x12\'/v1/{parent=projects/*}/transferConfigs\xda\x41\x06parent\x12\xcd\x02\n\x14ScheduleTransferRuns\x12\x42.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest\x1a\x43.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse"\xab\x01\x88\x02\x01\x82\xd3\xe4\x93\x02\x84\x01"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\x01*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\x01*\xda\x41\x1aparent,start_time,end_time\x12\xbc\x02\n\x17StartManualTransferRuns\x12\x45.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest\x1a\x46.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse"\x91\x01\x82\xd3\xe4\x93\x02\x8a\x01"E/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns:\x01*Z>"9/v1/{parent=projects/*/transferConfigs/*}:startManualRuns:\x01*\x12\xff\x01\n\x0eGetTransferRun\x12<.google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest\x1a\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"{\x82\xd3\xe4\x93\x02n\x12:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\x12./v1/{name=projects/*/transferConfigs/*/runs/*}\xda\x41\x04name\x12\xe9\x01\n\x11\x44\x65leteTransferRun\x12?.google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest\x1a\x16.google.protobuf.Empty"{\x82\xd3\xe4\x93\x02n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}\xda\x41\x04name\x12\x92\x02\n\x10ListTransferRuns\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse"}\x82\xd3\xe4\x93\x02n\x12:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\x12./v1/{parent=projects/*/transferConfigs/*}/runs\xda\x41\x06parent\x12\xb2\x02\n\x10ListTransferLogs\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse"\x9c\x01\x82\xd3\xe4\x93\x02\x8c\x01\x12I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\x12=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs\xda\x41\x06parent\x12\x9e\x02\n\x0f\x43heckValidCreds\x12=.google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest\x1a>.google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse"\x8b\x01\x82\xd3\xe4\x93\x02~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\x01*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\x01*\xda\x41\x04name\x1aW\xca\x41#bigquerydatatransfer.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x8f\x02\n)com.google.cloud.bigquery.datatransfer.v1B\x11\x44\x61taTransferProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1\xea\x02)Google::Cloud::Bigquery::DataTransfer::V1b\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, - ], -) - - -_DATASOURCEPARAMETER_TYPE = _descriptor.EnumDescriptor( - name="Type", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="STRING", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="INTEGER", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="DOUBLE", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="BOOLEAN", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="RECORD", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PLUS_PAGE", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=981, - serialized_end=1086, -) -_sym_db.RegisterEnumDescriptor(_DATASOURCEPARAMETER_TYPE) - -_DATASOURCE_AUTHORIZATIONTYPE = _descriptor.EnumDescriptor( - name="AuthorizationType", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="AUTHORIZATION_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="AUTHORIZATION_CODE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="GOOGLE_PLUS_AUTHORIZATION_CODE", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="FIRST_PARTY_OAUTH", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1862, - serialized_end=2000, -) -_sym_db.RegisterEnumDescriptor(_DATASOURCE_AUTHORIZATIONTYPE) - -_DATASOURCE_DATAREFRESHTYPE = _descriptor.EnumDescriptor( - name="DataRefreshType", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="DATA_REFRESH_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SLIDING_WINDOW", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CUSTOM_SLIDING_WINDOW", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2002, - serialized_end=2101, -) -_sym_db.RegisterEnumDescriptor(_DATASOURCE_DATAREFRESHTYPE) - -_LISTTRANSFERRUNSREQUEST_RUNATTEMPT = _descriptor.EnumDescriptor( - name="RunAttempt", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="RUN_ATTEMPT_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="LATEST", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=4161, - serialized_end=4214, -) -_sym_db.RegisterEnumDescriptor(_LISTTRANSFERRUNSREQUEST_RUNATTEMPT) - - -_DATASOURCEPARAMETER = _descriptor.Descriptor( - name="DataSourceParameter", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="param_id", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.param_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.display_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.description", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="type", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.type", - index=3, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="required", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.required", - index=4, - number=5, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="repeated", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.repeated", - index=5, - number=6, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="validation_regex", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.validation_regex", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="allowed_values", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.allowed_values", - index=7, - number=8, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="min_value", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.min_value", - index=8, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_value", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.max_value", - index=9, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="fields", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.fields", - index=10, - number=11, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="validation_description", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.validation_description", - index=11, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="validation_help_url", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.validation_help_url", - index=12, - number=13, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="immutable", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.immutable", - index=13, - number=14, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="recurse", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.recurse", - index=14, - number=15, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="deprecated", - full_name="google.cloud.bigquery.datatransfer.v1.DataSourceParameter.deprecated", - index=15, - number=20, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_DATASOURCEPARAMETER_TYPE,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=441, - serialized_end=1086, -) - - -_DATASOURCE = _descriptor.Descriptor( - name="DataSource", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="data_source_id", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.data_source_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.display_name", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.description", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="client_id", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.client_id", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="scopes", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.scopes", - index=5, - number=6, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="transfer_type", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.transfer_type", - index=6, - number=7, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\030\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="supports_multiple_transfers", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.supports_multiple_transfers", - index=7, - number=8, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\030\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_deadline_seconds", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.update_deadline_seconds", - index=8, - number=9, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="default_schedule", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.default_schedule", - index=9, - number=10, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="supports_custom_schedule", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.supports_custom_schedule", - index=10, - number=11, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="parameters", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.parameters", - index=11, - number=12, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="help_url", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.help_url", - index=12, - number=13, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="authorization_type", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.authorization_type", - index=13, - number=14, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="data_refresh_type", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.data_refresh_type", - index=14, - number=15, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="default_data_refresh_window_days", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.default_data_refresh_window_days", - index=15, - number=16, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="manual_runs_disabled", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.manual_runs_disabled", - index=16, - number=17, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="minimum_schedule_interval", - full_name="google.cloud.bigquery.datatransfer.v1.DataSource.minimum_schedule_interval", - index=17, - number=18, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_DATASOURCE_AUTHORIZATIONTYPE, _DATASOURCE_DATAREFRESHTYPE,], - serialized_options=b"\352A\241\001\n.bigquerydatatransfer.googleapis.com/DataSource\022,projects/{project}/dataSources/{data_source}\022Aprojects/{project}/locations/{location}/dataSources/{data_source}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1089, - serialized_end=2269, -) - - -_GETDATASOURCEREQUEST = _descriptor.Descriptor( - name="GetDataSourceRequest", - full_name="google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A0\n.bigquerydatatransfer.googleapis.com/DataSource", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2271, - serialized_end=2363, -) - - -_LISTDATASOURCESREQUEST = _descriptor.Descriptor( - name="ListDataSourcesRequest", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A0\022.bigquerydatatransfer.googleapis.com/DataSource", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest.page_token", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest.page_size", - index=2, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2366, - serialized_end=2501, -) - - -_LISTDATASOURCESRESPONSE = _descriptor.Descriptor( - name="ListDataSourcesResponse", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="data_sources", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse.data_sources", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2504, - serialized_end=2632, -) - - -_CREATETRANSFERCONFIGREQUEST = _descriptor.Descriptor( - name="CreateTransferConfigRequest", - full_name="google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A4\0222bigquerydatatransfer.googleapis.com/TransferConfig", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="transfer_config", - full_name="google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest.transfer_config", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="authorization_code", - full_name="google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest.authorization_code", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="version_info", - full_name="google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest.version_info", - index=3, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="service_account_name", - full_name="google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest.service_account_name", - index=4, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2635, - serialized_end=2905, -) - - -_UPDATETRANSFERCONFIGREQUEST = _descriptor.Descriptor( - name="UpdateTransferConfigRequest", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="transfer_config", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest.transfer_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="authorization_code", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest.authorization_code", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest.update_mask", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="version_info", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest.version_info", - index=3, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="service_account_name", - full_name="google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest.service_account_name", - index=4, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2908, - serialized_end=3156, -) - - -_GETTRANSFERCONFIGREQUEST = _descriptor.Descriptor( - name="GetTransferConfigRequest", - full_name="google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3158, - serialized_end=3258, -) - - -_DELETETRANSFERCONFIGREQUEST = _descriptor.Descriptor( - name="DeleteTransferConfigRequest", - full_name="google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3260, - serialized_end=3363, -) - - -_GETTRANSFERRUNREQUEST = _descriptor.Descriptor( - name="GetTransferRunRequest", - full_name="google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A)\n'bigquerydatatransfer.googleapis.com/Run", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3365, - serialized_end=3451, -) - - -_DELETETRANSFERRUNREQUEST = _descriptor.Descriptor( - name="DeleteTransferRunRequest", - full_name="google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A)\n'bigquerydatatransfer.googleapis.com/Run", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3453, - serialized_end=3542, -) - - -_LISTTRANSFERCONFIGSREQUEST = _descriptor.Descriptor( - name="ListTransferConfigsRequest", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A4\0222bigquerydatatransfer.googleapis.com/TransferConfig", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="data_source_ids", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest.data_source_ids", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest.page_size", - index=3, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3545, - serialized_end=3713, -) - - -_LISTTRANSFERCONFIGSRESPONSE = _descriptor.Descriptor( - name="ListTransferConfigsResponse", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="transfer_configs", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse.transfer_configs", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3716, - serialized_end=3861, -) - - -_LISTTRANSFERRUNSREQUEST = _descriptor.Descriptor( - name="ListTransferRunsRequest", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A)\022'bigquerydatatransfer.googleapis.com/Run", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="states", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.states", - index=1, - number=2, - type=14, - cpp_type=8, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.page_size", - index=3, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="run_attempt", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.run_attempt", - index=4, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_LISTTRANSFERRUNSREQUEST_RUNATTEMPT,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3864, - serialized_end=4214, -) - - -_LISTTRANSFERRUNSRESPONSE = _descriptor.Descriptor( - name="ListTransferRunsResponse", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="transfer_runs", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse.transfer_runs", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4217, - serialized_end=4353, -) - - -_LISTTRANSFERLOGSREQUEST = _descriptor.Descriptor( - name="ListTransferLogsRequest", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A)\n'bigquerydatatransfer.googleapis.com/Run", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest.page_token", - index=1, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest.page_size", - index=2, - number=5, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="message_types", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest.message_types", - index=3, - number=6, - type=14, - cpp_type=8, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4356, - serialized_end=4580, -) - - -_LISTTRANSFERLOGSRESPONSE = _descriptor.Descriptor( - name="ListTransferLogsResponse", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="transfer_messages", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse.transfer_messages", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4583, - serialized_end=4727, -) - - -_CHECKVALIDCREDSREQUEST = _descriptor.Descriptor( - name="CheckValidCredsRequest", - full_name="google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A0\n.bigquerydatatransfer.googleapis.com/DataSource", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4729, - serialized_end=4823, -) - - -_CHECKVALIDCREDSRESPONSE = _descriptor.Descriptor( - name="CheckValidCredsResponse", - full_name="google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="has_valid_creds", - full_name="google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse.has_valid_creds", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4825, - serialized_end=4875, -) - - -_SCHEDULETRANSFERRUNSREQUEST = _descriptor.Descriptor( - name="ScheduleTransferRunsRequest", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest.start_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="end_time", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest.end_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4878, - serialized_end=5087, -) - - -_SCHEDULETRANSFERRUNSRESPONSE = _descriptor.Descriptor( - name="ScheduleTransferRunsResponse", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="runs", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse.runs", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5089, - serialized_end=5185, -) - - -_STARTMANUALTRANSFERRUNSREQUEST_TIMERANGE = _descriptor.Descriptor( - name="TimeRange", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange.start_time", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="end_time", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange.end_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5466, - serialized_end=5571, -) - -_STARTMANUALTRANSFERRUNSREQUEST = _descriptor.Descriptor( - name="StartManualTransferRunsRequest", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="requested_time_range", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.requested_time_range", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="requested_run_time", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.requested_run_time", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_STARTMANUALTRANSFERRUNSREQUEST_TIMERANGE,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="time", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.time", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=5188, - serialized_end=5579, -) - - -_STARTMANUALTRANSFERRUNSRESPONSE = _descriptor.Descriptor( - name="StartManualTransferRunsResponse", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="runs", - full_name="google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse.runs", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5581, - serialized_end=5680, -) - -_DATASOURCEPARAMETER.fields_by_name["type"].enum_type = _DATASOURCEPARAMETER_TYPE -_DATASOURCEPARAMETER.fields_by_name[ - "min_value" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_DATASOURCEPARAMETER.fields_by_name[ - "max_value" -].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE -_DATASOURCEPARAMETER.fields_by_name["fields"].message_type = _DATASOURCEPARAMETER -_DATASOURCEPARAMETER_TYPE.containing_type = _DATASOURCEPARAMETER -_DATASOURCE.fields_by_name[ - "transfer_type" -].enum_type = ( - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERTYPE -) -_DATASOURCE.fields_by_name["parameters"].message_type = _DATASOURCEPARAMETER -_DATASOURCE.fields_by_name[ - "authorization_type" -].enum_type = _DATASOURCE_AUTHORIZATIONTYPE -_DATASOURCE.fields_by_name["data_refresh_type"].enum_type = _DATASOURCE_DATAREFRESHTYPE -_DATASOURCE.fields_by_name[ - "minimum_schedule_interval" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_DATASOURCE_AUTHORIZATIONTYPE.containing_type = _DATASOURCE -_DATASOURCE_DATAREFRESHTYPE.containing_type = _DATASOURCE -_LISTDATASOURCESRESPONSE.fields_by_name["data_sources"].message_type = _DATASOURCE -_CREATETRANSFERCONFIGREQUEST.fields_by_name[ - "transfer_config" -].message_type = ( - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG -) -_UPDATETRANSFERCONFIGREQUEST.fields_by_name[ - "transfer_config" -].message_type = ( - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG -) -_UPDATETRANSFERCONFIGREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTTRANSFERCONFIGSRESPONSE.fields_by_name[ - "transfer_configs" -].message_type = ( - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG -) -_LISTTRANSFERRUNSREQUEST.fields_by_name[ - "states" -].enum_type = ( - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERSTATE -) -_LISTTRANSFERRUNSREQUEST.fields_by_name[ - "run_attempt" -].enum_type = _LISTTRANSFERRUNSREQUEST_RUNATTEMPT -_LISTTRANSFERRUNSREQUEST_RUNATTEMPT.containing_type = _LISTTRANSFERRUNSREQUEST -_LISTTRANSFERRUNSRESPONSE.fields_by_name[ - "transfer_runs" -].message_type = ( - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN -) -_LISTTRANSFERLOGSREQUEST.fields_by_name[ - "message_types" -].enum_type = ( - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERMESSAGE_MESSAGESEVERITY -) -_LISTTRANSFERLOGSRESPONSE.fields_by_name[ - "transfer_messages" -].message_type = ( - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERMESSAGE -) -_SCHEDULETRANSFERRUNSREQUEST.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_SCHEDULETRANSFERRUNSREQUEST.fields_by_name[ - "end_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_SCHEDULETRANSFERRUNSRESPONSE.fields_by_name[ - "runs" -].message_type = ( - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN -) -_STARTMANUALTRANSFERRUNSREQUEST_TIMERANGE.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_STARTMANUALTRANSFERRUNSREQUEST_TIMERANGE.fields_by_name[ - "end_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_STARTMANUALTRANSFERRUNSREQUEST_TIMERANGE.containing_type = ( - _STARTMANUALTRANSFERRUNSREQUEST -) -_STARTMANUALTRANSFERRUNSREQUEST.fields_by_name[ - "requested_time_range" -].message_type = _STARTMANUALTRANSFERRUNSREQUEST_TIMERANGE -_STARTMANUALTRANSFERRUNSREQUEST.fields_by_name[ - "requested_run_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_STARTMANUALTRANSFERRUNSREQUEST.oneofs_by_name["time"].fields.append( - _STARTMANUALTRANSFERRUNSREQUEST.fields_by_name["requested_time_range"] -) -_STARTMANUALTRANSFERRUNSREQUEST.fields_by_name[ - "requested_time_range" -].containing_oneof = _STARTMANUALTRANSFERRUNSREQUEST.oneofs_by_name["time"] -_STARTMANUALTRANSFERRUNSREQUEST.oneofs_by_name["time"].fields.append( - _STARTMANUALTRANSFERRUNSREQUEST.fields_by_name["requested_run_time"] -) -_STARTMANUALTRANSFERRUNSREQUEST.fields_by_name[ - "requested_run_time" -].containing_oneof = _STARTMANUALTRANSFERRUNSREQUEST.oneofs_by_name["time"] -_STARTMANUALTRANSFERRUNSRESPONSE.fields_by_name[ - "runs" -].message_type = ( - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN -) -DESCRIPTOR.message_types_by_name["DataSourceParameter"] = _DATASOURCEPARAMETER -DESCRIPTOR.message_types_by_name["DataSource"] = _DATASOURCE -DESCRIPTOR.message_types_by_name["GetDataSourceRequest"] = _GETDATASOURCEREQUEST -DESCRIPTOR.message_types_by_name["ListDataSourcesRequest"] = _LISTDATASOURCESREQUEST -DESCRIPTOR.message_types_by_name["ListDataSourcesResponse"] = _LISTDATASOURCESRESPONSE -DESCRIPTOR.message_types_by_name[ - "CreateTransferConfigRequest" -] = _CREATETRANSFERCONFIGREQUEST -DESCRIPTOR.message_types_by_name[ - "UpdateTransferConfigRequest" -] = _UPDATETRANSFERCONFIGREQUEST -DESCRIPTOR.message_types_by_name["GetTransferConfigRequest"] = _GETTRANSFERCONFIGREQUEST -DESCRIPTOR.message_types_by_name[ - "DeleteTransferConfigRequest" -] = _DELETETRANSFERCONFIGREQUEST -DESCRIPTOR.message_types_by_name["GetTransferRunRequest"] = _GETTRANSFERRUNREQUEST -DESCRIPTOR.message_types_by_name["DeleteTransferRunRequest"] = _DELETETRANSFERRUNREQUEST -DESCRIPTOR.message_types_by_name[ - "ListTransferConfigsRequest" -] = _LISTTRANSFERCONFIGSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListTransferConfigsResponse" -] = _LISTTRANSFERCONFIGSRESPONSE -DESCRIPTOR.message_types_by_name["ListTransferRunsRequest"] = _LISTTRANSFERRUNSREQUEST -DESCRIPTOR.message_types_by_name["ListTransferRunsResponse"] = _LISTTRANSFERRUNSRESPONSE -DESCRIPTOR.message_types_by_name["ListTransferLogsRequest"] = _LISTTRANSFERLOGSREQUEST -DESCRIPTOR.message_types_by_name["ListTransferLogsResponse"] = _LISTTRANSFERLOGSRESPONSE -DESCRIPTOR.message_types_by_name["CheckValidCredsRequest"] = _CHECKVALIDCREDSREQUEST -DESCRIPTOR.message_types_by_name["CheckValidCredsResponse"] = _CHECKVALIDCREDSRESPONSE -DESCRIPTOR.message_types_by_name[ - "ScheduleTransferRunsRequest" -] = _SCHEDULETRANSFERRUNSREQUEST -DESCRIPTOR.message_types_by_name[ - "ScheduleTransferRunsResponse" -] = _SCHEDULETRANSFERRUNSRESPONSE -DESCRIPTOR.message_types_by_name[ - "StartManualTransferRunsRequest" -] = _STARTMANUALTRANSFERRUNSREQUEST -DESCRIPTOR.message_types_by_name[ - "StartManualTransferRunsResponse" -] = _STARTMANUALTRANSFERRUNSRESPONSE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -DataSourceParameter = _reflection.GeneratedProtocolMessageType( - "DataSourceParameter", - (_message.Message,), - { - "DESCRIPTOR": _DATASOURCEPARAMETER, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """Represents a data source parameter with validation rules, so that - parameters can be rendered in the UI. These parameters are given to us - by supported data sources, and include all needed information for - rendering and validation. Thus, whoever uses this api can decide to - generate either generic ui, or custom data source specific forms. - - Attributes: - param_id: - Parameter identifier. - display_name: - Parameter display name in the user interface. - description: - Parameter description. - type: - Parameter type. - required: - Is parameter required. - repeated: - Deprecated. This field has no effect. - validation_regex: - Regular expression which can be used for parameter validation. - allowed_values: - All possible values for the parameter. - min_value: - For integer and double values specifies minimum allowed value. - max_value: - For integer and double values specifies maxminum allowed - value. - fields: - Deprecated. This field has no effect. - validation_description: - Description of the requirements for this field, in case the - user input does not fulfill the regex pattern or min/max - values. - validation_help_url: - URL to a help document to further explain the naming - requirements. - immutable: - Cannot be changed after initial creation. - recurse: - Deprecated. This field has no effect. - deprecated: - If true, it should not be used in new transfers, and it should - not be visible to users. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DataSourceParameter) - }, -) -_sym_db.RegisterMessage(DataSourceParameter) - -DataSource = _reflection.GeneratedProtocolMessageType( - "DataSource", - (_message.Message,), - { - "DESCRIPTOR": _DATASOURCE, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """Represents data source metadata. Metadata is sufficient to render UI - and request proper OAuth tokens. - - Attributes: - name: - Output only. Data source resource name. - data_source_id: - Data source id. - display_name: - User friendly data source name. - description: - User friendly data source description string. - client_id: - Data source client id which should be used to receive refresh - token. - scopes: - Api auth scopes for which refresh token needs to be obtained. - These are scopes needed by a data source to prepare data and - ingest them into BigQuery, e.g., - https://www.googleapis.com/auth/bigquery - transfer_type: - Deprecated. This field has no effect. - supports_multiple_transfers: - Deprecated. This field has no effect. - update_deadline_seconds: - The number of seconds to wait for an update from the data - source before the Data Transfer Service marks the transfer as - FAILED. - default_schedule: - Default data transfer schedule. Examples of valid schedules - include: ``1st,3rd monday of month 15:30``, ``every wed,fri of - jan,jun 13:15``, and ``first sunday of quarter 00:00``. - supports_custom_schedule: - Specifies whether the data source supports a user defined - schedule, or operates on the default schedule. When set to - ``true``, user can override default schedule. - parameters: - Data source parameters. - help_url: - Url for the help document for this data source. - authorization_type: - Indicates the type of authorization. - data_refresh_type: - Specifies whether the data source supports automatic data - refresh for the past few days, and how it’s supported. For - some data sources, data might not be complete until a few days - later, so it’s useful to refresh data automatically. - default_data_refresh_window_days: - Default data refresh window on days. Only meaningful when - ``data_refresh_type`` = ``SLIDING_WINDOW``. - manual_runs_disabled: - Disables backfilling and manual run scheduling for the data - source. - minimum_schedule_interval: - The minimum interval for scheduler to schedule runs. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DataSource) - }, -) -_sym_db.RegisterMessage(DataSource) - -GetDataSourceRequest = _reflection.GeneratedProtocolMessageType( - "GetDataSourceRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETDATASOURCEREQUEST, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """A request to get data source info. - - Attributes: - name: - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/dataSources/{data_source_id}`` or ``pr - ojects/{project_id}/locations/{location_id}/dataSources/{data_ - source_id}`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest) - }, -) -_sym_db.RegisterMessage(GetDataSourceRequest) - -ListDataSourcesRequest = _reflection.GeneratedProtocolMessageType( - "ListDataSourcesRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTDATASOURCESREQUEST, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """Request to list supported data sources and their data transfer - settings. - - Attributes: - parent: - Required. The BigQuery project id for which data sources - should be returned. Must be in the form: - ``projects/{project_id}`` or - \`projects/{project_id}/locations/{location_id} - page_token: - Pagination token, which can be used to request a specific page - of ``ListDataSourcesRequest`` list results. For multiple-page - results, ``ListDataSourcesResponse`` outputs a ``next_page`` - token, which can be used as the ``page_token`` value to - request the next page of list results. - page_size: - Page size. The default page size is the maximum value of 1000 - results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest) - }, -) -_sym_db.RegisterMessage(ListDataSourcesRequest) - -ListDataSourcesResponse = _reflection.GeneratedProtocolMessageType( - "ListDataSourcesResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTDATASOURCESRESPONSE, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """Returns list of supported data sources and their metadata. - - Attributes: - data_sources: - List of supported data sources and their transfer settings. - next_page_token: - Output only. The next-pagination token. For multiple-page list - results, this token can be used as the - ``ListDataSourcesRequest.page_token`` to request the next page - of list results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse) - }, -) -_sym_db.RegisterMessage(ListDataSourcesResponse) - -CreateTransferConfigRequest = _reflection.GeneratedProtocolMessageType( - "CreateTransferConfigRequest", - (_message.Message,), - { - "DESCRIPTOR": _CREATETRANSFERCONFIGREQUEST, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """A request to create a data transfer configuration. If new credentials - are needed for this transfer configuration, an authorization code must - be provided. If an authorization code is provided, the transfer - configuration will be associated with the user id corresponding to the - authorization code. Otherwise, the transfer configuration will be - associated with the calling user. - - Attributes: - parent: - Required. The BigQuery project id where the transfer - configuration should be created. Must be in the format - projects/{project_id}/locations/{location_id} or - projects/{project_id}. If specified location and location of - the destination bigquery dataset do not match - the request - will fail. - transfer_config: - Required. Data transfer configuration to create. - authorization_code: - Optional OAuth2 authorization code to use with this transfer - configuration. This is required if new credentials are needed, - as indicated by ``CheckValidCreds``. In order to obtain - authorization_code, please make a request to https://www.gstat - ic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redi - rect_uri= - client_id should be OAuth client_id of BigQuery - DTS API for the given data source returned by - ListDataSources method. - data_source_scopes are the scopes - returned by ListDataSources method. - redirect_uri is an - optional parameter. If not specified, then authorization - code is posted to the opener of authorization flow window. - Otherwise it will be sent to the redirect uri. A special - value of urn:ietf:wg:oauth:2.0:oob means that authorization - code should be returned in the title bar of the browser, - with the page text prompting the user to copy the code and - paste it in the application. - version_info: - Optional version info. If users want to find a very recent - access token, that is, immediately after approving access, - users have to set the version_info claim in the token request. - To obtain the version_info, users must use the “none+gsession” - response type. which be return a version_info back in the - authorization response which be be put in a JWT claim in the - token request. - service_account_name: - Optional service account name. If this field is set, transfer - config will be created with this service account credentials. - It requires that requesting user calling this API has - permissions to act as this service account. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest) - }, -) -_sym_db.RegisterMessage(CreateTransferConfigRequest) - -UpdateTransferConfigRequest = _reflection.GeneratedProtocolMessageType( - "UpdateTransferConfigRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATETRANSFERCONFIGREQUEST, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """A request to update a transfer configuration. To update the user id of - the transfer configuration, an authorization code needs to be - provided. - - Attributes: - transfer_config: - Required. Data transfer configuration to create. - authorization_code: - Optional OAuth2 authorization code to use with this transfer - configuration. If it is provided, the transfer configuration - will be associated with the authorizing user. In order to - obtain authorization_code, please make a request to https://ww - w.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scop - e=&redirect_uri= - client_id should be OAuth client_id of - BigQuery DTS API for the given data source returned by - ListDataSources method. - data_source_scopes are the scopes - returned by ListDataSources method. - redirect_uri is an - optional parameter. If not specified, then authorization - code is posted to the opener of authorization flow window. - Otherwise it will be sent to the redirect uri. A special - value of urn:ietf:wg:oauth:2.0:oob means that authorization - code should be returned in the title bar of the browser, - with the page text prompting the user to copy the code and - paste it in the application. - update_mask: - Required. Required list of fields to be updated in this - request. - version_info: - Optional version info. If users want to find a very recent - access token, that is, immediately after approving access, - users have to set the version_info claim in the token request. - To obtain the version_info, users must use the “none+gsession” - response type. which be return a version_info back in the - authorization response which be be put in a JWT claim in the - token request. - service_account_name: - Optional service account name. If this field is set and - “service_account_name” is set in update_mask, transfer config - will be updated to use this service account credentials. It - requires that requesting user calling this API has permissions - to act as this service account. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest) - }, -) -_sym_db.RegisterMessage(UpdateTransferConfigRequest) - -GetTransferConfigRequest = _reflection.GeneratedProtocolMessageType( - "GetTransferConfigRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETTRANSFERCONFIGREQUEST, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """A request to get data transfer information. - - Attributes: - name: - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}`` or ``pro - jects/{project_id}/locations/{location_id}/transferConfigs/{co - nfig_id}`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest) - }, -) -_sym_db.RegisterMessage(GetTransferConfigRequest) - -DeleteTransferConfigRequest = _reflection.GeneratedProtocolMessageType( - "DeleteTransferConfigRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETETRANSFERCONFIGREQUEST, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """A request to delete data transfer information. All associated transfer - runs and log messages will be deleted as well. - - Attributes: - name: - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}`` or ``pro - jects/{project_id}/locations/{location_id}/transferConfigs/{co - nfig_id}`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest) - }, -) -_sym_db.RegisterMessage(DeleteTransferConfigRequest) - -GetTransferRunRequest = _reflection.GeneratedProtocolMessageType( - "GetTransferRunRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETTRANSFERRUNREQUEST, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """A request to get data transfer run information. - - Attributes: - name: - Required. The field will contain name of the resource - requested, for example: ``projects/{project_id}/transferConfig - s/{config_id}/runs/{run_id}`` or ``projects/{project_id}/locat - ions/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest) - }, -) -_sym_db.RegisterMessage(GetTransferRunRequest) - -DeleteTransferRunRequest = _reflection.GeneratedProtocolMessageType( - "DeleteTransferRunRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETETRANSFERRUNREQUEST, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """A request to delete data transfer run information. - - Attributes: - name: - Required. The field will contain name of the resource - requested, for example: ``projects/{project_id}/transferConfig - s/{config_id}/runs/{run_id}`` or ``projects/{project_id}/locat - ions/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest) - }, -) -_sym_db.RegisterMessage(DeleteTransferRunRequest) - -ListTransferConfigsRequest = _reflection.GeneratedProtocolMessageType( - "ListTransferConfigsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTTRANSFERCONFIGSREQUEST, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """A request to list data transfers configured for a BigQuery project. - - Attributes: - parent: - Required. The BigQuery project id for which data sources - should be returned: ``projects/{project_id}`` or - ``projects/{project_id}/locations/{location_id}`` - data_source_ids: - When specified, only configurations of requested data sources - are returned. - page_token: - Pagination token, which can be used to request a specific page - of ``ListTransfersRequest`` list results. For multiple-page - results, ``ListTransfersResponse`` outputs a ``next_page`` - token, which can be used as the ``page_token`` value to - request the next page of list results. - page_size: - Page size. The default page size is the maximum value of 1000 - results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest) - }, -) -_sym_db.RegisterMessage(ListTransferConfigsRequest) - -ListTransferConfigsResponse = _reflection.GeneratedProtocolMessageType( - "ListTransferConfigsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTTRANSFERCONFIGSRESPONSE, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """The returned list of pipelines in the project. - - Attributes: - transfer_configs: - Output only. The stored pipeline transfer configurations. - next_page_token: - Output only. The next-pagination token. For multiple-page list - results, this token can be used as the - ``ListTransferConfigsRequest.page_token`` to request the next - page of list results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse) - }, -) -_sym_db.RegisterMessage(ListTransferConfigsResponse) - -ListTransferRunsRequest = _reflection.GeneratedProtocolMessageType( - "ListTransferRunsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTTRANSFERRUNSREQUEST, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """A request to list data transfer runs. UI can use this method to - show/filter specific data transfer runs. The data source can use this - method to request all scheduled transfer runs. - - Attributes: - parent: - Required. Name of transfer configuration for which transfer - runs should be retrieved. Format of transfer configuration - resource name is: - ``projects/{project_id}/transferConfigs/{config_id}`` or ``pro - jects/{project_id}/locations/{location_id}/transferConfigs/{co - nfig_id}``. - states: - When specified, only transfer runs with requested states are - returned. - page_token: - Pagination token, which can be used to request a specific page - of ``ListTransferRunsRequest`` list results. For multiple-page - results, ``ListTransferRunsResponse`` outputs a ``next_page`` - token, which can be used as the ``page_token`` value to - request the next page of list results. - page_size: - Page size. The default page size is the maximum value of 1000 - results. - run_attempt: - Indicates how run attempts are to be pulled. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest) - }, -) -_sym_db.RegisterMessage(ListTransferRunsRequest) - -ListTransferRunsResponse = _reflection.GeneratedProtocolMessageType( - "ListTransferRunsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTTRANSFERRUNSRESPONSE, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """The returned list of pipelines in the project. - - Attributes: - transfer_runs: - Output only. The stored pipeline transfer runs. - next_page_token: - Output only. The next-pagination token. For multiple-page list - results, this token can be used as the - ``ListTransferRunsRequest.page_token`` to request the next - page of list results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse) - }, -) -_sym_db.RegisterMessage(ListTransferRunsResponse) - -ListTransferLogsRequest = _reflection.GeneratedProtocolMessageType( - "ListTransferLogsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTTRANSFERLOGSREQUEST, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """A request to get user facing log messages associated with data - transfer run. - - Attributes: - parent: - Required. Transfer run name in the form: ``projects/{project_i - d}/transferConfigs/{config_id}/runs/{run_id}`` or ``projects/{ - project_id}/locations/{location_id}/transferConfigs/{config_id - }/runs/{run_id}`` - page_token: - Pagination token, which can be used to request a specific page - of ``ListTransferLogsRequest`` list results. For multiple-page - results, ``ListTransferLogsResponse`` outputs a ``next_page`` - token, which can be used as the ``page_token`` value to - request the next page of list results. - page_size: - Page size. The default page size is the maximum value of 1000 - results. - message_types: - Message types to return. If not populated - INFO, WARNING and - ERROR messages are returned. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest) - }, -) -_sym_db.RegisterMessage(ListTransferLogsRequest) - -ListTransferLogsResponse = _reflection.GeneratedProtocolMessageType( - "ListTransferLogsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTTRANSFERLOGSRESPONSE, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """The returned list transfer run messages. - - Attributes: - transfer_messages: - Output only. The stored pipeline transfer messages. - next_page_token: - Output only. The next-pagination token. For multiple-page list - results, this token can be used as the - ``GetTransferRunLogRequest.page_token`` to request the next - page of list results. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse) - }, -) -_sym_db.RegisterMessage(ListTransferLogsResponse) - -CheckValidCredsRequest = _reflection.GeneratedProtocolMessageType( - "CheckValidCredsRequest", - (_message.Message,), - { - "DESCRIPTOR": _CHECKVALIDCREDSREQUEST, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """A request to determine whether the user has valid credentials. This - method is used to limit the number of OAuth popups in the user - interface. The user id is inferred from the API call context. If the - data source has the Google+ authorization type, this method returns - false, as it cannot be determined whether the credentials are already - valid merely based on the user id. - - Attributes: - name: - Required. The data source in the form: - ``projects/{project_id}/dataSources/{data_source_id}`` or ``pr - ojects/{project_id}/locations/{location_id}/dataSources/{data_ - source_id}``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest) - }, -) -_sym_db.RegisterMessage(CheckValidCredsRequest) - -CheckValidCredsResponse = _reflection.GeneratedProtocolMessageType( - "CheckValidCredsResponse", - (_message.Message,), - { - "DESCRIPTOR": _CHECKVALIDCREDSRESPONSE, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """A response indicating whether the credentials exist and are valid. - - Attributes: - has_valid_creds: - If set to ``true``, the credentials exist and are valid. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse) - }, -) -_sym_db.RegisterMessage(CheckValidCredsResponse) - -ScheduleTransferRunsRequest = _reflection.GeneratedProtocolMessageType( - "ScheduleTransferRunsRequest", - (_message.Message,), - { - "DESCRIPTOR": _SCHEDULETRANSFERRUNSREQUEST, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """A request to schedule transfer runs for a time range. - - Attributes: - parent: - Required. Transfer configuration name in the form: - ``projects/{project_id}/transferConfigs/{config_id}`` or ``pro - jects/{project_id}/locations/{location_id}/transferConfigs/{co - nfig_id}``. - start_time: - Required. Start time of the range of transfer runs. For - example, ``"2017-05-25T00:00:00+00:00"``. - end_time: - Required. End time of the range of transfer runs. For example, - ``"2017-05-30T00:00:00+00:00"``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest) - }, -) -_sym_db.RegisterMessage(ScheduleTransferRunsRequest) - -ScheduleTransferRunsResponse = _reflection.GeneratedProtocolMessageType( - "ScheduleTransferRunsResponse", - (_message.Message,), - { - "DESCRIPTOR": _SCHEDULETRANSFERRUNSRESPONSE, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """A response to schedule transfer runs for a time range. - - Attributes: - runs: - The transfer runs that were scheduled. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse) - }, -) -_sym_db.RegisterMessage(ScheduleTransferRunsResponse) - -StartManualTransferRunsRequest = _reflection.GeneratedProtocolMessageType( - "StartManualTransferRunsRequest", - (_message.Message,), - { - "TimeRange": _reflection.GeneratedProtocolMessageType( - "TimeRange", - (_message.Message,), - { - "DESCRIPTOR": _STARTMANUALTRANSFERRUNSREQUEST_TIMERANGE, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """A specification for a time range, this will request transfer runs with - run_time between start_time (inclusive) and end_time (exclusive). - - Attributes: - start_time: - Start time of the range of transfer runs. For example, - ``"2017-05-25T00:00:00+00:00"``. The start_time must be - strictly less than the end_time. Creates transfer runs where - run_time is in the range betwen start_time (inclusive) and - end_time (exlusive). - end_time: - End time of the range of transfer runs. For example, - ``"2017-05-30T00:00:00+00:00"``. The end_time must not be in - the future. Creates transfer runs where run_time is in the - range betwen start_time (inclusive) and end_time (exlusive). - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange) - }, - ), - "DESCRIPTOR": _STARTMANUALTRANSFERRUNSREQUEST, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """A request to start manual transfer runs. - - Attributes: - parent: - Transfer configuration name in the form: - ``projects/{project_id}/transferConfigs/{config_id}`` or ``pro - jects/{project_id}/locations/{location_id}/transferConfigs/{co - nfig_id}``. - time: - The requested time specification - this can be a time range or - a specific run_time. - requested_time_range: - Time range for the transfer runs that should be started. - requested_run_time: - Specific run_time for a transfer run to be started. The - requested_run_time must not be in the future. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest) - }, -) -_sym_db.RegisterMessage(StartManualTransferRunsRequest) -_sym_db.RegisterMessage(StartManualTransferRunsRequest.TimeRange) - -StartManualTransferRunsResponse = _reflection.GeneratedProtocolMessageType( - "StartManualTransferRunsResponse", - (_message.Message,), - { - "DESCRIPTOR": _STARTMANUALTRANSFERRUNSRESPONSE, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.datatransfer_pb2", - "__doc__": """A response to start manual transfer runs. - - Attributes: - runs: - The transfer runs that were created. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse) - }, -) -_sym_db.RegisterMessage(StartManualTransferRunsResponse) - - -DESCRIPTOR._options = None -_DATASOURCE.fields_by_name["name"]._options = None -_DATASOURCE.fields_by_name["transfer_type"]._options = None -_DATASOURCE.fields_by_name["supports_multiple_transfers"]._options = None -_DATASOURCE._options = None -_GETDATASOURCEREQUEST.fields_by_name["name"]._options = None -_LISTDATASOURCESREQUEST.fields_by_name["parent"]._options = None -_LISTDATASOURCESRESPONSE.fields_by_name["next_page_token"]._options = None -_CREATETRANSFERCONFIGREQUEST.fields_by_name["parent"]._options = None -_CREATETRANSFERCONFIGREQUEST.fields_by_name["transfer_config"]._options = None -_UPDATETRANSFERCONFIGREQUEST.fields_by_name["transfer_config"]._options = None -_UPDATETRANSFERCONFIGREQUEST.fields_by_name["update_mask"]._options = None -_GETTRANSFERCONFIGREQUEST.fields_by_name["name"]._options = None -_DELETETRANSFERCONFIGREQUEST.fields_by_name["name"]._options = None -_GETTRANSFERRUNREQUEST.fields_by_name["name"]._options = None -_DELETETRANSFERRUNREQUEST.fields_by_name["name"]._options = None -_LISTTRANSFERCONFIGSREQUEST.fields_by_name["parent"]._options = None -_LISTTRANSFERCONFIGSRESPONSE.fields_by_name["transfer_configs"]._options = None -_LISTTRANSFERCONFIGSRESPONSE.fields_by_name["next_page_token"]._options = None -_LISTTRANSFERRUNSREQUEST.fields_by_name["parent"]._options = None -_LISTTRANSFERRUNSRESPONSE.fields_by_name["transfer_runs"]._options = None -_LISTTRANSFERRUNSRESPONSE.fields_by_name["next_page_token"]._options = None -_LISTTRANSFERLOGSREQUEST.fields_by_name["parent"]._options = None -_LISTTRANSFERLOGSRESPONSE.fields_by_name["transfer_messages"]._options = None -_LISTTRANSFERLOGSRESPONSE.fields_by_name["next_page_token"]._options = None -_CHECKVALIDCREDSREQUEST.fields_by_name["name"]._options = None -_SCHEDULETRANSFERRUNSREQUEST.fields_by_name["parent"]._options = None -_SCHEDULETRANSFERRUNSREQUEST.fields_by_name["start_time"]._options = None -_SCHEDULETRANSFERRUNSREQUEST.fields_by_name["end_time"]._options = None -_STARTMANUALTRANSFERRUNSREQUEST.fields_by_name["parent"]._options = None - -_DATATRANSFERSERVICE = _descriptor.ServiceDescriptor( - name="DataTransferService", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService", - file=DESCRIPTOR, - index=0, - serialized_options=b"\312A#bigquerydatatransfer.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform", - create_key=_descriptor._internal_create_key, - serialized_start=5683, - serialized_end=9718, - methods=[ - _descriptor.MethodDescriptor( - name="GetDataSource", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.GetDataSource", - index=0, - containing_service=None, - input_type=_GETDATASOURCEREQUEST, - output_type=_DATASOURCE, - serialized_options=b"\202\323\344\223\002X\022//v1/{name=projects/*/locations/*/dataSources/*}Z%\022#/v1/{name=projects/*/dataSources/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListDataSources", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.ListDataSources", - index=1, - containing_service=None, - input_type=_LISTDATASOURCESREQUEST, - output_type=_LISTDATASOURCESRESPONSE, - serialized_options=b"\202\323\344\223\002X\022//v1/{parent=projects/*/locations/*}/dataSourcesZ%\022#/v1/{parent=projects/*}/dataSources\332A\006parent", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="CreateTransferConfig", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.CreateTransferConfig", - index=2, - containing_service=None, - input_type=_CREATETRANSFERCONFIGREQUEST, - output_type=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, - serialized_options=b'\202\323\344\223\002\202\001"3/v1/{parent=projects/*/locations/*}/transferConfigs:\017transfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\017transfer_config\332A\026parent,transfer_config', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="UpdateTransferConfig", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.UpdateTransferConfig", - index=3, - containing_service=None, - input_type=_UPDATETRANSFERCONFIGREQUEST, - output_type=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, - serialized_options=b"\202\323\344\223\002\242\0012C/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\017transfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\017transfer_config\332A\033transfer_config,update_mask", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteTransferConfig", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferConfig", - index=4, - containing_service=None, - input_type=_DELETETRANSFERCONFIGREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\002`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*'/v1/{name=projects/*/transferConfigs/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetTransferConfig", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.GetTransferConfig", - index=5, - containing_service=None, - input_type=_GETTRANSFERCONFIGREQUEST, - output_type=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, - serialized_options=b"\202\323\344\223\002`\0223/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\022'/v1/{name=projects/*/transferConfigs/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListTransferConfigs", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferConfigs", - index=6, - containing_service=None, - input_type=_LISTTRANSFERCONFIGSREQUEST, - output_type=_LISTTRANSFERCONFIGSRESPONSE, - serialized_options=b"\202\323\344\223\002`\0223/v1/{parent=projects/*/locations/*}/transferConfigsZ)\022'/v1/{parent=projects/*}/transferConfigs\332A\006parent", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ScheduleTransferRuns", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.ScheduleTransferRuns", - index=7, - containing_service=None, - input_type=_SCHEDULETRANSFERRUNSREQUEST, - output_type=_SCHEDULETRANSFERRUNSRESPONSE, - serialized_options=b'\210\002\001\202\323\344\223\002\204\001"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\001*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\001*\332A\032parent,start_time,end_time', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="StartManualTransferRuns", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.StartManualTransferRuns", - index=8, - containing_service=None, - input_type=_STARTMANUALTRANSFERRUNSREQUEST, - output_type=_STARTMANUALTRANSFERRUNSRESPONSE, - serialized_options=b'\202\323\344\223\002\212\001"E/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns:\001*Z>"9/v1/{parent=projects/*/transferConfigs/*}:startManualRuns:\001*', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetTransferRun", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.GetTransferRun", - index=9, - containing_service=None, - input_type=_GETTRANSFERRUNREQUEST, - output_type=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN, - serialized_options=b"\202\323\344\223\002n\022:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\022./v1/{name=projects/*/transferConfigs/*/runs/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteTransferRun", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferRun", - index=10, - containing_service=None, - input_type=_DELETETRANSFERRUNREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\002n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListTransferRuns", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferRuns", - index=11, - containing_service=None, - input_type=_LISTTRANSFERRUNSREQUEST, - output_type=_LISTTRANSFERRUNSRESPONSE, - serialized_options=b"\202\323\344\223\002n\022:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\022./v1/{parent=projects/*/transferConfigs/*}/runs\332A\006parent", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListTransferLogs", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferLogs", - index=12, - containing_service=None, - input_type=_LISTTRANSFERLOGSREQUEST, - output_type=_LISTTRANSFERLOGSRESPONSE, - serialized_options=b"\202\323\344\223\002\214\001\022I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\022=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs\332A\006parent", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="CheckValidCreds", - full_name="google.cloud.bigquery.datatransfer.v1.DataTransferService.CheckValidCreds", - index=13, - containing_service=None, - input_type=_CHECKVALIDCREDSREQUEST, - output_type=_CHECKVALIDCREDSRESPONSE, - serialized_options=b'\202\323\344\223\002~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\001*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\001*\332A\004name', - create_key=_descriptor._internal_create_key, - ), - ], -) -_sym_db.RegisterServiceDescriptor(_DATATRANSFERSERVICE) - -DESCRIPTOR.services_by_name["DataTransferService"] = _DATATRANSFERSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2_grpc.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2_grpc.py deleted file mode 100644 index f14711076cd4..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2_grpc.py +++ /dev/null @@ -1,684 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc - -from google.cloud.bigquery_datatransfer_v1.proto import ( - datatransfer_pb2 as google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2, -) -from google.cloud.bigquery_datatransfer_v1.proto import ( - transfer_pb2 as google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class DataTransferServiceStub(object): - """The Google BigQuery Data Transfer Service API enables BigQuery users to - configure the transfer of their data from other Google Products into - BigQuery. This service contains methods that are end user exposed. It backs - up the frontend. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.GetDataSource = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetDataSource", - request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetDataSourceRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.DataSource.FromString, - ) - self.ListDataSources = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListDataSources", - request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesResponse.FromString, - ) - self.CreateTransferConfig = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CreateTransferConfig", - request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.CreateTransferConfigRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.FromString, - ) - self.UpdateTransferConfig = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/UpdateTransferConfig", - request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.UpdateTransferConfigRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.FromString, - ) - self.DeleteTransferConfig = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferConfig", - request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferConfigRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.GetTransferConfig = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferConfig", - request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferConfigRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.FromString, - ) - self.ListTransferConfigs = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferConfigs", - request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsResponse.FromString, - ) - self.ScheduleTransferRuns = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ScheduleTransferRuns", - request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsResponse.FromString, - ) - self.StartManualTransferRuns = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/StartManualTransferRuns", - request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsResponse.FromString, - ) - self.GetTransferRun = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferRun", - request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferRunRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferRun.FromString, - ) - self.DeleteTransferRun = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferRun", - request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferRunRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.ListTransferRuns = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferRuns", - request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsResponse.FromString, - ) - self.ListTransferLogs = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferLogs", - request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsResponse.FromString, - ) - self.CheckValidCreds = channel.unary_unary( - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CheckValidCreds", - request_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsResponse.FromString, - ) - - -class DataTransferServiceServicer(object): - """The Google BigQuery Data Transfer Service API enables BigQuery users to - configure the transfer of their data from other Google Products into - BigQuery. This service contains methods that are end user exposed. It backs - up the frontend. - """ - - def GetDataSource(self, request, context): - """Retrieves a supported data source and returns its settings, - which can be used for UI rendering. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListDataSources(self, request, context): - """Lists supported data sources and returns their settings, - which can be used for UI rendering. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateTransferConfig(self, request, context): - """Creates a new data transfer configuration. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateTransferConfig(self, request, context): - """Updates a data transfer configuration. - All fields must be set, even if they are not updated. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteTransferConfig(self, request, context): - """Deletes a data transfer configuration, - including any associated transfer runs and logs. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetTransferConfig(self, request, context): - """Returns information about a data transfer config. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTransferConfigs(self, request, context): - """Returns information about all data transfers in the project. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ScheduleTransferRuns(self, request, context): - """Creates transfer runs for a time range [start_time, end_time]. - For each date - or whatever granularity the data source supports - in the - range, one transfer run is created. - Note that runs are created per UTC time in the time range. - DEPRECATED: use StartManualTransferRuns instead. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def StartManualTransferRuns(self, request, context): - """Start manual transfer runs to be executed now with schedule_time equal to - current time. The transfer runs can be created for a time range where the - run_time is between start_time (inclusive) and end_time (exclusive), or for - a specific run_time. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetTransferRun(self, request, context): - """Returns information about the particular transfer run. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteTransferRun(self, request, context): - """Deletes the specified transfer run. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTransferRuns(self, request, context): - """Returns information about running and completed jobs. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTransferLogs(self, request, context): - """Returns user facing log messages for the data transfer run. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CheckValidCreds(self, request, context): - """Returns true if valid credentials exist for the given data source and - requesting user. - Some data sources doesn't support service account, so we need to talk to - them on behalf of the end user. This API just checks whether we have OAuth - token for the particular user, which is a pre-requisite before user can - create a transfer config. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_DataTransferServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "GetDataSource": grpc.unary_unary_rpc_method_handler( - servicer.GetDataSource, - request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetDataSourceRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.DataSource.SerializeToString, - ), - "ListDataSources": grpc.unary_unary_rpc_method_handler( - servicer.ListDataSources, - request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesResponse.SerializeToString, - ), - "CreateTransferConfig": grpc.unary_unary_rpc_method_handler( - servicer.CreateTransferConfig, - request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.CreateTransferConfigRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.SerializeToString, - ), - "UpdateTransferConfig": grpc.unary_unary_rpc_method_handler( - servicer.UpdateTransferConfig, - request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.UpdateTransferConfigRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.SerializeToString, - ), - "DeleteTransferConfig": grpc.unary_unary_rpc_method_handler( - servicer.DeleteTransferConfig, - request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferConfigRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "GetTransferConfig": grpc.unary_unary_rpc_method_handler( - servicer.GetTransferConfig, - request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferConfigRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.SerializeToString, - ), - "ListTransferConfigs": grpc.unary_unary_rpc_method_handler( - servicer.ListTransferConfigs, - request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsResponse.SerializeToString, - ), - "ScheduleTransferRuns": grpc.unary_unary_rpc_method_handler( - servicer.ScheduleTransferRuns, - request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsResponse.SerializeToString, - ), - "StartManualTransferRuns": grpc.unary_unary_rpc_method_handler( - servicer.StartManualTransferRuns, - request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsResponse.SerializeToString, - ), - "GetTransferRun": grpc.unary_unary_rpc_method_handler( - servicer.GetTransferRun, - request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferRunRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferRun.SerializeToString, - ), - "DeleteTransferRun": grpc.unary_unary_rpc_method_handler( - servicer.DeleteTransferRun, - request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferRunRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "ListTransferRuns": grpc.unary_unary_rpc_method_handler( - servicer.ListTransferRuns, - request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsResponse.SerializeToString, - ), - "ListTransferLogs": grpc.unary_unary_rpc_method_handler( - servicer.ListTransferLogs, - request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsResponse.SerializeToString, - ), - "CheckValidCreds": grpc.unary_unary_rpc_method_handler( - servicer.CheckValidCreds, - request_deserializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsRequest.FromString, - response_serializer=google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.bigquery.datatransfer.v1.DataTransferService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) - - -# This class is part of an EXPERIMENTAL API. -class DataTransferService(object): - """The Google BigQuery Data Transfer Service API enables BigQuery users to - configure the transfer of their data from other Google Products into - BigQuery. This service contains methods that are end user exposed. It backs - up the frontend. - """ - - @staticmethod - def GetDataSource( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetDataSource", - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetDataSourceRequest.SerializeToString, - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.DataSource.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListDataSources( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListDataSources", - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesRequest.SerializeToString, - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListDataSourcesResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def CreateTransferConfig( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CreateTransferConfig", - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.CreateTransferConfigRequest.SerializeToString, - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def UpdateTransferConfig( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/UpdateTransferConfig", - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.UpdateTransferConfigRequest.SerializeToString, - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteTransferConfig( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferConfig", - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferConfigRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetTransferConfig( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferConfig", - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferConfigRequest.SerializeToString, - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferConfig.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListTransferConfigs( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferConfigs", - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsRequest.SerializeToString, - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferConfigsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ScheduleTransferRuns( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ScheduleTransferRuns", - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsRequest.SerializeToString, - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ScheduleTransferRunsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def StartManualTransferRuns( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/StartManualTransferRuns", - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsRequest.SerializeToString, - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.StartManualTransferRunsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetTransferRun( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferRun", - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.GetTransferRunRequest.SerializeToString, - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_transfer__pb2.TransferRun.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteTransferRun( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferRun", - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.DeleteTransferRunRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListTransferRuns( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferRuns", - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsRequest.SerializeToString, - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferRunsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListTransferLogs( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferLogs", - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsRequest.SerializeToString, - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.ListTransferLogsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def CheckValidCreds( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CheckValidCreds", - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsRequest.SerializeToString, - google_dot_cloud_dot_bigquery__datatransfer__v1_dot_proto_dot_datatransfer__pb2.CheckValidCredsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py deleted file mode 100644 index 9f9c1ca2efd7..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py +++ /dev/null @@ -1,1353 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/bigquery_datatransfer_v1/proto/transfer.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/bigquery_datatransfer_v1/proto/transfer.proto", - package="google.cloud.bigquery.datatransfer.v1", - syntax="proto3", - serialized_options=b"\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\001ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\242\002\005GCBDT\252\002%Google.Cloud.BigQuery.DataTransfer.V1\312\002%Google\\Cloud\\BigQuery\\DataTransfer\\V1\352\002)Google::Cloud::Bigquery::DataTransfer::V1", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n:google/cloud/bigquery_datatransfer_v1/proto/transfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"0\n\x10\x45mailPreferences\x12\x1c\n\x14\x65nable_failure_email\x18\x01 \x01(\x08"\x90\x01\n\x0fScheduleOptions\x12\x1f\n\x17\x64isable_auto_scheduling\x18\x03 \x01(\x08\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xd7\x06\n\x0eTransferConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12 \n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tH\x00\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x16\n\x0e\x64\x61ta_source_id\x18\x05 \x01(\t\x12\'\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x10\n\x08schedule\x18\x07 \x01(\t\x12P\n\x10schedule_options\x18\x18 \x01(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.ScheduleOptions\x12 \n\x18\x64\x61ta_refresh_window_days\x18\x0c \x01(\x05\x12\x10\n\x08\x64isabled\x18\r \x01(\x08\x12\x34\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x36\n\rnext_run_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12H\n\x05state\x18\n \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferStateB\x03\xe0\x41\x03\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x1b\n\x0e\x64\x61taset_region\x18\x0e \x01(\tB\x03\xe0\x41\x03\x12!\n\x19notification_pubsub_topic\x18\x0f \x01(\t\x12R\n\x11\x65mail_preferences\x18\x12 \x01(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.EmailPreferences:\xb9\x01\xea\x41\xb5\x01\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x34projects/{project}/transferConfigs/{transfer_config}\x12Iprojects/{project}/locations/{location}/transferConfigs/{transfer_config}B\r\n\x0b\x64\x65stination"\xfa\x06\n\x0bTransferRun\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\rschedule_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08run_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12(\n\x0c\x65rror_status\x18\x15 \x01(\x0b\x32\x12.google.rpc.Status\x12\x33\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12,\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.StructB\x03\xe0\x41\x03\x12%\n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x03H\x00\x12\x1b\n\x0e\x64\x61ta_source_id\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12\x43\n\x05state\x18\x08 \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x15\n\x08schedule\x18\x0c \x01(\tB\x03\xe0\x41\x03\x12&\n\x19notification_pubsub_topic\x18\x17 \x01(\tB\x03\xe0\x41\x03\x12W\n\x11\x65mail_preferences\x18\x19 \x01(\x0b\x32\x37.google.cloud.bigquery.datatransfer.v1.EmailPreferencesB\x03\xe0\x41\x03:\xc4\x01\xea\x41\xc0\x01\n\'bigquerydatatransfer.googleapis.com/Run\x12?projects/{project}/transferConfigs/{transfer_config}/runs/{run}\x12Tprojects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}B\r\n\x0b\x64\x65stination"\x8a\x02\n\x0fTransferMessage\x12\x30\n\x0cmessage_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12X\n\x08severity\x18\x02 \x01(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity\x12\x14\n\x0cmessage_text\x18\x03 \x01(\t"U\n\x0fMessageSeverity\x12 \n\x1cMESSAGE_SEVERITY_UNSPECIFIED\x10\x00\x12\x08\n\x04INFO\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03*K\n\x0cTransferType\x12\x1d\n\x19TRANSFER_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41TCH\x10\x01\x12\r\n\tSTREAMING\x10\x02\x1a\x02\x18\x01*s\n\rTransferState\x12\x1e\n\x1aTRANSFER_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05\x12\r\n\tCANCELLED\x10\x06\x42\x93\x02\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xa2\x02\x05GCBDT\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1\xea\x02)Google::Cloud::Bigquery::DataTransfer::V1b\x06proto3', - dependencies=[ - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - ], -) - -_TRANSFERTYPE = _descriptor.EnumDescriptor( - name="TransferType", - full_name="google.cloud.bigquery.datatransfer.v1.TransferType", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="TRANSFER_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="BATCH", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="STREAMING", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=b"\030\001", - serialized_start=2466, - serialized_end=2541, -) -_sym_db.RegisterEnumDescriptor(_TRANSFERTYPE) - -TransferType = enum_type_wrapper.EnumTypeWrapper(_TRANSFERTYPE) -_TRANSFERSTATE = _descriptor.EnumDescriptor( - name="TransferState", - full_name="google.cloud.bigquery.datatransfer.v1.TransferState", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="TRANSFER_STATE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="PENDING", - index=1, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="RUNNING", - index=2, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SUCCEEDED", - index=3, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="FAILED", - index=4, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="CANCELLED", - index=5, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2543, - serialized_end=2658, -) -_sym_db.RegisterEnumDescriptor(_TRANSFERSTATE) - -TransferState = enum_type_wrapper.EnumTypeWrapper(_TRANSFERSTATE) -TRANSFER_TYPE_UNSPECIFIED = 0 -BATCH = 1 -STREAMING = 2 -TRANSFER_STATE_UNSPECIFIED = 0 -PENDING = 2 -RUNNING = 3 -SUCCEEDED = 4 -FAILED = 5 -CANCELLED = 6 - - -_TRANSFERMESSAGE_MESSAGESEVERITY = _descriptor.EnumDescriptor( - name="MessageSeverity", - full_name="google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="MESSAGE_SEVERITY_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="INFO", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="WARNING", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ERROR", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2379, - serialized_end=2464, -) -_sym_db.RegisterEnumDescriptor(_TRANSFERMESSAGE_MESSAGESEVERITY) - - -_EMAILPREFERENCES = _descriptor.Descriptor( - name="EmailPreferences", - full_name="google.cloud.bigquery.datatransfer.v1.EmailPreferences", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="enable_failure_email", - full_name="google.cloud.bigquery.datatransfer.v1.EmailPreferences.enable_failure_email", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=249, - serialized_end=297, -) - - -_SCHEDULEOPTIONS = _descriptor.Descriptor( - name="ScheduleOptions", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="disable_auto_scheduling", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleOptions.disable_auto_scheduling", - index=0, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleOptions.start_time", - index=1, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="end_time", - full_name="google.cloud.bigquery.datatransfer.v1.ScheduleOptions.end_time", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=300, - serialized_end=444, -) - - -_TRANSFERCONFIG = _descriptor.Descriptor( - name="TransferConfig", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="destination_dataset_id", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.destination_dataset_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.display_name", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="data_source_id", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.data_source_id", - index=3, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="params", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.params", - index=4, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="schedule", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.schedule", - index=5, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="schedule_options", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.schedule_options", - index=6, - number=24, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="data_refresh_window_days", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.data_refresh_window_days", - index=7, - number=12, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="disabled", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.disabled", - index=8, - number=13, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.update_time", - index=9, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_run_time", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.next_run_time", - index=10, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="state", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.state", - index=11, - number=10, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="user_id", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.user_id", - index=12, - number=11, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="dataset_region", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.dataset_region", - index=13, - number=14, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="notification_pubsub_topic", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.notification_pubsub_topic", - index=14, - number=15, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="email_preferences", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.email_preferences", - index=15, - number=18, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"\352A\265\001\n2bigquerydatatransfer.googleapis.com/TransferConfig\0224projects/{project}/transferConfigs/{transfer_config}\022Iprojects/{project}/locations/{location}/transferConfigs/{transfer_config}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="destination", - full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.destination", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=447, - serialized_end=1302, -) - - -_TRANSFERRUN = _descriptor.Descriptor( - name="TransferRun", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="schedule_time", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.schedule_time", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="run_time", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.run_time", - index=2, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="error_status", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.error_status", - index=3, - number=21, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.start_time", - index=4, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="end_time", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.end_time", - index=5, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.update_time", - index=6, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="params", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.params", - index=7, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="destination_dataset_id", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.destination_dataset_id", - index=8, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="data_source_id", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.data_source_id", - index=9, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="state", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.state", - index=10, - number=8, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="user_id", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.user_id", - index=11, - number=11, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="schedule", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.schedule", - index=12, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="notification_pubsub_topic", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.notification_pubsub_topic", - index=13, - number=23, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="email_preferences", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.email_preferences", - index=14, - number=25, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"\352A\300\001\n'bigquerydatatransfer.googleapis.com/Run\022?projects/{project}/transferConfigs/{transfer_config}/runs/{run}\022Tprojects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="destination", - full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.destination", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=1305, - serialized_end=2195, -) - - -_TRANSFERMESSAGE = _descriptor.Descriptor( - name="TransferMessage", - full_name="google.cloud.bigquery.datatransfer.v1.TransferMessage", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="message_time", - full_name="google.cloud.bigquery.datatransfer.v1.TransferMessage.message_time", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="severity", - full_name="google.cloud.bigquery.datatransfer.v1.TransferMessage.severity", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="message_text", - full_name="google.cloud.bigquery.datatransfer.v1.TransferMessage.message_text", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_TRANSFERMESSAGE_MESSAGESEVERITY,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2198, - serialized_end=2464, -) - -_SCHEDULEOPTIONS.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_SCHEDULEOPTIONS.fields_by_name[ - "end_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERCONFIG.fields_by_name[ - "params" -].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT -_TRANSFERCONFIG.fields_by_name["schedule_options"].message_type = _SCHEDULEOPTIONS -_TRANSFERCONFIG.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERCONFIG.fields_by_name[ - "next_run_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERCONFIG.fields_by_name["state"].enum_type = _TRANSFERSTATE -_TRANSFERCONFIG.fields_by_name["email_preferences"].message_type = _EMAILPREFERENCES -_TRANSFERCONFIG.oneofs_by_name["destination"].fields.append( - _TRANSFERCONFIG.fields_by_name["destination_dataset_id"] -) -_TRANSFERCONFIG.fields_by_name[ - "destination_dataset_id" -].containing_oneof = _TRANSFERCONFIG.oneofs_by_name["destination"] -_TRANSFERRUN.fields_by_name[ - "schedule_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERRUN.fields_by_name[ - "run_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERRUN.fields_by_name[ - "error_status" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -_TRANSFERRUN.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERRUN.fields_by_name[ - "end_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERRUN.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERRUN.fields_by_name[ - "params" -].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT -_TRANSFERRUN.fields_by_name["state"].enum_type = _TRANSFERSTATE -_TRANSFERRUN.fields_by_name["email_preferences"].message_type = _EMAILPREFERENCES -_TRANSFERRUN.oneofs_by_name["destination"].fields.append( - _TRANSFERRUN.fields_by_name["destination_dataset_id"] -) -_TRANSFERRUN.fields_by_name[ - "destination_dataset_id" -].containing_oneof = _TRANSFERRUN.oneofs_by_name["destination"] -_TRANSFERMESSAGE.fields_by_name[ - "message_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TRANSFERMESSAGE.fields_by_name["severity"].enum_type = _TRANSFERMESSAGE_MESSAGESEVERITY -_TRANSFERMESSAGE_MESSAGESEVERITY.containing_type = _TRANSFERMESSAGE -DESCRIPTOR.message_types_by_name["EmailPreferences"] = _EMAILPREFERENCES -DESCRIPTOR.message_types_by_name["ScheduleOptions"] = _SCHEDULEOPTIONS -DESCRIPTOR.message_types_by_name["TransferConfig"] = _TRANSFERCONFIG -DESCRIPTOR.message_types_by_name["TransferRun"] = _TRANSFERRUN -DESCRIPTOR.message_types_by_name["TransferMessage"] = _TRANSFERMESSAGE -DESCRIPTOR.enum_types_by_name["TransferType"] = _TRANSFERTYPE -DESCRIPTOR.enum_types_by_name["TransferState"] = _TRANSFERSTATE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -EmailPreferences = _reflection.GeneratedProtocolMessageType( - "EmailPreferences", - (_message.Message,), - { - "DESCRIPTOR": _EMAILPREFERENCES, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.transfer_pb2", - "__doc__": """Represents preferences for sending email notifications for transfer - run events. - - Attributes: - enable_failure_email: - If true, email notifications will be sent on transfer run - failures. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.EmailPreferences) - }, -) -_sym_db.RegisterMessage(EmailPreferences) - -ScheduleOptions = _reflection.GeneratedProtocolMessageType( - "ScheduleOptions", - (_message.Message,), - { - "DESCRIPTOR": _SCHEDULEOPTIONS, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.transfer_pb2", - "__doc__": """Options customizing the data transfer schedule. - - Attributes: - disable_auto_scheduling: - If true, automatic scheduling of data transfer runs for this - configuration will be disabled. The runs can be started on ad- - hoc basis using StartManualTransferRuns API. When automatic - scheduling is disabled, the TransferConfig.schedule field will - be ignored. - start_time: - Specifies time to start scheduling transfer runs. The first - run will be scheduled at or after the start time according to - a recurrence pattern defined in the schedule string. The start - time can be changed at any moment. The time when a data - transfer can be trigerred manually is not limited by this - option. - end_time: - Defines time to stop scheduling transfer runs. A transfer run - cannot be scheduled at or after the end time. The end time can - be changed at any moment. The time when a data transfer can be - trigerred manually is not limited by this option. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ScheduleOptions) - }, -) -_sym_db.RegisterMessage(ScheduleOptions) - -TransferConfig = _reflection.GeneratedProtocolMessageType( - "TransferConfig", - (_message.Message,), - { - "DESCRIPTOR": _TRANSFERCONFIG, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.transfer_pb2", - "__doc__": """Represents a data transfer configuration. A transfer configuration - contains all metadata needed to perform a data transfer. For example, - ``destination_dataset_id`` specifies where data should be stored. When - a new transfer configuration is created, the specified - ``destination_dataset_id`` is created when needed and shared with the - appropriate data source service account. - - Attributes: - name: - The resource name of the transfer config. Transfer config - names have the form of ``projects/{project_id}/locations/{regi - on}/transferConfigs/{config_id}``. The name is automatically - generated based on the config_id specified in - CreateTransferConfigRequest along with project_id and region. - If config_id is not provided, usually a uuid, even though it - is not guaranteed or required, will be generated for - config_id. - destination: - The desination of the transfer config. - destination_dataset_id: - The BigQuery target dataset id. - display_name: - User specified display name for the data transfer. - data_source_id: - Data source id. Cannot be changed once data transfer is - created. - params: - Data transfer specific parameters. - schedule: - Data transfer schedule. If the data source does not support a - custom schedule, this should be empty. If it is empty, the - default value for the data source will be used. The specified - times are in UTC. Examples of valid format: ``1st,3rd monday - of month 15:30``, ``every wed,fri of jan,jun 13:15``, and - ``first sunday of quarter 00:00``. See more explanation about - the format here: https://cloud.google.com/appengine/docs/flexi - ble/python/scheduling-jobs-with-cron-yaml#the_schedule_format - NOTE: the granularity should be at least 8 hours, or less - frequent. - schedule_options: - Options customizing the data transfer schedule. - data_refresh_window_days: - The number of days to look back to automatically refresh the - data. For example, if ``data_refresh_window_days = 10``, then - every day BigQuery reingests data for [today-10, today-1], - rather than ingesting data for just [today-1]. Only valid if - the data source supports the feature. Set the value to 0 to - use the default value. - disabled: - Is this config disabled. When set to true, no runs are - scheduled for a given transfer. - update_time: - Output only. Data transfer modification time. Ignored by - server on input. - next_run_time: - Output only. Next time when data transfer will run. - state: - Output only. State of the most recently updated transfer run. - user_id: - Deprecated. Unique ID of the user on whose behalf transfer is - done. - dataset_region: - Output only. Region in which BigQuery dataset is located. - notification_pubsub_topic: - Pub/Sub topic where notifications will be sent after transfer - runs associated with this transfer config finish. - email_preferences: - Email notifications will be sent according to these - preferences to the email address of the user who owns this - transfer config. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.TransferConfig) - }, -) -_sym_db.RegisterMessage(TransferConfig) - -TransferRun = _reflection.GeneratedProtocolMessageType( - "TransferRun", - (_message.Message,), - { - "DESCRIPTOR": _TRANSFERRUN, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.transfer_pb2", - "__doc__": """Represents a data transfer run. - - Attributes: - name: - The resource name of the transfer run. Transfer run names have - the form ``projects/{project_id}/locations/{location}/transfer - Configs/{config_id}/runs/{run_id}``. The name is ignored when - creating a transfer run. - schedule_time: - Minimum time after which a transfer run can be started. - run_time: - For batch transfer runs, specifies the date and time of the - data should be ingested. - error_status: - Status of the transfer run. - start_time: - Output only. Time when transfer run was started. Parameter - ignored by server for input requests. - end_time: - Output only. Time when transfer run ended. Parameter ignored - by server for input requests. - update_time: - Output only. Last time the data transfer run state was - updated. - params: - Output only. Data transfer specific parameters. - destination: - Data transfer destination. - destination_dataset_id: - Output only. The BigQuery target dataset id. - data_source_id: - Output only. Data source id. - state: - Data transfer run state. Ignored for input requests. - user_id: - Deprecated. Unique ID of the user on whose behalf transfer is - done. - schedule: - Output only. Describes the schedule of this transfer run if it - was created as part of a regular schedule. For batch transfer - runs that are scheduled manually, this is empty. NOTE: the - system might choose to delay the schedule depending on the - current load, so ``schedule_time`` doesn’t always match this. - notification_pubsub_topic: - Output only. Pub/Sub topic where a notification will be sent - after this transfer run finishes - email_preferences: - Output only. Email notifications will be sent according to - these preferences to the email address of the user who owns - the transfer config this run was derived from. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.TransferRun) - }, -) -_sym_db.RegisterMessage(TransferRun) - -TransferMessage = _reflection.GeneratedProtocolMessageType( - "TransferMessage", - (_message.Message,), - { - "DESCRIPTOR": _TRANSFERMESSAGE, - "__module__": "google.cloud.bigquery_datatransfer_v1.proto.transfer_pb2", - "__doc__": """Represents a user facing message for a particular data transfer run. - - Attributes: - message_time: - Time when message was logged. - severity: - Message severity. - message_text: - Message text. - """, - # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.TransferMessage) - }, -) -_sym_db.RegisterMessage(TransferMessage) - - -DESCRIPTOR._options = None -_TRANSFERTYPE._options = None -_TRANSFERCONFIG.fields_by_name["update_time"]._options = None -_TRANSFERCONFIG.fields_by_name["next_run_time"]._options = None -_TRANSFERCONFIG.fields_by_name["state"]._options = None -_TRANSFERCONFIG.fields_by_name["dataset_region"]._options = None -_TRANSFERCONFIG._options = None -_TRANSFERRUN.fields_by_name["start_time"]._options = None -_TRANSFERRUN.fields_by_name["end_time"]._options = None -_TRANSFERRUN.fields_by_name["update_time"]._options = None -_TRANSFERRUN.fields_by_name["params"]._options = None -_TRANSFERRUN.fields_by_name["destination_dataset_id"]._options = None -_TRANSFERRUN.fields_by_name["data_source_id"]._options = None -_TRANSFERRUN.fields_by_name["schedule"]._options = None -_TRANSFERRUN.fields_by_name["notification_pubsub_topic"]._options = None -_TRANSFERRUN.fields_by_name["email_preferences"]._options = None -_TRANSFERRUN._options = None -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2_grpc.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2_grpc.py deleted file mode 100644 index 8a9393943bdf..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types.py deleted file mode 100644 index 5a4afb7d3c39..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/types.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys - -from google.api_core.protobuf_helpers import get_messages - -from google.cloud.bigquery_datatransfer_v1.proto import datatransfer_pb2 -from google.cloud.bigquery_datatransfer_v1.proto import transfer_pb2 -from google.protobuf import any_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import struct_pb2 -from google.protobuf import timestamp_pb2 -from google.protobuf import wrappers_pb2 -from google.rpc import status_pb2 - - -_shared_modules = [ - any_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - struct_pb2, - timestamp_pb2, - wrappers_pb2, - status_pb2, -] - -_local_modules = [ - datatransfer_pb2, - transfer_pb2, -] - -names = [] - -for module in _shared_modules: # pragma: NO COVER - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.bigquery_datatransfer_v1.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - - -__all__ = tuple(sorted(names)) diff --git a/packages/google-cloud-bigquery-datatransfer/mypy.ini b/packages/google-cloud-bigquery-datatransfer/mypy.ini new file mode 100644 index 000000000000..4505b485436b --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/packages/google-cloud-bigquery-datatransfer/noxfile.py b/packages/google-cloud-bigquery-datatransfer/noxfile.py index 402614d32fe7..ee87f536213f 100644 --- a/packages/google-cloud-bigquery-datatransfer/noxfile.py +++ b/packages/google-cloud-bigquery-datatransfer/noxfile.py @@ -27,8 +27,8 @@ BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -70,6 +70,8 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. + session.install("asyncmock", "pytest-asyncio") + session.install("mock", "pytest", "pytest-cov") session.install("-e", ".") @@ -139,7 +141,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=70") + session.run("coverage", "report", "--show-missing", "--fail-under=99") session.run("coverage", "erase") diff --git a/packages/google-cloud-bigquery-datatransfer/samples/create_scheduled_query.py b/packages/google-cloud-bigquery-datatransfer/samples/create_scheduled_query.py index 52c34e8eae78..a05cfd13291d 100644 --- a/packages/google-cloud-bigquery-datatransfer/samples/create_scheduled_query.py +++ b/packages/google-cloud-bigquery-datatransfer/samples/create_scheduled_query.py @@ -20,10 +20,9 @@ def sample_create_transfer_config(project_id, dataset_id, authorization_code=""): # [START bigquerydatatransfer_create_scheduled_query] - from google.cloud import bigquery_datatransfer_v1 - import google.protobuf.json_format + from google.cloud.bigquery import datatransfer_v1 - client = bigquery_datatransfer_v1.DataTransferServiceClient() + client = datatransfer_v1.DataTransferServiceClient() # TODO(developer): Set the project_id to the project that contains the # destination dataset. @@ -53,26 +52,27 @@ def sample_create_transfer_config(project_id, dataset_id, authorization_code="") 17 as some_integer """ - parent = client.project_path(project_id) - - transfer_config = google.protobuf.json_format.ParseDict( - { - "destination_dataset_id": dataset_id, - "display_name": "Your Scheduled Query Name", - "data_source_id": "scheduled_query", - "params": { - "query": query_string, - "destination_table_name_template": "your_table_{run_date}", - "write_disposition": "WRITE_TRUNCATE", - "partitioning_field": "", - }, - "schedule": "every 24 hours", + parent = f"projects/{project_id}" + + transfer_config = datatransfer_v1.types.TransferConfig( + destination_dataset_id=dataset_id, + display_name="Your Scheduled Query Name", + data_source_id="scheduled_query", + params={ + "query": query_string, + "destination_table_name_template": "your_table_{run_date}", + "write_disposition": "WRITE_TRUNCATE", + "partitioning_field": "", }, - bigquery_datatransfer_v1.types.TransferConfig(), + schedule="every 24 hours", ) response = client.create_transfer_config( - parent, transfer_config, authorization_code=authorization_code + request={ + "parent": parent, + "transfer_config": transfer_config, + "authorization_code": authorization_code, + } ) print("Created scheduled query '{}'".format(response.name)) diff --git a/packages/google-cloud-bigquery-datatransfer/samples/snippets/quickstart.py b/packages/google-cloud-bigquery-datatransfer/samples/snippets/quickstart.py index a16a0c9cb624..347ccdbf5f7c 100644 --- a/packages/google-cloud-bigquery-datatransfer/samples/snippets/quickstart.py +++ b/packages/google-cloud-bigquery-datatransfer/samples/snippets/quickstart.py @@ -15,21 +15,22 @@ # limitations under the License. -def run_quickstart(): +def run_quickstart(project="my-project"): # [START bigquerydatatransfer_quickstart] - from google.cloud import bigquery_datatransfer + from google.cloud.bigquery import datatransfer - client = bigquery_datatransfer.DataTransferServiceClient() + client = datatransfer.DataTransferServiceClient() - project = 'my-project' # TODO: Update to your project ID. + # TODO: Update to your project ID. + # project = "my-project" # Get the full path to your project. - parent = client.project_path(project) + parent = f"projects/{project}" print('Supported Data Sources:') # Iterate over all possible data sources. - for data_source in client.list_data_sources(parent): + for data_source in client.list_data_sources(parent=parent): print('{}:'.format(data_source.display_name)) print('\tID: {}'.format(data_source.data_source_id)) print('\tFull path: {}'.format(data_source.name)) diff --git a/packages/google-cloud-bigquery-datatransfer/samples/snippets/quickstart_test.py b/packages/google-cloud-bigquery-datatransfer/samples/snippets/quickstart_test.py index cd397b814871..387c2e8b6066 100644 --- a/packages/google-cloud-bigquery-datatransfer/samples/snippets/quickstart_test.py +++ b/packages/google-cloud-bigquery-datatransfer/samples/snippets/quickstart_test.py @@ -14,7 +14,6 @@ import os -import mock import pytest import quickstart @@ -24,18 +23,13 @@ @pytest.fixture -def mock_project_path(): +def mock_project_id(): """Mock out project and replace with project from environment.""" - project_patch = mock.patch( - 'google.cloud.bigquery_datatransfer.DataTransferServiceClient.' - 'project_path') - with project_patch as project_mock: - project_mock.return_value = 'projects/{}'.format(PROJECT) - yield project_mock + return PROJECT -def test_quickstart(capsys, mock_project_path): - quickstart.run_quickstart() +def test_quickstart(capsys, mock_project_id): + quickstart.run_quickstart(mock_project_id) out, _ = capsys.readouterr() assert 'Supported Data Sources:' in out diff --git a/packages/google-cloud-bigquery-datatransfer/samples/tests/conftest.py b/packages/google-cloud-bigquery-datatransfer/samples/tests/conftest.py index e9dec8dd9631..05e32436a3b0 100644 --- a/packages/google-cloud-bigquery-datatransfer/samples/tests/conftest.py +++ b/packages/google-cloud-bigquery-datatransfer/samples/tests/conftest.py @@ -39,9 +39,9 @@ def credentials(): @pytest.fixture(scope="module") def bqdts_client(credentials): - from google.cloud import bigquery_datatransfer_v1 + from google.cloud.bigquery import datatransfer_v1 - return bigquery_datatransfer_v1.DataTransferServiceClient(credentials=credentials) + return datatransfer_v1.DataTransferServiceClient(credentials=credentials) @pytest.fixture(scope="module") @@ -69,6 +69,6 @@ def to_delete(bqdts_client): for resource_name in doomed: try: - bqdts_client.delete_transfer_config(resource_name) + bqdts_client.delete_transfer_config(name=resource_name) except Exception: pass diff --git a/packages/google-cloud-bigquery-datatransfer/samples/update_transfer_config.py b/packages/google-cloud-bigquery-datatransfer/samples/update_transfer_config.py index 3811a8334560..c1f4190cd486 100644 --- a/packages/google-cloud-bigquery-datatransfer/samples/update_transfer_config.py +++ b/packages/google-cloud-bigquery-datatransfer/samples/update_transfer_config.py @@ -20,9 +20,9 @@ def sample_update_transfer_config(config_name, display_name): # [START bigquerydatatransfer_update_transfer_config] - from google.cloud import bigquery_datatransfer_v1 + from google.cloud.bigquery import datatransfer_v1 - client = bigquery_datatransfer_v1.DataTransferServiceClient() + client = datatransfer_v1.DataTransferServiceClient() # TODO(developer): Set the config_name which user wants to update. # config_name = "your-created-transfer-config-name" @@ -32,7 +32,9 @@ def sample_update_transfer_config(config_name, display_name): transfer_config = client.get_transfer_config(name=config_name) transfer_config.display_name = display_name field_mask = {"paths": ["display_name"]} - response = client.update_transfer_config(transfer_config, field_mask) + response = client.update_transfer_config( + transfer_config=transfer_config, update_mask=field_mask + ) print("Transfer config updated for '{}'".format(response.name)) # [END bigquerydatatransfer_update_transfer_config] diff --git a/packages/google-cloud-bigquery-datatransfer/scripts/fixup_datatransfer_v1_keywords.py b/packages/google-cloud-bigquery-datatransfer/scripts/fixup_datatransfer_v1_keywords.py new file mode 100644 index 000000000000..f5fcc272f044 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/scripts/fixup_datatransfer_v1_keywords.py @@ -0,0 +1,191 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class datatransferCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'check_valid_creds': ('name', ), + 'create_transfer_config': ('parent', 'transfer_config', 'authorization_code', 'version_info', 'service_account_name', ), + 'delete_transfer_config': ('name', ), + 'delete_transfer_run': ('name', ), + 'get_data_source': ('name', ), + 'get_transfer_config': ('name', ), + 'get_transfer_run': ('name', ), + 'list_data_sources': ('parent', 'page_token', 'page_size', ), + 'list_transfer_configs': ('parent', 'data_source_ids', 'page_token', 'page_size', ), + 'list_transfer_logs': ('parent', 'page_token', 'page_size', 'message_types', ), + 'list_transfer_runs': ('parent', 'states', 'page_token', 'page_size', 'run_attempt', ), + 'schedule_transfer_runs': ('parent', 'start_time', 'end_time', ), + 'start_manual_transfer_runs': ('parent', 'requested_time_range', 'requested_run_time', ), + 'update_transfer_config': ('transfer_config', 'update_mask', 'authorization_code', 'version_info', 'service_account_name', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=datatransferCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the datatransfer client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-bigquery-datatransfer/setup.py b/packages/google-cloud-bigquery-datatransfer/setup.py index 633e3dd6676b..c44d66952733 100644 --- a/packages/google-cloud-bigquery-datatransfer/setup.py +++ b/packages/google-cloud-bigquery-datatransfer/setup.py @@ -22,13 +22,17 @@ name = "google-cloud-bigquery-datatransfer" description = "BigQuery Data Transfer API client library" -version = "1.1.1" +version = "2.0.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" -dependencies = ["google-api-core[grpc] >= 1.14.0, < 2.0.0dev"] +dependencies = ( + "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", + "proto-plus >= 1.4.0", + "libcst >= 0.2.5", +) extras = {} @@ -43,7 +47,9 @@ # Only include packages under the 'google' namespace. Do not include tests, # benchmarks, etc. packages = [ - package for package in setuptools.find_packages() if package.startswith("google") + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") ] # Determine which namespaces are needed. @@ -51,6 +57,8 @@ if "google.cloud" in packages: namespaces.append("google.cloud") +if "google.cloud.bigquery" in packages: + namespaces.append("google.cloud.bigquery") setuptools.setup( name=name, @@ -66,12 +74,10 @@ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", "Topic :: Internet", ], @@ -80,7 +86,8 @@ namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", + python_requires=">=3.6", + scripts=["scripts/fixup_datatransfer_v1_keywords.py"], include_package_data=True, zip_safe=False, ) diff --git a/packages/google-cloud-bigquery-datatransfer/synth.metadata b/packages/google-cloud-bigquery-datatransfer/synth.metadata index 900336ab4fe4..30c7dbc52ac0 100644 --- a/packages/google-cloud-bigquery-datatransfer/synth.metadata +++ b/packages/google-cloud-bigquery-datatransfer/synth.metadata @@ -4,29 +4,21 @@ "git": { "name": ".", "remote": "git@github.com:plamut/python-bigquery-datatransfer.git", - "sha": "b0741fedddc1eee65f6ca3558465b380951aa583" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "5e53d6b6dde0e72fa9510ec1d796176d128afa40", - "internalRef": "331912851" + "sha": "b1bdf7efd37c00871a8716340a45184a20acb4ad" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "fdd03c161003ab97657cc0218f25c82c89ddf4b6" + "sha": "dba48bb9bc6959c232bec9150ac6313b608fe7bd" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "fdd03c161003ab97657cc0218f25c82c89ddf4b6" + "sha": "dba48bb9bc6959c232bec9150ac6313b608fe7bd" } } ], diff --git a/packages/google-cloud-bigquery-datatransfer/synth.py b/packages/google-cloud-bigquery-datatransfer/synth.py index 8fa1e4de10df..ee143409444a 100644 --- a/packages/google-cloud-bigquery-datatransfer/synth.py +++ b/packages/google-cloud-bigquery-datatransfer/synth.py @@ -22,55 +22,29 @@ gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() -version = "v1" +versions = ["v1"] # ---------------------------------------------------------------------------- # Generate bigquery_datatransfer GAPIC layer # ---------------------------------------------------------------------------- -library = gapic.py_library( - service="bigquery_datatransfer", - version=version, - bazel_target="//google/cloud/bigquery/datatransfer/v1:bigquery-datatransfer-v1-py", - include_protos=True, -) - -s.move( - library, - excludes=["docs/conf.py", "docs/index.rst", "README.rst", "nox.py", "setup.py"], -) - -s.replace( - [ - "google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py", - "google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2_grpc.py", - ], - "from google.cloud.bigquery.datatransfer_v1.proto", - "from google.cloud.bigquery_datatransfer_v1.proto", -) - -s.replace( - "google/cloud/bigquery_datatransfer_v1/gapic/" "data_transfer_service_client.py", - "import google.api_core.gapic_v1.method\n", - "\g<0>import google.api_core.path_template\n", -) +for version in versions: + library = gapic.py_library( + service="bigquery_datatransfer", + version=version, + bazel_target=( + f"//google/cloud/bigquery/datatransfer/{version}:" + "bigquery-datatransfer-v1-py" + ), + include_protos=True, + ) + s.move(library, excludes=["setup.py", "docs/index.rst"]) -# Fix docstring fromatting to avoid warnings when generating the docs. -s.replace( - "google/cloud/bigquery_datatransfer_v1/gapic/" "data_transfer_service_client.py", - r'"""Creates an instance of this client using the provided credentials\s+file.', - '"""Creates an instance of this client using the provided credentials file.', -) -s.replace( - "google/cloud/bigquery_datatransfer_v1/gapic/" "data_transfer_service_client.py", - r'should be set through client_options\.', - "\g<0>\n", # add missing newline -) # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(cov_level=70, samples=True) -s.move(templated_files) +templated_files = common.py_library(microgenerator=True, samples=True, cov_level=99) +s.move(templated_files, excludes=[".coveragerc"]) # ---------------------------------------------------------------------------- # Samples templates @@ -78,6 +52,18 @@ python.py_samples(skip_readmes=True) +# Fix missing async client in datatransfer_v1 +s.replace( + "google/cloud/bigquery/datatransfer_v1/__init__.py", + r"from \.services\.data_transfer_service import DataTransferServiceClient", + "\g<0>\nfrom .services.data_transfer_service import DataTransferServiceAsyncClient", +) +s.replace( + "google/cloud/bigquery/datatransfer_v1/__init__.py", + r"'DataTransferServiceClient',", + '\g<0>\n "DataTransferServiceAsyncClient"', +) + # TODO(busunkim): Use latest sphinx after microgenerator transition s.replace("noxfile.py", "'sphinx'", '"sphinx<3.0.0"') diff --git a/packages/google-cloud-bigquery-datatransfer/tests/system/gapic/v1/test_system_data_transfer_service_v1.py b/packages/google-cloud-bigquery-datatransfer/tests/system/gapic/v1/test_system_data_transfer_service_v1.py deleted file mode 100644 index 039871d51212..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/tests/system/gapic/v1/test_system_data_transfer_service_v1.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import time - -from google.cloud import bigquery_datatransfer_v1 -from google.cloud.bigquery_datatransfer_v1.proto import datatransfer_pb2 - - -class TestSystemDataTransferService(object): - def test_list_data_sources(self): - project_id = os.environ["PROJECT_ID"] - - client = bigquery_datatransfer_v1.DataTransferServiceClient() - parent = client.project_path(project_id) - response = client.list_data_sources(parent) diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/datatransfer_v1/__init__.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/datatransfer_v1/__init__.py new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/datatransfer_v1/__init__.py @@ -0,0 +1 @@ + diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/datatransfer_v1/test_data_transfer_service.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/datatransfer_v1/test_data_transfer_service.py new file mode 100644 index 000000000000..249f6f382434 --- /dev/null +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/datatransfer_v1/test_data_transfer_service.py @@ -0,0 +1,4576 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.bigquery.datatransfer_v1.services.data_transfer_service import ( + DataTransferServiceAsyncClient, +) +from google.cloud.bigquery.datatransfer_v1.services.data_transfer_service import ( + DataTransferServiceClient, +) +from google.cloud.bigquery.datatransfer_v1.services.data_transfer_service import pagers +from google.cloud.bigquery.datatransfer_v1.services.data_transfer_service import ( + transports, +) +from google.cloud.bigquery.datatransfer_v1.types import datatransfer +from google.cloud.bigquery.datatransfer_v1.types import transfer +from google.oauth2 import service_account +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import struct_pb2 as struct # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.rpc import status_pb2 as status # type: ignore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DataTransferServiceClient._get_default_mtls_endpoint(None) is None + assert ( + DataTransferServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DataTransferServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DataTransferServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DataTransferServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DataTransferServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [DataTransferServiceClient, DataTransferServiceAsyncClient] +) +def test_data_transfer_service_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client._transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client._transport._credentials == creds + + assert client._transport._host == "bigquerydatatransfer.googleapis.com:443" + + +def test_data_transfer_service_client_get_transport_class(): + transport = DataTransferServiceClient.get_transport_class() + assert transport == transports.DataTransferServiceGrpcTransport + + transport = DataTransferServiceClient.get_transport_class("grpc") + assert transport == transports.DataTransferServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + DataTransferServiceClient, + transports.DataTransferServiceGrpcTransport, + "grpc", + ), + ( + DataTransferServiceAsyncClient, + transports.DataTransferServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + DataTransferServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataTransferServiceClient), +) +@mock.patch.object( + DataTransferServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataTransferServiceAsyncClient), +) +def test_data_transfer_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataTransferServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataTransferServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + DataTransferServiceClient, + transports.DataTransferServiceGrpcTransport, + "grpc", + "true", + ), + ( + DataTransferServiceAsyncClient, + transports.DataTransferServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + DataTransferServiceClient, + transports.DataTransferServiceGrpcTransport, + "grpc", + "false", + ), + ( + DataTransferServiceAsyncClient, + transports.DataTransferServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + DataTransferServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataTransferServiceClient), +) +@mock.patch.object( + DataTransferServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataTransferServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_data_transfer_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() + with mock.patch( + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds + ): + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + DataTransferServiceClient, + transports.DataTransferServiceGrpcTransport, + "grpc", + ), + ( + DataTransferServiceAsyncClient, + transports.DataTransferServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_data_transfer_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + DataTransferServiceClient, + transports.DataTransferServiceGrpcTransport, + "grpc", + ), + ( + DataTransferServiceAsyncClient, + transports.DataTransferServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_data_transfer_service_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_data_transfer_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.bigquery.datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DataTransferServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_get_data_source( + transport: str = "grpc", request_type=datatransfer.GetDataSourceRequest +): + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.DataSource( + name="name_value", + data_source_id="data_source_id_value", + display_name="display_name_value", + description="description_value", + client_id="client_id_value", + scopes=["scopes_value"], + transfer_type=transfer.TransferType.BATCH, + supports_multiple_transfers=True, + update_deadline_seconds=2406, + default_schedule="default_schedule_value", + supports_custom_schedule=True, + help_url="help_url_value", + authorization_type=datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE, + data_refresh_type=datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW, + default_data_refresh_window_days=3379, + manual_runs_disabled=True, + ) + + response = client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datatransfer.GetDataSourceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.DataSource) + + assert response.name == "name_value" + + assert response.data_source_id == "data_source_id_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.client_id == "client_id_value" + + assert response.scopes == ["scopes_value"] + + assert response.transfer_type == transfer.TransferType.BATCH + + assert response.supports_multiple_transfers is True + + assert response.update_deadline_seconds == 2406 + + assert response.default_schedule == "default_schedule_value" + + assert response.supports_custom_schedule is True + + assert response.help_url == "help_url_value" + + assert ( + response.authorization_type + == datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE + ) + + assert ( + response.data_refresh_type + == datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW + ) + + assert response.default_data_refresh_window_days == 3379 + + assert response.manual_runs_disabled is True + + +def test_get_data_source_from_dict(): + test_get_data_source(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_data_source_async(transport: str = "grpc_asyncio"): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datatransfer.GetDataSourceRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.DataSource( + name="name_value", + data_source_id="data_source_id_value", + display_name="display_name_value", + description="description_value", + client_id="client_id_value", + scopes=["scopes_value"], + transfer_type=transfer.TransferType.BATCH, + supports_multiple_transfers=True, + update_deadline_seconds=2406, + default_schedule="default_schedule_value", + supports_custom_schedule=True, + help_url="help_url_value", + authorization_type=datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE, + data_refresh_type=datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW, + default_data_refresh_window_days=3379, + manual_runs_disabled=True, + ) + ) + + response = await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.DataSource) + + assert response.name == "name_value" + + assert response.data_source_id == "data_source_id_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.client_id == "client_id_value" + + assert response.scopes == ["scopes_value"] + + assert response.transfer_type == transfer.TransferType.BATCH + + assert response.supports_multiple_transfers is True + + assert response.update_deadline_seconds == 2406 + + assert response.default_schedule == "default_schedule_value" + + assert response.supports_custom_schedule is True + + assert response.help_url == "help_url_value" + + assert ( + response.authorization_type + == datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE + ) + + assert ( + response.data_refresh_type + == datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW + ) + + assert response.default_data_refresh_window_days == 3379 + + assert response.manual_runs_disabled is True + + +def test_get_data_source_field_headers(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.GetDataSourceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_data_source), "__call__") as call: + call.return_value = datatransfer.DataSource() + + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_data_source_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.GetDataSourceRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_data_source), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.DataSource() + ) + + await client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_data_source_flattened(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.DataSource() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_source(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_data_source_flattened_error(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_source( + datatransfer.GetDataSourceRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_data_source_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.DataSource() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.DataSource() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_source(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_data_source_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_source( + datatransfer.GetDataSourceRequest(), name="name_value", + ) + + +def test_list_data_sources( + transport: str = "grpc", request_type=datatransfer.ListDataSourcesRequest +): + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListDataSourcesResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datatransfer.ListDataSourcesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_data_sources_from_dict(): + test_list_data_sources(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_data_sources_async(transport: str = "grpc_asyncio"): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datatransfer.ListDataSourcesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListDataSourcesResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_data_sources_field_headers(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ListDataSourcesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_data_sources), "__call__" + ) as call: + call.return_value = datatransfer.ListDataSourcesResponse() + + client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_data_sources_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ListDataSourcesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_data_sources), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListDataSourcesResponse() + ) + + await client.list_data_sources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_data_sources_flattened(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListDataSourcesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_sources(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_data_sources_flattened_error(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_sources( + datatransfer.ListDataSourcesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_data_sources_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListDataSourcesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListDataSourcesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_sources(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_data_sources_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_sources( + datatransfer.ListDataSourcesRequest(), parent="parent_value", + ) + + +def test_list_data_sources_pager(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_data_sources), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + datatransfer.DataSource(), + datatransfer.DataSource(), + ], + next_page_token="abc", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[], next_page_token="def", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[datatransfer.DataSource(),], next_page_token="ghi", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[datatransfer.DataSource(), datatransfer.DataSource(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_data_sources(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, datatransfer.DataSource) for i in results) + + +def test_list_data_sources_pages(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_data_sources), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + datatransfer.DataSource(), + datatransfer.DataSource(), + ], + next_page_token="abc", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[], next_page_token="def", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[datatransfer.DataSource(),], next_page_token="ghi", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[datatransfer.DataSource(), datatransfer.DataSource(),], + ), + RuntimeError, + ) + pages = list(client.list_data_sources(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_data_sources_async_pager(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_data_sources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + datatransfer.DataSource(), + datatransfer.DataSource(), + ], + next_page_token="abc", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[], next_page_token="def", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[datatransfer.DataSource(),], next_page_token="ghi", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[datatransfer.DataSource(), datatransfer.DataSource(),], + ), + RuntimeError, + ) + async_pager = await client.list_data_sources(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, datatransfer.DataSource) for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_sources_async_pages(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_data_sources), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + datatransfer.DataSource(), + datatransfer.DataSource(), + ], + next_page_token="abc", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[], next_page_token="def", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[datatransfer.DataSource(),], next_page_token="ghi", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[datatransfer.DataSource(), datatransfer.DataSource(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_data_sources(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_create_transfer_config( + transport: str = "grpc", request_type=datatransfer.CreateTransferConfigRequest +): + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + destination_dataset_id="destination_dataset_id_value", + ) + + response = client.create_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datatransfer.CreateTransferConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.data_source_id == "data_source_id_value" + + assert response.schedule == "schedule_value" + + assert response.data_refresh_window_days == 2543 + + assert response.disabled is True + + assert response.state == transfer.TransferState.PENDING + + assert response.user_id == 747 + + assert response.dataset_region == "dataset_region_value" + + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_create_transfer_config_from_dict(): + test_create_transfer_config(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_transfer_config_async(transport: str = "grpc_asyncio"): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datatransfer.CreateTransferConfigRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + ) + ) + + response = await client.create_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.data_source_id == "data_source_id_value" + + assert response.schedule == "schedule_value" + + assert response.data_refresh_window_days == 2543 + + assert response.disabled is True + + assert response.state == transfer.TransferState.PENDING + + assert response.user_id == 747 + + assert response.dataset_region == "dataset_region_value" + + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_create_transfer_config_field_headers(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.CreateTransferConfigRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_transfer_config), "__call__" + ) as call: + call.return_value = transfer.TransferConfig() + + client.create_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_transfer_config_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.CreateTransferConfigRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_transfer_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig() + ) + + await client.create_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_transfer_config_flattened(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_transfer_config( + parent="parent_value", + transfer_config=transfer.TransferConfig(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].transfer_config == transfer.TransferConfig(name="name_value") + + +def test_create_transfer_config_flattened_error(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_transfer_config( + datatransfer.CreateTransferConfigRequest(), + parent="parent_value", + transfer_config=transfer.TransferConfig(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_transfer_config_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_transfer_config( + parent="parent_value", + transfer_config=transfer.TransferConfig(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].transfer_config == transfer.TransferConfig(name="name_value") + + +@pytest.mark.asyncio +async def test_create_transfer_config_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_transfer_config( + datatransfer.CreateTransferConfigRequest(), + parent="parent_value", + transfer_config=transfer.TransferConfig(name="name_value"), + ) + + +def test_update_transfer_config( + transport: str = "grpc", request_type=datatransfer.UpdateTransferConfigRequest +): + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + destination_dataset_id="destination_dataset_id_value", + ) + + response = client.update_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datatransfer.UpdateTransferConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.data_source_id == "data_source_id_value" + + assert response.schedule == "schedule_value" + + assert response.data_refresh_window_days == 2543 + + assert response.disabled is True + + assert response.state == transfer.TransferState.PENDING + + assert response.user_id == 747 + + assert response.dataset_region == "dataset_region_value" + + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_update_transfer_config_from_dict(): + test_update_transfer_config(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_transfer_config_async(transport: str = "grpc_asyncio"): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datatransfer.UpdateTransferConfigRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + ) + ) + + response = await client.update_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.data_source_id == "data_source_id_value" + + assert response.schedule == "schedule_value" + + assert response.data_refresh_window_days == 2543 + + assert response.disabled is True + + assert response.state == transfer.TransferState.PENDING + + assert response.user_id == 747 + + assert response.dataset_region == "dataset_region_value" + + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_update_transfer_config_field_headers(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.UpdateTransferConfigRequest() + request.transfer_config.name = "transfer_config.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_transfer_config), "__call__" + ) as call: + call.return_value = transfer.TransferConfig() + + client.update_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "transfer_config.name=transfer_config.name/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_transfer_config_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.UpdateTransferConfigRequest() + request.transfer_config.name = "transfer_config.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_transfer_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig() + ) + + await client.update_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "transfer_config.name=transfer_config.name/value", + ) in kw["metadata"] + + +def test_update_transfer_config_flattened(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_transfer_config( + transfer_config=transfer.TransferConfig(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].transfer_config == transfer.TransferConfig(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +def test_update_transfer_config_flattened_error(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_transfer_config( + datatransfer.UpdateTransferConfigRequest(), + transfer_config=transfer.TransferConfig(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_transfer_config_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_transfer_config( + transfer_config=transfer.TransferConfig(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].transfer_config == transfer.TransferConfig(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_transfer_config_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_transfer_config( + datatransfer.UpdateTransferConfigRequest(), + transfer_config=transfer.TransferConfig(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_transfer_config( + transport: str = "grpc", request_type=datatransfer.DeleteTransferConfigRequest +): + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datatransfer.DeleteTransferConfigRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_transfer_config_from_dict(): + test_delete_transfer_config(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_transfer_config_async(transport: str = "grpc_asyncio"): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datatransfer.DeleteTransferConfigRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_transfer_config_field_headers(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.DeleteTransferConfigRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_transfer_config), "__call__" + ) as call: + call.return_value = None + + client.delete_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_transfer_config_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.DeleteTransferConfigRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_transfer_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_transfer_config_flattened(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_transfer_config(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_transfer_config_flattened_error(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_transfer_config( + datatransfer.DeleteTransferConfigRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_transfer_config_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_transfer_config(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_transfer_config_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_transfer_config( + datatransfer.DeleteTransferConfigRequest(), name="name_value", + ) + + +def test_get_transfer_config( + transport: str = "grpc", request_type=datatransfer.GetTransferConfigRequest +): + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + destination_dataset_id="destination_dataset_id_value", + ) + + response = client.get_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datatransfer.GetTransferConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.data_source_id == "data_source_id_value" + + assert response.schedule == "schedule_value" + + assert response.data_refresh_window_days == 2543 + + assert response.disabled is True + + assert response.state == transfer.TransferState.PENDING + + assert response.user_id == 747 + + assert response.dataset_region == "dataset_region_value" + + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_get_transfer_config_from_dict(): + test_get_transfer_config(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_transfer_config_async(transport: str = "grpc_asyncio"): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datatransfer.GetTransferConfigRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + ) + ) + + response = await client.get_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.data_source_id == "data_source_id_value" + + assert response.schedule == "schedule_value" + + assert response.data_refresh_window_days == 2543 + + assert response.disabled is True + + assert response.state == transfer.TransferState.PENDING + + assert response.user_id == 747 + + assert response.dataset_region == "dataset_region_value" + + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_get_transfer_config_field_headers(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.GetTransferConfigRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_transfer_config), "__call__" + ) as call: + call.return_value = transfer.TransferConfig() + + client.get_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_transfer_config_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.GetTransferConfigRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_transfer_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig() + ) + + await client.get_transfer_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_transfer_config_flattened(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_transfer_config(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_transfer_config_flattened_error(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_transfer_config( + datatransfer.GetTransferConfigRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_transfer_config_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_transfer_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_transfer_config(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_transfer_config_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_transfer_config( + datatransfer.GetTransferConfigRequest(), name="name_value", + ) + + +def test_list_transfer_configs( + transport: str = "grpc", request_type=datatransfer.ListTransferConfigsRequest +): + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_transfer_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferConfigsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_transfer_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datatransfer.ListTransferConfigsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferConfigsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_transfer_configs_from_dict(): + test_list_transfer_configs(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_transfer_configs_async(transport: str = "grpc_asyncio"): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datatransfer.ListTransferConfigsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_transfer_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferConfigsResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_transfer_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferConfigsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_transfer_configs_field_headers(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ListTransferConfigsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_transfer_configs), "__call__" + ) as call: + call.return_value = datatransfer.ListTransferConfigsResponse() + + client.list_transfer_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_transfer_configs_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ListTransferConfigsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_transfer_configs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferConfigsResponse() + ) + + await client.list_transfer_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_transfer_configs_flattened(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_transfer_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferConfigsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_transfer_configs(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_transfer_configs_flattened_error(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_transfer_configs( + datatransfer.ListTransferConfigsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_transfer_configs_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_transfer_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferConfigsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_transfer_configs(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_transfer_configs_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_transfer_configs( + datatransfer.ListTransferConfigsRequest(), parent="parent_value", + ) + + +def test_list_transfer_configs_pager(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_transfer_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[], next_page_token="def", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[transfer.TransferConfig(),], next_page_token="ghi", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_transfer_configs(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, transfer.TransferConfig) for i in results) + + +def test_list_transfer_configs_pages(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_transfer_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[], next_page_token="def", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[transfer.TransferConfig(),], next_page_token="ghi", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_transfer_configs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_transfer_configs_async_pager(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_transfer_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[], next_page_token="def", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[transfer.TransferConfig(),], next_page_token="ghi", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_transfer_configs(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, transfer.TransferConfig) for i in responses) + + +@pytest.mark.asyncio +async def test_list_transfer_configs_async_pages(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_transfer_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[], next_page_token="def", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[transfer.TransferConfig(),], next_page_token="ghi", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_transfer_configs(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_schedule_transfer_runs( + transport: str = "grpc", request_type=datatransfer.ScheduleTransferRunsRequest +): + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.schedule_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ScheduleTransferRunsResponse() + + response = client.schedule_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datatransfer.ScheduleTransferRunsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.ScheduleTransferRunsResponse) + + +def test_schedule_transfer_runs_from_dict(): + test_schedule_transfer_runs(request_type=dict) + + +@pytest.mark.asyncio +async def test_schedule_transfer_runs_async(transport: str = "grpc_asyncio"): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datatransfer.ScheduleTransferRunsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.schedule_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ScheduleTransferRunsResponse() + ) + + response = await client.schedule_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.ScheduleTransferRunsResponse) + + +def test_schedule_transfer_runs_field_headers(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ScheduleTransferRunsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.schedule_transfer_runs), "__call__" + ) as call: + call.return_value = datatransfer.ScheduleTransferRunsResponse() + + client.schedule_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_schedule_transfer_runs_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ScheduleTransferRunsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.schedule_transfer_runs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ScheduleTransferRunsResponse() + ) + + await client.schedule_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_schedule_transfer_runs_flattened(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.schedule_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ScheduleTransferRunsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.schedule_transfer_runs( + parent="parent_value", + start_time=timestamp.Timestamp(seconds=751), + end_time=timestamp.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert TimestampRule().to_proto(args[0].start_time) == timestamp.Timestamp( + seconds=751 + ) + + assert TimestampRule().to_proto(args[0].end_time) == timestamp.Timestamp( + seconds=751 + ) + + +def test_schedule_transfer_runs_flattened_error(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.schedule_transfer_runs( + datatransfer.ScheduleTransferRunsRequest(), + parent="parent_value", + start_time=timestamp.Timestamp(seconds=751), + end_time=timestamp.Timestamp(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_schedule_transfer_runs_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.schedule_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ScheduleTransferRunsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ScheduleTransferRunsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.schedule_transfer_runs( + parent="parent_value", + start_time=timestamp.Timestamp(seconds=751), + end_time=timestamp.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert TimestampRule().to_proto(args[0].start_time) == timestamp.Timestamp( + seconds=751 + ) + + assert TimestampRule().to_proto(args[0].end_time) == timestamp.Timestamp( + seconds=751 + ) + + +@pytest.mark.asyncio +async def test_schedule_transfer_runs_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.schedule_transfer_runs( + datatransfer.ScheduleTransferRunsRequest(), + parent="parent_value", + start_time=timestamp.Timestamp(seconds=751), + end_time=timestamp.Timestamp(seconds=751), + ) + + +def test_start_manual_transfer_runs( + transport: str = "grpc", request_type=datatransfer.StartManualTransferRunsRequest +): + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.start_manual_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.StartManualTransferRunsResponse() + + response = client.start_manual_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datatransfer.StartManualTransferRunsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.StartManualTransferRunsResponse) + + +def test_start_manual_transfer_runs_from_dict(): + test_start_manual_transfer_runs(request_type=dict) + + +@pytest.mark.asyncio +async def test_start_manual_transfer_runs_async(transport: str = "grpc_asyncio"): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datatransfer.StartManualTransferRunsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.start_manual_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.StartManualTransferRunsResponse() + ) + + response = await client.start_manual_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.StartManualTransferRunsResponse) + + +def test_start_manual_transfer_runs_field_headers(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.StartManualTransferRunsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.start_manual_transfer_runs), "__call__" + ) as call: + call.return_value = datatransfer.StartManualTransferRunsResponse() + + client.start_manual_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_start_manual_transfer_runs_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.StartManualTransferRunsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.start_manual_transfer_runs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.StartManualTransferRunsResponse() + ) + + await client.start_manual_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_get_transfer_run( + transport: str = "grpc", request_type=datatransfer.GetTransferRunRequest +): + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_transfer_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferRun( + name="name_value", + data_source_id="data_source_id_value", + state=transfer.TransferState.PENDING, + user_id=747, + schedule="schedule_value", + notification_pubsub_topic="notification_pubsub_topic_value", + destination_dataset_id="destination_dataset_id_value", + ) + + response = client.get_transfer_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datatransfer.GetTransferRunRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferRun) + + assert response.name == "name_value" + + assert response.data_source_id == "data_source_id_value" + + assert response.state == transfer.TransferState.PENDING + + assert response.user_id == 747 + + assert response.schedule == "schedule_value" + + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_get_transfer_run_from_dict(): + test_get_transfer_run(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_transfer_run_async(transport: str = "grpc_asyncio"): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datatransfer.GetTransferRunRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_transfer_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferRun( + name="name_value", + data_source_id="data_source_id_value", + state=transfer.TransferState.PENDING, + user_id=747, + schedule="schedule_value", + notification_pubsub_topic="notification_pubsub_topic_value", + ) + ) + + response = await client.get_transfer_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferRun) + + assert response.name == "name_value" + + assert response.data_source_id == "data_source_id_value" + + assert response.state == transfer.TransferState.PENDING + + assert response.user_id == 747 + + assert response.schedule == "schedule_value" + + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_get_transfer_run_field_headers(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.GetTransferRunRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_transfer_run), "__call__" + ) as call: + call.return_value = transfer.TransferRun() + + client.get_transfer_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_transfer_run_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.GetTransferRunRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_transfer_run), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferRun() + ) + + await client.get_transfer_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_transfer_run_flattened(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_transfer_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferRun() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_transfer_run(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_transfer_run_flattened_error(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_transfer_run( + datatransfer.GetTransferRunRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_transfer_run_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_transfer_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = transfer.TransferRun() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + transfer.TransferRun() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_transfer_run(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_transfer_run_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_transfer_run( + datatransfer.GetTransferRunRequest(), name="name_value", + ) + + +def test_delete_transfer_run( + transport: str = "grpc", request_type=datatransfer.DeleteTransferRunRequest +): + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_transfer_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_transfer_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datatransfer.DeleteTransferRunRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_transfer_run_from_dict(): + test_delete_transfer_run(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_transfer_run_async(transport: str = "grpc_asyncio"): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datatransfer.DeleteTransferRunRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_transfer_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_transfer_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_transfer_run_field_headers(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.DeleteTransferRunRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_transfer_run), "__call__" + ) as call: + call.return_value = None + + client.delete_transfer_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_transfer_run_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.DeleteTransferRunRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_transfer_run), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_transfer_run(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_transfer_run_flattened(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_transfer_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_transfer_run(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_transfer_run_flattened_error(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_transfer_run( + datatransfer.DeleteTransferRunRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_transfer_run_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_transfer_run), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_transfer_run(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_transfer_run_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_transfer_run( + datatransfer.DeleteTransferRunRequest(), name="name_value", + ) + + +def test_list_transfer_runs( + transport: str = "grpc", request_type=datatransfer.ListTransferRunsRequest +): + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferRunsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datatransfer.ListTransferRunsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferRunsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_transfer_runs_from_dict(): + test_list_transfer_runs(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_transfer_runs_async(transport: str = "grpc_asyncio"): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datatransfer.ListTransferRunsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferRunsResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferRunsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_transfer_runs_field_headers(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ListTransferRunsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_transfer_runs), "__call__" + ) as call: + call.return_value = datatransfer.ListTransferRunsResponse() + + client.list_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_transfer_runs_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ListTransferRunsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_transfer_runs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferRunsResponse() + ) + + await client.list_transfer_runs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_transfer_runs_flattened(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferRunsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_transfer_runs(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_transfer_runs_flattened_error(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_transfer_runs( + datatransfer.ListTransferRunsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_transfer_runs_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_transfer_runs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferRunsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferRunsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_transfer_runs(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_transfer_runs_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_transfer_runs( + datatransfer.ListTransferRunsRequest(), parent="parent_value", + ) + + +def test_list_transfer_runs_pager(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_transfer_runs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + transfer.TransferRun(), + transfer.TransferRun(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[], next_page_token="def", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[transfer.TransferRun(),], next_page_token="ghi", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[transfer.TransferRun(), transfer.TransferRun(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_transfer_runs(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, transfer.TransferRun) for i in results) + + +def test_list_transfer_runs_pages(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_transfer_runs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + transfer.TransferRun(), + transfer.TransferRun(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[], next_page_token="def", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[transfer.TransferRun(),], next_page_token="ghi", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[transfer.TransferRun(), transfer.TransferRun(),], + ), + RuntimeError, + ) + pages = list(client.list_transfer_runs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_transfer_runs_async_pager(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_transfer_runs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + transfer.TransferRun(), + transfer.TransferRun(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[], next_page_token="def", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[transfer.TransferRun(),], next_page_token="ghi", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[transfer.TransferRun(), transfer.TransferRun(),], + ), + RuntimeError, + ) + async_pager = await client.list_transfer_runs(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, transfer.TransferRun) for i in responses) + + +@pytest.mark.asyncio +async def test_list_transfer_runs_async_pages(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_transfer_runs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + transfer.TransferRun(), + transfer.TransferRun(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[], next_page_token="def", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[transfer.TransferRun(),], next_page_token="ghi", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[transfer.TransferRun(), transfer.TransferRun(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_transfer_runs(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_transfer_logs( + transport: str = "grpc", request_type=datatransfer.ListTransferLogsRequest +): + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_transfer_logs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferLogsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_transfer_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datatransfer.ListTransferLogsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferLogsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_transfer_logs_from_dict(): + test_list_transfer_logs(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_transfer_logs_async(transport: str = "grpc_asyncio"): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datatransfer.ListTransferLogsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_transfer_logs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferLogsResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_transfer_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferLogsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_transfer_logs_field_headers(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ListTransferLogsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_transfer_logs), "__call__" + ) as call: + call.return_value = datatransfer.ListTransferLogsResponse() + + client.list_transfer_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_transfer_logs_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.ListTransferLogsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_transfer_logs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferLogsResponse() + ) + + await client.list_transfer_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_transfer_logs_flattened(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_transfer_logs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferLogsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_transfer_logs(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_transfer_logs_flattened_error(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_transfer_logs( + datatransfer.ListTransferLogsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_transfer_logs_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_transfer_logs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.ListTransferLogsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.ListTransferLogsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_transfer_logs(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_transfer_logs_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_transfer_logs( + datatransfer.ListTransferLogsRequest(), parent="parent_value", + ) + + +def test_list_transfer_logs_pager(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_transfer_logs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[], next_page_token="def", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[transfer.TransferMessage(),], next_page_token="ghi", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_transfer_logs(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, transfer.TransferMessage) for i in results) + + +def test_list_transfer_logs_pages(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_transfer_logs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[], next_page_token="def", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[transfer.TransferMessage(),], next_page_token="ghi", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + ), + RuntimeError, + ) + pages = list(client.list_transfer_logs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_transfer_logs_async_pager(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_transfer_logs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[], next_page_token="def", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[transfer.TransferMessage(),], next_page_token="ghi", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_transfer_logs(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, transfer.TransferMessage) for i in responses) + + +@pytest.mark.asyncio +async def test_list_transfer_logs_async_pages(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_transfer_logs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[], next_page_token="def", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[transfer.TransferMessage(),], next_page_token="ghi", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_transfer_logs(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_check_valid_creds( + transport: str = "grpc", request_type=datatransfer.CheckValidCredsRequest +): + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.check_valid_creds), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.CheckValidCredsResponse(has_valid_creds=True,) + + response = client.check_valid_creds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == datatransfer.CheckValidCredsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.CheckValidCredsResponse) + + assert response.has_valid_creds is True + + +def test_check_valid_creds_from_dict(): + test_check_valid_creds(request_type=dict) + + +@pytest.mark.asyncio +async def test_check_valid_creds_async(transport: str = "grpc_asyncio"): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = datatransfer.CheckValidCredsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.check_valid_creds), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.CheckValidCredsResponse(has_valid_creds=True,) + ) + + response = await client.check_valid_creds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.CheckValidCredsResponse) + + assert response.has_valid_creds is True + + +def test_check_valid_creds_field_headers(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.CheckValidCredsRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.check_valid_creds), "__call__" + ) as call: + call.return_value = datatransfer.CheckValidCredsResponse() + + client.check_valid_creds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_check_valid_creds_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datatransfer.CheckValidCredsRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.check_valid_creds), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.CheckValidCredsResponse() + ) + + await client.check_valid_creds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_check_valid_creds_flattened(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.check_valid_creds), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.CheckValidCredsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.check_valid_creds(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_check_valid_creds_flattened_error(): + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.check_valid_creds( + datatransfer.CheckValidCredsRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_check_valid_creds_flattened_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.check_valid_creds), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datatransfer.CheckValidCredsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datatransfer.CheckValidCredsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.check_valid_creds(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_check_valid_creds_flattened_error_async(): + client = DataTransferServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.check_valid_creds( + datatransfer.CheckValidCredsRequest(), name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataTransferServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataTransferServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTransferServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataTransferServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTransferServiceClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataTransferServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = DataTransferServiceClient(transport=transport) + assert client._transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataTransferServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataTransferServiceGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTransferServiceGrpcTransport, + transports.DataTransferServiceGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataTransferServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client._transport, transports.DataTransferServiceGrpcTransport,) + + +def test_data_transfer_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.DataTransferServiceTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_data_transfer_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.bigquery.datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DataTransferServiceTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_data_source", + "list_data_sources", + "create_transfer_config", + "update_transfer_config", + "delete_transfer_config", + "get_transfer_config", + "list_transfer_configs", + "schedule_transfer_runs", + "start_manual_transfer_runs", + "get_transfer_run", + "delete_transfer_run", + "list_transfer_runs", + "list_transfer_logs", + "check_valid_creds", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_data_transfer_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.bigquery.datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.DataTransferServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_data_transfer_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.bigquery.datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.DataTransferServiceTransport() + adc.assert_called_once() + + +def test_data_transfer_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + DataTransferServiceClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_data_transfer_service_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.DataTransferServiceGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_data_transfer_service_host_no_port(): + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="bigquerydatatransfer.googleapis.com" + ), + ) + assert client._transport._host == "bigquerydatatransfer.googleapis.com:443" + + +def test_data_transfer_service_host_with_port(): + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="bigquerydatatransfer.googleapis.com:8000" + ), + ) + assert client._transport._host == "bigquerydatatransfer.googleapis.com:8000" + + +def test_data_transfer_service_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.DataTransferServiceGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + + +def test_data_transfer_service_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.DataTransferServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTransferServiceGrpcTransport, + transports.DataTransferServiceGrpcAsyncIOTransport, + ], +) +def test_data_transfer_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTransferServiceGrpcTransport, + transports.DataTransferServiceGrpcAsyncIOTransport, + ], +) +def test_data_transfer_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_transfer_config_path(): + project = "squid" + transfer_config = "clam" + + expected = "projects/{project}/transferConfigs/{transfer_config}".format( + project=project, transfer_config=transfer_config, + ) + actual = DataTransferServiceClient.transfer_config_path(project, transfer_config) + assert expected == actual + + +def test_parse_transfer_config_path(): + expected = { + "project": "whelk", + "transfer_config": "octopus", + } + path = DataTransferServiceClient.transfer_config_path(**expected) + + # Check that the path construction is reversible. + actual = DataTransferServiceClient.parse_transfer_config_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DataTransferServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = DataTransferServiceClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DataTransferServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DataTransferServiceClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/v1/test_data_transfer_service_client_v1.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/v1/test_data_transfer_service_client_v1.py deleted file mode 100644 index a24f06476e97..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/tests/unit/gapic/v1/test_data_transfer_service_client_v1.py +++ /dev/null @@ -1,731 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import bigquery_datatransfer_v1 -from google.cloud.bigquery_datatransfer_v1.proto import datatransfer_pb2 -from google.cloud.bigquery_datatransfer_v1.proto import transfer_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestDataTransferServiceClient(object): - def test_get_data_source(self): - # Setup Expected Response - name_2 = "name2-1052831874" - data_source_id = "dataSourceId-1015796374" - display_name = "displayName1615086568" - description = "description-1724546052" - client_id = "clientId-1904089585" - supports_multiple_transfers = True - update_deadline_seconds = 991471694 - default_schedule = "defaultSchedule-800168235" - supports_custom_schedule = True - help_url = "helpUrl-789431439" - default_data_refresh_window_days = 1804935157 - manual_runs_disabled = True - expected_response = { - "name": name_2, - "data_source_id": data_source_id, - "display_name": display_name, - "description": description, - "client_id": client_id, - "supports_multiple_transfers": supports_multiple_transfers, - "update_deadline_seconds": update_deadline_seconds, - "default_schedule": default_schedule, - "supports_custom_schedule": supports_custom_schedule, - "help_url": help_url, - "default_data_refresh_window_days": default_data_refresh_window_days, - "manual_runs_disabled": manual_runs_disabled, - } - expected_response = datatransfer_pb2.DataSource(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - name = client.project_data_source_path("[PROJECT]", "[DATA_SOURCE]") - - response = client.get_data_source(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.GetDataSourceRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_data_source_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - name = client.project_data_source_path("[PROJECT]", "[DATA_SOURCE]") - - with pytest.raises(CustomException): - client.get_data_source(name) - - def test_list_data_sources(self): - # Setup Expected Response - next_page_token = "" - data_sources_element = {} - data_sources = [data_sources_element] - expected_response = { - "next_page_token": next_page_token, - "data_sources": data_sources, - } - expected_response = datatransfer_pb2.ListDataSourcesResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_data_sources(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.data_sources[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.ListDataSourcesRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_data_sources_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_data_sources(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_create_transfer_config(self): - # Setup Expected Response - name = "name3373707" - destination_dataset_id = "destinationDatasetId1541564179" - display_name = "displayName1615086568" - data_source_id = "dataSourceId-1015796374" - schedule = "schedule-697920873" - data_refresh_window_days = 327632845 - disabled = True - user_id = 147132913 - dataset_region = "datasetRegion959248539" - notification_pubsub_topic = "notificationPubsubTopic1794281191" - expected_response = { - "name": name, - "destination_dataset_id": destination_dataset_id, - "display_name": display_name, - "data_source_id": data_source_id, - "schedule": schedule, - "data_refresh_window_days": data_refresh_window_days, - "disabled": disabled, - "user_id": user_id, - "dataset_region": dataset_region, - "notification_pubsub_topic": notification_pubsub_topic, - } - expected_response = transfer_pb2.TransferConfig(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - transfer_config = {} - - response = client.create_transfer_config(parent, transfer_config) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.CreateTransferConfigRequest( - parent=parent, transfer_config=transfer_config - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_transfer_config_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - transfer_config = {} - - with pytest.raises(CustomException): - client.create_transfer_config(parent, transfer_config) - - def test_update_transfer_config(self): - # Setup Expected Response - name = "name3373707" - destination_dataset_id = "destinationDatasetId1541564179" - display_name = "displayName1615086568" - data_source_id = "dataSourceId-1015796374" - schedule = "schedule-697920873" - data_refresh_window_days = 327632845 - disabled = True - user_id = 147132913 - dataset_region = "datasetRegion959248539" - notification_pubsub_topic = "notificationPubsubTopic1794281191" - expected_response = { - "name": name, - "destination_dataset_id": destination_dataset_id, - "display_name": display_name, - "data_source_id": data_source_id, - "schedule": schedule, - "data_refresh_window_days": data_refresh_window_days, - "disabled": disabled, - "user_id": user_id, - "dataset_region": dataset_region, - "notification_pubsub_topic": notification_pubsub_topic, - } - expected_response = transfer_pb2.TransferConfig(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - transfer_config = {} - update_mask = {} - - response = client.update_transfer_config(transfer_config, update_mask) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.UpdateTransferConfigRequest( - transfer_config=transfer_config, update_mask=update_mask - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_transfer_config_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - transfer_config = {} - update_mask = {} - - with pytest.raises(CustomException): - client.update_transfer_config(transfer_config, update_mask) - - def test_delete_transfer_config(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - name = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - - client.delete_transfer_config(name) - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.DeleteTransferConfigRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_transfer_config_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - name = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - - with pytest.raises(CustomException): - client.delete_transfer_config(name) - - def test_get_transfer_config(self): - # Setup Expected Response - name_2 = "name2-1052831874" - destination_dataset_id = "destinationDatasetId1541564179" - display_name = "displayName1615086568" - data_source_id = "dataSourceId-1015796374" - schedule = "schedule-697920873" - data_refresh_window_days = 327632845 - disabled = True - user_id = 147132913 - dataset_region = "datasetRegion959248539" - notification_pubsub_topic = "notificationPubsubTopic1794281191" - expected_response = { - "name": name_2, - "destination_dataset_id": destination_dataset_id, - "display_name": display_name, - "data_source_id": data_source_id, - "schedule": schedule, - "data_refresh_window_days": data_refresh_window_days, - "disabled": disabled, - "user_id": user_id, - "dataset_region": dataset_region, - "notification_pubsub_topic": notification_pubsub_topic, - } - expected_response = transfer_pb2.TransferConfig(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - name = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - - response = client.get_transfer_config(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.GetTransferConfigRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_transfer_config_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - name = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - - with pytest.raises(CustomException): - client.get_transfer_config(name) - - def test_list_transfer_configs(self): - # Setup Expected Response - next_page_token = "" - transfer_configs_element = {} - transfer_configs = [transfer_configs_element] - expected_response = { - "next_page_token": next_page_token, - "transfer_configs": transfer_configs, - } - expected_response = datatransfer_pb2.ListTransferConfigsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_transfer_configs(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.transfer_configs[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.ListTransferConfigsRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_transfer_configs_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_transfer_configs(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_schedule_transfer_runs(self): - # Setup Expected Response - expected_response = {} - expected_response = datatransfer_pb2.ScheduleTransferRunsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - parent = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - start_time = {} - end_time = {} - - response = client.schedule_transfer_runs(parent, start_time, end_time) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.ScheduleTransferRunsRequest( - parent=parent, start_time=start_time, end_time=end_time - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_schedule_transfer_runs_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - parent = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - start_time = {} - end_time = {} - - with pytest.raises(CustomException): - client.schedule_transfer_runs(parent, start_time, end_time) - - def test_start_manual_transfer_runs(self): - # Setup Expected Response - expected_response = {} - expected_response = datatransfer_pb2.StartManualTransferRunsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - response = client.start_manual_transfer_runs() - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.StartManualTransferRunsRequest() - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_start_manual_transfer_runs_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - with pytest.raises(CustomException): - client.start_manual_transfer_runs() - - def test_get_transfer_run(self): - # Setup Expected Response - name_2 = "name2-1052831874" - destination_dataset_id = "destinationDatasetId1541564179" - data_source_id = "dataSourceId-1015796374" - user_id = 147132913 - schedule = "schedule-697920873" - notification_pubsub_topic = "notificationPubsubTopic1794281191" - expected_response = { - "name": name_2, - "destination_dataset_id": destination_dataset_id, - "data_source_id": data_source_id, - "user_id": user_id, - "schedule": schedule, - "notification_pubsub_topic": notification_pubsub_topic, - } - expected_response = transfer_pb2.TransferRun(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - name = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") - - response = client.get_transfer_run(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.GetTransferRunRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_transfer_run_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - name = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") - - with pytest.raises(CustomException): - client.get_transfer_run(name) - - def test_delete_transfer_run(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - name = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") - - client.delete_transfer_run(name) - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.DeleteTransferRunRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_transfer_run_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - name = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") - - with pytest.raises(CustomException): - client.delete_transfer_run(name) - - def test_list_transfer_runs(self): - # Setup Expected Response - next_page_token = "" - transfer_runs_element = {} - transfer_runs = [transfer_runs_element] - expected_response = { - "next_page_token": next_page_token, - "transfer_runs": transfer_runs, - } - expected_response = datatransfer_pb2.ListTransferRunsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - parent = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - - paged_list_response = client.list_transfer_runs(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.transfer_runs[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.ListTransferRunsRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_transfer_runs_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - parent = client.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]") - - paged_list_response = client.list_transfer_runs(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_list_transfer_logs(self): - # Setup Expected Response - next_page_token = "" - transfer_messages_element = {} - transfer_messages = [transfer_messages_element] - expected_response = { - "next_page_token": next_page_token, - "transfer_messages": transfer_messages, - } - expected_response = datatransfer_pb2.ListTransferLogsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - parent = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") - - paged_list_response = client.list_transfer_logs(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.transfer_messages[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.ListTransferLogsRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_transfer_logs_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - parent = client.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]") - - paged_list_response = client.list_transfer_logs(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_check_valid_creds(self): - # Setup Expected Response - has_valid_creds = False - expected_response = {"has_valid_creds": has_valid_creds} - expected_response = datatransfer_pb2.CheckValidCredsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup Request - name = client.project_data_source_path("[PROJECT]", "[DATA_SOURCE]") - - response = client.check_valid_creds(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = datatransfer_pb2.CheckValidCredsRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_check_valid_creds_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Setup request - name = client.project_data_source_path("[PROJECT]", "[DATA_SOURCE]") - - with pytest.raises(CustomException): - client.check_valid_creds(name) diff --git a/packages/google-cloud-bigquery-datatransfer/tests/unit/test_shim.py b/packages/google-cloud-bigquery-datatransfer/tests/unit/test_shim.py index c27963bce4ab..eff54512c624 100644 --- a/packages/google-cloud-bigquery-datatransfer/tests/unit/test_shim.py +++ b/packages/google-cloud-bigquery-datatransfer/tests/unit/test_shim.py @@ -18,12 +18,12 @@ def test_shim(): - from google.cloud import bigquery_datatransfer - from google.cloud import bigquery_datatransfer_v1 + from google.cloud.bigquery import datatransfer + from google.cloud.bigquery import datatransfer_v1 - assert bigquery_datatransfer.__all__ == bigquery_datatransfer_v1.__all__ + assert sorted(datatransfer.__all__) == sorted(datatransfer_v1.__all__) - for name in bigquery_datatransfer.__all__: - found = getattr(bigquery_datatransfer, name) - expected = getattr(bigquery_datatransfer_v1, name) + for name in datatransfer.__all__: + found = getattr(datatransfer, name) + expected = getattr(datatransfer_v1, name) assert found is expected