From 5137a6fce856b22be884aae19ec814458fc4ce97 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 20 May 2021 17:18:42 -0600 Subject: [PATCH] chore: upgrade gapic-generator-python to 0.46.3 (#188) feat: update the Dataproc V1 API client library This includes the following: 1. The new start and stop cluster methods. 2. The ability to specify a metastore config in a cluster. 3. The ability to specify a (BETA) GKE cluster when creating a Dataproc cluster. 4. The ability to configure the behavior for private IPv6 cluster networking. 5. The ability to specify node affinity groups for clusters. 6. The ability to specify shielded VM configurations for clusters. 7. Support for service-account based secure multi-tenancy. 8. The ability to specify cluster labels for picking which cluster should run a job. 9. Components for DOCKER, DRUID, FLINK, HBASE, RANGER, and SOLR 10. The ability to specify a DAG timeout for workflows. feat: support self-signed JWT flow for service accounts fix: add async client to %name_%version/init.py chore: add autogenerated snippets chore: remove auth, policy, and options from the reserved names list chore: enable GAPIC metadata generation chore: sort subpackages in %namespace/%name/init.py --- dataproc-v1beta2-py.tar.gz | 0 .../autoscaling_policy_service.rst | 1 - docs/dataproc_v1/cluster_controller.rst | 1 - docs/dataproc_v1/job_controller.rst | 1 - .../dataproc_v1/workflow_template_service.rst | 1 - .../autoscaling_policy_service.rst | 1 - docs/dataproc_v1beta2/cluster_controller.rst | 1 - docs/dataproc_v1beta2/job_controller.rst | 1 - .../workflow_template_service.rst | 1 - google/cloud/dataproc/__init__.py | 130 ++-- google/cloud/dataproc_v1/__init__.py | 27 +- google/cloud/dataproc_v1/gapic_metadata.json | 335 +++++++++ google/cloud/dataproc_v1/services/__init__.py | 1 - .../autoscaling_policy_service/__init__.py | 2 - .../async_client.py | 43 +- .../autoscaling_policy_service/client.py | 76 +- .../autoscaling_policy_service/pagers.py | 4 +- .../transports/__init__.py | 2 - .../transports/base.py | 137 +++- .../transports/grpc.py | 30 +- .../transports/grpc_asyncio.py | 30 +- .../services/cluster_controller/__init__.py | 2 - .../cluster_controller/async_client.py | 179 +++-- .../services/cluster_controller/client.py | 221 ++++-- .../services/cluster_controller/pagers.py | 4 +- .../cluster_controller/transports/__init__.py | 2 - .../cluster_controller/transports/base.py | 179 +++-- .../cluster_controller/transports/grpc.py | 92 ++- .../transports/grpc_asyncio.py | 95 ++- .../services/job_controller/__init__.py | 2 - .../services/job_controller/async_client.py | 71 +- .../services/job_controller/client.py | 77 +- .../services/job_controller/pagers.py | 4 +- .../job_controller/transports/__init__.py | 2 - .../job_controller/transports/base.py | 165 +++-- .../job_controller/transports/grpc.py | 34 +- .../job_controller/transports/grpc_asyncio.py | 37 +- .../workflow_template_service/__init__.py | 2 - .../workflow_template_service/async_client.py | 85 +-- .../workflow_template_service/client.py | 125 ++-- .../workflow_template_service/pagers.py | 4 +- .../transports/__init__.py | 2 - .../transports/base.py | 170 +++-- .../transports/grpc.py | 39 +- .../transports/grpc_asyncio.py | 39 +- google/cloud/dataproc_v1/types/__init__.py | 16 +- .../dataproc_v1/types/autoscaling_policies.py | 64 +- google/cloud/dataproc_v1/types/clusters.py | 581 +++++++++------ google/cloud/dataproc_v1/types/jobs.py | 322 +++----- google/cloud/dataproc_v1/types/operations.py | 35 +- google/cloud/dataproc_v1/types/shared.py | 12 +- .../dataproc_v1/types/workflow_templates.py | 204 +++-- google/cloud/dataproc_v1beta2/__init__.py | 13 +- .../dataproc_v1beta2/gapic_metadata.json | 315 ++++++++ .../dataproc_v1beta2/services/__init__.py | 1 - .../autoscaling_policy_service/__init__.py | 2 - .../async_client.py | 43 +- .../autoscaling_policy_service/client.py | 76 +- .../autoscaling_policy_service/pagers.py | 4 +- .../transports/__init__.py | 2 - .../transports/base.py | 137 +++- .../transports/grpc.py | 30 +- .../transports/grpc_asyncio.py | 30 +- .../services/cluster_controller/__init__.py | 2 - .../cluster_controller/async_client.py | 74 +- .../services/cluster_controller/client.py | 89 +-- .../services/cluster_controller/pagers.py | 4 +- .../cluster_controller/transports/__init__.py | 2 - .../cluster_controller/transports/base.py | 155 ++-- .../cluster_controller/transports/grpc.py | 40 +- .../transports/grpc_asyncio.py | 43 +- .../services/job_controller/__init__.py | 2 - .../services/job_controller/async_client.py | 71 +- .../services/job_controller/client.py | 77 +- .../services/job_controller/pagers.py | 4 +- .../job_controller/transports/__init__.py | 2 - .../job_controller/transports/base.py | 165 +++-- .../job_controller/transports/grpc.py | 34 +- .../job_controller/transports/grpc_asyncio.py | 37 +- .../workflow_template_service/__init__.py | 2 - .../workflow_template_service/async_client.py | 80 +- .../workflow_template_service/client.py | 96 +-- .../workflow_template_service/pagers.py | 4 +- .../transports/__init__.py | 2 - .../transports/base.py | 170 +++-- .../transports/grpc.py | 39 +- .../transports/grpc_asyncio.py | 39 +- .../cloud/dataproc_v1beta2/types/__init__.py | 2 - .../types/autoscaling_policies.py | 64 +- .../cloud/dataproc_v1beta2/types/clusters.py | 302 +++----- google/cloud/dataproc_v1beta2/types/jobs.py | 297 +++----- .../dataproc_v1beta2/types/operations.py | 35 +- google/cloud/dataproc_v1beta2/types/shared.py | 2 - .../types/workflow_templates.py | 193 ++--- noxfile.py | 2 +- owlbot.py | 1 + scripts/fixup_dataproc_v1_keywords.py | 57 +- scripts/fixup_dataproc_v1beta2_keywords.py | 55 +- setup.py | 1 + testing/constraints-3.6.txt | 2 + tests/__init__.py | 15 + tests/unit/__init__.py | 15 + tests/unit/gapic/__init__.py | 15 + tests/unit/gapic/dataproc_v1/__init__.py | 1 - .../test_autoscaling_policy_service.py | 473 +++++++----- .../dataproc_v1/test_cluster_controller.py | 696 +++++++++++++----- .../gapic/dataproc_v1/test_job_controller.py | 530 +++++++------ .../test_workflow_template_service.py | 591 +++++++++------ tests/unit/gapic/dataproc_v1beta2/__init__.py | 1 - .../test_autoscaling_policy_service.py | 473 +++++++----- .../test_cluster_controller.py | 489 +++++++----- .../dataproc_v1beta2/test_job_controller.py | 538 ++++++++------ .../test_workflow_template_service.py | 544 ++++++++------ 113 files changed, 6592 insertions(+), 4379 deletions(-) delete mode 100644 dataproc-v1beta2-py.tar.gz create mode 100644 google/cloud/dataproc_v1/gapic_metadata.json create mode 100644 google/cloud/dataproc_v1beta2/gapic_metadata.json create mode 100644 tests/__init__.py create mode 100644 tests/unit/__init__.py create mode 100644 tests/unit/gapic/__init__.py diff --git a/dataproc-v1beta2-py.tar.gz b/dataproc-v1beta2-py.tar.gz deleted file mode 100644 index e69de29b..00000000 diff --git a/docs/dataproc_v1/autoscaling_policy_service.rst b/docs/dataproc_v1/autoscaling_policy_service.rst index cbb62baa..9b885c57 100644 --- a/docs/dataproc_v1/autoscaling_policy_service.rst +++ b/docs/dataproc_v1/autoscaling_policy_service.rst @@ -5,7 +5,6 @@ AutoscalingPolicyService :members: :inherited-members: - .. automodule:: google.cloud.dataproc_v1.services.autoscaling_policy_service.pagers :members: :inherited-members: diff --git a/docs/dataproc_v1/cluster_controller.rst b/docs/dataproc_v1/cluster_controller.rst index 1c4e398b..d9b7f2ad 100644 --- a/docs/dataproc_v1/cluster_controller.rst +++ b/docs/dataproc_v1/cluster_controller.rst @@ -5,7 +5,6 @@ ClusterController :members: :inherited-members: - .. automodule:: google.cloud.dataproc_v1.services.cluster_controller.pagers :members: :inherited-members: diff --git a/docs/dataproc_v1/job_controller.rst b/docs/dataproc_v1/job_controller.rst index e73db167..5f14863b 100644 --- a/docs/dataproc_v1/job_controller.rst +++ b/docs/dataproc_v1/job_controller.rst @@ -5,7 +5,6 @@ JobController :members: :inherited-members: - .. automodule:: google.cloud.dataproc_v1.services.job_controller.pagers :members: :inherited-members: diff --git a/docs/dataproc_v1/workflow_template_service.rst b/docs/dataproc_v1/workflow_template_service.rst index 154de462..0f301cee 100644 --- a/docs/dataproc_v1/workflow_template_service.rst +++ b/docs/dataproc_v1/workflow_template_service.rst @@ -5,7 +5,6 @@ WorkflowTemplateService :members: :inherited-members: - .. automodule:: google.cloud.dataproc_v1.services.workflow_template_service.pagers :members: :inherited-members: diff --git a/docs/dataproc_v1beta2/autoscaling_policy_service.rst b/docs/dataproc_v1beta2/autoscaling_policy_service.rst index 3a411371..cc81bb57 100644 --- a/docs/dataproc_v1beta2/autoscaling_policy_service.rst +++ b/docs/dataproc_v1beta2/autoscaling_policy_service.rst @@ -5,7 +5,6 @@ AutoscalingPolicyService :members: :inherited-members: - .. automodule:: google.cloud.dataproc_v1beta2.services.autoscaling_policy_service.pagers :members: :inherited-members: diff --git a/docs/dataproc_v1beta2/cluster_controller.rst b/docs/dataproc_v1beta2/cluster_controller.rst index c10e78c7..3e375a37 100644 --- a/docs/dataproc_v1beta2/cluster_controller.rst +++ b/docs/dataproc_v1beta2/cluster_controller.rst @@ -5,7 +5,6 @@ ClusterController :members: :inherited-members: - .. automodule:: google.cloud.dataproc_v1beta2.services.cluster_controller.pagers :members: :inherited-members: diff --git a/docs/dataproc_v1beta2/job_controller.rst b/docs/dataproc_v1beta2/job_controller.rst index 3f5d74e1..8ca76058 100644 --- a/docs/dataproc_v1beta2/job_controller.rst +++ b/docs/dataproc_v1beta2/job_controller.rst @@ -5,7 +5,6 @@ JobController :members: :inherited-members: - .. automodule:: google.cloud.dataproc_v1beta2.services.job_controller.pagers :members: :inherited-members: diff --git a/docs/dataproc_v1beta2/workflow_template_service.rst b/docs/dataproc_v1beta2/workflow_template_service.rst index 10a2826c..d93e941b 100644 --- a/docs/dataproc_v1beta2/workflow_template_service.rst +++ b/docs/dataproc_v1beta2/workflow_template_service.rst @@ -5,7 +5,6 @@ WorkflowTemplateService :members: :inherited-members: - .. automodule:: google.cloud.dataproc_v1beta2.services.workflow_template_service.pagers :members: :inherited-members: diff --git a/google/cloud/dataproc/__init__.py b/google/cloud/dataproc/__init__.py index 7ed0e897..90d84a64 100644 --- a/google/cloud/dataproc/__init__.py +++ b/google/cloud/dataproc/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,28 +14,29 @@ # limitations under the License. # -from google.cloud.dataproc_v1.services.autoscaling_policy_service.async_client import ( - AutoscalingPolicyServiceAsyncClient, -) from google.cloud.dataproc_v1.services.autoscaling_policy_service.client import ( AutoscalingPolicyServiceClient, ) -from google.cloud.dataproc_v1.services.cluster_controller.async_client import ( - ClusterControllerAsyncClient, +from google.cloud.dataproc_v1.services.autoscaling_policy_service.async_client import ( + AutoscalingPolicyServiceAsyncClient, ) from google.cloud.dataproc_v1.services.cluster_controller.client import ( ClusterControllerClient, ) -from google.cloud.dataproc_v1.services.job_controller.async_client import ( - JobControllerAsyncClient, +from google.cloud.dataproc_v1.services.cluster_controller.async_client import ( + ClusterControllerAsyncClient, ) from google.cloud.dataproc_v1.services.job_controller.client import JobControllerClient -from google.cloud.dataproc_v1.services.workflow_template_service.async_client import ( - WorkflowTemplateServiceAsyncClient, +from google.cloud.dataproc_v1.services.job_controller.async_client import ( + JobControllerAsyncClient, ) from google.cloud.dataproc_v1.services.workflow_template_service.client import ( WorkflowTemplateServiceClient, ) +from google.cloud.dataproc_v1.services.workflow_template_service.async_client import ( + WorkflowTemplateServiceAsyncClient, +) + from google.cloud.dataproc_v1.types.autoscaling_policies import AutoscalingPolicy from google.cloud.dataproc_v1.types.autoscaling_policies import ( BasicAutoscalingAlgorithm, @@ -80,16 +80,23 @@ from google.cloud.dataproc_v1.types.clusters import EndpointConfig from google.cloud.dataproc_v1.types.clusters import GceClusterConfig from google.cloud.dataproc_v1.types.clusters import GetClusterRequest +from google.cloud.dataproc_v1.types.clusters import GkeClusterConfig +from google.cloud.dataproc_v1.types.clusters import IdentityConfig from google.cloud.dataproc_v1.types.clusters import InstanceGroupConfig from google.cloud.dataproc_v1.types.clusters import KerberosConfig from google.cloud.dataproc_v1.types.clusters import LifecycleConfig from google.cloud.dataproc_v1.types.clusters import ListClustersRequest from google.cloud.dataproc_v1.types.clusters import ListClustersResponse from google.cloud.dataproc_v1.types.clusters import ManagedGroupConfig +from google.cloud.dataproc_v1.types.clusters import MetastoreConfig +from google.cloud.dataproc_v1.types.clusters import NodeGroupAffinity from google.cloud.dataproc_v1.types.clusters import NodeInitializationAction from google.cloud.dataproc_v1.types.clusters import ReservationAffinity from google.cloud.dataproc_v1.types.clusters import SecurityConfig +from google.cloud.dataproc_v1.types.clusters import ShieldedInstanceConfig from google.cloud.dataproc_v1.types.clusters import SoftwareConfig +from google.cloud.dataproc_v1.types.clusters import StartClusterRequest +from google.cloud.dataproc_v1.types.clusters import StopClusterRequest from google.cloud.dataproc_v1.types.clusters import UpdateClusterRequest from google.cloud.dataproc_v1.types.jobs import CancelJobRequest from google.cloud.dataproc_v1.types.jobs import DeleteJobRequest @@ -155,88 +162,98 @@ from google.cloud.dataproc_v1.types.workflow_templates import WorkflowTemplatePlacement __all__ = ( - "AcceleratorConfig", - "AutoscalingConfig", - "AutoscalingPolicy", - "AutoscalingPolicyServiceAsyncClient", "AutoscalingPolicyServiceClient", + "AutoscalingPolicyServiceAsyncClient", + "ClusterControllerClient", + "ClusterControllerAsyncClient", + "JobControllerClient", + "JobControllerAsyncClient", + "WorkflowTemplateServiceClient", + "WorkflowTemplateServiceAsyncClient", + "AutoscalingPolicy", "BasicAutoscalingAlgorithm", "BasicYarnAutoscalingConfig", - "CancelJobRequest", + "CreateAutoscalingPolicyRequest", + "DeleteAutoscalingPolicyRequest", + "GetAutoscalingPolicyRequest", + "InstanceGroupAutoscalingPolicyConfig", + "ListAutoscalingPoliciesRequest", + "ListAutoscalingPoliciesResponse", + "UpdateAutoscalingPolicyRequest", + "AcceleratorConfig", + "AutoscalingConfig", "Cluster", "ClusterConfig", - "ClusterControllerAsyncClient", - "ClusterControllerClient", "ClusterMetrics", - "ClusterOperation", - "ClusterOperationMetadata", - "ClusterOperationStatus", - "ClusterSelector", "ClusterStatus", - "Component", - "CreateAutoscalingPolicyRequest", "CreateClusterRequest", - "CreateWorkflowTemplateRequest", - "DeleteAutoscalingPolicyRequest", "DeleteClusterRequest", - "DeleteJobRequest", - "DeleteWorkflowTemplateRequest", "DiagnoseClusterRequest", "DiagnoseClusterResults", "DiskConfig", "EncryptionConfig", "EndpointConfig", "GceClusterConfig", - "GetAutoscalingPolicyRequest", "GetClusterRequest", + "GkeClusterConfig", + "IdentityConfig", + "InstanceGroupConfig", + "KerberosConfig", + "LifecycleConfig", + "ListClustersRequest", + "ListClustersResponse", + "ManagedGroupConfig", + "MetastoreConfig", + "NodeGroupAffinity", + "NodeInitializationAction", + "ReservationAffinity", + "SecurityConfig", + "ShieldedInstanceConfig", + "SoftwareConfig", + "StartClusterRequest", + "StopClusterRequest", + "UpdateClusterRequest", + "CancelJobRequest", + "DeleteJobRequest", "GetJobRequest", - "GetWorkflowTemplateRequest", "HadoopJob", "HiveJob", - "InstanceGroupAutoscalingPolicyConfig", - "InstanceGroupConfig", - "InstantiateInlineWorkflowTemplateRequest", - "InstantiateWorkflowTemplateRequest", "Job", - "JobControllerAsyncClient", - "JobControllerClient", "JobMetadata", "JobPlacement", "JobReference", "JobScheduling", "JobStatus", - "KerberosConfig", - "LifecycleConfig", - "ListAutoscalingPoliciesRequest", - "ListAutoscalingPoliciesResponse", - "ListClustersRequest", - "ListClustersResponse", "ListJobsRequest", "ListJobsResponse", - "ListWorkflowTemplatesRequest", - "ListWorkflowTemplatesResponse", "LoggingConfig", - "ManagedCluster", - "ManagedGroupConfig", - "NodeInitializationAction", - "OrderedJob", - "ParameterValidation", "PigJob", "PrestoJob", "PySparkJob", "QueryList", - "RegexValidation", - "ReservationAffinity", - "SecurityConfig", - "SoftwareConfig", "SparkJob", "SparkRJob", "SparkSqlJob", "SubmitJobRequest", - "TemplateParameter", - "UpdateAutoscalingPolicyRequest", - "UpdateClusterRequest", "UpdateJobRequest", + "YarnApplication", + "ClusterOperationMetadata", + "ClusterOperationStatus", + "Component", + "ClusterOperation", + "ClusterSelector", + "CreateWorkflowTemplateRequest", + "DeleteWorkflowTemplateRequest", + "GetWorkflowTemplateRequest", + "InstantiateInlineWorkflowTemplateRequest", + "InstantiateWorkflowTemplateRequest", + "ListWorkflowTemplatesRequest", + "ListWorkflowTemplatesResponse", + "ManagedCluster", + "OrderedJob", + "ParameterValidation", + "RegexValidation", + "TemplateParameter", "UpdateWorkflowTemplateRequest", "ValueValidation", "WorkflowGraph", @@ -244,7 +261,4 @@ "WorkflowNode", "WorkflowTemplate", "WorkflowTemplatePlacement", - "WorkflowTemplateServiceAsyncClient", - "WorkflowTemplateServiceClient", - "YarnApplication", ) diff --git a/google/cloud/dataproc_v1/__init__.py b/google/cloud/dataproc_v1/__init__.py index 82d780ab..22af0899 100644 --- a/google/cloud/dataproc_v1/__init__.py +++ b/google/cloud/dataproc_v1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,9 +15,14 @@ # from .services.autoscaling_policy_service import AutoscalingPolicyServiceClient +from .services.autoscaling_policy_service import AutoscalingPolicyServiceAsyncClient from .services.cluster_controller import ClusterControllerClient +from .services.cluster_controller import ClusterControllerAsyncClient from .services.job_controller import JobControllerClient +from .services.job_controller import JobControllerAsyncClient from .services.workflow_template_service import WorkflowTemplateServiceClient +from .services.workflow_template_service import WorkflowTemplateServiceAsyncClient + from .types.autoscaling_policies import AutoscalingPolicy from .types.autoscaling_policies import BasicAutoscalingAlgorithm from .types.autoscaling_policies import BasicYarnAutoscalingConfig @@ -44,16 +48,23 @@ from .types.clusters import EndpointConfig from .types.clusters import GceClusterConfig from .types.clusters import GetClusterRequest +from .types.clusters import GkeClusterConfig +from .types.clusters import IdentityConfig from .types.clusters import InstanceGroupConfig from .types.clusters import KerberosConfig from .types.clusters import LifecycleConfig from .types.clusters import ListClustersRequest from .types.clusters import ListClustersResponse from .types.clusters import ManagedGroupConfig +from .types.clusters import MetastoreConfig +from .types.clusters import NodeGroupAffinity from .types.clusters import NodeInitializationAction from .types.clusters import ReservationAffinity from .types.clusters import SecurityConfig +from .types.clusters import ShieldedInstanceConfig from .types.clusters import SoftwareConfig +from .types.clusters import StartClusterRequest +from .types.clusters import StopClusterRequest from .types.clusters import UpdateClusterRequest from .types.jobs import CancelJobRequest from .types.jobs import DeleteJobRequest @@ -104,8 +115,11 @@ from .types.workflow_templates import WorkflowTemplate from .types.workflow_templates import WorkflowTemplatePlacement - __all__ = ( + "AutoscalingPolicyServiceAsyncClient", + "ClusterControllerAsyncClient", + "JobControllerAsyncClient", + "WorkflowTemplateServiceAsyncClient", "AcceleratorConfig", "AutoscalingConfig", "AutoscalingPolicy", @@ -140,8 +154,10 @@ "GetClusterRequest", "GetJobRequest", "GetWorkflowTemplateRequest", + "GkeClusterConfig", "HadoopJob", "HiveJob", + "IdentityConfig", "InstanceGroupAutoscalingPolicyConfig", "InstanceGroupConfig", "InstantiateInlineWorkflowTemplateRequest", @@ -166,6 +182,8 @@ "LoggingConfig", "ManagedCluster", "ManagedGroupConfig", + "MetastoreConfig", + "NodeGroupAffinity", "NodeInitializationAction", "OrderedJob", "ParameterValidation", @@ -176,10 +194,13 @@ "RegexValidation", "ReservationAffinity", "SecurityConfig", + "ShieldedInstanceConfig", "SoftwareConfig", "SparkJob", "SparkRJob", "SparkSqlJob", + "StartClusterRequest", + "StopClusterRequest", "SubmitJobRequest", "TemplateParameter", "UpdateAutoscalingPolicyRequest", @@ -192,6 +213,6 @@ "WorkflowNode", "WorkflowTemplate", "WorkflowTemplatePlacement", - "YarnApplication", "WorkflowTemplateServiceClient", + "YarnApplication", ) diff --git a/google/cloud/dataproc_v1/gapic_metadata.json b/google/cloud/dataproc_v1/gapic_metadata.json new file mode 100644 index 00000000..2d068a45 --- /dev/null +++ b/google/cloud/dataproc_v1/gapic_metadata.json @@ -0,0 +1,335 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.dataproc_v1", + "protoPackage": "google.cloud.dataproc.v1", + "schema": "1.0", + "services": { + "AutoscalingPolicyService": { + "clients": { + "grpc": { + "libraryClient": "AutoscalingPolicyServiceClient", + "rpcs": { + "CreateAutoscalingPolicy": { + "methods": [ + "create_autoscaling_policy" + ] + }, + "DeleteAutoscalingPolicy": { + "methods": [ + "delete_autoscaling_policy" + ] + }, + "GetAutoscalingPolicy": { + "methods": [ + "get_autoscaling_policy" + ] + }, + "ListAutoscalingPolicies": { + "methods": [ + "list_autoscaling_policies" + ] + }, + "UpdateAutoscalingPolicy": { + "methods": [ + "update_autoscaling_policy" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AutoscalingPolicyServiceAsyncClient", + "rpcs": { + "CreateAutoscalingPolicy": { + "methods": [ + "create_autoscaling_policy" + ] + }, + "DeleteAutoscalingPolicy": { + "methods": [ + "delete_autoscaling_policy" + ] + }, + "GetAutoscalingPolicy": { + "methods": [ + "get_autoscaling_policy" + ] + }, + "ListAutoscalingPolicies": { + "methods": [ + "list_autoscaling_policies" + ] + }, + "UpdateAutoscalingPolicy": { + "methods": [ + "update_autoscaling_policy" + ] + } + } + } + } + }, + "ClusterController": { + "clients": { + "grpc": { + "libraryClient": "ClusterControllerClient", + "rpcs": { + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DiagnoseCluster": { + "methods": [ + "diagnose_cluster" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "StartCluster": { + "methods": [ + "start_cluster" + ] + }, + "StopCluster": { + "methods": [ + "stop_cluster" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ClusterControllerAsyncClient", + "rpcs": { + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DiagnoseCluster": { + "methods": [ + "diagnose_cluster" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "StartCluster": { + "methods": [ + "start_cluster" + ] + }, + "StopCluster": { + "methods": [ + "stop_cluster" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + } + } + } + } + }, + "JobController": { + "clients": { + "grpc": { + "libraryClient": "JobControllerClient", + "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "SubmitJob": { + "methods": [ + "submit_job" + ] + }, + "SubmitJobAsOperation": { + "methods": [ + "submit_job_as_operation" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + }, + "grpc-async": { + "libraryClient": "JobControllerAsyncClient", + "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "SubmitJob": { + "methods": [ + "submit_job" + ] + }, + "SubmitJobAsOperation": { + "methods": [ + "submit_job_as_operation" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + } + } + }, + "WorkflowTemplateService": { + "clients": { + "grpc": { + "libraryClient": "WorkflowTemplateServiceClient", + "rpcs": { + "CreateWorkflowTemplate": { + "methods": [ + "create_workflow_template" + ] + }, + "DeleteWorkflowTemplate": { + "methods": [ + "delete_workflow_template" + ] + }, + "GetWorkflowTemplate": { + "methods": [ + "get_workflow_template" + ] + }, + "InstantiateInlineWorkflowTemplate": { + "methods": [ + "instantiate_inline_workflow_template" + ] + }, + "InstantiateWorkflowTemplate": { + "methods": [ + "instantiate_workflow_template" + ] + }, + "ListWorkflowTemplates": { + "methods": [ + "list_workflow_templates" + ] + }, + "UpdateWorkflowTemplate": { + "methods": [ + "update_workflow_template" + ] + } + } + }, + "grpc-async": { + "libraryClient": "WorkflowTemplateServiceAsyncClient", + "rpcs": { + "CreateWorkflowTemplate": { + "methods": [ + "create_workflow_template" + ] + }, + "DeleteWorkflowTemplate": { + "methods": [ + "delete_workflow_template" + ] + }, + "GetWorkflowTemplate": { + "methods": [ + "get_workflow_template" + ] + }, + "InstantiateInlineWorkflowTemplate": { + "methods": [ + "instantiate_inline_workflow_template" + ] + }, + "InstantiateWorkflowTemplate": { + "methods": [ + "instantiate_workflow_template" + ] + }, + "ListWorkflowTemplates": { + "methods": [ + "list_workflow_templates" + ] + }, + "UpdateWorkflowTemplate": { + "methods": [ + "update_workflow_template" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/dataproc_v1/services/__init__.py b/google/cloud/dataproc_v1/services/__init__.py index 42ffdf2b..4de65971 100644 --- a/google/cloud/dataproc_v1/services/__init__.py +++ b/google/cloud/dataproc_v1/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/__init__.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/__init__.py index e33cbc43..08e39679 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/__init__.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import AutoscalingPolicyServiceClient from .async_client import AutoscalingPolicyServiceAsyncClient diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py index 52d43220..f2c5718c 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,15 +20,14 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.dataproc_v1.services.autoscaling_policy_service import pagers from google.cloud.dataproc_v1.types import autoscaling_policies - from .transports.base import AutoscalingPolicyServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import AutoscalingPolicyServiceGrpcAsyncIOTransport from .client import AutoscalingPolicyServiceClient @@ -52,33 +49,28 @@ class AutoscalingPolicyServiceAsyncClient: parse_autoscaling_policy_path = staticmethod( AutoscalingPolicyServiceClient.parse_autoscaling_policy_path ) - common_billing_account_path = staticmethod( AutoscalingPolicyServiceClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( AutoscalingPolicyServiceClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(AutoscalingPolicyServiceClient.common_folder_path) parse_common_folder_path = staticmethod( AutoscalingPolicyServiceClient.parse_common_folder_path ) - common_organization_path = staticmethod( AutoscalingPolicyServiceClient.common_organization_path ) parse_common_organization_path = staticmethod( AutoscalingPolicyServiceClient.parse_common_organization_path ) - common_project_path = staticmethod( AutoscalingPolicyServiceClient.common_project_path ) parse_common_project_path = staticmethod( AutoscalingPolicyServiceClient.parse_common_project_path ) - common_location_path = staticmethod( AutoscalingPolicyServiceClient.common_location_path ) @@ -88,7 +80,8 @@ class AutoscalingPolicyServiceAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -103,7 +96,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -120,7 +113,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> AutoscalingPolicyServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: AutoscalingPolicyServiceTransport: The transport used by the client instance. @@ -135,12 +128,12 @@ def transport(self) -> AutoscalingPolicyServiceTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, AutoscalingPolicyServiceTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the autoscaling policy service client. + """Instantiates the autoscaling policy service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -172,7 +165,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = AutoscalingPolicyServiceClient( credentials=credentials, transport=transport, @@ -221,7 +213,6 @@ async def create_autoscaling_policy( This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -248,7 +239,6 @@ async def create_autoscaling_policy( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if policy is not None: @@ -299,7 +289,6 @@ async def update_autoscaling_policy( This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -326,7 +315,6 @@ async def update_autoscaling_policy( # If we have keyword arguments corresponding to fields on the # request, apply these. - if policy is not None: request.policy = policy @@ -339,7 +327,8 @@ async def update_autoscaling_policy( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -393,7 +382,6 @@ async def get_autoscaling_policy( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -420,7 +408,6 @@ async def get_autoscaling_policy( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -433,7 +420,8 @@ async def get_autoscaling_policy( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -485,7 +473,6 @@ async def list_autoscaling_policies( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -515,7 +502,6 @@ async def list_autoscaling_policies( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -528,7 +514,8 @@ async def list_autoscaling_policies( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -592,7 +579,6 @@ async def delete_autoscaling_policy( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -613,7 +599,6 @@ async def delete_autoscaling_policy( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py index 83bce7d3..181654ab 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -34,7 +32,6 @@ from google.cloud.dataproc_v1.services.autoscaling_policy_service import pagers from google.cloud.dataproc_v1.types import autoscaling_policies - from .transports.base import AutoscalingPolicyServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import AutoscalingPolicyServiceGrpcTransport from .transports.grpc_asyncio import AutoscalingPolicyServiceGrpcAsyncIOTransport @@ -57,7 +54,7 @@ class AutoscalingPolicyServiceClientMeta(type): def get_transport_class( cls, label: str = None, ) -> Type[AutoscalingPolicyServiceTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -82,7 +79,8 @@ class AutoscalingPolicyServiceClient(metaclass=AutoscalingPolicyServiceClientMet @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -116,7 +114,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -133,7 +132,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -152,10 +151,11 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> AutoscalingPolicyServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - AutoscalingPolicyServiceTransport: The transport used by the client instance. + AutoscalingPolicyServiceTransport: The transport used by the client + instance. """ return self._transport @@ -163,14 +163,14 @@ def transport(self) -> AutoscalingPolicyServiceTransport: def autoscaling_policy_path( project: str, location: str, autoscaling_policy: str, ) -> str: - """Return a fully-qualified autoscaling_policy string.""" + """Returns a fully-qualified autoscaling_policy string.""" return "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}".format( project=project, location=location, autoscaling_policy=autoscaling_policy, ) @staticmethod def parse_autoscaling_policy_path(path: str) -> Dict[str, str]: - """Parse a autoscaling_policy path into its component segments.""" + """Parses a autoscaling_policy path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/autoscalingPolicies/(?P.+?)$", path, @@ -179,7 +179,7 @@ def parse_autoscaling_policy_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -192,7 +192,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -203,7 +203,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -214,7 +214,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -225,7 +225,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -239,12 +239,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, AutoscalingPolicyServiceTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the autoscaling policy service client. + """Instantiates the autoscaling policy service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -299,9 +299,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -313,12 +314,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -333,8 +336,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -390,7 +393,6 @@ def create_autoscaling_policy( This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -419,10 +421,8 @@ def create_autoscaling_policy( # there are no flattened fields. if not isinstance(request, autoscaling_policies.CreateAutoscalingPolicyRequest): request = autoscaling_policies.CreateAutoscalingPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if policy is not None: @@ -471,7 +471,6 @@ def update_autoscaling_policy( This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -500,10 +499,8 @@ def update_autoscaling_policy( # there are no flattened fields. if not isinstance(request, autoscaling_policies.UpdateAutoscalingPolicyRequest): request = autoscaling_policies.UpdateAutoscalingPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if policy is not None: request.policy = policy @@ -559,7 +556,6 @@ def get_autoscaling_policy( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -588,10 +584,8 @@ def get_autoscaling_policy( # there are no flattened fields. if not isinstance(request, autoscaling_policies.GetAutoscalingPolicyRequest): request = autoscaling_policies.GetAutoscalingPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -643,7 +637,6 @@ def list_autoscaling_policies( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -675,10 +668,8 @@ def list_autoscaling_policies( # there are no flattened fields. if not isinstance(request, autoscaling_policies.ListAutoscalingPoliciesRequest): request = autoscaling_policies.ListAutoscalingPoliciesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -744,7 +735,6 @@ def delete_autoscaling_policy( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -767,10 +757,8 @@ def delete_autoscaling_policy( # there are no flattened fields. if not isinstance(request, autoscaling_policies.DeleteAutoscalingPolicyRequest): request = autoscaling_policies.DeleteAutoscalingPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/pagers.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/pagers.py index 85deb317..9a6b36e1 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/pagers.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -119,7 +117,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/__init__.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/__init__.py index 48eb9a18..3e31e172 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/__init__.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py index 6fcb1442..7ba0fcb4 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.dataproc_v1.types import autoscaling_policies -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -36,27 +35,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class AutoscalingPolicyServiceTransport(abc.ABC): """Abstract transport class for AutoscalingPolicyService.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "dataproc.googleapis.com" + def __init__( self, *, - host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -65,7 +78,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -79,29 +92,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -117,7 +177,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -131,7 +192,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -145,7 +207,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -162,11 +225,11 @@ def _prep_wrapped_messages(self, client_info): @property def create_autoscaling_policy( self, - ) -> typing.Callable[ + ) -> Callable[ [autoscaling_policies.CreateAutoscalingPolicyRequest], - typing.Union[ + Union[ autoscaling_policies.AutoscalingPolicy, - typing.Awaitable[autoscaling_policies.AutoscalingPolicy], + Awaitable[autoscaling_policies.AutoscalingPolicy], ], ]: raise NotImplementedError() @@ -174,11 +237,11 @@ def create_autoscaling_policy( @property def update_autoscaling_policy( self, - ) -> typing.Callable[ + ) -> Callable[ [autoscaling_policies.UpdateAutoscalingPolicyRequest], - typing.Union[ + Union[ autoscaling_policies.AutoscalingPolicy, - typing.Awaitable[autoscaling_policies.AutoscalingPolicy], + Awaitable[autoscaling_policies.AutoscalingPolicy], ], ]: raise NotImplementedError() @@ -186,11 +249,11 @@ def update_autoscaling_policy( @property def get_autoscaling_policy( self, - ) -> typing.Callable[ + ) -> Callable[ [autoscaling_policies.GetAutoscalingPolicyRequest], - typing.Union[ + Union[ autoscaling_policies.AutoscalingPolicy, - typing.Awaitable[autoscaling_policies.AutoscalingPolicy], + Awaitable[autoscaling_policies.AutoscalingPolicy], ], ]: raise NotImplementedError() @@ -198,11 +261,11 @@ def get_autoscaling_policy( @property def list_autoscaling_policies( self, - ) -> typing.Callable[ + ) -> Callable[ [autoscaling_policies.ListAutoscalingPoliciesRequest], - typing.Union[ + Union[ autoscaling_policies.ListAutoscalingPoliciesResponse, - typing.Awaitable[autoscaling_policies.ListAutoscalingPoliciesResponse], + Awaitable[autoscaling_policies.ListAutoscalingPoliciesResponse], ], ]: raise NotImplementedError() @@ -210,9 +273,9 @@ def list_autoscaling_policies( @property def delete_autoscaling_policy( self, - ) -> typing.Callable[ + ) -> Callable[ [autoscaling_policies.DeleteAutoscalingPolicyRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py index e1df740b..dc44bb26 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.dataproc_v1.types import autoscaling_policies -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import AutoscalingPolicyServiceTransport, DEFAULT_CLIENT_INFO @@ -52,7 +49,7 @@ def __init__( self, *, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -66,7 +63,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -176,7 +174,7 @@ def __init__( def create_channel( cls, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -207,13 +205,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -345,7 +345,9 @@ def list_autoscaling_policies( @property def delete_autoscaling_policy( self, - ) -> Callable[[autoscaling_policies.DeleteAutoscalingPolicyRequest], empty.Empty]: + ) -> Callable[ + [autoscaling_policies.DeleteAutoscalingPolicyRequest], empty_pb2.Empty + ]: r"""Return a callable for the delete autoscaling policy method over gRPC. Deletes an autoscaling policy. It is an error to @@ -366,7 +368,7 @@ def delete_autoscaling_policy( self._stubs["delete_autoscaling_policy"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.AutoscalingPolicyService/DeleteAutoscalingPolicy", request_serializer=autoscaling_policies.DeleteAutoscalingPolicyRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_autoscaling_policy"] diff --git a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py index f098fe12..5f9a1017 100644 --- a/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/autoscaling_policy_service/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.dataproc_v1.types import autoscaling_policies -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import AutoscalingPolicyServiceTransport, DEFAULT_CLIENT_INFO from .grpc import AutoscalingPolicyServiceGrpcTransport @@ -55,7 +52,7 @@ class AutoscalingPolicyServiceGrpcAsyncIOTransport(AutoscalingPolicyServiceTrans def create_channel( cls, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -82,13 +79,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -96,7 +95,7 @@ def __init__( self, *, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -110,7 +109,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -168,7 +168,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -350,7 +349,8 @@ def list_autoscaling_policies( def delete_autoscaling_policy( self, ) -> Callable[ - [autoscaling_policies.DeleteAutoscalingPolicyRequest], Awaitable[empty.Empty] + [autoscaling_policies.DeleteAutoscalingPolicyRequest], + Awaitable[empty_pb2.Empty], ]: r"""Return a callable for the delete autoscaling policy method over gRPC. @@ -372,7 +372,7 @@ def delete_autoscaling_policy( self._stubs["delete_autoscaling_policy"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.AutoscalingPolicyService/DeleteAutoscalingPolicy", request_serializer=autoscaling_policies.DeleteAutoscalingPolicyRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_autoscaling_policy"] diff --git a/google/cloud/dataproc_v1/services/cluster_controller/__init__.py b/google/cloud/dataproc_v1/services/cluster_controller/__init__.py index 99ce2997..62affd76 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/__init__.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import ClusterControllerClient from .async_client import ClusterControllerAsyncClient diff --git a/google/cloud/dataproc_v1/services/cluster_controller/async_client.py b/google/cloud/dataproc_v1/services/cluster_controller/async_client.py index 924c35f0..67dc8a57 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/async_client.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,10 +20,10 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.api_core import operation # type: ignore @@ -33,9 +31,8 @@ from google.cloud.dataproc_v1.services.cluster_controller import pagers from google.cloud.dataproc_v1.types import clusters from google.cloud.dataproc_v1.types import operations -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from .transports.base import ClusterControllerTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import ClusterControllerGrpcAsyncIOTransport from .client import ClusterControllerClient @@ -51,30 +48,30 @@ class ClusterControllerAsyncClient: DEFAULT_ENDPOINT = ClusterControllerClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = ClusterControllerClient.DEFAULT_MTLS_ENDPOINT + cluster_path = staticmethod(ClusterControllerClient.cluster_path) + parse_cluster_path = staticmethod(ClusterControllerClient.parse_cluster_path) + service_path = staticmethod(ClusterControllerClient.service_path) + parse_service_path = staticmethod(ClusterControllerClient.parse_service_path) common_billing_account_path = staticmethod( ClusterControllerClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( ClusterControllerClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(ClusterControllerClient.common_folder_path) parse_common_folder_path = staticmethod( ClusterControllerClient.parse_common_folder_path ) - common_organization_path = staticmethod( ClusterControllerClient.common_organization_path ) parse_common_organization_path = staticmethod( ClusterControllerClient.parse_common_organization_path ) - common_project_path = staticmethod(ClusterControllerClient.common_project_path) parse_common_project_path = staticmethod( ClusterControllerClient.parse_common_project_path ) - common_location_path = staticmethod(ClusterControllerClient.common_location_path) parse_common_location_path = staticmethod( ClusterControllerClient.parse_common_location_path @@ -82,7 +79,8 @@ class ClusterControllerAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -97,7 +95,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -114,7 +112,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> ClusterControllerTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: ClusterControllerTransport: The transport used by the client instance. @@ -128,12 +126,12 @@ def transport(self) -> ClusterControllerTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, ClusterControllerTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the cluster controller client. + """Instantiates the cluster controller client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -165,7 +163,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = ClusterControllerClient( credentials=credentials, transport=transport, @@ -212,7 +209,6 @@ async def create_cluster( This corresponds to the ``cluster`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -241,7 +237,6 @@ async def create_cluster( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -257,7 +252,9 @@ async def create_cluster( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=300.0, ), default_timeout=300.0, @@ -286,7 +283,7 @@ async def update_cluster( region: str = None, cluster_name: str = None, cluster: clusters.Cluster = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -390,7 +387,6 @@ async def update_cluster( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -421,7 +417,6 @@ async def update_cluster( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -441,7 +436,9 @@ async def update_cluster( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=300.0, ), default_timeout=300.0, @@ -462,6 +459,110 @@ async def update_cluster( # Done; return the response. return response + async def stop_cluster( + self, + request: clusters.StopClusterRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Stops a cluster in a project. + + Args: + request (:class:`google.cloud.dataproc_v1.types.StopClusterRequest`): + The request object. A request to stop a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of + a cluster of Compute Engine instances. + + """ + # Create or coerce a protobuf request object. + request = clusters.StopClusterRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.stop_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clusters.Cluster, + metadata_type=operations.ClusterOperationMetadata, + ) + + # Done; return the response. + return response + + async def start_cluster( + self, + request: clusters.StartClusterRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Starts a cluster in a project. + + Args: + request (:class:`google.cloud.dataproc_v1.types.StartClusterRequest`): + The request object. A request to start a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of + a cluster of Compute Engine instances. + + """ + # Create or coerce a protobuf request object. + request = clusters.StartClusterRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.start_cluster, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clusters.Cluster, + metadata_type=operations.ClusterOperationMetadata, + ) + + # Done; return the response. + return response + async def delete_cluster( self, request: clusters.DeleteClusterRequest = None, @@ -501,7 +602,6 @@ async def delete_cluster( This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -541,7 +641,6 @@ async def delete_cluster( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -557,7 +656,9 @@ async def delete_cluster( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=300.0, ), default_timeout=300.0, @@ -571,7 +672,7 @@ async def delete_cluster( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=operations.ClusterOperationMetadata, ) @@ -616,7 +717,6 @@ async def get_cluster( This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -644,7 +744,6 @@ async def get_cluster( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -661,9 +760,9 @@ async def get_cluster( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -738,7 +837,6 @@ async def list_clusters( This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -768,7 +866,6 @@ async def list_clusters( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -785,9 +882,9 @@ async def list_clusters( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -851,7 +948,6 @@ async def diagnose_cluster( This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -881,7 +977,6 @@ async def diagnose_cluster( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -897,7 +992,9 @@ async def diagnose_cluster( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=300.0, ), default_timeout=300.0, diff --git a/google/cloud/dataproc_v1/services/cluster_controller/client.py b/google/cloud/dataproc_v1/services/cluster_controller/client.py index daef141a..e6186f8c 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/client.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -37,9 +35,8 @@ from google.cloud.dataproc_v1.services.cluster_controller import pagers from google.cloud.dataproc_v1.types import clusters from google.cloud.dataproc_v1.types import operations -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from .transports.base import ClusterControllerTransport, DEFAULT_CLIENT_INFO from .transports.grpc import ClusterControllerGrpcTransport from .transports.grpc_asyncio import ClusterControllerGrpcAsyncIOTransport @@ -62,7 +59,7 @@ class ClusterControllerClientMeta(type): def get_transport_class( cls, label: str = None, ) -> Type[ClusterControllerTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -87,7 +84,8 @@ class ClusterControllerClient(metaclass=ClusterControllerClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -121,7 +119,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -138,7 +137,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -157,16 +156,49 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> ClusterControllerTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - ClusterControllerTransport: The transport used by the client instance. + ClusterControllerTransport: The transport used by the client + instance. """ return self._transport + @staticmethod + def cluster_path(project: str, location: str, cluster: str,) -> str: + """Returns a fully-qualified cluster string.""" + return "projects/{project}/locations/{location}/clusters/{cluster}".format( + project=project, location=location, cluster=cluster, + ) + + @staticmethod + def parse_cluster_path(path: str) -> Dict[str, str]: + """Parses a cluster path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def service_path(project: str, location: str, service: str,) -> str: + """Returns a fully-qualified service string.""" + return "projects/{project}/locations/{location}/services/{service}".format( + project=project, location=location, service=service, + ) + + @staticmethod + def parse_service_path(path: str) -> Dict[str, str]: + """Parses a service path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -179,7 +211,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -190,7 +222,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -201,7 +233,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -212,7 +244,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -226,12 +258,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, ClusterControllerTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the cluster controller client. + """Instantiates the cluster controller client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -286,9 +318,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -300,12 +333,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -320,8 +355,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -375,7 +410,6 @@ def create_cluster( This corresponds to the ``cluster`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -406,10 +440,8 @@ def create_cluster( # there are no flattened fields. if not isinstance(request, clusters.CreateClusterRequest): request = clusters.CreateClusterRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -443,7 +475,7 @@ def update_cluster( region: str = None, cluster_name: str = None, cluster: clusters.Cluster = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -547,7 +579,6 @@ def update_cluster( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -580,10 +611,8 @@ def update_cluster( # there are no flattened fields. if not isinstance(request, clusters.UpdateClusterRequest): request = clusters.UpdateClusterRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -613,6 +642,112 @@ def update_cluster( # Done; return the response. return response + def stop_cluster( + self, + request: clusters.StopClusterRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Stops a cluster in a project. + + Args: + request (google.cloud.dataproc_v1.types.StopClusterRequest): + The request object. A request to stop a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of + a cluster of Compute Engine instances. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clusters.StopClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clusters.StopClusterRequest): + request = clusters.StopClusterRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.stop_cluster] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clusters.Cluster, + metadata_type=operations.ClusterOperationMetadata, + ) + + # Done; return the response. + return response + + def start_cluster( + self, + request: clusters.StartClusterRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Starts a cluster in a project. + + Args: + request (google.cloud.dataproc_v1.types.StartClusterRequest): + The request object. A request to start a cluster. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataproc_v1.types.Cluster` Describes the identifying information, config, and status of + a cluster of Compute Engine instances. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clusters.StartClusterRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clusters.StartClusterRequest): + request = clusters.StartClusterRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.start_cluster] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clusters.Cluster, + metadata_type=operations.ClusterOperationMetadata, + ) + + # Done; return the response. + return response + def delete_cluster( self, request: clusters.DeleteClusterRequest = None, @@ -652,7 +787,6 @@ def delete_cluster( This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -694,10 +828,8 @@ def delete_cluster( # there are no flattened fields. if not isinstance(request, clusters.DeleteClusterRequest): request = clusters.DeleteClusterRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -716,7 +848,7 @@ def delete_cluster( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=operations.ClusterOperationMetadata, ) @@ -761,7 +893,6 @@ def get_cluster( This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -791,10 +922,8 @@ def get_cluster( # there are no flattened fields. if not isinstance(request, clusters.GetClusterRequest): request = clusters.GetClusterRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -873,7 +1002,6 @@ def list_clusters( This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -905,10 +1033,8 @@ def list_clusters( # there are no flattened fields. if not isinstance(request, clusters.ListClustersRequest): request = clusters.ListClustersRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -976,7 +1102,6 @@ def diagnose_cluster( This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1008,10 +1133,8 @@ def diagnose_cluster( # there are no flattened fields. if not isinstance(request, clusters.DiagnoseClusterRequest): request = clusters.DiagnoseClusterRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: diff --git a/google/cloud/dataproc_v1/services/cluster_controller/pagers.py b/google/cloud/dataproc_v1/services/cluster_controller/pagers.py index 418c92b1..ead31b4b 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/pagers.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/dataproc_v1/services/cluster_controller/transports/__init__.py b/google/cloud/dataproc_v1/services/cluster_controller/transports/__init__.py index a487348a..df574dd0 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/transports/__init__.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py b/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py index 3583488a..53e0db16 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.dataproc_v1.types import clusters -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -37,27 +36,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class ClusterControllerTransport(abc.ABC): """Abstract transport class for ClusterController.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "dataproc.googleapis.com" + def __init__( self, *, - host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -66,7 +79,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -80,29 +93,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -112,7 +172,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=300.0, ), default_timeout=300.0, @@ -124,19 +186,29 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=300.0, ), default_timeout=300.0, client_info=client_info, ), + self.stop_cluster: gapic_v1.method.wrap_method( + self.stop_cluster, default_timeout=None, client_info=client_info, + ), + self.start_cluster: gapic_v1.method.wrap_method( + self.start_cluster, default_timeout=None, client_info=client_info, + ), self.delete_cluster: gapic_v1.method.wrap_method( self.delete_cluster, default_retry=retries.Retry( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=300.0, ), default_timeout=300.0, @@ -149,9 +221,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -165,9 +237,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -180,7 +252,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=300.0, ), default_timeout=300.0, @@ -196,57 +270,72 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def create_cluster( self, - ) -> typing.Callable[ + ) -> Callable[ [clusters.CreateClusterRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def update_cluster( self, - ) -> typing.Callable[ + ) -> Callable[ [clusters.UpdateClusterRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def stop_cluster( + self, + ) -> Callable[ + [clusters.StopClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def start_cluster( + self, + ) -> Callable[ + [clusters.StartClusterRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def delete_cluster( self, - ) -> typing.Callable[ + ) -> Callable[ [clusters.DeleteClusterRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def get_cluster( self, - ) -> typing.Callable[ + ) -> Callable[ [clusters.GetClusterRequest], - typing.Union[clusters.Cluster, typing.Awaitable[clusters.Cluster]], + Union[clusters.Cluster, Awaitable[clusters.Cluster]], ]: raise NotImplementedError() @property def list_clusters( self, - ) -> typing.Callable[ + ) -> Callable[ [clusters.ListClustersRequest], - typing.Union[ - clusters.ListClustersResponse, - typing.Awaitable[clusters.ListClustersResponse], - ], + Union[clusters.ListClustersResponse, Awaitable[clusters.ListClustersResponse]], ]: raise NotImplementedError() @property def diagnose_cluster( self, - ) -> typing.Callable[ + ) -> Callable[ [clusters.DiagnoseClusterRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() diff --git a/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py b/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py index f8c3e60a..5936c539 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.dataproc_v1.types import clusters -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import ClusterControllerTransport, DEFAULT_CLIENT_INFO @@ -53,7 +50,7 @@ def __init__( self, *, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -67,7 +64,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -178,7 +176,7 @@ def __init__( def create_channel( cls, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -209,13 +207,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -242,7 +242,7 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def create_cluster( self, - ) -> Callable[[clusters.CreateClusterRequest], operations.Operation]: + ) -> Callable[[clusters.CreateClusterRequest], operations_pb2.Operation]: r"""Return a callable for the create cluster method over gRPC. Creates a cluster in a project. The returned @@ -264,14 +264,14 @@ def create_cluster( self._stubs["create_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.ClusterController/CreateCluster", request_serializer=clusters.CreateClusterRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_cluster"] @property def update_cluster( self, - ) -> Callable[[clusters.UpdateClusterRequest], operations.Operation]: + ) -> Callable[[clusters.UpdateClusterRequest], operations_pb2.Operation]: r"""Return a callable for the update cluster method over gRPC. Updates a cluster in a project. The returned @@ -293,14 +293,66 @@ def update_cluster( self._stubs["update_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.ClusterController/UpdateCluster", request_serializer=clusters.UpdateClusterRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_cluster"] + @property + def stop_cluster( + self, + ) -> Callable[[clusters.StopClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the stop cluster method over gRPC. + + Stops a cluster in a project. + + Returns: + Callable[[~.StopClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_cluster" not in self._stubs: + self._stubs["stop_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/StopCluster", + request_serializer=clusters.StopClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["stop_cluster"] + + @property + def start_cluster( + self, + ) -> Callable[[clusters.StartClusterRequest], operations_pb2.Operation]: + r"""Return a callable for the start cluster method over gRPC. + + Starts a cluster in a project. + + Returns: + Callable[[~.StartClusterRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_cluster" not in self._stubs: + self._stubs["start_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/StartCluster", + request_serializer=clusters.StartClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["start_cluster"] + @property def delete_cluster( self, - ) -> Callable[[clusters.DeleteClusterRequest], operations.Operation]: + ) -> Callable[[clusters.DeleteClusterRequest], operations_pb2.Operation]: r"""Return a callable for the delete cluster method over gRPC. Deletes a cluster in a project. The returned @@ -322,7 +374,7 @@ def delete_cluster( self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.ClusterController/DeleteCluster", request_serializer=clusters.DeleteClusterRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_cluster"] @@ -381,7 +433,7 @@ def list_clusters( @property def diagnose_cluster( self, - ) -> Callable[[clusters.DiagnoseClusterRequest], operations.Operation]: + ) -> Callable[[clusters.DiagnoseClusterRequest], operations_pb2.Operation]: r"""Return a callable for the diagnose cluster method over gRPC. Gets cluster diagnostic information. The returned @@ -407,7 +459,7 @@ def diagnose_cluster( self._stubs["diagnose_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.ClusterController/DiagnoseCluster", request_serializer=clusters.DiagnoseClusterRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["diagnose_cluster"] diff --git a/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py index e27a348b..2e58151f 100644 --- a/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/cluster_controller/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.dataproc_v1.types import clusters -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import ClusterControllerTransport, DEFAULT_CLIENT_INFO from .grpc import ClusterControllerGrpcTransport @@ -56,7 +53,7 @@ class ClusterControllerGrpcAsyncIOTransport(ClusterControllerTransport): def create_channel( cls, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -83,13 +80,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -97,7 +96,7 @@ def __init__( self, *, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -111,7 +110,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -170,7 +170,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -248,7 +247,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: @property def create_cluster( self, - ) -> Callable[[clusters.CreateClusterRequest], Awaitable[operations.Operation]]: + ) -> Callable[[clusters.CreateClusterRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the create cluster method over gRPC. Creates a cluster in a project. The returned @@ -270,14 +269,14 @@ def create_cluster( self._stubs["create_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.ClusterController/CreateCluster", request_serializer=clusters.CreateClusterRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_cluster"] @property def update_cluster( self, - ) -> Callable[[clusters.UpdateClusterRequest], Awaitable[operations.Operation]]: + ) -> Callable[[clusters.UpdateClusterRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the update cluster method over gRPC. Updates a cluster in a project. The returned @@ -299,14 +298,66 @@ def update_cluster( self._stubs["update_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.ClusterController/UpdateCluster", request_serializer=clusters.UpdateClusterRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_cluster"] + @property + def stop_cluster( + self, + ) -> Callable[[clusters.StopClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the stop cluster method over gRPC. + + Stops a cluster in a project. + + Returns: + Callable[[~.StopClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stop_cluster" not in self._stubs: + self._stubs["stop_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/StopCluster", + request_serializer=clusters.StopClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["stop_cluster"] + + @property + def start_cluster( + self, + ) -> Callable[[clusters.StartClusterRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the start cluster method over gRPC. + + Starts a cluster in a project. + + Returns: + Callable[[~.StartClusterRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "start_cluster" not in self._stubs: + self._stubs["start_cluster"] = self.grpc_channel.unary_unary( + "/google.cloud.dataproc.v1.ClusterController/StartCluster", + request_serializer=clusters.StartClusterRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["start_cluster"] + @property def delete_cluster( self, - ) -> Callable[[clusters.DeleteClusterRequest], Awaitable[operations.Operation]]: + ) -> Callable[[clusters.DeleteClusterRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the delete cluster method over gRPC. Deletes a cluster in a project. The returned @@ -328,7 +379,7 @@ def delete_cluster( self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.ClusterController/DeleteCluster", request_serializer=clusters.DeleteClusterRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_cluster"] @@ -391,7 +442,9 @@ def list_clusters( @property def diagnose_cluster( self, - ) -> Callable[[clusters.DiagnoseClusterRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [clusters.DiagnoseClusterRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the diagnose cluster method over gRPC. Gets cluster diagnostic information. The returned @@ -417,7 +470,7 @@ def diagnose_cluster( self._stubs["diagnose_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.ClusterController/DiagnoseCluster", request_serializer=clusters.DiagnoseClusterRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["diagnose_cluster"] diff --git a/google/cloud/dataproc_v1/services/job_controller/__init__.py b/google/cloud/dataproc_v1/services/job_controller/__init__.py index 5bb83207..ca574e29 100644 --- a/google/cloud/dataproc_v1/services/job_controller/__init__.py +++ b/google/cloud/dataproc_v1/services/job_controller/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import JobControllerClient from .async_client import JobControllerAsyncClient diff --git a/google/cloud/dataproc_v1/services/job_controller/async_client.py b/google/cloud/dataproc_v1/services/job_controller/async_client.py index cc5d6522..809b1178 100644 --- a/google/cloud/dataproc_v1/services/job_controller/async_client.py +++ b/google/cloud/dataproc_v1/services/job_controller/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,17 +20,16 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.dataproc_v1.services.job_controller import pagers from google.cloud.dataproc_v1.types import jobs - from .transports.base import JobControllerTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import JobControllerGrpcAsyncIOTransport from .client import JobControllerClient @@ -52,24 +49,20 @@ class JobControllerAsyncClient: parse_common_billing_account_path = staticmethod( JobControllerClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(JobControllerClient.common_folder_path) parse_common_folder_path = staticmethod( JobControllerClient.parse_common_folder_path ) - common_organization_path = staticmethod( JobControllerClient.common_organization_path ) parse_common_organization_path = staticmethod( JobControllerClient.parse_common_organization_path ) - common_project_path = staticmethod(JobControllerClient.common_project_path) parse_common_project_path = staticmethod( JobControllerClient.parse_common_project_path ) - common_location_path = staticmethod(JobControllerClient.common_location_path) parse_common_location_path = staticmethod( JobControllerClient.parse_common_location_path @@ -77,7 +70,8 @@ class JobControllerAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -92,7 +86,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -109,7 +103,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> JobControllerTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: JobControllerTransport: The transport used by the client instance. @@ -123,12 +117,12 @@ def transport(self) -> JobControllerTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, JobControllerTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the job controller client. + """Instantiates the job controller client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -160,7 +154,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = JobControllerClient( credentials=credentials, transport=transport, @@ -204,7 +197,6 @@ async def submit_job( This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -229,7 +221,6 @@ async def submit_job( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -245,7 +236,9 @@ async def submit_job( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=900.0, ), default_timeout=900.0, @@ -294,7 +287,6 @@ async def submit_job_as_operation( This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -324,7 +316,6 @@ async def submit_job_as_operation( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -340,7 +331,9 @@ async def submit_job_as_operation( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=900.0, ), default_timeout=900.0, @@ -399,7 +392,6 @@ async def get_job( This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -424,7 +416,6 @@ async def get_job( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -441,9 +432,9 @@ async def get_job( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=900.0, ), @@ -510,7 +501,6 @@ async def list_jobs( This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -539,7 +529,6 @@ async def list_jobs( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -556,9 +545,9 @@ async def list_jobs( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=900.0, ), @@ -591,7 +580,6 @@ async def update_job( Args: request (:class:`google.cloud.dataproc_v1.types.UpdateJobRequest`): The request object. A request to update a job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -603,7 +591,6 @@ async def update_job( A Dataproc job resource. """ # Create or coerce a protobuf request object. - request = jobs.UpdateJobRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -614,7 +601,9 @@ async def update_job( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=900.0, ), default_timeout=900.0, @@ -667,7 +656,6 @@ async def cancel_job( This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -692,7 +680,6 @@ async def cancel_job( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -709,9 +696,9 @@ async def cancel_job( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=900.0, ), @@ -762,7 +749,6 @@ async def delete_job( This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -783,7 +769,6 @@ async def delete_job( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -799,7 +784,9 @@ async def delete_job( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=900.0, ), default_timeout=900.0, diff --git a/google/cloud/dataproc_v1/services/job_controller/client.py b/google/cloud/dataproc_v1/services/job_controller/client.py index 92d3a67e..6e07eab7 100644 --- a/google/cloud/dataproc_v1/services/job_controller/client.py +++ b/google/cloud/dataproc_v1/services/job_controller/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -36,7 +34,6 @@ from google.api_core import operation_async # type: ignore from google.cloud.dataproc_v1.services.job_controller import pagers from google.cloud.dataproc_v1.types import jobs - from .transports.base import JobControllerTransport, DEFAULT_CLIENT_INFO from .transports.grpc import JobControllerGrpcTransport from .transports.grpc_asyncio import JobControllerGrpcAsyncIOTransport @@ -55,7 +52,7 @@ class JobControllerClientMeta(type): _transport_registry["grpc_asyncio"] = JobControllerGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[JobControllerTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -78,7 +75,8 @@ class JobControllerClient(metaclass=JobControllerClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -112,7 +110,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -129,7 +128,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -148,16 +147,17 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> JobControllerTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - JobControllerTransport: The transport used by the client instance. + JobControllerTransport: The transport used by the client + instance. """ return self._transport @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -170,7 +170,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -181,7 +181,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -192,7 +192,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -203,7 +203,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -217,12 +217,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, JobControllerTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the job controller client. + """Instantiates the job controller client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -277,9 +277,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -291,12 +292,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -311,8 +314,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -363,7 +366,6 @@ def submit_job( This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -390,10 +392,8 @@ def submit_job( # there are no flattened fields. if not isinstance(request, jobs.SubmitJobRequest): request = jobs.SubmitJobRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -447,7 +447,6 @@ def submit_job_as_operation( This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -479,10 +478,8 @@ def submit_job_as_operation( # there are no flattened fields. if not isinstance(request, jobs.SubmitJobRequest): request = jobs.SubmitJobRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -546,7 +543,6 @@ def get_job( This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -573,10 +569,8 @@ def get_job( # there are no flattened fields. if not isinstance(request, jobs.GetJobRequest): request = jobs.GetJobRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -647,7 +641,6 @@ def list_jobs( This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -678,10 +671,8 @@ def list_jobs( # there are no flattened fields. if not isinstance(request, jobs.ListJobsRequest): request = jobs.ListJobsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -718,7 +709,6 @@ def update_job( Args: request (google.cloud.dataproc_v1.types.UpdateJobRequest): The request object. A request to update a job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -730,7 +720,6 @@ def update_job( A Dataproc job resource. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a jobs.UpdateJobRequest. # There's no risk of modifying the input as we've already verified @@ -788,7 +777,6 @@ def cancel_job( This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -815,10 +803,8 @@ def cancel_job( # there are no flattened fields. if not isinstance(request, jobs.CancelJobRequest): request = jobs.CancelJobRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -873,7 +859,6 @@ def delete_job( This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -896,10 +881,8 @@ def delete_job( # there are no flattened fields. if not isinstance(request, jobs.DeleteJobRequest): request = jobs.DeleteJobRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: diff --git a/google/cloud/dataproc_v1/services/job_controller/pagers.py b/google/cloud/dataproc_v1/services/job_controller/pagers.py index 77ae8be8..da3c2c46 100644 --- a/google/cloud/dataproc_v1/services/job_controller/pagers.py +++ b/google/cloud/dataproc_v1/services/job_controller/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/dataproc_v1/services/job_controller/transports/__init__.py b/google/cloud/dataproc_v1/services/job_controller/transports/__init__.py index d28c850a..f5701c97 100644 --- a/google/cloud/dataproc_v1/services/job_controller/transports/__init__.py +++ b/google/cloud/dataproc_v1/services/job_controller/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/dataproc_v1/services/job_controller/transports/base.py b/google/cloud/dataproc_v1/services/job_controller/transports/base.py index 15bf4766..60283c02 100644 --- a/google/cloud/dataproc_v1/services/job_controller/transports/base.py +++ b/google/cloud/dataproc_v1/services/job_controller/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.dataproc_v1.types import jobs -from google.longrunning import operations_pb2 as operations # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -38,27 +37,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class JobControllerTransport(abc.ABC): """Abstract transport class for JobController.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "dataproc.googleapis.com" + def __init__( self, *, - host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -67,7 +80,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -81,29 +94,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -113,7 +173,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=900.0, ), default_timeout=900.0, @@ -125,7 +187,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=900.0, ), default_timeout=900.0, @@ -138,9 +202,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=900.0, ), @@ -154,9 +218,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=900.0, ), @@ -169,7 +233,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=900.0, ), default_timeout=900.0, @@ -182,9 +248,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=900.0, ), @@ -197,7 +263,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=900.0, ), default_timeout=900.0, @@ -213,59 +281,50 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def submit_job( self, - ) -> typing.Callable[ - [jobs.SubmitJobRequest], typing.Union[jobs.Job, typing.Awaitable[jobs.Job]] - ]: + ) -> Callable[[jobs.SubmitJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]: raise NotImplementedError() @property def submit_job_as_operation( self, - ) -> typing.Callable[ + ) -> Callable[ [jobs.SubmitJobRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def get_job( self, - ) -> typing.Callable[ - [jobs.GetJobRequest], typing.Union[jobs.Job, typing.Awaitable[jobs.Job]] - ]: + ) -> Callable[[jobs.GetJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]: raise NotImplementedError() @property def list_jobs( self, - ) -> typing.Callable[ + ) -> Callable[ [jobs.ListJobsRequest], - typing.Union[jobs.ListJobsResponse, typing.Awaitable[jobs.ListJobsResponse]], + Union[jobs.ListJobsResponse, Awaitable[jobs.ListJobsResponse]], ]: raise NotImplementedError() @property def update_job( self, - ) -> typing.Callable[ - [jobs.UpdateJobRequest], typing.Union[jobs.Job, typing.Awaitable[jobs.Job]] - ]: + ) -> Callable[[jobs.UpdateJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]: raise NotImplementedError() @property def cancel_job( self, - ) -> typing.Callable[ - [jobs.CancelJobRequest], typing.Union[jobs.Job, typing.Awaitable[jobs.Job]] - ]: + ) -> Callable[[jobs.CancelJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]: raise NotImplementedError() @property def delete_job( self, - ) -> typing.Callable[ - [jobs.DeleteJobRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ) -> Callable[ + [jobs.DeleteJobRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] ]: raise NotImplementedError() diff --git a/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py b/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py index 9842af0e..611533af 100644 --- a/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/job_controller/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.dataproc_v1.types import jobs -from google.longrunning import operations_pb2 as operations # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from .base import JobControllerTransport, DEFAULT_CLIENT_INFO @@ -53,7 +50,7 @@ def __init__( self, *, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -67,7 +64,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -178,7 +176,7 @@ def __init__( def create_channel( cls, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -209,13 +207,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -266,7 +266,7 @@ def submit_job(self) -> Callable[[jobs.SubmitJobRequest], jobs.Job]: @property def submit_job_as_operation( self, - ) -> Callable[[jobs.SubmitJobRequest], operations.Operation]: + ) -> Callable[[jobs.SubmitJobRequest], operations_pb2.Operation]: r"""Return a callable for the submit job as operation method over gRPC. Submits job to a cluster. @@ -285,7 +285,7 @@ def submit_job_as_operation( self._stubs["submit_job_as_operation"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.JobController/SubmitJobAsOperation", request_serializer=jobs.SubmitJobRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["submit_job_as_operation"] @@ -391,7 +391,7 @@ def cancel_job(self) -> Callable[[jobs.CancelJobRequest], jobs.Job]: return self._stubs["cancel_job"] @property - def delete_job(self) -> Callable[[jobs.DeleteJobRequest], empty.Empty]: + def delete_job(self) -> Callable[[jobs.DeleteJobRequest], empty_pb2.Empty]: r"""Return a callable for the delete job method over gRPC. Deletes the job from the project. If the job is active, the @@ -411,7 +411,7 @@ def delete_job(self) -> Callable[[jobs.DeleteJobRequest], empty.Empty]: self._stubs["delete_job"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.JobController/DeleteJob", request_serializer=jobs.DeleteJobRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_job"] diff --git a/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py index e9b2d197..f3cf0080 100644 --- a/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/job_controller/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,24 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.dataproc_v1.types import jobs -from google.longrunning import operations_pb2 as operations # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from .base import JobControllerTransport, DEFAULT_CLIENT_INFO from .grpc import JobControllerGrpcTransport @@ -56,7 +53,7 @@ class JobControllerGrpcAsyncIOTransport(JobControllerTransport): def create_channel( cls, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -83,13 +80,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -97,7 +96,7 @@ def __init__( self, *, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -111,7 +110,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -170,7 +170,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -272,7 +271,7 @@ def submit_job(self) -> Callable[[jobs.SubmitJobRequest], Awaitable[jobs.Job]]: @property def submit_job_as_operation( self, - ) -> Callable[[jobs.SubmitJobRequest], Awaitable[operations.Operation]]: + ) -> Callable[[jobs.SubmitJobRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the submit job as operation method over gRPC. Submits job to a cluster. @@ -291,7 +290,7 @@ def submit_job_as_operation( self._stubs["submit_job_as_operation"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.JobController/SubmitJobAsOperation", request_serializer=jobs.SubmitJobRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["submit_job_as_operation"] @@ -399,7 +398,9 @@ def cancel_job(self) -> Callable[[jobs.CancelJobRequest], Awaitable[jobs.Job]]: return self._stubs["cancel_job"] @property - def delete_job(self) -> Callable[[jobs.DeleteJobRequest], Awaitable[empty.Empty]]: + def delete_job( + self, + ) -> Callable[[jobs.DeleteJobRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete job method over gRPC. Deletes the job from the project. If the job is active, the @@ -419,7 +420,7 @@ def delete_job(self) -> Callable[[jobs.DeleteJobRequest], Awaitable[empty.Empty] self._stubs["delete_job"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.JobController/DeleteJob", request_serializer=jobs.DeleteJobRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_job"] diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/__init__.py b/google/cloud/dataproc_v1/services/workflow_template_service/__init__.py index 8e92d92d..c339809a 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/__init__.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import WorkflowTemplateServiceClient from .async_client import WorkflowTemplateServiceAsyncClient diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py b/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py index cbfafedc..6e895241 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,19 +20,19 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.dataproc_v1.services.workflow_template_service import pagers from google.cloud.dataproc_v1.types import workflow_templates -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import WorkflowTemplateServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import WorkflowTemplateServiceGrpcAsyncIOTransport from .client import WorkflowTemplateServiceClient @@ -50,39 +48,38 @@ class WorkflowTemplateServiceAsyncClient: DEFAULT_ENDPOINT = WorkflowTemplateServiceClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = WorkflowTemplateServiceClient.DEFAULT_MTLS_ENDPOINT + cluster_path = staticmethod(WorkflowTemplateServiceClient.cluster_path) + parse_cluster_path = staticmethod(WorkflowTemplateServiceClient.parse_cluster_path) + service_path = staticmethod(WorkflowTemplateServiceClient.service_path) + parse_service_path = staticmethod(WorkflowTemplateServiceClient.parse_service_path) workflow_template_path = staticmethod( WorkflowTemplateServiceClient.workflow_template_path ) parse_workflow_template_path = staticmethod( WorkflowTemplateServiceClient.parse_workflow_template_path ) - common_billing_account_path = staticmethod( WorkflowTemplateServiceClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( WorkflowTemplateServiceClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(WorkflowTemplateServiceClient.common_folder_path) parse_common_folder_path = staticmethod( WorkflowTemplateServiceClient.parse_common_folder_path ) - common_organization_path = staticmethod( WorkflowTemplateServiceClient.common_organization_path ) parse_common_organization_path = staticmethod( WorkflowTemplateServiceClient.parse_common_organization_path ) - common_project_path = staticmethod( WorkflowTemplateServiceClient.common_project_path ) parse_common_project_path = staticmethod( WorkflowTemplateServiceClient.parse_common_project_path ) - common_location_path = staticmethod( WorkflowTemplateServiceClient.common_location_path ) @@ -92,7 +89,8 @@ class WorkflowTemplateServiceAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -107,7 +105,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -124,7 +122,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> WorkflowTemplateServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: WorkflowTemplateServiceTransport: The transport used by the client instance. @@ -139,12 +137,12 @@ def transport(self) -> WorkflowTemplateServiceTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, WorkflowTemplateServiceTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the workflow template service client. + """Instantiates the workflow template service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -176,7 +174,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = WorkflowTemplateServiceClient( credentials=credentials, transport=transport, @@ -224,7 +221,6 @@ async def create_workflow_template( This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -251,7 +247,6 @@ async def create_workflow_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if template is not None: @@ -265,7 +260,9 @@ async def create_workflow_template( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -319,7 +316,6 @@ async def get_workflow_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -346,7 +342,6 @@ async def get_workflow_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -359,9 +354,9 @@ async def get_workflow_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -442,13 +437,12 @@ async def instantiate_workflow_template( parameters (:class:`Sequence[google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest.ParametersEntry]`): Optional. Map from parameter names to values that should be used for those - parameters. Values may not exceed 100 + parameters. Values may not exceed 1000 characters. This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -488,7 +482,6 @@ async def instantiate_workflow_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -503,7 +496,9 @@ async def instantiate_workflow_template( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -523,7 +518,7 @@ async def instantiate_workflow_template( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=workflow_templates.WorkflowMetadata, ) @@ -597,7 +592,6 @@ async def instantiate_inline_workflow_template( This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -637,7 +631,6 @@ async def instantiate_inline_workflow_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if template is not None: @@ -651,7 +644,9 @@ async def instantiate_inline_workflow_template( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -671,7 +666,7 @@ async def instantiate_inline_workflow_template( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=workflow_templates.WorkflowMetadata, ) @@ -704,7 +699,6 @@ async def update_workflow_template( This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -731,7 +725,6 @@ async def update_workflow_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if template is not None: request.template = template @@ -743,7 +736,9 @@ async def update_workflow_template( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -797,7 +792,6 @@ async def list_workflow_templates( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -827,7 +821,6 @@ async def list_workflow_templates( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -840,9 +833,9 @@ async def list_workflow_templates( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -904,7 +897,6 @@ async def delete_workflow_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -925,7 +917,6 @@ async def delete_workflow_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -937,7 +928,9 @@ async def delete_workflow_template( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/client.py b/google/cloud/dataproc_v1/services/workflow_template_service/client.py index bb0be312..d60fc6ed 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/client.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -36,9 +34,9 @@ from google.api_core import operation_async # type: ignore from google.cloud.dataproc_v1.services.workflow_template_service import pagers from google.cloud.dataproc_v1.types import workflow_templates -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import WorkflowTemplateServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import WorkflowTemplateServiceGrpcTransport from .transports.grpc_asyncio import WorkflowTemplateServiceGrpcAsyncIOTransport @@ -61,7 +59,7 @@ class WorkflowTemplateServiceClientMeta(type): def get_transport_class( cls, label: str = None, ) -> Type[WorkflowTemplateServiceTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -86,7 +84,8 @@ class WorkflowTemplateServiceClient(metaclass=WorkflowTemplateServiceClientMeta) @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -120,7 +119,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -137,7 +137,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -156,25 +156,58 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> WorkflowTemplateServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - WorkflowTemplateServiceTransport: The transport used by the client instance. + WorkflowTemplateServiceTransport: The transport used by the client + instance. """ return self._transport + @staticmethod + def cluster_path(project: str, location: str, cluster: str,) -> str: + """Returns a fully-qualified cluster string.""" + return "projects/{project}/locations/{location}/clusters/{cluster}".format( + project=project, location=location, cluster=cluster, + ) + + @staticmethod + def parse_cluster_path(path: str) -> Dict[str, str]: + """Parses a cluster path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def service_path(project: str, location: str, service: str,) -> str: + """Returns a fully-qualified service string.""" + return "projects/{project}/locations/{location}/services/{service}".format( + project=project, location=location, service=service, + ) + + @staticmethod + def parse_service_path(path: str) -> Dict[str, str]: + """Parses a service path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def workflow_template_path( project: str, region: str, workflow_template: str, ) -> str: - """Return a fully-qualified workflow_template string.""" + """Returns a fully-qualified workflow_template string.""" return "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}".format( project=project, region=region, workflow_template=workflow_template, ) @staticmethod def parse_workflow_template_path(path: str) -> Dict[str, str]: - """Parse a workflow_template path into its component segments.""" + """Parses a workflow_template path into its component segments.""" m = re.match( r"^projects/(?P.+?)/regions/(?P.+?)/workflowTemplates/(?P.+?)$", path, @@ -183,7 +216,7 @@ def parse_workflow_template_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -196,7 +229,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -207,7 +240,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -218,7 +251,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -229,7 +262,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -243,12 +276,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, WorkflowTemplateServiceTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the workflow template service client. + """Instantiates the workflow template service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -303,9 +336,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -317,12 +351,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -337,8 +373,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -393,7 +429,6 @@ def create_workflow_template( This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -422,10 +457,8 @@ def create_workflow_template( # there are no flattened fields. if not isinstance(request, workflow_templates.CreateWorkflowTemplateRequest): request = workflow_templates.CreateWorkflowTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if template is not None: @@ -482,7 +515,6 @@ def get_workflow_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -511,10 +543,8 @@ def get_workflow_template( # there are no flattened fields. if not isinstance(request, workflow_templates.GetWorkflowTemplateRequest): request = workflow_templates.GetWorkflowTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -595,13 +625,12 @@ def instantiate_workflow_template( parameters (Sequence[google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest.ParametersEntry]): Optional. Map from parameter names to values that should be used for those - parameters. Values may not exceed 100 + parameters. Values may not exceed 1000 characters. This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -645,10 +674,8 @@ def instantiate_workflow_template( request, workflow_templates.InstantiateWorkflowTemplateRequest ): request = workflow_templates.InstantiateWorkflowTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if parameters is not None: @@ -673,7 +700,7 @@ def instantiate_workflow_template( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=workflow_templates.WorkflowMetadata, ) @@ -747,7 +774,6 @@ def instantiate_inline_workflow_template( This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -793,10 +819,8 @@ def instantiate_inline_workflow_template( request = workflow_templates.InstantiateInlineWorkflowTemplateRequest( request ) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if template is not None: @@ -821,7 +845,7 @@ def instantiate_inline_workflow_template( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=workflow_templates.WorkflowMetadata, ) @@ -854,7 +878,6 @@ def update_workflow_template( This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -883,10 +906,8 @@ def update_workflow_template( # there are no flattened fields. if not isinstance(request, workflow_templates.UpdateWorkflowTemplateRequest): request = workflow_templates.UpdateWorkflowTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if template is not None: request.template = template @@ -941,7 +962,6 @@ def list_workflow_templates( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -973,10 +993,8 @@ def list_workflow_templates( # there are no flattened fields. if not isinstance(request, workflow_templates.ListWorkflowTemplatesRequest): request = workflow_templates.ListWorkflowTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1038,7 +1056,6 @@ def delete_workflow_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1061,10 +1078,8 @@ def delete_workflow_template( # there are no flattened fields. if not isinstance(request, workflow_templates.DeleteWorkflowTemplateRequest): request = workflow_templates.DeleteWorkflowTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/pagers.py b/google/cloud/dataproc_v1/services/workflow_template_service/pagers.py index 90fa03f2..33bbb914 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/pagers.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -119,7 +117,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/transports/__init__.py b/google/cloud/dataproc_v1/services/workflow_template_service/transports/__init__.py index ac44e1f0..c72ace70 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/transports/__init__.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py b/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py index bded001b..d5288980 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.dataproc_v1.types import workflow_templates -from google.longrunning import operations_pb2 as operations # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -38,27 +37,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class WorkflowTemplateServiceTransport(abc.ABC): """Abstract transport class for WorkflowTemplateService.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "dataproc.googleapis.com" + def __init__( self, *, - host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -67,7 +80,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -81,29 +94,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -113,7 +173,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -126,9 +188,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -141,7 +203,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -153,7 +217,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -165,7 +231,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -178,9 +246,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -193,7 +261,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -209,11 +279,11 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def create_workflow_template( self, - ) -> typing.Callable[ + ) -> Callable[ [workflow_templates.CreateWorkflowTemplateRequest], - typing.Union[ + Union[ workflow_templates.WorkflowTemplate, - typing.Awaitable[workflow_templates.WorkflowTemplate], + Awaitable[workflow_templates.WorkflowTemplate], ], ]: raise NotImplementedError() @@ -221,11 +291,11 @@ def create_workflow_template( @property def get_workflow_template( self, - ) -> typing.Callable[ + ) -> Callable[ [workflow_templates.GetWorkflowTemplateRequest], - typing.Union[ + Union[ workflow_templates.WorkflowTemplate, - typing.Awaitable[workflow_templates.WorkflowTemplate], + Awaitable[workflow_templates.WorkflowTemplate], ], ]: raise NotImplementedError() @@ -233,29 +303,29 @@ def get_workflow_template( @property def instantiate_workflow_template( self, - ) -> typing.Callable[ + ) -> Callable[ [workflow_templates.InstantiateWorkflowTemplateRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def instantiate_inline_workflow_template( self, - ) -> typing.Callable[ + ) -> Callable[ [workflow_templates.InstantiateInlineWorkflowTemplateRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def update_workflow_template( self, - ) -> typing.Callable[ + ) -> Callable[ [workflow_templates.UpdateWorkflowTemplateRequest], - typing.Union[ + Union[ workflow_templates.WorkflowTemplate, - typing.Awaitable[workflow_templates.WorkflowTemplate], + Awaitable[workflow_templates.WorkflowTemplate], ], ]: raise NotImplementedError() @@ -263,11 +333,11 @@ def update_workflow_template( @property def list_workflow_templates( self, - ) -> typing.Callable[ + ) -> Callable[ [workflow_templates.ListWorkflowTemplatesRequest], - typing.Union[ + Union[ workflow_templates.ListWorkflowTemplatesResponse, - typing.Awaitable[workflow_templates.ListWorkflowTemplatesResponse], + Awaitable[workflow_templates.ListWorkflowTemplatesResponse], ], ]: raise NotImplementedError() @@ -275,9 +345,9 @@ def list_workflow_templates( @property def delete_workflow_template( self, - ) -> typing.Callable[ + ) -> Callable[ [workflow_templates.DeleteWorkflowTemplateRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py b/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py index e2bbf535..d0ae8f96 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.dataproc_v1.types import workflow_templates -from google.longrunning import operations_pb2 as operations # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from .base import WorkflowTemplateServiceTransport, DEFAULT_CLIENT_INFO @@ -54,7 +51,7 @@ def __init__( self, *, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -68,7 +65,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -179,7 +177,7 @@ def __init__( def create_channel( cls, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -210,13 +208,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -304,7 +304,8 @@ def get_workflow_template( def instantiate_workflow_template( self, ) -> Callable[ - [workflow_templates.InstantiateWorkflowTemplateRequest], operations.Operation + [workflow_templates.InstantiateWorkflowTemplateRequest], + operations_pb2.Operation, ]: r"""Return a callable for the instantiate workflow template method over gRPC. @@ -346,7 +347,7 @@ def instantiate_workflow_template( ] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateWorkflowTemplate", request_serializer=workflow_templates.InstantiateWorkflowTemplateRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["instantiate_workflow_template"] @@ -355,7 +356,7 @@ def instantiate_inline_workflow_template( self, ) -> Callable[ [workflow_templates.InstantiateInlineWorkflowTemplateRequest], - operations.Operation, + operations_pb2.Operation, ]: r"""Return a callable for the instantiate inline workflow template method over gRPC. @@ -403,7 +404,7 @@ def instantiate_inline_workflow_template( ] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateInlineWorkflowTemplate", request_serializer=workflow_templates.InstantiateInlineWorkflowTemplateRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["instantiate_inline_workflow_template"] @@ -471,7 +472,7 @@ def list_workflow_templates( @property def delete_workflow_template( self, - ) -> Callable[[workflow_templates.DeleteWorkflowTemplateRequest], empty.Empty]: + ) -> Callable[[workflow_templates.DeleteWorkflowTemplateRequest], empty_pb2.Empty]: r"""Return a callable for the delete workflow template method over gRPC. Deletes a workflow template. It does not cancel in- @@ -491,7 +492,7 @@ def delete_workflow_template( self._stubs["delete_workflow_template"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.WorkflowTemplateService/DeleteWorkflowTemplate", request_serializer=workflow_templates.DeleteWorkflowTemplateRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_workflow_template"] diff --git a/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py b/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py index 1f93da89..c70245f9 100644 --- a/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1/services/workflow_template_service/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,24 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.dataproc_v1.types import workflow_templates -from google.longrunning import operations_pb2 as operations # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from .base import WorkflowTemplateServiceTransport, DEFAULT_CLIENT_INFO from .grpc import WorkflowTemplateServiceGrpcTransport @@ -57,7 +54,7 @@ class WorkflowTemplateServiceGrpcAsyncIOTransport(WorkflowTemplateServiceTranspo def create_channel( cls, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -84,13 +81,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -98,7 +97,7 @@ def __init__( self, *, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -112,7 +111,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -171,7 +171,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -311,7 +310,7 @@ def instantiate_workflow_template( self, ) -> Callable[ [workflow_templates.InstantiateWorkflowTemplateRequest], - Awaitable[operations.Operation], + Awaitable[operations_pb2.Operation], ]: r"""Return a callable for the instantiate workflow template method over gRPC. @@ -353,7 +352,7 @@ def instantiate_workflow_template( ] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateWorkflowTemplate", request_serializer=workflow_templates.InstantiateWorkflowTemplateRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["instantiate_workflow_template"] @@ -362,7 +361,7 @@ def instantiate_inline_workflow_template( self, ) -> Callable[ [workflow_templates.InstantiateInlineWorkflowTemplateRequest], - Awaitable[operations.Operation], + Awaitable[operations_pb2.Operation], ]: r"""Return a callable for the instantiate inline workflow template method over gRPC. @@ -410,7 +409,7 @@ def instantiate_inline_workflow_template( ] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateInlineWorkflowTemplate", request_serializer=workflow_templates.InstantiateInlineWorkflowTemplateRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["instantiate_inline_workflow_template"] @@ -479,7 +478,7 @@ def list_workflow_templates( def delete_workflow_template( self, ) -> Callable[ - [workflow_templates.DeleteWorkflowTemplateRequest], Awaitable[empty.Empty] + [workflow_templates.DeleteWorkflowTemplateRequest], Awaitable[empty_pb2.Empty] ]: r"""Return a callable for the delete workflow template method over gRPC. @@ -500,7 +499,7 @@ def delete_workflow_template( self._stubs["delete_workflow_template"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1.WorkflowTemplateService/DeleteWorkflowTemplate", request_serializer=workflow_templates.DeleteWorkflowTemplateRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_workflow_template"] diff --git a/google/cloud/dataproc_v1/types/__init__.py b/google/cloud/dataproc_v1/types/__init__.py index d79923a4..814a8c19 100644 --- a/google/cloud/dataproc_v1/types/__init__.py +++ b/google/cloud/dataproc_v1/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .autoscaling_policies import ( AutoscalingPolicy, BasicAutoscalingAlgorithm, @@ -43,16 +41,23 @@ EndpointConfig, GceClusterConfig, GetClusterRequest, + GkeClusterConfig, + IdentityConfig, InstanceGroupConfig, KerberosConfig, LifecycleConfig, ListClustersRequest, ListClustersResponse, ManagedGroupConfig, + MetastoreConfig, + NodeGroupAffinity, NodeInitializationAction, ReservationAffinity, SecurityConfig, + ShieldedInstanceConfig, SoftwareConfig, + StartClusterRequest, + StopClusterRequest, UpdateClusterRequest, ) from .jobs import ( @@ -135,16 +140,23 @@ "EndpointConfig", "GceClusterConfig", "GetClusterRequest", + "GkeClusterConfig", + "IdentityConfig", "InstanceGroupConfig", "KerberosConfig", "LifecycleConfig", "ListClustersRequest", "ListClustersResponse", "ManagedGroupConfig", + "MetastoreConfig", + "NodeGroupAffinity", "NodeInitializationAction", "ReservationAffinity", "SecurityConfig", + "ShieldedInstanceConfig", "SoftwareConfig", + "StartClusterRequest", + "StopClusterRequest", "UpdateClusterRequest", "CancelJobRequest", "DeleteJobRequest", diff --git a/google/cloud/dataproc_v1/types/autoscaling_policies.py b/google/cloud/dataproc_v1/types/autoscaling_policies.py index 7fa0779f..ae9afa52 100644 --- a/google/cloud/dataproc_v1/types/autoscaling_policies.py +++ b/google/cloud/dataproc_v1/types/autoscaling_policies.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,11 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import duration_pb2 # type: ignore __protobuf__ = proto.module( @@ -72,18 +69,14 @@ class AutoscalingPolicy(proto.Message): operate for secondary workers. """ - id = proto.Field(proto.STRING, number=1) - - name = proto.Field(proto.STRING, number=2) - + id = proto.Field(proto.STRING, number=1,) + name = proto.Field(proto.STRING, number=2,) basic_algorithm = proto.Field( proto.MESSAGE, number=3, oneof="algorithm", message="BasicAutoscalingAlgorithm", ) - worker_config = proto.Field( proto.MESSAGE, number=4, message="InstanceGroupAutoscalingPolicyConfig", ) - secondary_worker_config = proto.Field( proto.MESSAGE, number=5, message="InstanceGroupAutoscalingPolicyConfig", ) @@ -91,7 +84,6 @@ class AutoscalingPolicy(proto.Message): class BasicAutoscalingAlgorithm(proto.Message): r"""Basic algorithm for autoscaling. - Attributes: yarn_config (google.cloud.dataproc_v1.types.BasicYarnAutoscalingConfig): Required. YARN autoscaling configuration. @@ -106,13 +98,13 @@ class BasicAutoscalingAlgorithm(proto.Message): yarn_config = proto.Field( proto.MESSAGE, number=1, message="BasicYarnAutoscalingConfig", ) - - cooldown_period = proto.Field(proto.MESSAGE, number=2, message=duration.Duration,) + cooldown_period = proto.Field( + proto.MESSAGE, number=2, message=duration_pb2.Duration, + ) class BasicYarnAutoscalingConfig(proto.Message): r"""Basic autoscaling configurations for YARN. - Attributes: graceful_decommission_timeout (google.protobuf.duration_pb2.Duration): Required. Timeout for YARN graceful decommissioning of Node @@ -167,16 +159,12 @@ class BasicYarnAutoscalingConfig(proto.Message): """ graceful_decommission_timeout = proto.Field( - proto.MESSAGE, number=5, message=duration.Duration, + proto.MESSAGE, number=5, message=duration_pb2.Duration, ) - - scale_up_factor = proto.Field(proto.DOUBLE, number=1) - - scale_down_factor = proto.Field(proto.DOUBLE, number=2) - - scale_up_min_worker_fraction = proto.Field(proto.DOUBLE, number=3) - - scale_down_min_worker_fraction = proto.Field(proto.DOUBLE, number=4) + scale_up_factor = proto.Field(proto.DOUBLE, number=1,) + scale_down_factor = proto.Field(proto.DOUBLE, number=2,) + scale_up_min_worker_fraction = proto.Field(proto.DOUBLE, number=3,) + scale_down_min_worker_fraction = proto.Field(proto.DOUBLE, number=4,) class InstanceGroupAutoscalingPolicyConfig(proto.Message): @@ -221,16 +209,13 @@ class InstanceGroupAutoscalingPolicyConfig(proto.Message): only and no secondary workers. """ - min_instances = proto.Field(proto.INT32, number=1) - - max_instances = proto.Field(proto.INT32, number=2) - - weight = proto.Field(proto.INT32, number=3) + min_instances = proto.Field(proto.INT32, number=1,) + max_instances = proto.Field(proto.INT32, number=2,) + weight = proto.Field(proto.INT32, number=3,) class CreateAutoscalingPolicyRequest(proto.Message): r"""A request to create an autoscaling policy. - Attributes: parent (str): Required. The "resource name" of the region or location, as @@ -248,14 +233,12 @@ class CreateAutoscalingPolicyRequest(proto.Message): Required. The autoscaling policy to create. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) policy = proto.Field(proto.MESSAGE, number=2, message="AutoscalingPolicy",) class GetAutoscalingPolicyRequest(proto.Message): r"""A request to fetch an autoscaling policy. - Attributes: name (str): Required. The "resource name" of the autoscaling policy, as @@ -271,12 +254,11 @@ class GetAutoscalingPolicyRequest(proto.Message): ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class UpdateAutoscalingPolicyRequest(proto.Message): r"""A request to update an autoscaling policy. - Attributes: policy (google.cloud.dataproc_v1.types.AutoscalingPolicy): Required. The updated autoscaling policy. @@ -305,12 +287,11 @@ class DeleteAutoscalingPolicyRequest(proto.Message): ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ListAutoscalingPoliciesRequest(proto.Message): r"""A request to list autoscaling policies in a project. - Attributes: parent (str): Required. The "resource name" of the region or location, as @@ -334,11 +315,9 @@ class ListAutoscalingPoliciesRequest(proto.Message): results. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListAutoscalingPoliciesResponse(proto.Message): @@ -360,8 +339,7 @@ def raw_page(self): policies = proto.RepeatedField( proto.MESSAGE, number=1, message="AutoscalingPolicy", ) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/dataproc_v1/types/clusters.py b/google/cloud/dataproc_v1/types/clusters.py index 008b4866..f2837680 100644 --- a/google/cloud/dataproc_v1/types/clusters.py +++ b/google/cloud/dataproc_v1/types/clusters.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,14 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.dataproc_v1.types import shared -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -29,10 +26,13 @@ manifest={ "Cluster", "ClusterConfig", + "GkeClusterConfig", "EndpointConfig", "AutoscalingConfig", "EncryptionConfig", "GceClusterConfig", + "NodeGroupAffinity", + "ShieldedInstanceConfig", "InstanceGroupConfig", "ManagedGroupConfig", "AcceleratorConfig", @@ -41,11 +41,15 @@ "ClusterStatus", "SecurityConfig", "KerberosConfig", + "IdentityConfig", "SoftwareConfig", "LifecycleConfig", + "MetastoreConfig", "ClusterMetrics", "CreateClusterRequest", "UpdateClusterRequest", + "StopClusterRequest", + "StartClusterRequest", "DeleteClusterRequest", "GetClusterRequest", "ListClustersRequest", @@ -97,28 +101,20 @@ class Cluster(proto.Message): purposes only. It may be changed before final release. """ - project_id = proto.Field(proto.STRING, number=1) - - cluster_name = proto.Field(proto.STRING, number=2) - + project_id = proto.Field(proto.STRING, number=1,) + cluster_name = proto.Field(proto.STRING, number=2,) config = proto.Field(proto.MESSAGE, number=3, message="ClusterConfig",) - - labels = proto.MapField(proto.STRING, proto.STRING, number=8) - + labels = proto.MapField(proto.STRING, proto.STRING, number=8,) status = proto.Field(proto.MESSAGE, number=4, message="ClusterStatus",) - status_history = proto.RepeatedField( proto.MESSAGE, number=7, message="ClusterStatus", ) - - cluster_uuid = proto.Field(proto.STRING, number=6) - + cluster_uuid = proto.Field(proto.STRING, number=6,) metrics = proto.Field(proto.MESSAGE, number=9, message="ClusterMetrics",) class ClusterConfig(proto.Message): r"""The cluster config. - Attributes: config_bucket (str): Optional. A Cloud Storage bucket used to stage job @@ -130,18 +126,19 @@ class ClusterConfig(proto.Message): and manage this project-level, per-location bucket (see `Dataproc staging bucket `__). + **This field requires a Cloud Storage bucket name, not a URI + to a Cloud Storage bucket.** temp_bucket (str): - Optional. A Cloud Storage bucket used to - store ephemeral cluster and jobs data, such as - Spark and MapReduce history files. If you do not - specify a temp bucket, - Dataproc will determine a Cloud Storage location - (US, ASIA, or EU) for your cluster's temp bucket - according to the Compute Engine zone where your - cluster is deployed, and then create and manage - this project-level, per-location bucket. The - default bucket has a TTL of 90 days, but you can - use any TTL (or none) if you specify a bucket. + Optional. A Cloud Storage bucket used to store ephemeral + cluster and jobs data, such as Spark and MapReduce history + files. If you do not specify a temp bucket, Dataproc will + determine a Cloud Storage location (US, ASIA, or EU) for + your cluster's temp bucket according to the Compute Engine + zone where your cluster is deployed, and then create and + manage this project-level, per-location bucket. The default + bucket has a TTL of 90 days, but you can use any TTL (or + none) if you specify a bucket. **This field requires a Cloud + Storage bucket name, not a URI to a Cloud Storage bucket.** gce_cluster_config (google.cloud.dataproc_v1.types.GceClusterConfig): Optional. The shared Compute Engine config settings for all instances in a cluster. @@ -187,50 +184,78 @@ class ClusterConfig(proto.Message): endpoint_config (google.cloud.dataproc_v1.types.EndpointConfig): Optional. Port/endpoint configuration for this cluster + metastore_config (google.cloud.dataproc_v1.types.MetastoreConfig): + Optional. Metastore configuration. + gke_cluster_config (google.cloud.dataproc_v1.types.GkeClusterConfig): + Optional. BETA. The Kubernetes Engine config for Dataproc + clusters deployed to Kubernetes. Setting this is considered + mutually exclusive with Compute Engine-based options such as + ``gce_cluster_config``, ``master_config``, + ``worker_config``, ``secondary_worker_config``, and + ``autoscaling_config``. """ - config_bucket = proto.Field(proto.STRING, number=1) - - temp_bucket = proto.Field(proto.STRING, number=2) - + config_bucket = proto.Field(proto.STRING, number=1,) + temp_bucket = proto.Field(proto.STRING, number=2,) gce_cluster_config = proto.Field( proto.MESSAGE, number=8, message="GceClusterConfig", ) - master_config = proto.Field(proto.MESSAGE, number=9, message="InstanceGroupConfig",) - worker_config = proto.Field( proto.MESSAGE, number=10, message="InstanceGroupConfig", ) - secondary_worker_config = proto.Field( proto.MESSAGE, number=12, message="InstanceGroupConfig", ) - software_config = proto.Field(proto.MESSAGE, number=13, message="SoftwareConfig",) - initialization_actions = proto.RepeatedField( proto.MESSAGE, number=11, message="NodeInitializationAction", ) - encryption_config = proto.Field( proto.MESSAGE, number=15, message="EncryptionConfig", ) - autoscaling_config = proto.Field( proto.MESSAGE, number=18, message="AutoscalingConfig", ) - security_config = proto.Field(proto.MESSAGE, number=16, message="SecurityConfig",) - lifecycle_config = proto.Field(proto.MESSAGE, number=17, message="LifecycleConfig",) - endpoint_config = proto.Field(proto.MESSAGE, number=19, message="EndpointConfig",) + metastore_config = proto.Field(proto.MESSAGE, number=20, message="MetastoreConfig",) + gke_cluster_config = proto.Field( + proto.MESSAGE, number=21, message="GkeClusterConfig", + ) + + +class GkeClusterConfig(proto.Message): + r"""The GKE config for this cluster. + Attributes: + namespaced_gke_deployment_target (google.cloud.dataproc_v1.types.GkeClusterConfig.NamespacedGkeDeploymentTarget): + Optional. A target for the deployment. + """ + + class NamespacedGkeDeploymentTarget(proto.Message): + r"""A full, namespace-isolated deployment target for an existing + GKE cluster. + + Attributes: + target_gke_cluster (str): + Optional. The target GKE cluster to deploy to. Format: + 'projects/{project}/locations/{location}/clusters/{cluster_id}' + cluster_namespace (str): + Optional. A namespace within the GKE cluster + to deploy into. + """ + + target_gke_cluster = proto.Field(proto.STRING, number=1,) + cluster_namespace = proto.Field(proto.STRING, number=2,) + + namespaced_gke_deployment_target = proto.Field( + proto.MESSAGE, number=1, message=NamespacedGkeDeploymentTarget, + ) class EndpointConfig(proto.Message): r"""Endpoint config for this cluster - Attributes: http_ports (Sequence[google.cloud.dataproc_v1.types.EndpointConfig.HttpPortsEntry]): Output only. The map of port descriptions to URLs. Will only @@ -241,14 +266,12 @@ class EndpointConfig(proto.Message): sources. Defaults to false. """ - http_ports = proto.MapField(proto.STRING, proto.STRING, number=1) - - enable_http_port_access = proto.Field(proto.BOOL, number=2) + http_ports = proto.MapField(proto.STRING, proto.STRING, number=1,) + enable_http_port_access = proto.Field(proto.BOOL, number=2,) class AutoscalingConfig(proto.Message): r"""Autoscaling Policy config associated with the cluster. - Attributes: policy_uri (str): Optional. The autoscaling policy used by the cluster. @@ -263,12 +286,11 @@ class AutoscalingConfig(proto.Message): Dataproc region. """ - policy_uri = proto.Field(proto.STRING, number=1) + policy_uri = proto.Field(proto.STRING, number=1,) class EncryptionConfig(proto.Message): r"""Encryption settings for the cluster. - Attributes: gce_pd_kms_key_name (str): Optional. The Cloud KMS key name to use for @@ -276,7 +298,7 @@ class EncryptionConfig(proto.Message): cluster. """ - gce_pd_kms_key_name = proto.Field(proto.STRING, number=1) + gce_pd_kms_key_name = proto.Field(proto.STRING, number=1,) class GceClusterConfig(proto.Message): @@ -329,6 +351,9 @@ class GceClusterConfig(proto.Message): subnetwork enabled networks, and all off-cluster dependencies must be configured to be accessible without external IP addresses. + private_ipv6_google_access (google.cloud.dataproc_v1.types.GceClusterConfig.PrivateIpv6GoogleAccess): + Optional. The type of IPv6 access for a + cluster. service_account (str): Optional. The `Dataproc service account `__ @@ -367,27 +392,89 @@ class GceClusterConfig(proto.Message): reservation_affinity (google.cloud.dataproc_v1.types.ReservationAffinity): Optional. Reservation Affinity for consuming Zonal reservation. + node_group_affinity (google.cloud.dataproc_v1.types.NodeGroupAffinity): + Optional. Node Group Affinity for sole-tenant + clusters. + shielded_instance_config (google.cloud.dataproc_v1.types.ShieldedInstanceConfig): + Optional. Shielded Instance Config for clusters using + `Compute Engine Shielded + VMs `__. """ - zone_uri = proto.Field(proto.STRING, number=1) + class PrivateIpv6GoogleAccess(proto.Enum): + r"""``PrivateIpv6GoogleAccess`` controls whether and how Dataproc + cluster nodes can communicate with Google Services through gRPC over + IPv6. These values are directly mapped to corresponding values in + the `Compute Engine Instance + fields `__. + """ + PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED = 0 + INHERIT_FROM_SUBNETWORK = 1 + OUTBOUND = 2 + BIDIRECTIONAL = 3 + + zone_uri = proto.Field(proto.STRING, number=1,) + network_uri = proto.Field(proto.STRING, number=2,) + subnetwork_uri = proto.Field(proto.STRING, number=6,) + internal_ip_only = proto.Field(proto.BOOL, number=7,) + private_ipv6_google_access = proto.Field( + proto.ENUM, number=12, enum=PrivateIpv6GoogleAccess, + ) + service_account = proto.Field(proto.STRING, number=8,) + service_account_scopes = proto.RepeatedField(proto.STRING, number=3,) + tags = proto.RepeatedField(proto.STRING, number=4,) + metadata = proto.MapField(proto.STRING, proto.STRING, number=5,) + reservation_affinity = proto.Field( + proto.MESSAGE, number=11, message="ReservationAffinity", + ) + node_group_affinity = proto.Field( + proto.MESSAGE, number=13, message="NodeGroupAffinity", + ) + shielded_instance_config = proto.Field( + proto.MESSAGE, number=14, message="ShieldedInstanceConfig", + ) - network_uri = proto.Field(proto.STRING, number=2) - subnetwork_uri = proto.Field(proto.STRING, number=6) +class NodeGroupAffinity(proto.Message): + r"""Node Group Affinity for clusters using sole-tenant node + groups. - internal_ip_only = proto.Field(proto.BOOL, number=7) + Attributes: + node_group_uri (str): + Required. The URI of a sole-tenant `node group + resource `__ + that the cluster will be created on. - service_account = proto.Field(proto.STRING, number=8) + A full URL, partial URI, or node group name are valid. + Examples: - service_account_scopes = proto.RepeatedField(proto.STRING, number=3) + - ``https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-central1-a/nodeGroups/node-group-1`` + - ``projects/[project_id]/zones/us-central1-a/nodeGroups/node-group-1`` + - ``node-group-1`` + """ - tags = proto.RepeatedField(proto.STRING, number=4) + node_group_uri = proto.Field(proto.STRING, number=1,) - metadata = proto.MapField(proto.STRING, proto.STRING, number=5) - reservation_affinity = proto.Field( - proto.MESSAGE, number=11, message="ReservationAffinity", - ) +class ShieldedInstanceConfig(proto.Message): + r"""Shielded Instance Config for clusters using `Compute Engine Shielded + VMs `__. + + Attributes: + enable_secure_boot (bool): + Optional. Defines whether instances have + Secure Boot enabled. + enable_vtpm (bool): + Optional. Defines whether instances have the + vTPM enabled. + enable_integrity_monitoring (bool): + Optional. Defines whether instances have + integrity monitoring enabled. + """ + + enable_secure_boot = proto.Field(proto.BOOL, number=1,) + enable_vtpm = proto.Field(proto.BOOL, number=2,) + enable_integrity_monitoring = proto.Field(proto.BOOL, number=3,) class InstanceGroupConfig(proto.Message): @@ -396,9 +483,13 @@ class InstanceGroupConfig(proto.Message): Attributes: num_instances (int): - Optional. The number of VM instances in the - instance group. For master instance groups, must - be set to 1. + Optional. The number of VM instances in the instance group. + For `HA + cluster `__ + `master_config <#FIELDS.master_config>`__ groups, **must be + set to 3**. For standard cluster + `master_config <#FIELDS.master_config>`__ groups, **must be + set to 1**. instance_names (Sequence[str]): Output only. The list of instance names. Dataproc derives the names from ``cluster_name``, ``num_instances``, and the @@ -475,29 +566,20 @@ class Preemptibility(proto.Enum): NON_PREEMPTIBLE = 1 PREEMPTIBLE = 2 - num_instances = proto.Field(proto.INT32, number=1) - - instance_names = proto.RepeatedField(proto.STRING, number=2) - - image_uri = proto.Field(proto.STRING, number=3) - - machine_type_uri = proto.Field(proto.STRING, number=4) - + num_instances = proto.Field(proto.INT32, number=1,) + instance_names = proto.RepeatedField(proto.STRING, number=2,) + image_uri = proto.Field(proto.STRING, number=3,) + machine_type_uri = proto.Field(proto.STRING, number=4,) disk_config = proto.Field(proto.MESSAGE, number=5, message="DiskConfig",) - - is_preemptible = proto.Field(proto.BOOL, number=6) - + is_preemptible = proto.Field(proto.BOOL, number=6,) preemptibility = proto.Field(proto.ENUM, number=10, enum=Preemptibility,) - managed_group_config = proto.Field( proto.MESSAGE, number=7, message="ManagedGroupConfig", ) - accelerators = proto.RepeatedField( proto.MESSAGE, number=8, message="AcceleratorConfig", ) - - min_cpu_platform = proto.Field(proto.STRING, number=9) + min_cpu_platform = proto.Field(proto.STRING, number=9,) class ManagedGroupConfig(proto.Message): @@ -513,9 +595,8 @@ class ManagedGroupConfig(proto.Message): Manager for this group. """ - instance_template_name = proto.Field(proto.STRING, number=1) - - instance_group_manager_name = proto.Field(proto.STRING, number=2) + instance_template_name = proto.Field(proto.STRING, number=1,) + instance_group_manager_name = proto.Field(proto.STRING, number=2,) class AcceleratorConfig(proto.Message): @@ -545,9 +626,8 @@ class AcceleratorConfig(proto.Message): type exposed to this instance. """ - accelerator_type_uri = proto.Field(proto.STRING, number=1) - - accelerator_count = proto.Field(proto.INT32, number=2) + accelerator_type_uri = proto.Field(proto.STRING, number=1,) + accelerator_count = proto.Field(proto.INT32, number=2,) class DiskConfig(proto.Message): @@ -556,10 +636,12 @@ class DiskConfig(proto.Message): Attributes: boot_disk_type (str): - Optional. Type of the boot disk (default is - "pd-standard"). Valid values: "pd-ssd" - (Persistent Disk Solid State Drive) or "pd- - standard" (Persistent Disk Hard Disk Drive). + Optional. Type of the boot disk (default is "pd-standard"). + Valid values: "pd-balanced" (Persistent Disk Balanced Solid + State Drive), "pd-ssd" (Persistent Disk Solid State Drive), + or "pd-standard" (Persistent Disk Hard Disk Drive). See + `Disk + types `__. boot_disk_size_gb (int): Optional. Size in GB of the boot disk (default is 500GB). @@ -573,11 +655,9 @@ class DiskConfig(proto.Message): basic config and installed binaries. """ - boot_disk_type = proto.Field(proto.STRING, number=3) - - boot_disk_size_gb = proto.Field(proto.INT32, number=1) - - num_local_ssds = proto.Field(proto.INT32, number=2) + boot_disk_type = proto.Field(proto.STRING, number=3,) + boot_disk_size_gb = proto.Field(proto.INT32, number=1,) + num_local_ssds = proto.Field(proto.INT32, number=2,) class NodeInitializationAction(proto.Message): @@ -599,14 +679,14 @@ class NodeInitializationAction(proto.Message): at end of the timeout period. """ - executable_file = proto.Field(proto.STRING, number=1) - - execution_timeout = proto.Field(proto.MESSAGE, number=2, message=duration.Duration,) + executable_file = proto.Field(proto.STRING, number=1,) + execution_timeout = proto.Field( + proto.MESSAGE, number=2, message=duration_pb2.Duration, + ) class ClusterStatus(proto.Message): r"""The status of a cluster and its instances. - Attributes: state (google.cloud.dataproc_v1.types.ClusterStatus.State): Output only. The cluster's state. @@ -630,6 +710,9 @@ class State(proto.Enum): ERROR = 3 DELETING = 4 UPDATING = 5 + STOPPING = 6 + STOPPED = 7 + STARTING = 8 class Substate(proto.Enum): r"""The cluster substate.""" @@ -638,41 +721,43 @@ class Substate(proto.Enum): STALE_STATUS = 2 state = proto.Field(proto.ENUM, number=1, enum=State,) - - detail = proto.Field(proto.STRING, number=2) - + detail = proto.Field(proto.STRING, number=2,) state_start_time = proto.Field( - proto.MESSAGE, number=3, message=timestamp.Timestamp, + proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp, ) - substate = proto.Field(proto.ENUM, number=4, enum=Substate,) class SecurityConfig(proto.Message): - r"""Security related configuration, including Kerberos. + r"""Security related configuration, including encryption, + Kerberos, etc. Attributes: kerberos_config (google.cloud.dataproc_v1.types.KerberosConfig): - Kerberos related configuration. + Optional. Kerberos related configuration. + identity_config (google.cloud.dataproc_v1.types.IdentityConfig): + Optional. Identity related configuration, + including service account based secure multi- + tenancy user mappings. """ kerberos_config = proto.Field(proto.MESSAGE, number=1, message="KerberosConfig",) + identity_config = proto.Field(proto.MESSAGE, number=2, message="IdentityConfig",) class KerberosConfig(proto.Message): r"""Specifies Kerberos related configuration. - Attributes: enable_kerberos (bool): Optional. Flag to indicate whether to Kerberize the cluster (default: false). Set this field to true to enable Kerberos on a cluster. root_principal_password_uri (str): - Required. The Cloud Storage URI of a KMS + Optional. The Cloud Storage URI of a KMS encrypted file containing the root principal password. kms_key_uri (str): - Required. The uri of the KMS key used to + Optional. The uri of the KMS key used to encrypt various sensitive files. keystore_uri (str): Optional. The Cloud Storage URI of the @@ -734,35 +819,33 @@ class KerberosConfig(proto.Message): of hostnames will be the realm. """ - enable_kerberos = proto.Field(proto.BOOL, number=1) - - root_principal_password_uri = proto.Field(proto.STRING, number=2) - - kms_key_uri = proto.Field(proto.STRING, number=3) - - keystore_uri = proto.Field(proto.STRING, number=4) - - truststore_uri = proto.Field(proto.STRING, number=5) - - keystore_password_uri = proto.Field(proto.STRING, number=6) + enable_kerberos = proto.Field(proto.BOOL, number=1,) + root_principal_password_uri = proto.Field(proto.STRING, number=2,) + kms_key_uri = proto.Field(proto.STRING, number=3,) + keystore_uri = proto.Field(proto.STRING, number=4,) + truststore_uri = proto.Field(proto.STRING, number=5,) + keystore_password_uri = proto.Field(proto.STRING, number=6,) + key_password_uri = proto.Field(proto.STRING, number=7,) + truststore_password_uri = proto.Field(proto.STRING, number=8,) + cross_realm_trust_realm = proto.Field(proto.STRING, number=9,) + cross_realm_trust_kdc = proto.Field(proto.STRING, number=10,) + cross_realm_trust_admin_server = proto.Field(proto.STRING, number=11,) + cross_realm_trust_shared_password_uri = proto.Field(proto.STRING, number=12,) + kdc_db_key_uri = proto.Field(proto.STRING, number=13,) + tgt_lifetime_hours = proto.Field(proto.INT32, number=14,) + realm = proto.Field(proto.STRING, number=15,) + + +class IdentityConfig(proto.Message): + r"""Identity related configuration, including service account + based secure multi-tenancy user mappings. - key_password_uri = proto.Field(proto.STRING, number=7) - - truststore_password_uri = proto.Field(proto.STRING, number=8) - - cross_realm_trust_realm = proto.Field(proto.STRING, number=9) - - cross_realm_trust_kdc = proto.Field(proto.STRING, number=10) - - cross_realm_trust_admin_server = proto.Field(proto.STRING, number=11) - - cross_realm_trust_shared_password_uri = proto.Field(proto.STRING, number=12) - - kdc_db_key_uri = proto.Field(proto.STRING, number=13) - - tgt_lifetime_hours = proto.Field(proto.INT32, number=14) + Attributes: + user_service_account_mapping (Sequence[google.cloud.dataproc_v1.types.IdentityConfig.UserServiceAccountMappingEntry]): + Required. Map of user to service account. + """ - realm = proto.Field(proto.STRING, number=15) + user_service_account_mapping = proto.MapField(proto.STRING, proto.STRING, number=1,) class SoftwareConfig(proto.Message): @@ -802,10 +885,8 @@ class SoftwareConfig(proto.Message): on the cluster. """ - image_version = proto.Field(proto.STRING, number=1) - - properties = proto.MapField(proto.STRING, proto.STRING, number=2) - + image_version = proto.Field(proto.STRING, number=1,) + properties = proto.MapField(proto.STRING, proto.STRING, number=2,) optional_components = proto.RepeatedField( proto.ENUM, number=3, enum=shared.Component, ) @@ -813,15 +894,14 @@ class SoftwareConfig(proto.Message): class LifecycleConfig(proto.Message): r"""Specifies the cluster auto-delete schedule configuration. - Attributes: idle_delete_ttl (google.protobuf.duration_pb2.Duration): Optional. The duration to keep the cluster alive while idling (when no jobs are running). Passing this threshold - will cause the cluster to be deleted. Minimum value is 10 + will cause the cluster to be deleted. Minimum value is 5 minutes; maximum value is 14 days (see JSON representation of - `Duration `__. + `Duration `__). auto_delete_time (google.protobuf.timestamp_pb2.Timestamp): Optional. The time when cluster will be auto-deleted (see JSON representation of @@ -839,17 +919,33 @@ class LifecycleConfig(proto.Message): `Timestamp `__). """ - idle_delete_ttl = proto.Field(proto.MESSAGE, number=1, message=duration.Duration,) - + idle_delete_ttl = proto.Field( + proto.MESSAGE, number=1, message=duration_pb2.Duration, + ) auto_delete_time = proto.Field( - proto.MESSAGE, number=2, oneof="ttl", message=timestamp.Timestamp, + proto.MESSAGE, number=2, oneof="ttl", message=timestamp_pb2.Timestamp, ) - auto_delete_ttl = proto.Field( - proto.MESSAGE, number=3, oneof="ttl", message=duration.Duration, + proto.MESSAGE, number=3, oneof="ttl", message=duration_pb2.Duration, + ) + idle_start_time = proto.Field( + proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp, ) - idle_start_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + +class MetastoreConfig(proto.Message): + r"""Specifies a Metastore configuration. + Attributes: + dataproc_metastore_service (str): + Required. Resource name of an existing Dataproc Metastore + service. + + Example: + + - ``projects/[project_id]/locations/[dataproc_region]/services/[service-name]`` + """ + + dataproc_metastore_service = proto.Field(proto.STRING, number=1,) class ClusterMetrics(proto.Message): @@ -865,14 +961,12 @@ class ClusterMetrics(proto.Message): The YARN metrics. """ - hdfs_metrics = proto.MapField(proto.STRING, proto.INT64, number=1) - - yarn_metrics = proto.MapField(proto.STRING, proto.INT64, number=2) + hdfs_metrics = proto.MapField(proto.STRING, proto.INT64, number=1,) + yarn_metrics = proto.MapField(proto.STRING, proto.INT64, number=2,) class CreateClusterRequest(proto.Message): r"""A request to create a cluster. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -885,9 +979,9 @@ class CreateClusterRequest(proto.Message): request_id (str): Optional. A unique id used to identify the request. If the server receives two - [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] - requests with the same id, then the second request will be - ignored and the first + `CreateClusterRequest `__\ s + with the same id, then the second request will be ignored + and the first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend is returned. @@ -899,18 +993,14 @@ class CreateClusterRequest(proto.Message): characters. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=3) - + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=3,) cluster = proto.Field(proto.MESSAGE, number=2, message="Cluster",) - - request_id = proto.Field(proto.STRING, number=4) + request_id = proto.Field(proto.STRING, number=4,) class UpdateClusterRequest(proto.Message): r"""A request to update a cluster. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -998,9 +1088,9 @@ class UpdateClusterRequest(proto.Message): request_id (str): Optional. A unique id used to identify the request. If the server receives two - [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] - requests with the same id, then the second request will be - ignored and the first + `UpdateClusterRequest `__\ s + with the same id, then the second request will be ignored + and the first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend is returned. @@ -1012,26 +1102,99 @@ class UpdateClusterRequest(proto.Message): characters. """ - project_id = proto.Field(proto.STRING, number=1) + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=5,) + cluster_name = proto.Field(proto.STRING, number=2,) + cluster = proto.Field(proto.MESSAGE, number=3, message="Cluster",) + graceful_decommission_timeout = proto.Field( + proto.MESSAGE, number=6, message=duration_pb2.Duration, + ) + update_mask = proto.Field( + proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + ) + request_id = proto.Field(proto.STRING, number=7,) - region = proto.Field(proto.STRING, number=5) - cluster_name = proto.Field(proto.STRING, number=2) +class StopClusterRequest(proto.Message): + r"""A request to stop a cluster. + Attributes: + project_id (str): + Required. The ID of the Google Cloud Platform + project the cluster belongs to. + region (str): + Required. The Dataproc region in which to + handle the request. + cluster_name (str): + Required. The cluster name. + cluster_uuid (str): + Optional. Specifying the ``cluster_uuid`` means the RPC will + fail (with error NOT_FOUND) if a cluster with the specified + UUID does not exist. + request_id (str): + Optional. A unique id used to identify the request. If the + server receives two + `StopClusterRequest `__\ s + with the same id, then the second request will be ignored + and the first + [google.longrunning.Operation][google.longrunning.Operation] + created and stored in the backend is returned. - cluster = proto.Field(proto.MESSAGE, number=3, message="Cluster",) + Recommendation: Set this value to a + `UUID `__. - graceful_decommission_timeout = proto.Field( - proto.MESSAGE, number=6, message=duration.Duration, - ) + The id must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ - update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=2,) + cluster_name = proto.Field(proto.STRING, number=3,) + cluster_uuid = proto.Field(proto.STRING, number=4,) + request_id = proto.Field(proto.STRING, number=5,) - request_id = proto.Field(proto.STRING, number=7) + +class StartClusterRequest(proto.Message): + r"""A request to start a cluster. + Attributes: + project_id (str): + Required. The ID of the Google Cloud Platform + project the cluster belongs to. + region (str): + Required. The Dataproc region in which to + handle the request. + cluster_name (str): + Required. The cluster name. + cluster_uuid (str): + Optional. Specifying the ``cluster_uuid`` means the RPC will + fail (with error NOT_FOUND) if a cluster with the specified + UUID does not exist. + request_id (str): + Optional. A unique id used to identify the request. If the + server receives two + `StartClusterRequest `__\ s + with the same id, then the second request will be ignored + and the first + [google.longrunning.Operation][google.longrunning.Operation] + created and stored in the backend is returned. + + Recommendation: Set this value to a + `UUID `__. + + The id must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=2,) + cluster_name = proto.Field(proto.STRING, number=3,) + cluster_uuid = proto.Field(proto.STRING, number=4,) + request_id = proto.Field(proto.STRING, number=5,) class DeleteClusterRequest(proto.Message): r"""A request to delete a cluster. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -1048,9 +1211,9 @@ class DeleteClusterRequest(proto.Message): request_id (str): Optional. A unique id used to identify the request. If the server receives two - [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] - requests with the same id, then the second request will be - ignored and the first + `DeleteClusterRequest `__\ s + with the same id, then the second request will be ignored + and the first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend is returned. @@ -1062,15 +1225,11 @@ class DeleteClusterRequest(proto.Message): characters. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=3) - - cluster_name = proto.Field(proto.STRING, number=2) - - cluster_uuid = proto.Field(proto.STRING, number=4) - - request_id = proto.Field(proto.STRING, number=5) + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=3,) + cluster_name = proto.Field(proto.STRING, number=2,) + cluster_uuid = proto.Field(proto.STRING, number=4,) + request_id = proto.Field(proto.STRING, number=5,) class GetClusterRequest(proto.Message): @@ -1088,16 +1247,13 @@ class GetClusterRequest(proto.Message): Required. The cluster name. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=3) - - cluster_name = proto.Field(proto.STRING, number=2) + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=3,) + cluster_name = proto.Field(proto.STRING, number=2,) class ListClustersRequest(proto.Message): r"""A request to list the clusters in a project. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -1133,20 +1289,15 @@ class ListClustersRequest(proto.Message): Optional. The standard List page token. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=4) - - filter = proto.Field(proto.STRING, number=5) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=4,) + filter = proto.Field(proto.STRING, number=5,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListClustersResponse(proto.Message): r"""The list of all clusters in a project. - Attributes: clusters (Sequence[google.cloud.dataproc_v1.types.Cluster]): Output only. The clusters in the project. @@ -1162,13 +1313,11 @@ def raw_page(self): return self clusters = proto.RepeatedField(proto.MESSAGE, number=1, message="Cluster",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class DiagnoseClusterRequest(proto.Message): r"""A request to collect cluster diagnostic information. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -1180,16 +1329,13 @@ class DiagnoseClusterRequest(proto.Message): Required. The cluster name. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=3) - - cluster_name = proto.Field(proto.STRING, number=2) + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=3,) + cluster_name = proto.Field(proto.STRING, number=2,) class DiagnoseClusterResults(proto.Message): r"""The location of diagnostic output. - Attributes: output_uri (str): Output only. The Cloud Storage URI of the @@ -1198,12 +1344,11 @@ class DiagnoseClusterResults(proto.Message): diagnostics. """ - output_uri = proto.Field(proto.STRING, number=1) + output_uri = proto.Field(proto.STRING, number=1,) class ReservationAffinity(proto.Message): r"""Reservation Affinity for consuming Zonal reservation. - Attributes: consume_reservation_type (google.cloud.dataproc_v1.types.ReservationAffinity.Type): Optional. Type of reservation to consume @@ -1225,10 +1370,8 @@ class Type(proto.Enum): SPECIFIC_RESERVATION = 3 consume_reservation_type = proto.Field(proto.ENUM, number=1, enum=Type,) - - key = proto.Field(proto.STRING, number=2) - - values = proto.RepeatedField(proto.STRING, number=3) + key = proto.Field(proto.STRING, number=2,) + values = proto.RepeatedField(proto.STRING, number=3,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/dataproc_v1/types/jobs.py b/google/cloud/dataproc_v1/types/jobs.py index cfb19555..87fa758e 100644 --- a/google/cloud/dataproc_v1/types/jobs.py +++ b/google/cloud/dataproc_v1/types/jobs.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -55,7 +52,6 @@ class LoggingConfig(proto.Message): r"""The runtime logging config of the job. - Attributes: driver_log_levels (Sequence[google.cloud.dataproc_v1.types.LoggingConfig.DriverLogLevelsEntry]): The per-package log levels for the driver. @@ -131,20 +127,13 @@ class HadoopJob(proto.Message): execution. """ - main_jar_file_uri = proto.Field(proto.STRING, number=1, oneof="driver") - - main_class = proto.Field(proto.STRING, number=2, oneof="driver") - - args = proto.RepeatedField(proto.STRING, number=3) - - jar_file_uris = proto.RepeatedField(proto.STRING, number=4) - - file_uris = proto.RepeatedField(proto.STRING, number=5) - - archive_uris = proto.RepeatedField(proto.STRING, number=6) - - properties = proto.MapField(proto.STRING, proto.STRING, number=7) - + main_jar_file_uri = proto.Field(proto.STRING, number=1, oneof="driver",) + main_class = proto.Field(proto.STRING, number=2, oneof="driver",) + args = proto.RepeatedField(proto.STRING, number=3,) + jar_file_uris = proto.RepeatedField(proto.STRING, number=4,) + file_uris = proto.RepeatedField(proto.STRING, number=5,) + archive_uris = proto.RepeatedField(proto.STRING, number=6,) + properties = proto.MapField(proto.STRING, proto.STRING, number=7,) logging_config = proto.Field(proto.MESSAGE, number=8, message="LoggingConfig",) @@ -189,20 +178,13 @@ class SparkJob(proto.Message): execution. """ - main_jar_file_uri = proto.Field(proto.STRING, number=1, oneof="driver") - - main_class = proto.Field(proto.STRING, number=2, oneof="driver") - - args = proto.RepeatedField(proto.STRING, number=3) - - jar_file_uris = proto.RepeatedField(proto.STRING, number=4) - - file_uris = proto.RepeatedField(proto.STRING, number=5) - - archive_uris = proto.RepeatedField(proto.STRING, number=6) - - properties = proto.MapField(proto.STRING, proto.STRING, number=7) - + main_jar_file_uri = proto.Field(proto.STRING, number=1, oneof="driver",) + main_class = proto.Field(proto.STRING, number=2, oneof="driver",) + args = proto.RepeatedField(proto.STRING, number=3,) + jar_file_uris = proto.RepeatedField(proto.STRING, number=4,) + file_uris = proto.RepeatedField(proto.STRING, number=5,) + archive_uris = proto.RepeatedField(proto.STRING, number=6,) + properties = proto.MapField(proto.STRING, proto.STRING, number=7,) logging_config = proto.Field(proto.MESSAGE, number=8, message="LoggingConfig",) @@ -249,33 +231,25 @@ class PySparkJob(proto.Message): execution. """ - main_python_file_uri = proto.Field(proto.STRING, number=1) - - args = proto.RepeatedField(proto.STRING, number=2) - - python_file_uris = proto.RepeatedField(proto.STRING, number=3) - - jar_file_uris = proto.RepeatedField(proto.STRING, number=4) - - file_uris = proto.RepeatedField(proto.STRING, number=5) - - archive_uris = proto.RepeatedField(proto.STRING, number=6) - - properties = proto.MapField(proto.STRING, proto.STRING, number=7) - + main_python_file_uri = proto.Field(proto.STRING, number=1,) + args = proto.RepeatedField(proto.STRING, number=2,) + python_file_uris = proto.RepeatedField(proto.STRING, number=3,) + jar_file_uris = proto.RepeatedField(proto.STRING, number=4,) + file_uris = proto.RepeatedField(proto.STRING, number=5,) + archive_uris = proto.RepeatedField(proto.STRING, number=6,) + properties = proto.MapField(proto.STRING, proto.STRING, number=7,) logging_config = proto.Field(proto.MESSAGE, number=8, message="LoggingConfig",) class QueryList(proto.Message): r"""A list of queries to run on a cluster. - Attributes: queries (Sequence[str]): - Required. The queries to execute. You do not need to - terminate a query with a semicolon. Multiple queries can be + Required. The queries to execute. You do not need to end a + query expression with a semicolon. Multiple queries can be specified in one string by separating each with a semicolon. - Here is an example of an Cloud Dataproc API snippet that - uses a QueryList to specify a HiveJob: + Here is an example of a Dataproc API snippet that uses a + QueryList to specify a HiveJob: :: @@ -290,7 +264,7 @@ class QueryList(proto.Message): } """ - queries = proto.RepeatedField(proto.STRING, number=1) + queries = proto.RepeatedField(proto.STRING, number=1,) class HiveJob(proto.Message): @@ -323,19 +297,14 @@ class HiveJob(proto.Message): and UDFs. """ - query_file_uri = proto.Field(proto.STRING, number=1, oneof="queries") - + query_file_uri = proto.Field(proto.STRING, number=1, oneof="queries",) query_list = proto.Field( proto.MESSAGE, number=2, oneof="queries", message="QueryList", ) - - continue_on_failure = proto.Field(proto.BOOL, number=3) - - script_variables = proto.MapField(proto.STRING, proto.STRING, number=4) - - properties = proto.MapField(proto.STRING, proto.STRING, number=5) - - jar_file_uris = proto.RepeatedField(proto.STRING, number=6) + continue_on_failure = proto.Field(proto.BOOL, number=3,) + script_variables = proto.MapField(proto.STRING, proto.STRING, number=4,) + properties = proto.MapField(proto.STRING, proto.STRING, number=5,) + jar_file_uris = proto.RepeatedField(proto.STRING, number=6,) class SparkSqlJob(proto.Message): @@ -365,18 +334,13 @@ class SparkSqlJob(proto.Message): execution. """ - query_file_uri = proto.Field(proto.STRING, number=1, oneof="queries") - + query_file_uri = proto.Field(proto.STRING, number=1, oneof="queries",) query_list = proto.Field( proto.MESSAGE, number=2, oneof="queries", message="QueryList", ) - - script_variables = proto.MapField(proto.STRING, proto.STRING, number=3) - - properties = proto.MapField(proto.STRING, proto.STRING, number=4) - - jar_file_uris = proto.RepeatedField(proto.STRING, number=56) - + script_variables = proto.MapField(proto.STRING, proto.STRING, number=3,) + properties = proto.MapField(proto.STRING, proto.STRING, number=4,) + jar_file_uris = proto.RepeatedField(proto.STRING, number=56,) logging_config = proto.Field(proto.MESSAGE, number=6, message="LoggingConfig",) @@ -412,20 +376,14 @@ class PigJob(proto.Message): execution. """ - query_file_uri = proto.Field(proto.STRING, number=1, oneof="queries") - + query_file_uri = proto.Field(proto.STRING, number=1, oneof="queries",) query_list = proto.Field( proto.MESSAGE, number=2, oneof="queries", message="QueryList", ) - - continue_on_failure = proto.Field(proto.BOOL, number=3) - - script_variables = proto.MapField(proto.STRING, proto.STRING, number=4) - - properties = proto.MapField(proto.STRING, proto.STRING, number=5) - - jar_file_uris = proto.RepeatedField(proto.STRING, number=6) - + continue_on_failure = proto.Field(proto.BOOL, number=3,) + script_variables = proto.MapField(proto.STRING, proto.STRING, number=4,) + properties = proto.MapField(proto.STRING, proto.STRING, number=5,) + jar_file_uris = proto.RepeatedField(proto.STRING, number=6,) logging_config = proto.Field(proto.MESSAGE, number=7, message="LoggingConfig",) @@ -465,16 +423,11 @@ class SparkRJob(proto.Message): execution. """ - main_r_file_uri = proto.Field(proto.STRING, number=1) - - args = proto.RepeatedField(proto.STRING, number=2) - - file_uris = proto.RepeatedField(proto.STRING, number=3) - - archive_uris = proto.RepeatedField(proto.STRING, number=4) - - properties = proto.MapField(proto.STRING, proto.STRING, number=5) - + main_r_file_uri = proto.Field(proto.STRING, number=1,) + args = proto.RepeatedField(proto.STRING, number=2,) + file_uris = proto.RepeatedField(proto.STRING, number=3,) + archive_uris = proto.RepeatedField(proto.STRING, number=4,) + properties = proto.MapField(proto.STRING, proto.STRING, number=5,) logging_config = proto.Field(proto.MESSAGE, number=6, message="LoggingConfig",) @@ -512,26 +465,19 @@ class PrestoJob(proto.Message): execution. """ - query_file_uri = proto.Field(proto.STRING, number=1, oneof="queries") - + query_file_uri = proto.Field(proto.STRING, number=1, oneof="queries",) query_list = proto.Field( proto.MESSAGE, number=2, oneof="queries", message="QueryList", ) - - continue_on_failure = proto.Field(proto.BOOL, number=3) - - output_format = proto.Field(proto.STRING, number=4) - - client_tags = proto.RepeatedField(proto.STRING, number=5) - - properties = proto.MapField(proto.STRING, proto.STRING, number=6) - + continue_on_failure = proto.Field(proto.BOOL, number=3,) + output_format = proto.Field(proto.STRING, number=4,) + client_tags = proto.RepeatedField(proto.STRING, number=5,) + properties = proto.MapField(proto.STRING, proto.STRING, number=6,) logging_config = proto.Field(proto.MESSAGE, number=7, message="LoggingConfig",) class JobPlacement(proto.Message): r"""Dataproc job config. - Attributes: cluster_name (str): Required. The name of the cluster where the @@ -539,16 +485,18 @@ class JobPlacement(proto.Message): cluster_uuid (str): Output only. A cluster UUID generated by the Dataproc service when the job is submitted. + cluster_labels (Sequence[google.cloud.dataproc_v1.types.JobPlacement.ClusterLabelsEntry]): + Optional. Cluster labels to identify a + cluster where the job will be submitted. """ - cluster_name = proto.Field(proto.STRING, number=1) - - cluster_uuid = proto.Field(proto.STRING, number=2) + cluster_name = proto.Field(proto.STRING, number=1,) + cluster_uuid = proto.Field(proto.STRING, number=2,) + cluster_labels = proto.MapField(proto.STRING, proto.STRING, number=3,) class JobStatus(proto.Message): r"""Dataproc job status. - Attributes: state (google.cloud.dataproc_v1.types.JobStatus.State): Output only. A state message specifying the @@ -586,19 +534,15 @@ class Substate(proto.Enum): STALE_STATUS = 3 state = proto.Field(proto.ENUM, number=1, enum=State,) - - details = proto.Field(proto.STRING, number=2) - + details = proto.Field(proto.STRING, number=2,) state_start_time = proto.Field( - proto.MESSAGE, number=6, message=timestamp.Timestamp, + proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp, ) - substate = proto.Field(proto.ENUM, number=7, enum=Substate,) class JobReference(proto.Message): r"""Encapsulates the full scoping used to reference a job. - Attributes: project_id (str): Optional. The ID of the Google Cloud Platform @@ -616,9 +560,8 @@ class JobReference(proto.Message): by the server. """ - project_id = proto.Field(proto.STRING, number=1) - - job_id = proto.Field(proto.STRING, number=2) + project_id = proto.Field(proto.STRING, number=1,) + job_id = proto.Field(proto.STRING, number=2,) class YarnApplication(proto.Message): @@ -660,18 +603,14 @@ class State(proto.Enum): FAILED = 7 KILLED = 8 - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) state = proto.Field(proto.ENUM, number=2, enum=State,) - - progress = proto.Field(proto.FLOAT, number=3) - - tracking_url = proto.Field(proto.STRING, number=4) + progress = proto.Field(proto.FLOAT, number=3,) + tracking_url = proto.Field(proto.STRING, number=4,) class Job(proto.Message): r"""A Dataproc job resource. - Attributes: reference (google.cloud.dataproc_v1.types.JobReference): Optional. The fully qualified reference to the job, which @@ -739,68 +678,49 @@ class Job(proto.Message): """ reference = proto.Field(proto.MESSAGE, number=1, message="JobReference",) - placement = proto.Field(proto.MESSAGE, number=2, message="JobPlacement",) - hadoop_job = proto.Field( proto.MESSAGE, number=3, oneof="type_job", message="HadoopJob", ) - spark_job = proto.Field( proto.MESSAGE, number=4, oneof="type_job", message="SparkJob", ) - pyspark_job = proto.Field( proto.MESSAGE, number=5, oneof="type_job", message="PySparkJob", ) - hive_job = proto.Field( proto.MESSAGE, number=6, oneof="type_job", message="HiveJob", ) - pig_job = proto.Field(proto.MESSAGE, number=7, oneof="type_job", message="PigJob",) - spark_r_job = proto.Field( proto.MESSAGE, number=21, oneof="type_job", message="SparkRJob", ) - spark_sql_job = proto.Field( proto.MESSAGE, number=12, oneof="type_job", message="SparkSqlJob", ) - presto_job = proto.Field( proto.MESSAGE, number=23, oneof="type_job", message="PrestoJob", ) - status = proto.Field(proto.MESSAGE, number=8, message="JobStatus",) - status_history = proto.RepeatedField(proto.MESSAGE, number=13, message="JobStatus",) - yarn_applications = proto.RepeatedField( proto.MESSAGE, number=9, message="YarnApplication", ) - - driver_output_resource_uri = proto.Field(proto.STRING, number=17) - - driver_control_files_uri = proto.Field(proto.STRING, number=15) - - labels = proto.MapField(proto.STRING, proto.STRING, number=18) - + driver_output_resource_uri = proto.Field(proto.STRING, number=17,) + driver_control_files_uri = proto.Field(proto.STRING, number=15,) + labels = proto.MapField(proto.STRING, proto.STRING, number=18,) scheduling = proto.Field(proto.MESSAGE, number=20, message="JobScheduling",) - - job_uuid = proto.Field(proto.STRING, number=22) - - done = proto.Field(proto.BOOL, number=24) + job_uuid = proto.Field(proto.STRING, number=22,) + done = proto.Field(proto.BOOL, number=24,) class JobScheduling(proto.Message): r"""Job scheduling options. - Attributes: max_failures_per_hour (int): Optional. Maximum number of times per hour a driver may be restarted as a result of driver - terminating with non-zero code before job is + exiting with non-zero code before job is reported failed. A job may be reported as thrashing if driver @@ -808,14 +728,19 @@ class JobScheduling(proto.Message): minute window. Maximum value is 10. + max_failures_total (int): + Optional. Maximum number of times in total a + driver may be restarted as a result of driver + exiting with non-zero code before job is + reported failed. Maximum value is 240. """ - max_failures_per_hour = proto.Field(proto.INT32, number=1) + max_failures_per_hour = proto.Field(proto.INT32, number=1,) + max_failures_total = proto.Field(proto.INT32, number=2,) class SubmitJobRequest(proto.Message): r"""A request to submit a job. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -828,10 +753,10 @@ class SubmitJobRequest(proto.Message): request_id (str): Optional. A unique id used to identify the request. If the server receives two - [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] - requests with the same id, then the second request will be - ignored and the first [Job][google.cloud.dataproc.v1.Job] - created and stored in the backend is returned. + `SubmitJobRequest `__\ s + with the same id, then the second request will be ignored + and the first [Job][google.cloud.dataproc.v1.Job] created + and stored in the backend is returned. It is recommended to always set this value to a `UUID `__. @@ -841,18 +766,14 @@ class SubmitJobRequest(proto.Message): characters. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=3) - + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=3,) job = proto.Field(proto.MESSAGE, number=2, message="Job",) - - request_id = proto.Field(proto.STRING, number=4) + request_id = proto.Field(proto.STRING, number=4,) class JobMetadata(proto.Message): r"""Job Operation metadata. - Attributes: job_id (str): Output only. The job id. @@ -864,13 +785,10 @@ class JobMetadata(proto.Message): Output only. Job submission time. """ - job_id = proto.Field(proto.STRING, number=1) - + job_id = proto.Field(proto.STRING, number=1,) status = proto.Field(proto.MESSAGE, number=2, message="JobStatus",) - - operation_type = proto.Field(proto.STRING, number=3) - - start_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + operation_type = proto.Field(proto.STRING, number=3,) + start_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) class GetJobRequest(proto.Message): @@ -888,16 +806,13 @@ class GetJobRequest(proto.Message): Required. The job ID. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=3) - - job_id = proto.Field(proto.STRING, number=2) + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=3,) + job_id = proto.Field(proto.STRING, number=2,) class ListJobsRequest(proto.Message): r"""A request to list jobs in a project. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -947,24 +862,17 @@ class JobStateMatcher(proto.Enum): ACTIVE = 1 NON_ACTIVE = 2 - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=6) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - cluster_name = proto.Field(proto.STRING, number=4) - + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=6,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + cluster_name = proto.Field(proto.STRING, number=4,) job_state_matcher = proto.Field(proto.ENUM, number=5, enum=JobStateMatcher,) - - filter = proto.Field(proto.STRING, number=7) + filter = proto.Field(proto.STRING, number=7,) class UpdateJobRequest(proto.Message): r"""A request to update a job. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -984,20 +892,17 @@ class UpdateJobRequest(proto.Message): Currently, labels is the only field that can be updated. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=2) - - job_id = proto.Field(proto.STRING, number=3) - + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=2,) + job_id = proto.Field(proto.STRING, number=3,) job = proto.Field(proto.MESSAGE, number=4, message="Job",) - - update_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=5, message=field_mask_pb2.FieldMask, + ) class ListJobsResponse(proto.Message): r"""A list of jobs in a project. - Attributes: jobs (Sequence[google.cloud.dataproc_v1.types.Job]): Output only. Jobs list. @@ -1013,13 +918,11 @@ def raw_page(self): return self jobs = proto.RepeatedField(proto.MESSAGE, number=1, message="Job",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class CancelJobRequest(proto.Message): r"""A request to cancel a job. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -1031,16 +934,13 @@ class CancelJobRequest(proto.Message): Required. The job ID. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=3) - - job_id = proto.Field(proto.STRING, number=2) + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=3,) + job_id = proto.Field(proto.STRING, number=2,) class DeleteJobRequest(proto.Message): r"""A request to delete a job. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -1052,11 +952,9 @@ class DeleteJobRequest(proto.Message): Required. The job ID. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=3) - - job_id = proto.Field(proto.STRING, number=2) + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=3,) + job_id = proto.Field(proto.STRING, number=2,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/dataproc_v1/types/operations.py b/google/cloud/dataproc_v1/types/operations.py index 4584b2ab..9448110a 100644 --- a/google/cloud/dataproc_v1/types/operations.py +++ b/google/cloud/dataproc_v1/types/operations.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,11 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -29,7 +26,6 @@ class ClusterOperationStatus(proto.Message): r"""The status of the operation. - Attributes: state (google.cloud.dataproc_v1.types.ClusterOperationStatus.State): Output only. A message containing the @@ -52,19 +48,15 @@ class State(proto.Enum): DONE = 3 state = proto.Field(proto.ENUM, number=1, enum=State,) - - inner_state = proto.Field(proto.STRING, number=2) - - details = proto.Field(proto.STRING, number=3) - + inner_state = proto.Field(proto.STRING, number=2,) + details = proto.Field(proto.STRING, number=3,) state_start_time = proto.Field( - proto.MESSAGE, number=4, message=timestamp.Timestamp, + proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp, ) class ClusterOperationMetadata(proto.Message): r"""Metadata describing the operation. - Attributes: cluster_name (str): Output only. Name of the cluster for the @@ -87,23 +79,16 @@ class ClusterOperationMetadata(proto.Message): operation execution. """ - cluster_name = proto.Field(proto.STRING, number=7) - - cluster_uuid = proto.Field(proto.STRING, number=8) - + cluster_name = proto.Field(proto.STRING, number=7,) + cluster_uuid = proto.Field(proto.STRING, number=8,) status = proto.Field(proto.MESSAGE, number=9, message="ClusterOperationStatus",) - status_history = proto.RepeatedField( proto.MESSAGE, number=10, message="ClusterOperationStatus", ) - - operation_type = proto.Field(proto.STRING, number=11) - - description = proto.Field(proto.STRING, number=12) - - labels = proto.MapField(proto.STRING, proto.STRING, number=13) - - warnings = proto.RepeatedField(proto.STRING, number=14) + operation_type = proto.Field(proto.STRING, number=11,) + description = proto.Field(proto.STRING, number=12,) + labels = proto.MapField(proto.STRING, proto.STRING, number=13,) + warnings = proto.RepeatedField(proto.STRING, number=14,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/dataproc_v1/types/shared.py b/google/cloud/dataproc_v1/types/shared.py index df28f11e..2e397004 100644 --- a/google/cloud/dataproc_v1/types/shared.py +++ b/google/cloud/dataproc_v1/types/shared.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore @@ -24,12 +22,20 @@ class Component(proto.Enum): - r"""Cluster components that can be activated.""" + r"""Cluster components that can be activated. + Next ID: 16. + """ COMPONENT_UNSPECIFIED = 0 ANACONDA = 5 + DOCKER = 13 + DRUID = 9 + FLINK = 14 + HBASE = 11 HIVE_WEBHCAT = 3 JUPYTER = 1 PRESTO = 6 + RANGER = 12 + SOLR = 10 ZEPPELIN = 4 ZOOKEEPER = 8 diff --git a/google/cloud/dataproc_v1/types/workflow_templates.py b/google/cloud/dataproc_v1/types/workflow_templates.py index 027fbc74..98987809 100644 --- a/google/cloud/dataproc_v1/types/workflow_templates.py +++ b/google/cloud/dataproc_v1/types/workflow_templates.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,13 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.dataproc_v1.types import clusters from google.cloud.dataproc_v1.types import jobs as gcd_jobs -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -53,7 +51,6 @@ class WorkflowTemplate(proto.Message): r"""A Dataproc workflow template resource. - Attributes: id (str): @@ -111,29 +108,34 @@ class WorkflowTemplate(proto.Message): are substituted into the template. Values for parameters must be provided when the template is instantiated. + dag_timeout (google.protobuf.duration_pb2.Duration): + Optional. Timeout duration for the DAG of jobs, expressed in + seconds (see `JSON representation of + duration `__). + The timeout duration must be from 10 minutes ("600s") to 24 + hours ("86400s"). The timer begins when the first job is + submitted. If the workflow is running at the end of the + timeout period, any remaining jobs are cancelled, the + workflow is ended, and if the workflow was running on a + `managed + cluster `__, + the cluster is deleted. """ - id = proto.Field(proto.STRING, number=2) - - name = proto.Field(proto.STRING, number=1) - - version = proto.Field(proto.INT32, number=3) - - create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) - - labels = proto.MapField(proto.STRING, proto.STRING, number=6) - + id = proto.Field(proto.STRING, number=2,) + name = proto.Field(proto.STRING, number=1,) + version = proto.Field(proto.INT32, number=3,) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + labels = proto.MapField(proto.STRING, proto.STRING, number=6,) placement = proto.Field( proto.MESSAGE, number=7, message="WorkflowTemplatePlacement", ) - jobs = proto.RepeatedField(proto.MESSAGE, number=8, message="OrderedJob",) - parameters = proto.RepeatedField( proto.MESSAGE, number=9, message="TemplateParameter", ) + dag_timeout = proto.Field(proto.MESSAGE, number=10, message=duration_pb2.Duration,) class WorkflowTemplatePlacement(proto.Message): @@ -155,7 +157,6 @@ class WorkflowTemplatePlacement(proto.Message): managed_cluster = proto.Field( proto.MESSAGE, number=1, oneof="placement", message="ManagedCluster", ) - cluster_selector = proto.Field( proto.MESSAGE, number=2, oneof="placement", message="ClusterSelector", ) @@ -163,7 +164,6 @@ class WorkflowTemplatePlacement(proto.Message): class ManagedCluster(proto.Message): r"""Cluster that is managed by the workflow. - Attributes: cluster_name (str): Required. The cluster name prefix. A unique @@ -191,11 +191,9 @@ class ManagedCluster(proto.Message): cluster. """ - cluster_name = proto.Field(proto.STRING, number=2) - + cluster_name = proto.Field(proto.STRING, number=2,) config = proto.Field(proto.MESSAGE, number=3, message=clusters.ClusterConfig,) - - labels = proto.MapField(proto.STRING, proto.STRING, number=4) + labels = proto.MapField(proto.STRING, proto.STRING, number=4,) class ClusterSelector(proto.Message): @@ -214,14 +212,12 @@ class ClusterSelector(proto.Message): have all labels to match. """ - zone = proto.Field(proto.STRING, number=1) - - cluster_labels = proto.MapField(proto.STRING, proto.STRING, number=2) + zone = proto.Field(proto.STRING, number=1,) + cluster_labels = proto.MapField(proto.STRING, proto.STRING, number=2,) class OrderedJob(proto.Message): r"""A job executed by the workflow. - Attributes: step_id (str): Required. The step id. The id must be unique among all jobs @@ -272,45 +268,34 @@ class OrderedJob(proto.Message): workflow. """ - step_id = proto.Field(proto.STRING, number=1) - + step_id = proto.Field(proto.STRING, number=1,) hadoop_job = proto.Field( proto.MESSAGE, number=2, oneof="job_type", message=gcd_jobs.HadoopJob, ) - spark_job = proto.Field( proto.MESSAGE, number=3, oneof="job_type", message=gcd_jobs.SparkJob, ) - pyspark_job = proto.Field( proto.MESSAGE, number=4, oneof="job_type", message=gcd_jobs.PySparkJob, ) - hive_job = proto.Field( proto.MESSAGE, number=5, oneof="job_type", message=gcd_jobs.HiveJob, ) - pig_job = proto.Field( proto.MESSAGE, number=6, oneof="job_type", message=gcd_jobs.PigJob, ) - spark_r_job = proto.Field( proto.MESSAGE, number=11, oneof="job_type", message=gcd_jobs.SparkRJob, ) - spark_sql_job = proto.Field( proto.MESSAGE, number=7, oneof="job_type", message=gcd_jobs.SparkSqlJob, ) - presto_job = proto.Field( proto.MESSAGE, number=12, oneof="job_type", message=gcd_jobs.PrestoJob, ) - - labels = proto.MapField(proto.STRING, proto.STRING, number=8) - + labels = proto.MapField(proto.STRING, proto.STRING, number=8,) scheduling = proto.Field(proto.MESSAGE, number=9, message=gcd_jobs.JobScheduling,) - - prerequisite_step_ids = proto.RepeatedField(proto.STRING, number=10) + prerequisite_step_ids = proto.RepeatedField(proto.STRING, number=10,) class TemplateParameter(proto.Message): @@ -392,18 +377,14 @@ class TemplateParameter(proto.Message): this parameter's value. """ - name = proto.Field(proto.STRING, number=1) - - fields = proto.RepeatedField(proto.STRING, number=2) - - description = proto.Field(proto.STRING, number=3) - + name = proto.Field(proto.STRING, number=1,) + fields = proto.RepeatedField(proto.STRING, number=2,) + description = proto.Field(proto.STRING, number=3,) validation = proto.Field(proto.MESSAGE, number=4, message="ParameterValidation",) class ParameterValidation(proto.Message): r"""Configuration for parameter validation. - Attributes: regex (google.cloud.dataproc_v1.types.RegexValidation): Validation based on regular expressions. @@ -414,7 +395,6 @@ class ParameterValidation(proto.Message): regex = proto.Field( proto.MESSAGE, number=1, oneof="validation_type", message="RegexValidation", ) - values = proto.Field( proto.MESSAGE, number=2, oneof="validation_type", message="ValueValidation", ) @@ -422,7 +402,6 @@ class ParameterValidation(proto.Message): class RegexValidation(proto.Message): r"""Validation based on regular expressions. - Attributes: regexes (Sequence[str]): Required. RE2 regular expressions used to @@ -431,24 +410,22 @@ class RegexValidation(proto.Message): matches are not sufficient). """ - regexes = proto.RepeatedField(proto.STRING, number=1) + regexes = proto.RepeatedField(proto.STRING, number=1,) class ValueValidation(proto.Message): r"""Validation based on a list of allowed values. - Attributes: values (Sequence[str]): Required. List of allowed values for the parameter. """ - values = proto.RepeatedField(proto.STRING, number=1) + values = proto.RepeatedField(proto.STRING, number=1,) class WorkflowMetadata(proto.Message): r"""A Dataproc workflow template resource. - Attributes: template (str): Output only. The resource name of the workflow template as @@ -486,6 +463,18 @@ class WorkflowMetadata(proto.Message): Output only. Workflow end time. cluster_uuid (str): Output only. The UUID of target cluster. + dag_timeout (google.protobuf.duration_pb2.Duration): + Output only. The timeout duration for the DAG of jobs, + expressed in seconds (see `JSON representation of + duration `__). + dag_start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. DAG start time, only set for workflows with + [dag_timeout][google.cloud.dataproc.v1.WorkflowMetadata.dag_timeout] + when DAG begins. + dag_end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. DAG end time, only set for workflows with + [dag_timeout][google.cloud.dataproc.v1.WorkflowMetadata.dag_timeout] + when DAG ends. """ class State(proto.Enum): @@ -495,32 +484,28 @@ class State(proto.Enum): RUNNING = 2 DONE = 3 - template = proto.Field(proto.STRING, number=1) - - version = proto.Field(proto.INT32, number=2) - + template = proto.Field(proto.STRING, number=1,) + version = proto.Field(proto.INT32, number=2,) create_cluster = proto.Field(proto.MESSAGE, number=3, message="ClusterOperation",) - graph = proto.Field(proto.MESSAGE, number=4, message="WorkflowGraph",) - delete_cluster = proto.Field(proto.MESSAGE, number=5, message="ClusterOperation",) - state = proto.Field(proto.ENUM, number=6, enum=State,) - - cluster_name = proto.Field(proto.STRING, number=7) - - parameters = proto.MapField(proto.STRING, proto.STRING, number=8) - - start_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=10, message=timestamp.Timestamp,) - - cluster_uuid = proto.Field(proto.STRING, number=11) + cluster_name = proto.Field(proto.STRING, number=7,) + parameters = proto.MapField(proto.STRING, proto.STRING, number=8,) + start_time = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=10, message=timestamp_pb2.Timestamp,) + cluster_uuid = proto.Field(proto.STRING, number=11,) + dag_timeout = proto.Field(proto.MESSAGE, number=12, message=duration_pb2.Duration,) + dag_start_time = proto.Field( + proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, + ) + dag_end_time = proto.Field( + proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, + ) class ClusterOperation(proto.Message): r"""The cluster operation triggered by a workflow. - Attributes: operation_id (str): Output only. The id of the cluster operation. @@ -530,16 +515,13 @@ class ClusterOperation(proto.Message): Output only. Indicates the operation is done. """ - operation_id = proto.Field(proto.STRING, number=1) - - error = proto.Field(proto.STRING, number=2) - - done = proto.Field(proto.BOOL, number=3) + operation_id = proto.Field(proto.STRING, number=1,) + error = proto.Field(proto.STRING, number=2,) + done = proto.Field(proto.BOOL, number=3,) class WorkflowGraph(proto.Message): r"""The workflow graph. - Attributes: nodes (Sequence[google.cloud.dataproc_v1.types.WorkflowNode]): Output only. The workflow nodes. @@ -550,7 +532,6 @@ class WorkflowGraph(proto.Message): class WorkflowNode(proto.Message): r"""The workflow node. - Attributes: step_id (str): Output only. The name of the node. @@ -574,20 +555,15 @@ class NodeState(proto.Enum): COMPLETED = 4 FAILED = 5 - step_id = proto.Field(proto.STRING, number=1) - - prerequisite_step_ids = proto.RepeatedField(proto.STRING, number=2) - - job_id = proto.Field(proto.STRING, number=3) - + step_id = proto.Field(proto.STRING, number=1,) + prerequisite_step_ids = proto.RepeatedField(proto.STRING, number=2,) + job_id = proto.Field(proto.STRING, number=3,) state = proto.Field(proto.ENUM, number=5, enum=NodeState,) - - error = proto.Field(proto.STRING, number=6) + error = proto.Field(proto.STRING, number=6,) class CreateWorkflowTemplateRequest(proto.Message): r"""A request to create a workflow template. - Attributes: parent (str): Required. The resource name of the region or location, as @@ -606,14 +582,12 @@ class CreateWorkflowTemplateRequest(proto.Message): create. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) template = proto.Field(proto.MESSAGE, number=2, message="WorkflowTemplate",) class GetWorkflowTemplateRequest(proto.Message): r"""A request to fetch a workflow template. - Attributes: name (str): Required. The resource name of the workflow template, as @@ -634,14 +608,12 @@ class GetWorkflowTemplateRequest(proto.Message): If unspecified, retrieves the current version. """ - name = proto.Field(proto.STRING, number=1) - - version = proto.Field(proto.INT32, number=2) + name = proto.Field(proto.STRING, number=1,) + version = proto.Field(proto.INT32, number=2,) class InstantiateWorkflowTemplateRequest(proto.Message): r"""A request to instantiate a workflow template. - Attributes: name (str): Required. The resource name of the workflow template, as @@ -678,21 +650,17 @@ class InstantiateWorkflowTemplateRequest(proto.Message): parameters (Sequence[google.cloud.dataproc_v1.types.InstantiateWorkflowTemplateRequest.ParametersEntry]): Optional. Map from parameter names to values that should be used for those parameters. Values - may not exceed 100 characters. + may not exceed 1000 characters. """ - name = proto.Field(proto.STRING, number=1) - - version = proto.Field(proto.INT32, number=2) - - request_id = proto.Field(proto.STRING, number=5) - - parameters = proto.MapField(proto.STRING, proto.STRING, number=6) + name = proto.Field(proto.STRING, number=1,) + version = proto.Field(proto.INT32, number=2,) + request_id = proto.Field(proto.STRING, number=5,) + parameters = proto.MapField(proto.STRING, proto.STRING, number=6,) class InstantiateInlineWorkflowTemplateRequest(proto.Message): r"""A request to instantiate an inline workflow template. - Attributes: parent (str): Required. The resource name of the region or location, as @@ -724,16 +692,13 @@ class InstantiateInlineWorkflowTemplateRequest(proto.Message): characters. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) template = proto.Field(proto.MESSAGE, number=2, message="WorkflowTemplate",) - - request_id = proto.Field(proto.STRING, number=3) + request_id = proto.Field(proto.STRING, number=3,) class UpdateWorkflowTemplateRequest(proto.Message): r"""A request to update a workflow template. - Attributes: template (google.cloud.dataproc_v1.types.WorkflowTemplate): Required. The updated workflow template. @@ -747,7 +712,6 @@ class UpdateWorkflowTemplateRequest(proto.Message): class ListWorkflowTemplatesRequest(proto.Message): r"""A request to list workflow templates in a project. - Attributes: parent (str): Required. The resource name of the region or location, as @@ -770,11 +734,9 @@ class ListWorkflowTemplatesRequest(proto.Message): results. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListWorkflowTemplatesResponse(proto.Message): @@ -798,8 +760,7 @@ def raw_page(self): templates = proto.RepeatedField( proto.MESSAGE, number=1, message="WorkflowTemplate", ) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class DeleteWorkflowTemplateRequest(proto.Message): @@ -827,9 +788,8 @@ class DeleteWorkflowTemplateRequest(proto.Message): specified version. """ - name = proto.Field(proto.STRING, number=1) - - version = proto.Field(proto.INT32, number=2) + name = proto.Field(proto.STRING, number=1,) + version = proto.Field(proto.INT32, number=2,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/dataproc_v1beta2/__init__.py b/google/cloud/dataproc_v1beta2/__init__.py index 1a0d3c1a..35b6c8d6 100644 --- a/google/cloud/dataproc_v1beta2/__init__.py +++ b/google/cloud/dataproc_v1beta2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,9 +15,14 @@ # from .services.autoscaling_policy_service import AutoscalingPolicyServiceClient +from .services.autoscaling_policy_service import AutoscalingPolicyServiceAsyncClient from .services.cluster_controller import ClusterControllerClient +from .services.cluster_controller import ClusterControllerAsyncClient from .services.job_controller import JobControllerClient +from .services.job_controller import JobControllerAsyncClient from .services.workflow_template_service import WorkflowTemplateServiceClient +from .services.workflow_template_service import WorkflowTemplateServiceAsyncClient + from .types.autoscaling_policies import AutoscalingPolicy from .types.autoscaling_policies import BasicAutoscalingAlgorithm from .types.autoscaling_policies import BasicYarnAutoscalingConfig @@ -105,8 +109,11 @@ from .types.workflow_templates import WorkflowTemplate from .types.workflow_templates import WorkflowTemplatePlacement - __all__ = ( + "AutoscalingPolicyServiceAsyncClient", + "ClusterControllerAsyncClient", + "JobControllerAsyncClient", + "WorkflowTemplateServiceAsyncClient", "AcceleratorConfig", "AutoscalingConfig", "AutoscalingPolicy", @@ -194,6 +201,6 @@ "WorkflowNode", "WorkflowTemplate", "WorkflowTemplatePlacement", - "YarnApplication", "WorkflowTemplateServiceClient", + "YarnApplication", ) diff --git a/google/cloud/dataproc_v1beta2/gapic_metadata.json b/google/cloud/dataproc_v1beta2/gapic_metadata.json new file mode 100644 index 00000000..c20241a8 --- /dev/null +++ b/google/cloud/dataproc_v1beta2/gapic_metadata.json @@ -0,0 +1,315 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.dataproc_v1beta2", + "protoPackage": "google.cloud.dataproc.v1beta2", + "schema": "1.0", + "services": { + "AutoscalingPolicyService": { + "clients": { + "grpc": { + "libraryClient": "AutoscalingPolicyServiceClient", + "rpcs": { + "CreateAutoscalingPolicy": { + "methods": [ + "create_autoscaling_policy" + ] + }, + "DeleteAutoscalingPolicy": { + "methods": [ + "delete_autoscaling_policy" + ] + }, + "GetAutoscalingPolicy": { + "methods": [ + "get_autoscaling_policy" + ] + }, + "ListAutoscalingPolicies": { + "methods": [ + "list_autoscaling_policies" + ] + }, + "UpdateAutoscalingPolicy": { + "methods": [ + "update_autoscaling_policy" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AutoscalingPolicyServiceAsyncClient", + "rpcs": { + "CreateAutoscalingPolicy": { + "methods": [ + "create_autoscaling_policy" + ] + }, + "DeleteAutoscalingPolicy": { + "methods": [ + "delete_autoscaling_policy" + ] + }, + "GetAutoscalingPolicy": { + "methods": [ + "get_autoscaling_policy" + ] + }, + "ListAutoscalingPolicies": { + "methods": [ + "list_autoscaling_policies" + ] + }, + "UpdateAutoscalingPolicy": { + "methods": [ + "update_autoscaling_policy" + ] + } + } + } + } + }, + "ClusterController": { + "clients": { + "grpc": { + "libraryClient": "ClusterControllerClient", + "rpcs": { + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DiagnoseCluster": { + "methods": [ + "diagnose_cluster" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ClusterControllerAsyncClient", + "rpcs": { + "CreateCluster": { + "methods": [ + "create_cluster" + ] + }, + "DeleteCluster": { + "methods": [ + "delete_cluster" + ] + }, + "DiagnoseCluster": { + "methods": [ + "diagnose_cluster" + ] + }, + "GetCluster": { + "methods": [ + "get_cluster" + ] + }, + "ListClusters": { + "methods": [ + "list_clusters" + ] + }, + "UpdateCluster": { + "methods": [ + "update_cluster" + ] + } + } + } + } + }, + "JobController": { + "clients": { + "grpc": { + "libraryClient": "JobControllerClient", + "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "SubmitJob": { + "methods": [ + "submit_job" + ] + }, + "SubmitJobAsOperation": { + "methods": [ + "submit_job_as_operation" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + }, + "grpc-async": { + "libraryClient": "JobControllerAsyncClient", + "rpcs": { + "CancelJob": { + "methods": [ + "cancel_job" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "SubmitJob": { + "methods": [ + "submit_job" + ] + }, + "SubmitJobAsOperation": { + "methods": [ + "submit_job_as_operation" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + } + } + }, + "WorkflowTemplateService": { + "clients": { + "grpc": { + "libraryClient": "WorkflowTemplateServiceClient", + "rpcs": { + "CreateWorkflowTemplate": { + "methods": [ + "create_workflow_template" + ] + }, + "DeleteWorkflowTemplate": { + "methods": [ + "delete_workflow_template" + ] + }, + "GetWorkflowTemplate": { + "methods": [ + "get_workflow_template" + ] + }, + "InstantiateInlineWorkflowTemplate": { + "methods": [ + "instantiate_inline_workflow_template" + ] + }, + "InstantiateWorkflowTemplate": { + "methods": [ + "instantiate_workflow_template" + ] + }, + "ListWorkflowTemplates": { + "methods": [ + "list_workflow_templates" + ] + }, + "UpdateWorkflowTemplate": { + "methods": [ + "update_workflow_template" + ] + } + } + }, + "grpc-async": { + "libraryClient": "WorkflowTemplateServiceAsyncClient", + "rpcs": { + "CreateWorkflowTemplate": { + "methods": [ + "create_workflow_template" + ] + }, + "DeleteWorkflowTemplate": { + "methods": [ + "delete_workflow_template" + ] + }, + "GetWorkflowTemplate": { + "methods": [ + "get_workflow_template" + ] + }, + "InstantiateInlineWorkflowTemplate": { + "methods": [ + "instantiate_inline_workflow_template" + ] + }, + "InstantiateWorkflowTemplate": { + "methods": [ + "instantiate_workflow_template" + ] + }, + "ListWorkflowTemplates": { + "methods": [ + "list_workflow_templates" + ] + }, + "UpdateWorkflowTemplate": { + "methods": [ + "update_workflow_template" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/dataproc_v1beta2/services/__init__.py b/google/cloud/dataproc_v1beta2/services/__init__.py index 42ffdf2b..4de65971 100644 --- a/google/cloud/dataproc_v1beta2/services/__init__.py +++ b/google/cloud/dataproc_v1beta2/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/__init__.py b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/__init__.py index e33cbc43..08e39679 100644 --- a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/__init__.py +++ b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import AutoscalingPolicyServiceClient from .async_client import AutoscalingPolicyServiceAsyncClient diff --git a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/async_client.py b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/async_client.py index 5b254961..afcd8629 100644 --- a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/async_client.py +++ b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,15 +20,14 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.dataproc_v1beta2.services.autoscaling_policy_service import pagers from google.cloud.dataproc_v1beta2.types import autoscaling_policies - from .transports.base import AutoscalingPolicyServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import AutoscalingPolicyServiceGrpcAsyncIOTransport from .client import AutoscalingPolicyServiceClient @@ -52,33 +49,28 @@ class AutoscalingPolicyServiceAsyncClient: parse_autoscaling_policy_path = staticmethod( AutoscalingPolicyServiceClient.parse_autoscaling_policy_path ) - common_billing_account_path = staticmethod( AutoscalingPolicyServiceClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( AutoscalingPolicyServiceClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(AutoscalingPolicyServiceClient.common_folder_path) parse_common_folder_path = staticmethod( AutoscalingPolicyServiceClient.parse_common_folder_path ) - common_organization_path = staticmethod( AutoscalingPolicyServiceClient.common_organization_path ) parse_common_organization_path = staticmethod( AutoscalingPolicyServiceClient.parse_common_organization_path ) - common_project_path = staticmethod( AutoscalingPolicyServiceClient.common_project_path ) parse_common_project_path = staticmethod( AutoscalingPolicyServiceClient.parse_common_project_path ) - common_location_path = staticmethod( AutoscalingPolicyServiceClient.common_location_path ) @@ -88,7 +80,8 @@ class AutoscalingPolicyServiceAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -103,7 +96,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -120,7 +113,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> AutoscalingPolicyServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: AutoscalingPolicyServiceTransport: The transport used by the client instance. @@ -135,12 +128,12 @@ def transport(self) -> AutoscalingPolicyServiceTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, AutoscalingPolicyServiceTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the autoscaling policy service client. + """Instantiates the autoscaling policy service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -172,7 +165,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = AutoscalingPolicyServiceClient( credentials=credentials, transport=transport, @@ -220,7 +212,6 @@ async def create_autoscaling_policy( This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -247,7 +238,6 @@ async def create_autoscaling_policy( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if policy is not None: @@ -298,7 +288,6 @@ async def update_autoscaling_policy( This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -325,7 +314,6 @@ async def update_autoscaling_policy( # If we have keyword arguments corresponding to fields on the # request, apply these. - if policy is not None: request.policy = policy @@ -338,7 +326,8 @@ async def update_autoscaling_policy( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -392,7 +381,6 @@ async def get_autoscaling_policy( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -419,7 +407,6 @@ async def get_autoscaling_policy( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -432,7 +419,8 @@ async def get_autoscaling_policy( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -484,7 +472,6 @@ async def list_autoscaling_policies( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -514,7 +501,6 @@ async def list_autoscaling_policies( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -527,7 +513,8 @@ async def list_autoscaling_policies( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -591,7 +578,6 @@ async def delete_autoscaling_policy( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -612,7 +598,6 @@ async def delete_autoscaling_policy( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name diff --git a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/client.py b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/client.py index d7ac0e31..a6b9f83c 100644 --- a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/client.py +++ b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -34,7 +32,6 @@ from google.cloud.dataproc_v1beta2.services.autoscaling_policy_service import pagers from google.cloud.dataproc_v1beta2.types import autoscaling_policies - from .transports.base import AutoscalingPolicyServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import AutoscalingPolicyServiceGrpcTransport from .transports.grpc_asyncio import AutoscalingPolicyServiceGrpcAsyncIOTransport @@ -57,7 +54,7 @@ class AutoscalingPolicyServiceClientMeta(type): def get_transport_class( cls, label: str = None, ) -> Type[AutoscalingPolicyServiceTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -82,7 +79,8 @@ class AutoscalingPolicyServiceClient(metaclass=AutoscalingPolicyServiceClientMet @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -116,7 +114,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -133,7 +132,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -152,10 +151,11 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> AutoscalingPolicyServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - AutoscalingPolicyServiceTransport: The transport used by the client instance. + AutoscalingPolicyServiceTransport: The transport used by the client + instance. """ return self._transport @@ -163,14 +163,14 @@ def transport(self) -> AutoscalingPolicyServiceTransport: def autoscaling_policy_path( project: str, location: str, autoscaling_policy: str, ) -> str: - """Return a fully-qualified autoscaling_policy string.""" + """Returns a fully-qualified autoscaling_policy string.""" return "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}".format( project=project, location=location, autoscaling_policy=autoscaling_policy, ) @staticmethod def parse_autoscaling_policy_path(path: str) -> Dict[str, str]: - """Parse a autoscaling_policy path into its component segments.""" + """Parses a autoscaling_policy path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/autoscalingPolicies/(?P.+?)$", path, @@ -179,7 +179,7 @@ def parse_autoscaling_policy_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -192,7 +192,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -203,7 +203,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -214,7 +214,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -225,7 +225,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -239,12 +239,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, AutoscalingPolicyServiceTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the autoscaling policy service client. + """Instantiates the autoscaling policy service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -299,9 +299,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -313,12 +314,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -333,8 +336,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -389,7 +392,6 @@ def create_autoscaling_policy( This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -418,10 +420,8 @@ def create_autoscaling_policy( # there are no flattened fields. if not isinstance(request, autoscaling_policies.CreateAutoscalingPolicyRequest): request = autoscaling_policies.CreateAutoscalingPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if policy is not None: @@ -470,7 +470,6 @@ def update_autoscaling_policy( This corresponds to the ``policy`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -499,10 +498,8 @@ def update_autoscaling_policy( # there are no flattened fields. if not isinstance(request, autoscaling_policies.UpdateAutoscalingPolicyRequest): request = autoscaling_policies.UpdateAutoscalingPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if policy is not None: request.policy = policy @@ -558,7 +555,6 @@ def get_autoscaling_policy( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -587,10 +583,8 @@ def get_autoscaling_policy( # there are no flattened fields. if not isinstance(request, autoscaling_policies.GetAutoscalingPolicyRequest): request = autoscaling_policies.GetAutoscalingPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -642,7 +636,6 @@ def list_autoscaling_policies( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -674,10 +667,8 @@ def list_autoscaling_policies( # there are no flattened fields. if not isinstance(request, autoscaling_policies.ListAutoscalingPoliciesRequest): request = autoscaling_policies.ListAutoscalingPoliciesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -743,7 +734,6 @@ def delete_autoscaling_policy( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -766,10 +756,8 @@ def delete_autoscaling_policy( # there are no flattened fields. if not isinstance(request, autoscaling_policies.DeleteAutoscalingPolicyRequest): request = autoscaling_policies.DeleteAutoscalingPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name diff --git a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/pagers.py b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/pagers.py index b74bf96a..88bb92d0 100644 --- a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/pagers.py +++ b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -119,7 +117,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/__init__.py b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/__init__.py index 48eb9a18..3e31e172 100644 --- a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/__init__.py +++ b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/base.py b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/base.py index 64375cee..bdd6aec0 100644 --- a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/base.py +++ b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.dataproc_v1beta2.types import autoscaling_policies -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -36,27 +35,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class AutoscalingPolicyServiceTransport(abc.ABC): """Abstract transport class for AutoscalingPolicyService.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "dataproc.googleapis.com" + def __init__( self, *, - host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -65,7 +78,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -79,29 +92,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -117,7 +177,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -131,7 +192,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -145,7 +207,8 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -162,11 +225,11 @@ def _prep_wrapped_messages(self, client_info): @property def create_autoscaling_policy( self, - ) -> typing.Callable[ + ) -> Callable[ [autoscaling_policies.CreateAutoscalingPolicyRequest], - typing.Union[ + Union[ autoscaling_policies.AutoscalingPolicy, - typing.Awaitable[autoscaling_policies.AutoscalingPolicy], + Awaitable[autoscaling_policies.AutoscalingPolicy], ], ]: raise NotImplementedError() @@ -174,11 +237,11 @@ def create_autoscaling_policy( @property def update_autoscaling_policy( self, - ) -> typing.Callable[ + ) -> Callable[ [autoscaling_policies.UpdateAutoscalingPolicyRequest], - typing.Union[ + Union[ autoscaling_policies.AutoscalingPolicy, - typing.Awaitable[autoscaling_policies.AutoscalingPolicy], + Awaitable[autoscaling_policies.AutoscalingPolicy], ], ]: raise NotImplementedError() @@ -186,11 +249,11 @@ def update_autoscaling_policy( @property def get_autoscaling_policy( self, - ) -> typing.Callable[ + ) -> Callable[ [autoscaling_policies.GetAutoscalingPolicyRequest], - typing.Union[ + Union[ autoscaling_policies.AutoscalingPolicy, - typing.Awaitable[autoscaling_policies.AutoscalingPolicy], + Awaitable[autoscaling_policies.AutoscalingPolicy], ], ]: raise NotImplementedError() @@ -198,11 +261,11 @@ def get_autoscaling_policy( @property def list_autoscaling_policies( self, - ) -> typing.Callable[ + ) -> Callable[ [autoscaling_policies.ListAutoscalingPoliciesRequest], - typing.Union[ + Union[ autoscaling_policies.ListAutoscalingPoliciesResponse, - typing.Awaitable[autoscaling_policies.ListAutoscalingPoliciesResponse], + Awaitable[autoscaling_policies.ListAutoscalingPoliciesResponse], ], ]: raise NotImplementedError() @@ -210,9 +273,9 @@ def list_autoscaling_policies( @property def delete_autoscaling_policy( self, - ) -> typing.Callable[ + ) -> Callable[ [autoscaling_policies.DeleteAutoscalingPolicyRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() diff --git a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc.py b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc.py index 28018b7c..ed8f6815 100644 --- a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc.py +++ b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.dataproc_v1beta2.types import autoscaling_policies -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import AutoscalingPolicyServiceTransport, DEFAULT_CLIENT_INFO @@ -52,7 +49,7 @@ def __init__( self, *, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -66,7 +63,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -176,7 +174,7 @@ def __init__( def create_channel( cls, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -207,13 +205,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -345,7 +345,9 @@ def list_autoscaling_policies( @property def delete_autoscaling_policy( self, - ) -> Callable[[autoscaling_policies.DeleteAutoscalingPolicyRequest], empty.Empty]: + ) -> Callable[ + [autoscaling_policies.DeleteAutoscalingPolicyRequest], empty_pb2.Empty + ]: r"""Return a callable for the delete autoscaling policy method over gRPC. Deletes an autoscaling policy. It is an error to @@ -366,7 +368,7 @@ def delete_autoscaling_policy( self._stubs["delete_autoscaling_policy"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.AutoscalingPolicyService/DeleteAutoscalingPolicy", request_serializer=autoscaling_policies.DeleteAutoscalingPolicyRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_autoscaling_policy"] diff --git a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc_asyncio.py b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc_asyncio.py index 53d446d8..b3f6f6b3 100644 --- a/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1beta2/services/autoscaling_policy_service/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.dataproc_v1beta2.types import autoscaling_policies -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import AutoscalingPolicyServiceTransport, DEFAULT_CLIENT_INFO from .grpc import AutoscalingPolicyServiceGrpcTransport @@ -55,7 +52,7 @@ class AutoscalingPolicyServiceGrpcAsyncIOTransport(AutoscalingPolicyServiceTrans def create_channel( cls, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -82,13 +79,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -96,7 +95,7 @@ def __init__( self, *, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -110,7 +109,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -168,7 +168,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -350,7 +349,8 @@ def list_autoscaling_policies( def delete_autoscaling_policy( self, ) -> Callable[ - [autoscaling_policies.DeleteAutoscalingPolicyRequest], Awaitable[empty.Empty] + [autoscaling_policies.DeleteAutoscalingPolicyRequest], + Awaitable[empty_pb2.Empty], ]: r"""Return a callable for the delete autoscaling policy method over gRPC. @@ -372,7 +372,7 @@ def delete_autoscaling_policy( self._stubs["delete_autoscaling_policy"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.AutoscalingPolicyService/DeleteAutoscalingPolicy", request_serializer=autoscaling_policies.DeleteAutoscalingPolicyRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_autoscaling_policy"] diff --git a/google/cloud/dataproc_v1beta2/services/cluster_controller/__init__.py b/google/cloud/dataproc_v1beta2/services/cluster_controller/__init__.py index 99ce2997..62affd76 100644 --- a/google/cloud/dataproc_v1beta2/services/cluster_controller/__init__.py +++ b/google/cloud/dataproc_v1beta2/services/cluster_controller/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import ClusterControllerClient from .async_client import ClusterControllerAsyncClient diff --git a/google/cloud/dataproc_v1beta2/services/cluster_controller/async_client.py b/google/cloud/dataproc_v1beta2/services/cluster_controller/async_client.py index a9f5f7e7..29a79d13 100644 --- a/google/cloud/dataproc_v1beta2/services/cluster_controller/async_client.py +++ b/google/cloud/dataproc_v1beta2/services/cluster_controller/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,10 +20,10 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.api_core import operation # type: ignore @@ -33,9 +31,8 @@ from google.cloud.dataproc_v1beta2.services.cluster_controller import pagers from google.cloud.dataproc_v1beta2.types import clusters from google.cloud.dataproc_v1beta2.types import operations -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from .transports.base import ClusterControllerTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import ClusterControllerGrpcAsyncIOTransport from .client import ClusterControllerClient @@ -53,31 +50,26 @@ class ClusterControllerAsyncClient: cluster_path = staticmethod(ClusterControllerClient.cluster_path) parse_cluster_path = staticmethod(ClusterControllerClient.parse_cluster_path) - common_billing_account_path = staticmethod( ClusterControllerClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( ClusterControllerClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(ClusterControllerClient.common_folder_path) parse_common_folder_path = staticmethod( ClusterControllerClient.parse_common_folder_path ) - common_organization_path = staticmethod( ClusterControllerClient.common_organization_path ) parse_common_organization_path = staticmethod( ClusterControllerClient.parse_common_organization_path ) - common_project_path = staticmethod(ClusterControllerClient.common_project_path) parse_common_project_path = staticmethod( ClusterControllerClient.parse_common_project_path ) - common_location_path = staticmethod(ClusterControllerClient.common_location_path) parse_common_location_path = staticmethod( ClusterControllerClient.parse_common_location_path @@ -85,7 +77,8 @@ class ClusterControllerAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -100,7 +93,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -117,7 +110,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> ClusterControllerTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: ClusterControllerTransport: The transport used by the client instance. @@ -131,12 +124,12 @@ def transport(self) -> ClusterControllerTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, ClusterControllerTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the cluster controller client. + """Instantiates the cluster controller client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -168,7 +161,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = ClusterControllerClient( credentials=credentials, transport=transport, @@ -215,7 +207,6 @@ async def create_cluster( This corresponds to the ``cluster`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -244,7 +235,6 @@ async def create_cluster( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -260,7 +250,9 @@ async def create_cluster( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=300.0, ), default_timeout=300.0, @@ -289,7 +281,7 @@ async def update_cluster( region: str = None, cluster_name: str = None, cluster: clusters.Cluster = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -401,7 +393,6 @@ async def update_cluster( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -432,7 +423,6 @@ async def update_cluster( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -452,7 +442,9 @@ async def update_cluster( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=300.0, ), default_timeout=300.0, @@ -512,7 +504,6 @@ async def delete_cluster( This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -552,7 +543,6 @@ async def delete_cluster( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -568,7 +558,9 @@ async def delete_cluster( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=300.0, ), default_timeout=300.0, @@ -582,7 +574,7 @@ async def delete_cluster( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=operations.ClusterOperationMetadata, ) @@ -627,7 +619,6 @@ async def get_cluster( This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -655,7 +646,6 @@ async def get_cluster( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -672,9 +662,9 @@ async def get_cluster( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -749,7 +739,6 @@ async def list_clusters( This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -779,7 +768,6 @@ async def list_clusters( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -796,9 +784,9 @@ async def list_clusters( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -861,7 +849,6 @@ async def diagnose_cluster( This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -901,7 +888,6 @@ async def diagnose_cluster( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -917,7 +903,9 @@ async def diagnose_cluster( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=300.0, ), default_timeout=300.0, @@ -931,7 +919,7 @@ async def diagnose_cluster( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=operations.ClusterOperationMetadata, ) diff --git a/google/cloud/dataproc_v1beta2/services/cluster_controller/client.py b/google/cloud/dataproc_v1beta2/services/cluster_controller/client.py index 167dfd57..69a09ce7 100644 --- a/google/cloud/dataproc_v1beta2/services/cluster_controller/client.py +++ b/google/cloud/dataproc_v1beta2/services/cluster_controller/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -37,9 +35,8 @@ from google.cloud.dataproc_v1beta2.services.cluster_controller import pagers from google.cloud.dataproc_v1beta2.types import clusters from google.cloud.dataproc_v1beta2.types import operations -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore - +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from .transports.base import ClusterControllerTransport, DEFAULT_CLIENT_INFO from .transports.grpc import ClusterControllerGrpcTransport from .transports.grpc_asyncio import ClusterControllerGrpcAsyncIOTransport @@ -62,7 +59,7 @@ class ClusterControllerClientMeta(type): def get_transport_class( cls, label: str = None, ) -> Type[ClusterControllerTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -87,7 +84,8 @@ class ClusterControllerClient(metaclass=ClusterControllerClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -121,7 +119,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -138,7 +137,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -157,23 +156,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> ClusterControllerTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - ClusterControllerTransport: The transport used by the client instance. + ClusterControllerTransport: The transport used by the client + instance. """ return self._transport @staticmethod def cluster_path(project: str, location: str, cluster: str,) -> str: - """Return a fully-qualified cluster string.""" + """Returns a fully-qualified cluster string.""" return "projects/{project}/locations/{location}/clusters/{cluster}".format( project=project, location=location, cluster=cluster, ) @staticmethod def parse_cluster_path(path: str) -> Dict[str, str]: - """Parse a cluster path into its component segments.""" + """Parses a cluster path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)$", path, @@ -182,7 +182,7 @@ def parse_cluster_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -195,7 +195,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -206,7 +206,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -217,7 +217,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -228,7 +228,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -242,12 +242,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, ClusterControllerTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the cluster controller client. + """Instantiates the cluster controller client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -302,9 +302,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -316,12 +317,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -336,8 +339,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -391,7 +394,6 @@ def create_cluster( This corresponds to the ``cluster`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -422,10 +424,8 @@ def create_cluster( # there are no flattened fields. if not isinstance(request, clusters.CreateClusterRequest): request = clusters.CreateClusterRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -459,7 +459,7 @@ def update_cluster( region: str = None, cluster_name: str = None, cluster: clusters.Cluster = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -571,7 +571,6 @@ def update_cluster( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -604,10 +603,8 @@ def update_cluster( # there are no flattened fields. if not isinstance(request, clusters.UpdateClusterRequest): request = clusters.UpdateClusterRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -676,7 +673,6 @@ def delete_cluster( This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -718,10 +714,8 @@ def delete_cluster( # there are no flattened fields. if not isinstance(request, clusters.DeleteClusterRequest): request = clusters.DeleteClusterRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -740,7 +734,7 @@ def delete_cluster( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=operations.ClusterOperationMetadata, ) @@ -785,7 +779,6 @@ def get_cluster( This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -815,10 +808,8 @@ def get_cluster( # there are no flattened fields. if not isinstance(request, clusters.GetClusterRequest): request = clusters.GetClusterRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -897,7 +888,6 @@ def list_clusters( This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -929,10 +919,8 @@ def list_clusters( # there are no flattened fields. if not isinstance(request, clusters.ListClustersRequest): request = clusters.ListClustersRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -999,7 +987,6 @@ def diagnose_cluster( This corresponds to the ``cluster_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1041,10 +1028,8 @@ def diagnose_cluster( # there are no flattened fields. if not isinstance(request, clusters.DiagnoseClusterRequest): request = clusters.DiagnoseClusterRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -1063,7 +1048,7 @@ def diagnose_cluster( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=operations.ClusterOperationMetadata, ) diff --git a/google/cloud/dataproc_v1beta2/services/cluster_controller/pagers.py b/google/cloud/dataproc_v1beta2/services/cluster_controller/pagers.py index 84576e5f..042d4c0a 100644 --- a/google/cloud/dataproc_v1beta2/services/cluster_controller/pagers.py +++ b/google/cloud/dataproc_v1beta2/services/cluster_controller/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/__init__.py b/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/__init__.py index a487348a..df574dd0 100644 --- a/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/__init__.py +++ b/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/base.py b/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/base.py index 10250808..37779c85 100644 --- a/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/base.py +++ b/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.dataproc_v1beta2.types import clusters -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -37,27 +36,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class ClusterControllerTransport(abc.ABC): """Abstract transport class for ClusterController.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "dataproc.googleapis.com" + def __init__( self, *, - host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -66,7 +79,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -80,29 +93,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -112,7 +172,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=300.0, ), default_timeout=300.0, @@ -124,7 +186,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=300.0, ), default_timeout=300.0, @@ -136,7 +200,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=300.0, ), default_timeout=300.0, @@ -149,9 +215,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -165,9 +231,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), @@ -180,7 +246,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=300.0, ), default_timeout=300.0, @@ -196,57 +264,54 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def create_cluster( self, - ) -> typing.Callable[ + ) -> Callable[ [clusters.CreateClusterRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def update_cluster( self, - ) -> typing.Callable[ + ) -> Callable[ [clusters.UpdateClusterRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def delete_cluster( self, - ) -> typing.Callable[ + ) -> Callable[ [clusters.DeleteClusterRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def get_cluster( self, - ) -> typing.Callable[ + ) -> Callable[ [clusters.GetClusterRequest], - typing.Union[clusters.Cluster, typing.Awaitable[clusters.Cluster]], + Union[clusters.Cluster, Awaitable[clusters.Cluster]], ]: raise NotImplementedError() @property def list_clusters( self, - ) -> typing.Callable[ + ) -> Callable[ [clusters.ListClustersRequest], - typing.Union[ - clusters.ListClustersResponse, - typing.Awaitable[clusters.ListClustersResponse], - ], + Union[clusters.ListClustersResponse, Awaitable[clusters.ListClustersResponse]], ]: raise NotImplementedError() @property def diagnose_cluster( self, - ) -> typing.Callable[ + ) -> Callable[ [clusters.DiagnoseClusterRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() diff --git a/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc.py b/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc.py index cb2b0558..21eb248e 100644 --- a/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc.py +++ b/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.dataproc_v1beta2.types import clusters -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import ClusterControllerTransport, DEFAULT_CLIENT_INFO @@ -53,7 +50,7 @@ def __init__( self, *, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -67,7 +64,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -178,7 +176,7 @@ def __init__( def create_channel( cls, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -209,13 +207,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -242,7 +242,7 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def create_cluster( self, - ) -> Callable[[clusters.CreateClusterRequest], operations.Operation]: + ) -> Callable[[clusters.CreateClusterRequest], operations_pb2.Operation]: r"""Return a callable for the create cluster method over gRPC. Creates a cluster in a project. The returned @@ -264,14 +264,14 @@ def create_cluster( self._stubs["create_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.ClusterController/CreateCluster", request_serializer=clusters.CreateClusterRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_cluster"] @property def update_cluster( self, - ) -> Callable[[clusters.UpdateClusterRequest], operations.Operation]: + ) -> Callable[[clusters.UpdateClusterRequest], operations_pb2.Operation]: r"""Return a callable for the update cluster method over gRPC. Updates a cluster in a project. The returned @@ -293,14 +293,14 @@ def update_cluster( self._stubs["update_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.ClusterController/UpdateCluster", request_serializer=clusters.UpdateClusterRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_cluster"] @property def delete_cluster( self, - ) -> Callable[[clusters.DeleteClusterRequest], operations.Operation]: + ) -> Callable[[clusters.DeleteClusterRequest], operations_pb2.Operation]: r"""Return a callable for the delete cluster method over gRPC. Deletes a cluster in a project. The returned @@ -322,7 +322,7 @@ def delete_cluster( self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.ClusterController/DeleteCluster", request_serializer=clusters.DeleteClusterRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_cluster"] @@ -381,7 +381,7 @@ def list_clusters( @property def diagnose_cluster( self, - ) -> Callable[[clusters.DiagnoseClusterRequest], operations.Operation]: + ) -> Callable[[clusters.DiagnoseClusterRequest], operations_pb2.Operation]: r"""Return a callable for the diagnose cluster method over gRPC. Gets cluster diagnostic information. The returned @@ -406,7 +406,7 @@ def diagnose_cluster( self._stubs["diagnose_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.ClusterController/DiagnoseCluster", request_serializer=clusters.DiagnoseClusterRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["diagnose_cluster"] diff --git a/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc_asyncio.py b/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc_asyncio.py index 80c4e84d..60650596 100644 --- a/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1beta2/services/cluster_controller/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.dataproc_v1beta2.types import clusters -from google.longrunning import operations_pb2 as operations # type: ignore - +from google.longrunning import operations_pb2 # type: ignore from .base import ClusterControllerTransport, DEFAULT_CLIENT_INFO from .grpc import ClusterControllerGrpcTransport @@ -56,7 +53,7 @@ class ClusterControllerGrpcAsyncIOTransport(ClusterControllerTransport): def create_channel( cls, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -83,13 +80,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -97,7 +96,7 @@ def __init__( self, *, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -111,7 +110,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -170,7 +170,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -248,7 +247,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: @property def create_cluster( self, - ) -> Callable[[clusters.CreateClusterRequest], Awaitable[operations.Operation]]: + ) -> Callable[[clusters.CreateClusterRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the create cluster method over gRPC. Creates a cluster in a project. The returned @@ -270,14 +269,14 @@ def create_cluster( self._stubs["create_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.ClusterController/CreateCluster", request_serializer=clusters.CreateClusterRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["create_cluster"] @property def update_cluster( self, - ) -> Callable[[clusters.UpdateClusterRequest], Awaitable[operations.Operation]]: + ) -> Callable[[clusters.UpdateClusterRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the update cluster method over gRPC. Updates a cluster in a project. The returned @@ -299,14 +298,14 @@ def update_cluster( self._stubs["update_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.ClusterController/UpdateCluster", request_serializer=clusters.UpdateClusterRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["update_cluster"] @property def delete_cluster( self, - ) -> Callable[[clusters.DeleteClusterRequest], Awaitable[operations.Operation]]: + ) -> Callable[[clusters.DeleteClusterRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the delete cluster method over gRPC. Deletes a cluster in a project. The returned @@ -328,7 +327,7 @@ def delete_cluster( self._stubs["delete_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.ClusterController/DeleteCluster", request_serializer=clusters.DeleteClusterRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["delete_cluster"] @@ -391,7 +390,9 @@ def list_clusters( @property def diagnose_cluster( self, - ) -> Callable[[clusters.DiagnoseClusterRequest], Awaitable[operations.Operation]]: + ) -> Callable[ + [clusters.DiagnoseClusterRequest], Awaitable[operations_pb2.Operation] + ]: r"""Return a callable for the diagnose cluster method over gRPC. Gets cluster diagnostic information. The returned @@ -416,7 +417,7 @@ def diagnose_cluster( self._stubs["diagnose_cluster"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.ClusterController/DiagnoseCluster", request_serializer=clusters.DiagnoseClusterRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["diagnose_cluster"] diff --git a/google/cloud/dataproc_v1beta2/services/job_controller/__init__.py b/google/cloud/dataproc_v1beta2/services/job_controller/__init__.py index 5bb83207..ca574e29 100644 --- a/google/cloud/dataproc_v1beta2/services/job_controller/__init__.py +++ b/google/cloud/dataproc_v1beta2/services/job_controller/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import JobControllerClient from .async_client import JobControllerAsyncClient diff --git a/google/cloud/dataproc_v1beta2/services/job_controller/async_client.py b/google/cloud/dataproc_v1beta2/services/job_controller/async_client.py index 38af5a95..44a51fde 100644 --- a/google/cloud/dataproc_v1beta2/services/job_controller/async_client.py +++ b/google/cloud/dataproc_v1beta2/services/job_controller/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,17 +20,16 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.dataproc_v1beta2.services.job_controller import pagers from google.cloud.dataproc_v1beta2.types import jobs - from .transports.base import JobControllerTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import JobControllerGrpcAsyncIOTransport from .client import JobControllerClient @@ -52,24 +49,20 @@ class JobControllerAsyncClient: parse_common_billing_account_path = staticmethod( JobControllerClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(JobControllerClient.common_folder_path) parse_common_folder_path = staticmethod( JobControllerClient.parse_common_folder_path ) - common_organization_path = staticmethod( JobControllerClient.common_organization_path ) parse_common_organization_path = staticmethod( JobControllerClient.parse_common_organization_path ) - common_project_path = staticmethod(JobControllerClient.common_project_path) parse_common_project_path = staticmethod( JobControllerClient.parse_common_project_path ) - common_location_path = staticmethod(JobControllerClient.common_location_path) parse_common_location_path = staticmethod( JobControllerClient.parse_common_location_path @@ -77,7 +70,8 @@ class JobControllerAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -92,7 +86,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -109,7 +103,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> JobControllerTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: JobControllerTransport: The transport used by the client instance. @@ -123,12 +117,12 @@ def transport(self) -> JobControllerTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, JobControllerTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the job controller client. + """Instantiates the job controller client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -160,7 +154,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = JobControllerClient( credentials=credentials, transport=transport, @@ -204,7 +197,6 @@ async def submit_job( This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -229,7 +221,6 @@ async def submit_job( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -245,7 +236,9 @@ async def submit_job( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=900.0, ), default_timeout=900.0, @@ -294,7 +287,6 @@ async def submit_job_as_operation( This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -324,7 +316,6 @@ async def submit_job_as_operation( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -340,7 +331,9 @@ async def submit_job_as_operation( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=900.0, ), default_timeout=900.0, @@ -399,7 +392,6 @@ async def get_job( This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -424,7 +416,6 @@ async def get_job( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -441,9 +432,9 @@ async def get_job( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=900.0, ), @@ -510,7 +501,6 @@ async def list_jobs( This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -539,7 +529,6 @@ async def list_jobs( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -556,9 +545,9 @@ async def list_jobs( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=900.0, ), @@ -591,7 +580,6 @@ async def update_job( Args: request (:class:`google.cloud.dataproc_v1beta2.types.UpdateJobRequest`): The request object. A request to update a job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -603,7 +591,6 @@ async def update_job( A Dataproc job resource. """ # Create or coerce a protobuf request object. - request = jobs.UpdateJobRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -614,7 +601,9 @@ async def update_job( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=900.0, ), default_timeout=900.0, @@ -667,7 +656,6 @@ async def cancel_job( This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -692,7 +680,6 @@ async def cancel_job( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -709,9 +696,9 @@ async def cancel_job( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=900.0, ), @@ -762,7 +749,6 @@ async def delete_job( This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -783,7 +769,6 @@ async def delete_job( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -799,7 +784,9 @@ async def delete_job( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=900.0, ), default_timeout=900.0, diff --git a/google/cloud/dataproc_v1beta2/services/job_controller/client.py b/google/cloud/dataproc_v1beta2/services/job_controller/client.py index b487648e..8672ead9 100644 --- a/google/cloud/dataproc_v1beta2/services/job_controller/client.py +++ b/google/cloud/dataproc_v1beta2/services/job_controller/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -36,7 +34,6 @@ from google.api_core import operation_async # type: ignore from google.cloud.dataproc_v1beta2.services.job_controller import pagers from google.cloud.dataproc_v1beta2.types import jobs - from .transports.base import JobControllerTransport, DEFAULT_CLIENT_INFO from .transports.grpc import JobControllerGrpcTransport from .transports.grpc_asyncio import JobControllerGrpcAsyncIOTransport @@ -55,7 +52,7 @@ class JobControllerClientMeta(type): _transport_registry["grpc_asyncio"] = JobControllerGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[JobControllerTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -78,7 +75,8 @@ class JobControllerClient(metaclass=JobControllerClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -112,7 +110,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -129,7 +128,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -148,16 +147,17 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> JobControllerTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - JobControllerTransport: The transport used by the client instance. + JobControllerTransport: The transport used by the client + instance. """ return self._transport @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -170,7 +170,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -181,7 +181,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -192,7 +192,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -203,7 +203,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -217,12 +217,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, JobControllerTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the job controller client. + """Instantiates the job controller client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -277,9 +277,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -291,12 +292,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -311,8 +314,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -363,7 +366,6 @@ def submit_job( This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -390,10 +392,8 @@ def submit_job( # there are no flattened fields. if not isinstance(request, jobs.SubmitJobRequest): request = jobs.SubmitJobRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -447,7 +447,6 @@ def submit_job_as_operation( This corresponds to the ``job`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -479,10 +478,8 @@ def submit_job_as_operation( # there are no flattened fields. if not isinstance(request, jobs.SubmitJobRequest): request = jobs.SubmitJobRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -546,7 +543,6 @@ def get_job( This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -573,10 +569,8 @@ def get_job( # there are no flattened fields. if not isinstance(request, jobs.GetJobRequest): request = jobs.GetJobRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -647,7 +641,6 @@ def list_jobs( This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -678,10 +671,8 @@ def list_jobs( # there are no flattened fields. if not isinstance(request, jobs.ListJobsRequest): request = jobs.ListJobsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -718,7 +709,6 @@ def update_job( Args: request (google.cloud.dataproc_v1beta2.types.UpdateJobRequest): The request object. A request to update a job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -730,7 +720,6 @@ def update_job( A Dataproc job resource. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a jobs.UpdateJobRequest. # There's no risk of modifying the input as we've already verified @@ -788,7 +777,6 @@ def cancel_job( This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -815,10 +803,8 @@ def cancel_job( # there are no flattened fields. if not isinstance(request, jobs.CancelJobRequest): request = jobs.CancelJobRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: @@ -873,7 +859,6 @@ def delete_job( This corresponds to the ``job_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -896,10 +881,8 @@ def delete_job( # there are no flattened fields. if not isinstance(request, jobs.DeleteJobRequest): request = jobs.DeleteJobRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project_id is not None: request.project_id = project_id if region is not None: diff --git a/google/cloud/dataproc_v1beta2/services/job_controller/pagers.py b/google/cloud/dataproc_v1beta2/services/job_controller/pagers.py index 1e104382..dcab801b 100644 --- a/google/cloud/dataproc_v1beta2/services/job_controller/pagers.py +++ b/google/cloud/dataproc_v1beta2/services/job_controller/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/dataproc_v1beta2/services/job_controller/transports/__init__.py b/google/cloud/dataproc_v1beta2/services/job_controller/transports/__init__.py index d28c850a..f5701c97 100644 --- a/google/cloud/dataproc_v1beta2/services/job_controller/transports/__init__.py +++ b/google/cloud/dataproc_v1beta2/services/job_controller/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/dataproc_v1beta2/services/job_controller/transports/base.py b/google/cloud/dataproc_v1beta2/services/job_controller/transports/base.py index 7b47cb8f..450fb1f1 100644 --- a/google/cloud/dataproc_v1beta2/services/job_controller/transports/base.py +++ b/google/cloud/dataproc_v1beta2/services/job_controller/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.dataproc_v1beta2.types import jobs -from google.longrunning import operations_pb2 as operations # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -38,27 +37,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class JobControllerTransport(abc.ABC): """Abstract transport class for JobController.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "dataproc.googleapis.com" + def __init__( self, *, - host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -67,7 +80,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -81,29 +94,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -113,7 +173,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=900.0, ), default_timeout=900.0, @@ -125,7 +187,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=900.0, ), default_timeout=900.0, @@ -138,9 +202,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=900.0, ), @@ -154,9 +218,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=900.0, ), @@ -169,7 +233,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=900.0, ), default_timeout=900.0, @@ -182,9 +248,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=900.0, ), @@ -197,7 +263,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=900.0, ), default_timeout=900.0, @@ -213,59 +281,50 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def submit_job( self, - ) -> typing.Callable[ - [jobs.SubmitJobRequest], typing.Union[jobs.Job, typing.Awaitable[jobs.Job]] - ]: + ) -> Callable[[jobs.SubmitJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]: raise NotImplementedError() @property def submit_job_as_operation( self, - ) -> typing.Callable[ + ) -> Callable[ [jobs.SubmitJobRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def get_job( self, - ) -> typing.Callable[ - [jobs.GetJobRequest], typing.Union[jobs.Job, typing.Awaitable[jobs.Job]] - ]: + ) -> Callable[[jobs.GetJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]: raise NotImplementedError() @property def list_jobs( self, - ) -> typing.Callable[ + ) -> Callable[ [jobs.ListJobsRequest], - typing.Union[jobs.ListJobsResponse, typing.Awaitable[jobs.ListJobsResponse]], + Union[jobs.ListJobsResponse, Awaitable[jobs.ListJobsResponse]], ]: raise NotImplementedError() @property def update_job( self, - ) -> typing.Callable[ - [jobs.UpdateJobRequest], typing.Union[jobs.Job, typing.Awaitable[jobs.Job]] - ]: + ) -> Callable[[jobs.UpdateJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]: raise NotImplementedError() @property def cancel_job( self, - ) -> typing.Callable[ - [jobs.CancelJobRequest], typing.Union[jobs.Job, typing.Awaitable[jobs.Job]] - ]: + ) -> Callable[[jobs.CancelJobRequest], Union[jobs.Job, Awaitable[jobs.Job]]]: raise NotImplementedError() @property def delete_job( self, - ) -> typing.Callable[ - [jobs.DeleteJobRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ) -> Callable[ + [jobs.DeleteJobRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] ]: raise NotImplementedError() diff --git a/google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc.py b/google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc.py index 78a1139b..9b33f4a7 100644 --- a/google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc.py +++ b/google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.dataproc_v1beta2.types import jobs -from google.longrunning import operations_pb2 as operations # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from .base import JobControllerTransport, DEFAULT_CLIENT_INFO @@ -53,7 +50,7 @@ def __init__( self, *, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -67,7 +64,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -178,7 +176,7 @@ def __init__( def create_channel( cls, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -209,13 +207,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -266,7 +266,7 @@ def submit_job(self) -> Callable[[jobs.SubmitJobRequest], jobs.Job]: @property def submit_job_as_operation( self, - ) -> Callable[[jobs.SubmitJobRequest], operations.Operation]: + ) -> Callable[[jobs.SubmitJobRequest], operations_pb2.Operation]: r"""Return a callable for the submit job as operation method over gRPC. Submits job to a cluster. @@ -285,7 +285,7 @@ def submit_job_as_operation( self._stubs["submit_job_as_operation"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.JobController/SubmitJobAsOperation", request_serializer=jobs.SubmitJobRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["submit_job_as_operation"] @@ -391,7 +391,7 @@ def cancel_job(self) -> Callable[[jobs.CancelJobRequest], jobs.Job]: return self._stubs["cancel_job"] @property - def delete_job(self) -> Callable[[jobs.DeleteJobRequest], empty.Empty]: + def delete_job(self) -> Callable[[jobs.DeleteJobRequest], empty_pb2.Empty]: r"""Return a callable for the delete job method over gRPC. Deletes the job from the project. If the job is active, the @@ -411,7 +411,7 @@ def delete_job(self) -> Callable[[jobs.DeleteJobRequest], empty.Empty]: self._stubs["delete_job"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.JobController/DeleteJob", request_serializer=jobs.DeleteJobRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_job"] diff --git a/google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc_asyncio.py b/google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc_asyncio.py index 28be8d15..11d89fdd 100644 --- a/google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1beta2/services/job_controller/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,24 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.dataproc_v1beta2.types import jobs -from google.longrunning import operations_pb2 as operations # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from .base import JobControllerTransport, DEFAULT_CLIENT_INFO from .grpc import JobControllerGrpcTransport @@ -56,7 +53,7 @@ class JobControllerGrpcAsyncIOTransport(JobControllerTransport): def create_channel( cls, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -83,13 +80,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -97,7 +96,7 @@ def __init__( self, *, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -111,7 +110,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -170,7 +170,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -272,7 +271,7 @@ def submit_job(self) -> Callable[[jobs.SubmitJobRequest], Awaitable[jobs.Job]]: @property def submit_job_as_operation( self, - ) -> Callable[[jobs.SubmitJobRequest], Awaitable[operations.Operation]]: + ) -> Callable[[jobs.SubmitJobRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the submit job as operation method over gRPC. Submits job to a cluster. @@ -291,7 +290,7 @@ def submit_job_as_operation( self._stubs["submit_job_as_operation"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.JobController/SubmitJobAsOperation", request_serializer=jobs.SubmitJobRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["submit_job_as_operation"] @@ -399,7 +398,9 @@ def cancel_job(self) -> Callable[[jobs.CancelJobRequest], Awaitable[jobs.Job]]: return self._stubs["cancel_job"] @property - def delete_job(self) -> Callable[[jobs.DeleteJobRequest], Awaitable[empty.Empty]]: + def delete_job( + self, + ) -> Callable[[jobs.DeleteJobRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete job method over gRPC. Deletes the job from the project. If the job is active, the @@ -419,7 +420,7 @@ def delete_job(self) -> Callable[[jobs.DeleteJobRequest], Awaitable[empty.Empty] self._stubs["delete_job"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.JobController/DeleteJob", request_serializer=jobs.DeleteJobRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_job"] diff --git a/google/cloud/dataproc_v1beta2/services/workflow_template_service/__init__.py b/google/cloud/dataproc_v1beta2/services/workflow_template_service/__init__.py index 8e92d92d..c339809a 100644 --- a/google/cloud/dataproc_v1beta2/services/workflow_template_service/__init__.py +++ b/google/cloud/dataproc_v1beta2/services/workflow_template_service/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import WorkflowTemplateServiceClient from .async_client import WorkflowTemplateServiceAsyncClient diff --git a/google/cloud/dataproc_v1beta2/services/workflow_template_service/async_client.py b/google/cloud/dataproc_v1beta2/services/workflow_template_service/async_client.py index 5f2f82c1..14f65d20 100644 --- a/google/cloud/dataproc_v1beta2/services/workflow_template_service/async_client.py +++ b/google/cloud/dataproc_v1beta2/services/workflow_template_service/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,20 +20,19 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.dataproc_v1beta2.services.workflow_template_service import pagers from google.cloud.dataproc_v1beta2.types import workflow_templates -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import WorkflowTemplateServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import WorkflowTemplateServiceGrpcAsyncIOTransport from .client import WorkflowTemplateServiceClient @@ -59,33 +56,28 @@ class WorkflowTemplateServiceAsyncClient: parse_workflow_template_path = staticmethod( WorkflowTemplateServiceClient.parse_workflow_template_path ) - common_billing_account_path = staticmethod( WorkflowTemplateServiceClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( WorkflowTemplateServiceClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(WorkflowTemplateServiceClient.common_folder_path) parse_common_folder_path = staticmethod( WorkflowTemplateServiceClient.parse_common_folder_path ) - common_organization_path = staticmethod( WorkflowTemplateServiceClient.common_organization_path ) parse_common_organization_path = staticmethod( WorkflowTemplateServiceClient.parse_common_organization_path ) - common_project_path = staticmethod( WorkflowTemplateServiceClient.common_project_path ) parse_common_project_path = staticmethod( WorkflowTemplateServiceClient.parse_common_project_path ) - common_location_path = staticmethod( WorkflowTemplateServiceClient.common_location_path ) @@ -95,7 +87,8 @@ class WorkflowTemplateServiceAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -110,7 +103,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -127,7 +120,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> WorkflowTemplateServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: WorkflowTemplateServiceTransport: The transport used by the client instance. @@ -142,12 +135,12 @@ def transport(self) -> WorkflowTemplateServiceTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, WorkflowTemplateServiceTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the workflow template service client. + """Instantiates the workflow template service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -179,7 +172,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = WorkflowTemplateServiceClient( credentials=credentials, transport=transport, @@ -227,7 +219,6 @@ async def create_workflow_template( This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -254,7 +245,6 @@ async def create_workflow_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if template is not None: @@ -268,7 +258,9 @@ async def create_workflow_template( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -322,7 +314,6 @@ async def get_workflow_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -349,7 +340,6 @@ async def get_workflow_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -362,9 +352,9 @@ async def get_workflow_template( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -451,7 +441,6 @@ async def instantiate_workflow_template( This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -491,7 +480,6 @@ async def instantiate_workflow_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -506,7 +494,9 @@ async def instantiate_workflow_template( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -526,7 +516,7 @@ async def instantiate_workflow_template( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=workflow_templates.WorkflowMetadata, ) @@ -600,7 +590,6 @@ async def instantiate_inline_workflow_template( This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -640,7 +629,6 @@ async def instantiate_inline_workflow_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if template is not None: @@ -654,7 +642,9 @@ async def instantiate_inline_workflow_template( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -674,7 +664,7 @@ async def instantiate_inline_workflow_template( response = operation_async.from_gapic( response, self._client._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=workflow_templates.WorkflowMetadata, ) @@ -707,7 +697,6 @@ async def update_workflow_template( This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -734,7 +723,6 @@ async def update_workflow_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if template is not None: request.template = template @@ -746,7 +734,9 @@ async def update_workflow_template( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -800,7 +790,6 @@ async def list_workflow_templates( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -830,7 +819,6 @@ async def list_workflow_templates( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -843,9 +831,9 @@ async def list_workflow_templates( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -907,7 +895,6 @@ async def delete_workflow_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -928,7 +915,6 @@ async def delete_workflow_template( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -940,7 +926,9 @@ async def delete_workflow_template( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, diff --git a/google/cloud/dataproc_v1beta2/services/workflow_template_service/client.py b/google/cloud/dataproc_v1beta2/services/workflow_template_service/client.py index 3c23cd89..55d1a26a 100644 --- a/google/cloud/dataproc_v1beta2/services/workflow_template_service/client.py +++ b/google/cloud/dataproc_v1beta2/services/workflow_template_service/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -36,10 +34,9 @@ from google.api_core import operation_async # type: ignore from google.cloud.dataproc_v1beta2.services.workflow_template_service import pagers from google.cloud.dataproc_v1beta2.types import workflow_templates -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import WorkflowTemplateServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import WorkflowTemplateServiceGrpcTransport from .transports.grpc_asyncio import WorkflowTemplateServiceGrpcAsyncIOTransport @@ -62,7 +59,7 @@ class WorkflowTemplateServiceClientMeta(type): def get_transport_class( cls, label: str = None, ) -> Type[WorkflowTemplateServiceTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -87,7 +84,8 @@ class WorkflowTemplateServiceClient(metaclass=WorkflowTemplateServiceClientMeta) @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -121,7 +119,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -138,7 +137,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -157,23 +156,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> WorkflowTemplateServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - WorkflowTemplateServiceTransport: The transport used by the client instance. + WorkflowTemplateServiceTransport: The transport used by the client + instance. """ return self._transport @staticmethod def cluster_path(project: str, location: str, cluster: str,) -> str: - """Return a fully-qualified cluster string.""" + """Returns a fully-qualified cluster string.""" return "projects/{project}/locations/{location}/clusters/{cluster}".format( project=project, location=location, cluster=cluster, ) @staticmethod def parse_cluster_path(path: str) -> Dict[str, str]: - """Parse a cluster path into its component segments.""" + """Parses a cluster path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/clusters/(?P.+?)$", path, @@ -184,14 +184,14 @@ def parse_cluster_path(path: str) -> Dict[str, str]: def workflow_template_path( project: str, region: str, workflow_template: str, ) -> str: - """Return a fully-qualified workflow_template string.""" + """Returns a fully-qualified workflow_template string.""" return "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}".format( project=project, region=region, workflow_template=workflow_template, ) @staticmethod def parse_workflow_template_path(path: str) -> Dict[str, str]: - """Parse a workflow_template path into its component segments.""" + """Parses a workflow_template path into its component segments.""" m = re.match( r"^projects/(?P.+?)/regions/(?P.+?)/workflowTemplates/(?P.+?)$", path, @@ -200,7 +200,7 @@ def parse_workflow_template_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -213,7 +213,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -224,7 +224,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -235,7 +235,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -246,7 +246,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -260,12 +260,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, WorkflowTemplateServiceTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the workflow template service client. + """Instantiates the workflow template service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -320,9 +320,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -334,12 +335,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -354,8 +357,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -410,7 +413,6 @@ def create_workflow_template( This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -439,10 +441,8 @@ def create_workflow_template( # there are no flattened fields. if not isinstance(request, workflow_templates.CreateWorkflowTemplateRequest): request = workflow_templates.CreateWorkflowTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if template is not None: @@ -499,7 +499,6 @@ def get_workflow_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -528,10 +527,8 @@ def get_workflow_template( # there are no flattened fields. if not isinstance(request, workflow_templates.GetWorkflowTemplateRequest): request = workflow_templates.GetWorkflowTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -618,7 +615,6 @@ def instantiate_workflow_template( This corresponds to the ``parameters`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -662,10 +658,8 @@ def instantiate_workflow_template( request, workflow_templates.InstantiateWorkflowTemplateRequest ): request = workflow_templates.InstantiateWorkflowTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if parameters is not None: @@ -690,7 +684,7 @@ def instantiate_workflow_template( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=workflow_templates.WorkflowMetadata, ) @@ -764,7 +758,6 @@ def instantiate_inline_workflow_template( This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -810,10 +803,8 @@ def instantiate_inline_workflow_template( request = workflow_templates.InstantiateInlineWorkflowTemplateRequest( request ) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if template is not None: @@ -838,7 +829,7 @@ def instantiate_inline_workflow_template( response = operation.from_gapic( response, self._transport.operations_client, - empty.Empty, + empty_pb2.Empty, metadata_type=workflow_templates.WorkflowMetadata, ) @@ -871,7 +862,6 @@ def update_workflow_template( This corresponds to the ``template`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -900,10 +890,8 @@ def update_workflow_template( # there are no flattened fields. if not isinstance(request, workflow_templates.UpdateWorkflowTemplateRequest): request = workflow_templates.UpdateWorkflowTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if template is not None: request.template = template @@ -958,7 +946,6 @@ def list_workflow_templates( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -990,10 +977,8 @@ def list_workflow_templates( # there are no flattened fields. if not isinstance(request, workflow_templates.ListWorkflowTemplatesRequest): request = workflow_templates.ListWorkflowTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1055,7 +1040,6 @@ def delete_workflow_template( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1078,10 +1062,8 @@ def delete_workflow_template( # there are no flattened fields. if not isinstance(request, workflow_templates.DeleteWorkflowTemplateRequest): request = workflow_templates.DeleteWorkflowTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name diff --git a/google/cloud/dataproc_v1beta2/services/workflow_template_service/pagers.py b/google/cloud/dataproc_v1beta2/services/workflow_template_service/pagers.py index 83bfd1f9..767df7a4 100644 --- a/google/cloud/dataproc_v1beta2/services/workflow_template_service/pagers.py +++ b/google/cloud/dataproc_v1beta2/services/workflow_template_service/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -119,7 +117,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/__init__.py b/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/__init__.py index ac44e1f0..c72ace70 100644 --- a/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/__init__.py +++ b/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/base.py b/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/base.py index 40ff6b61..ab850e66 100644 --- a/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/base.py +++ b/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.dataproc_v1beta2.types import workflow_templates -from google.longrunning import operations_pb2 as operations # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -38,27 +37,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class WorkflowTemplateServiceTransport(abc.ABC): """Abstract transport class for WorkflowTemplateService.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "dataproc.googleapis.com" + def __init__( self, *, - host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -67,7 +80,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -81,29 +94,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -113,7 +173,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -126,9 +188,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -141,7 +203,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -153,7 +217,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -165,7 +231,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -178,9 +246,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=600.0, ), @@ -193,7 +261,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=600.0, ), default_timeout=600.0, @@ -209,11 +279,11 @@ def operations_client(self) -> operations_v1.OperationsClient: @property def create_workflow_template( self, - ) -> typing.Callable[ + ) -> Callable[ [workflow_templates.CreateWorkflowTemplateRequest], - typing.Union[ + Union[ workflow_templates.WorkflowTemplate, - typing.Awaitable[workflow_templates.WorkflowTemplate], + Awaitable[workflow_templates.WorkflowTemplate], ], ]: raise NotImplementedError() @@ -221,11 +291,11 @@ def create_workflow_template( @property def get_workflow_template( self, - ) -> typing.Callable[ + ) -> Callable[ [workflow_templates.GetWorkflowTemplateRequest], - typing.Union[ + Union[ workflow_templates.WorkflowTemplate, - typing.Awaitable[workflow_templates.WorkflowTemplate], + Awaitable[workflow_templates.WorkflowTemplate], ], ]: raise NotImplementedError() @@ -233,29 +303,29 @@ def get_workflow_template( @property def instantiate_workflow_template( self, - ) -> typing.Callable[ + ) -> Callable[ [workflow_templates.InstantiateWorkflowTemplateRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def instantiate_inline_workflow_template( self, - ) -> typing.Callable[ + ) -> Callable[ [workflow_templates.InstantiateInlineWorkflowTemplateRequest], - typing.Union[operations.Operation, typing.Awaitable[operations.Operation]], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], ]: raise NotImplementedError() @property def update_workflow_template( self, - ) -> typing.Callable[ + ) -> Callable[ [workflow_templates.UpdateWorkflowTemplateRequest], - typing.Union[ + Union[ workflow_templates.WorkflowTemplate, - typing.Awaitable[workflow_templates.WorkflowTemplate], + Awaitable[workflow_templates.WorkflowTemplate], ], ]: raise NotImplementedError() @@ -263,11 +333,11 @@ def update_workflow_template( @property def list_workflow_templates( self, - ) -> typing.Callable[ + ) -> Callable[ [workflow_templates.ListWorkflowTemplatesRequest], - typing.Union[ + Union[ workflow_templates.ListWorkflowTemplatesResponse, - typing.Awaitable[workflow_templates.ListWorkflowTemplatesResponse], + Awaitable[workflow_templates.ListWorkflowTemplatesResponse], ], ]: raise NotImplementedError() @@ -275,9 +345,9 @@ def list_workflow_templates( @property def delete_workflow_template( self, - ) -> typing.Callable[ + ) -> Callable[ [workflow_templates.DeleteWorkflowTemplateRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() diff --git a/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc.py b/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc.py index 4d514e2d..f08b97ec 100644 --- a/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc.py +++ b/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.dataproc_v1beta2.types import workflow_templates -from google.longrunning import operations_pb2 as operations # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from .base import WorkflowTemplateServiceTransport, DEFAULT_CLIENT_INFO @@ -54,7 +51,7 @@ def __init__( self, *, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -68,7 +65,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -179,7 +177,7 @@ def __init__( def create_channel( cls, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -210,13 +208,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -304,7 +304,8 @@ def get_workflow_template( def instantiate_workflow_template( self, ) -> Callable[ - [workflow_templates.InstantiateWorkflowTemplateRequest], operations.Operation + [workflow_templates.InstantiateWorkflowTemplateRequest], + operations_pb2.Operation, ]: r"""Return a callable for the instantiate workflow template method over gRPC. @@ -346,7 +347,7 @@ def instantiate_workflow_template( ] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/InstantiateWorkflowTemplate", request_serializer=workflow_templates.InstantiateWorkflowTemplateRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["instantiate_workflow_template"] @@ -355,7 +356,7 @@ def instantiate_inline_workflow_template( self, ) -> Callable[ [workflow_templates.InstantiateInlineWorkflowTemplateRequest], - operations.Operation, + operations_pb2.Operation, ]: r"""Return a callable for the instantiate inline workflow template method over gRPC. @@ -403,7 +404,7 @@ def instantiate_inline_workflow_template( ] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/InstantiateInlineWorkflowTemplate", request_serializer=workflow_templates.InstantiateInlineWorkflowTemplateRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["instantiate_inline_workflow_template"] @@ -471,7 +472,7 @@ def list_workflow_templates( @property def delete_workflow_template( self, - ) -> Callable[[workflow_templates.DeleteWorkflowTemplateRequest], empty.Empty]: + ) -> Callable[[workflow_templates.DeleteWorkflowTemplateRequest], empty_pb2.Empty]: r"""Return a callable for the delete workflow template method over gRPC. Deletes a workflow template. It does not cancel in- @@ -491,7 +492,7 @@ def delete_workflow_template( self._stubs["delete_workflow_template"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/DeleteWorkflowTemplate", request_serializer=workflow_templates.DeleteWorkflowTemplateRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_workflow_template"] diff --git a/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc_asyncio.py b/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc_asyncio.py index b02a4e15..9b75cd8e 100644 --- a/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc_asyncio.py +++ b/google/cloud/dataproc_v1beta2/services/workflow_template_service/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,24 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.dataproc_v1beta2.types import workflow_templates -from google.longrunning import operations_pb2 as operations # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from .base import WorkflowTemplateServiceTransport, DEFAULT_CLIENT_INFO from .grpc import WorkflowTemplateServiceGrpcTransport @@ -57,7 +54,7 @@ class WorkflowTemplateServiceGrpcAsyncIOTransport(WorkflowTemplateServiceTranspo def create_channel( cls, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -84,13 +81,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -98,7 +97,7 @@ def __init__( self, *, host: str = "dataproc.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -112,7 +111,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -171,7 +171,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -311,7 +310,7 @@ def instantiate_workflow_template( self, ) -> Callable[ [workflow_templates.InstantiateWorkflowTemplateRequest], - Awaitable[operations.Operation], + Awaitable[operations_pb2.Operation], ]: r"""Return a callable for the instantiate workflow template method over gRPC. @@ -353,7 +352,7 @@ def instantiate_workflow_template( ] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/InstantiateWorkflowTemplate", request_serializer=workflow_templates.InstantiateWorkflowTemplateRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["instantiate_workflow_template"] @@ -362,7 +361,7 @@ def instantiate_inline_workflow_template( self, ) -> Callable[ [workflow_templates.InstantiateInlineWorkflowTemplateRequest], - Awaitable[operations.Operation], + Awaitable[operations_pb2.Operation], ]: r"""Return a callable for the instantiate inline workflow template method over gRPC. @@ -410,7 +409,7 @@ def instantiate_inline_workflow_template( ] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/InstantiateInlineWorkflowTemplate", request_serializer=workflow_templates.InstantiateInlineWorkflowTemplateRequest.serialize, - response_deserializer=operations.Operation.FromString, + response_deserializer=operations_pb2.Operation.FromString, ) return self._stubs["instantiate_inline_workflow_template"] @@ -479,7 +478,7 @@ def list_workflow_templates( def delete_workflow_template( self, ) -> Callable[ - [workflow_templates.DeleteWorkflowTemplateRequest], Awaitable[empty.Empty] + [workflow_templates.DeleteWorkflowTemplateRequest], Awaitable[empty_pb2.Empty] ]: r"""Return a callable for the delete workflow template method over gRPC. @@ -500,7 +499,7 @@ def delete_workflow_template( self._stubs["delete_workflow_template"] = self.grpc_channel.unary_unary( "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/DeleteWorkflowTemplate", request_serializer=workflow_templates.DeleteWorkflowTemplateRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_workflow_template"] diff --git a/google/cloud/dataproc_v1beta2/types/__init__.py b/google/cloud/dataproc_v1beta2/types/__init__.py index c640c043..911ee8dd 100644 --- a/google/cloud/dataproc_v1beta2/types/__init__.py +++ b/google/cloud/dataproc_v1beta2/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .autoscaling_policies import ( AutoscalingPolicy, BasicAutoscalingAlgorithm, diff --git a/google/cloud/dataproc_v1beta2/types/autoscaling_policies.py b/google/cloud/dataproc_v1beta2/types/autoscaling_policies.py index ccc13b13..f0ab76f0 100644 --- a/google/cloud/dataproc_v1beta2/types/autoscaling_policies.py +++ b/google/cloud/dataproc_v1beta2/types/autoscaling_policies.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,11 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import duration_pb2 # type: ignore __protobuf__ = proto.module( @@ -72,18 +69,14 @@ class AutoscalingPolicy(proto.Message): operate for secondary workers. """ - id = proto.Field(proto.STRING, number=1) - - name = proto.Field(proto.STRING, number=2) - + id = proto.Field(proto.STRING, number=1,) + name = proto.Field(proto.STRING, number=2,) basic_algorithm = proto.Field( proto.MESSAGE, number=3, oneof="algorithm", message="BasicAutoscalingAlgorithm", ) - worker_config = proto.Field( proto.MESSAGE, number=4, message="InstanceGroupAutoscalingPolicyConfig", ) - secondary_worker_config = proto.Field( proto.MESSAGE, number=5, message="InstanceGroupAutoscalingPolicyConfig", ) @@ -91,7 +84,6 @@ class AutoscalingPolicy(proto.Message): class BasicAutoscalingAlgorithm(proto.Message): r"""Basic algorithm for autoscaling. - Attributes: yarn_config (google.cloud.dataproc_v1beta2.types.BasicYarnAutoscalingConfig): Required. YARN autoscaling configuration. @@ -106,13 +98,13 @@ class BasicAutoscalingAlgorithm(proto.Message): yarn_config = proto.Field( proto.MESSAGE, number=1, message="BasicYarnAutoscalingConfig", ) - - cooldown_period = proto.Field(proto.MESSAGE, number=2, message=duration.Duration,) + cooldown_period = proto.Field( + proto.MESSAGE, number=2, message=duration_pb2.Duration, + ) class BasicYarnAutoscalingConfig(proto.Message): r"""Basic autoscaling configurations for YARN. - Attributes: graceful_decommission_timeout (google.protobuf.duration_pb2.Duration): Required. Timeout for YARN graceful decommissioning of Node @@ -167,16 +159,12 @@ class BasicYarnAutoscalingConfig(proto.Message): """ graceful_decommission_timeout = proto.Field( - proto.MESSAGE, number=5, message=duration.Duration, + proto.MESSAGE, number=5, message=duration_pb2.Duration, ) - - scale_up_factor = proto.Field(proto.DOUBLE, number=1) - - scale_down_factor = proto.Field(proto.DOUBLE, number=2) - - scale_up_min_worker_fraction = proto.Field(proto.DOUBLE, number=3) - - scale_down_min_worker_fraction = proto.Field(proto.DOUBLE, number=4) + scale_up_factor = proto.Field(proto.DOUBLE, number=1,) + scale_down_factor = proto.Field(proto.DOUBLE, number=2,) + scale_up_min_worker_fraction = proto.Field(proto.DOUBLE, number=3,) + scale_down_min_worker_fraction = proto.Field(proto.DOUBLE, number=4,) class InstanceGroupAutoscalingPolicyConfig(proto.Message): @@ -221,16 +209,13 @@ class InstanceGroupAutoscalingPolicyConfig(proto.Message): only and no secondary workers. """ - min_instances = proto.Field(proto.INT32, number=1) - - max_instances = proto.Field(proto.INT32, number=2) - - weight = proto.Field(proto.INT32, number=3) + min_instances = proto.Field(proto.INT32, number=1,) + max_instances = proto.Field(proto.INT32, number=2,) + weight = proto.Field(proto.INT32, number=3,) class CreateAutoscalingPolicyRequest(proto.Message): r"""A request to create an autoscaling policy. - Attributes: parent (str): Required. The "resource name" of the region or location, as @@ -248,14 +233,12 @@ class CreateAutoscalingPolicyRequest(proto.Message): Required. The autoscaling policy to create. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) policy = proto.Field(proto.MESSAGE, number=2, message="AutoscalingPolicy",) class GetAutoscalingPolicyRequest(proto.Message): r"""A request to fetch an autoscaling policy. - Attributes: name (str): Required. The "resource name" of the autoscaling policy, as @@ -271,12 +254,11 @@ class GetAutoscalingPolicyRequest(proto.Message): ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class UpdateAutoscalingPolicyRequest(proto.Message): r"""A request to update an autoscaling policy. - Attributes: policy (google.cloud.dataproc_v1beta2.types.AutoscalingPolicy): Required. The updated autoscaling policy. @@ -305,12 +287,11 @@ class DeleteAutoscalingPolicyRequest(proto.Message): ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ListAutoscalingPoliciesRequest(proto.Message): r"""A request to list autoscaling policies in a project. - Attributes: parent (str): Required. The "resource name" of the region or location, as @@ -334,11 +315,9 @@ class ListAutoscalingPoliciesRequest(proto.Message): results. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListAutoscalingPoliciesResponse(proto.Message): @@ -360,8 +339,7 @@ def raw_page(self): policies = proto.RepeatedField( proto.MESSAGE, number=1, message="AutoscalingPolicy", ) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/dataproc_v1beta2/types/clusters.py b/google/cloud/dataproc_v1beta2/types/clusters.py index 71047f01..6873bedd 100644 --- a/google/cloud/dataproc_v1beta2/types/clusters.py +++ b/google/cloud/dataproc_v1beta2/types/clusters.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,14 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.dataproc_v1beta2.types import shared -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -98,28 +95,20 @@ class Cluster(proto.Message): purposes only. It may be changed before final release. """ - project_id = proto.Field(proto.STRING, number=1) - - cluster_name = proto.Field(proto.STRING, number=2) - + project_id = proto.Field(proto.STRING, number=1,) + cluster_name = proto.Field(proto.STRING, number=2,) config = proto.Field(proto.MESSAGE, number=3, message="ClusterConfig",) - - labels = proto.MapField(proto.STRING, proto.STRING, number=8) - + labels = proto.MapField(proto.STRING, proto.STRING, number=8,) status = proto.Field(proto.MESSAGE, number=4, message="ClusterStatus",) - status_history = proto.RepeatedField( proto.MESSAGE, number=7, message="ClusterStatus", ) - - cluster_uuid = proto.Field(proto.STRING, number=6) - + cluster_uuid = proto.Field(proto.STRING, number=6,) metrics = proto.Field(proto.MESSAGE, number=9, message="ClusterMetrics",) class ClusterConfig(proto.Message): r"""The cluster config. - Attributes: config_bucket (str): Optional. A Cloud Storage bucket used to stage job @@ -198,44 +187,31 @@ class ClusterConfig(proto.Message): ``autoscaling_config``. """ - config_bucket = proto.Field(proto.STRING, number=1) - - temp_bucket = proto.Field(proto.STRING, number=2) - + config_bucket = proto.Field(proto.STRING, number=1,) + temp_bucket = proto.Field(proto.STRING, number=2,) gce_cluster_config = proto.Field( proto.MESSAGE, number=8, message="GceClusterConfig", ) - master_config = proto.Field(proto.MESSAGE, number=9, message="InstanceGroupConfig",) - worker_config = proto.Field( proto.MESSAGE, number=10, message="InstanceGroupConfig", ) - secondary_worker_config = proto.Field( proto.MESSAGE, number=12, message="InstanceGroupConfig", ) - software_config = proto.Field(proto.MESSAGE, number=13, message="SoftwareConfig",) - lifecycle_config = proto.Field(proto.MESSAGE, number=14, message="LifecycleConfig",) - initialization_actions = proto.RepeatedField( proto.MESSAGE, number=11, message="NodeInitializationAction", ) - encryption_config = proto.Field( proto.MESSAGE, number=15, message="EncryptionConfig", ) - autoscaling_config = proto.Field( proto.MESSAGE, number=16, message="AutoscalingConfig", ) - endpoint_config = proto.Field(proto.MESSAGE, number=17, message="EndpointConfig",) - security_config = proto.Field(proto.MESSAGE, number=18, message="SecurityConfig",) - gke_cluster_config = proto.Field( proto.MESSAGE, number=19, message="GkeClusterConfig", ) @@ -243,7 +219,6 @@ class ClusterConfig(proto.Message): class GkeClusterConfig(proto.Message): r"""The GKE config for this cluster. - Attributes: namespaced_gke_deployment_target (google.cloud.dataproc_v1beta2.types.GkeClusterConfig.NamespacedGkeDeploymentTarget): Optional. A target for the deployment. @@ -262,9 +237,8 @@ class NamespacedGkeDeploymentTarget(proto.Message): to deploy into. """ - target_gke_cluster = proto.Field(proto.STRING, number=1) - - cluster_namespace = proto.Field(proto.STRING, number=2) + target_gke_cluster = proto.Field(proto.STRING, number=1,) + cluster_namespace = proto.Field(proto.STRING, number=2,) namespaced_gke_deployment_target = proto.Field( proto.MESSAGE, number=1, message=NamespacedGkeDeploymentTarget, @@ -273,7 +247,6 @@ class NamespacedGkeDeploymentTarget(proto.Message): class EndpointConfig(proto.Message): r"""Endpoint config for this cluster - Attributes: http_ports (Sequence[google.cloud.dataproc_v1beta2.types.EndpointConfig.HttpPortsEntry]): Output only. The map of port descriptions to URLs. Will only @@ -284,14 +257,12 @@ class EndpointConfig(proto.Message): sources. Defaults to false. """ - http_ports = proto.MapField(proto.STRING, proto.STRING, number=1) - - enable_http_port_access = proto.Field(proto.BOOL, number=2) + http_ports = proto.MapField(proto.STRING, proto.STRING, number=1,) + enable_http_port_access = proto.Field(proto.BOOL, number=2,) class AutoscalingConfig(proto.Message): r"""Autoscaling Policy config associated with the cluster. - Attributes: policy_uri (str): Optional. The autoscaling policy used by the cluster. @@ -306,12 +277,11 @@ class AutoscalingConfig(proto.Message): Dataproc region. """ - policy_uri = proto.Field(proto.STRING, number=1) + policy_uri = proto.Field(proto.STRING, number=1,) class EncryptionConfig(proto.Message): r"""Encryption settings for the cluster. - Attributes: gce_pd_kms_key_name (str): Optional. The Cloud KMS key name to use for @@ -319,7 +289,7 @@ class EncryptionConfig(proto.Message): cluster. """ - gce_pd_kms_key_name = proto.Field(proto.STRING, number=1) + gce_pd_kms_key_name = proto.Field(proto.STRING, number=1,) class GceClusterConfig(proto.Message): @@ -412,22 +382,14 @@ class GceClusterConfig(proto.Message): Zonal reservation. """ - zone_uri = proto.Field(proto.STRING, number=1) - - network_uri = proto.Field(proto.STRING, number=2) - - subnetwork_uri = proto.Field(proto.STRING, number=6) - - internal_ip_only = proto.Field(proto.BOOL, number=7) - - service_account = proto.Field(proto.STRING, number=8) - - service_account_scopes = proto.RepeatedField(proto.STRING, number=3) - - tags = proto.RepeatedField(proto.STRING, number=4) - - metadata = proto.MapField(proto.STRING, proto.STRING, number=5) - + zone_uri = proto.Field(proto.STRING, number=1,) + network_uri = proto.Field(proto.STRING, number=2,) + subnetwork_uri = proto.Field(proto.STRING, number=6,) + internal_ip_only = proto.Field(proto.BOOL, number=7,) + service_account = proto.Field(proto.STRING, number=8,) + service_account_scopes = proto.RepeatedField(proto.STRING, number=3,) + tags = proto.RepeatedField(proto.STRING, number=4,) + metadata = proto.MapField(proto.STRING, proto.STRING, number=5,) reservation_affinity = proto.Field( proto.MESSAGE, number=11, message="ReservationAffinity", ) @@ -518,29 +480,20 @@ class Preemptibility(proto.Enum): NON_PREEMPTIBLE = 1 PREEMPTIBLE = 2 - num_instances = proto.Field(proto.INT32, number=1) - - instance_names = proto.RepeatedField(proto.STRING, number=2) - - image_uri = proto.Field(proto.STRING, number=3) - - machine_type_uri = proto.Field(proto.STRING, number=4) - + num_instances = proto.Field(proto.INT32, number=1,) + instance_names = proto.RepeatedField(proto.STRING, number=2,) + image_uri = proto.Field(proto.STRING, number=3,) + machine_type_uri = proto.Field(proto.STRING, number=4,) disk_config = proto.Field(proto.MESSAGE, number=5, message="DiskConfig",) - - is_preemptible = proto.Field(proto.BOOL, number=6) - + is_preemptible = proto.Field(proto.BOOL, number=6,) preemptibility = proto.Field(proto.ENUM, number=10, enum=Preemptibility,) - managed_group_config = proto.Field( proto.MESSAGE, number=7, message="ManagedGroupConfig", ) - accelerators = proto.RepeatedField( proto.MESSAGE, number=8, message="AcceleratorConfig", ) - - min_cpu_platform = proto.Field(proto.STRING, number=9) + min_cpu_platform = proto.Field(proto.STRING, number=9,) class ManagedGroupConfig(proto.Message): @@ -556,9 +509,8 @@ class ManagedGroupConfig(proto.Message): Manager for this group. """ - instance_template_name = proto.Field(proto.STRING, number=1) - - instance_group_manager_name = proto.Field(proto.STRING, number=2) + instance_template_name = proto.Field(proto.STRING, number=1,) + instance_group_manager_name = proto.Field(proto.STRING, number=2,) class AcceleratorConfig(proto.Message): @@ -588,9 +540,8 @@ class AcceleratorConfig(proto.Message): type exposed to this instance. """ - accelerator_type_uri = proto.Field(proto.STRING, number=1) - - accelerator_count = proto.Field(proto.INT32, number=2) + accelerator_type_uri = proto.Field(proto.STRING, number=1,) + accelerator_count = proto.Field(proto.INT32, number=2,) class DiskConfig(proto.Message): @@ -616,16 +567,13 @@ class DiskConfig(proto.Message): basic config and installed binaries. """ - boot_disk_type = proto.Field(proto.STRING, number=3) - - boot_disk_size_gb = proto.Field(proto.INT32, number=1) - - num_local_ssds = proto.Field(proto.INT32, number=2) + boot_disk_type = proto.Field(proto.STRING, number=3,) + boot_disk_size_gb = proto.Field(proto.INT32, number=1,) + num_local_ssds = proto.Field(proto.INT32, number=2,) class LifecycleConfig(proto.Message): r"""Specifies the cluster auto-delete schedule configuration. - Attributes: idle_delete_ttl (google.protobuf.duration_pb2.Duration): Optional. The duration to keep the cluster alive while @@ -651,17 +599,18 @@ class LifecycleConfig(proto.Message): `Timestamp `__). """ - idle_delete_ttl = proto.Field(proto.MESSAGE, number=1, message=duration.Duration,) - + idle_delete_ttl = proto.Field( + proto.MESSAGE, number=1, message=duration_pb2.Duration, + ) auto_delete_time = proto.Field( - proto.MESSAGE, number=2, oneof="ttl", message=timestamp.Timestamp, + proto.MESSAGE, number=2, oneof="ttl", message=timestamp_pb2.Timestamp, ) - auto_delete_ttl = proto.Field( - proto.MESSAGE, number=3, oneof="ttl", message=duration.Duration, + proto.MESSAGE, number=3, oneof="ttl", message=duration_pb2.Duration, + ) + idle_start_time = proto.Field( + proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp, ) - - idle_start_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) class SecurityConfig(proto.Message): @@ -678,7 +627,6 @@ class SecurityConfig(proto.Message): class KerberosConfig(proto.Message): r"""Specifies Kerberos related configuration. - Attributes: enable_kerberos (bool): Optional. Flag to indicate whether to @@ -751,35 +699,21 @@ class KerberosConfig(proto.Message): of hostnames will be the realm. """ - enable_kerberos = proto.Field(proto.BOOL, number=1) - - root_principal_password_uri = proto.Field(proto.STRING, number=2) - - kms_key_uri = proto.Field(proto.STRING, number=3) - - keystore_uri = proto.Field(proto.STRING, number=4) - - truststore_uri = proto.Field(proto.STRING, number=5) - - keystore_password_uri = proto.Field(proto.STRING, number=6) - - key_password_uri = proto.Field(proto.STRING, number=7) - - truststore_password_uri = proto.Field(proto.STRING, number=8) - - cross_realm_trust_realm = proto.Field(proto.STRING, number=9) - - cross_realm_trust_kdc = proto.Field(proto.STRING, number=10) - - cross_realm_trust_admin_server = proto.Field(proto.STRING, number=11) - - cross_realm_trust_shared_password_uri = proto.Field(proto.STRING, number=12) - - kdc_db_key_uri = proto.Field(proto.STRING, number=13) - - tgt_lifetime_hours = proto.Field(proto.INT32, number=14) - - realm = proto.Field(proto.STRING, number=15) + enable_kerberos = proto.Field(proto.BOOL, number=1,) + root_principal_password_uri = proto.Field(proto.STRING, number=2,) + kms_key_uri = proto.Field(proto.STRING, number=3,) + keystore_uri = proto.Field(proto.STRING, number=4,) + truststore_uri = proto.Field(proto.STRING, number=5,) + keystore_password_uri = proto.Field(proto.STRING, number=6,) + key_password_uri = proto.Field(proto.STRING, number=7,) + truststore_password_uri = proto.Field(proto.STRING, number=8,) + cross_realm_trust_realm = proto.Field(proto.STRING, number=9,) + cross_realm_trust_kdc = proto.Field(proto.STRING, number=10,) + cross_realm_trust_admin_server = proto.Field(proto.STRING, number=11,) + cross_realm_trust_shared_password_uri = proto.Field(proto.STRING, number=12,) + kdc_db_key_uri = proto.Field(proto.STRING, number=13,) + tgt_lifetime_hours = proto.Field(proto.INT32, number=14,) + realm = proto.Field(proto.STRING, number=15,) class NodeInitializationAction(proto.Message): @@ -801,14 +735,14 @@ class NodeInitializationAction(proto.Message): at end of the timeout period. """ - executable_file = proto.Field(proto.STRING, number=1) - - execution_timeout = proto.Field(proto.MESSAGE, number=2, message=duration.Duration,) + executable_file = proto.Field(proto.STRING, number=1,) + execution_timeout = proto.Field( + proto.MESSAGE, number=2, message=duration_pb2.Duration, + ) class ClusterStatus(proto.Message): r"""The status of a cluster and its instances. - Attributes: state (google.cloud.dataproc_v1beta2.types.ClusterStatus.State): Output only. The cluster's state. @@ -843,13 +777,10 @@ class Substate(proto.Enum): STALE_STATUS = 2 state = proto.Field(proto.ENUM, number=1, enum=State,) - - detail = proto.Field(proto.STRING, number=2) - + detail = proto.Field(proto.STRING, number=2,) state_start_time = proto.Field( - proto.MESSAGE, number=3, message=timestamp.Timestamp, + proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp, ) - substate = proto.Field(proto.ENUM, number=4, enum=Substate,) @@ -890,10 +821,8 @@ class SoftwareConfig(proto.Message): the cluster. """ - image_version = proto.Field(proto.STRING, number=1) - - properties = proto.MapField(proto.STRING, proto.STRING, number=2) - + image_version = proto.Field(proto.STRING, number=1,) + properties = proto.MapField(proto.STRING, proto.STRING, number=2,) optional_components = proto.RepeatedField( proto.ENUM, number=3, enum=shared.Component, ) @@ -912,14 +841,12 @@ class ClusterMetrics(proto.Message): The YARN metrics. """ - hdfs_metrics = proto.MapField(proto.STRING, proto.INT64, number=1) - - yarn_metrics = proto.MapField(proto.STRING, proto.INT64, number=2) + hdfs_metrics = proto.MapField(proto.STRING, proto.INT64, number=1,) + yarn_metrics = proto.MapField(proto.STRING, proto.INT64, number=2,) class CreateClusterRequest(proto.Message): r"""A request to create a cluster. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -946,18 +873,14 @@ class CreateClusterRequest(proto.Message): characters. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=3) - + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=3,) cluster = proto.Field(proto.MESSAGE, number=2, message="Cluster",) - - request_id = proto.Field(proto.STRING, number=4) + request_id = proto.Field(proto.STRING, number=4,) class UpdateClusterRequest(proto.Message): r"""A request to update a cluster. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -1067,26 +990,21 @@ class UpdateClusterRequest(proto.Message): characters. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=5) - - cluster_name = proto.Field(proto.STRING, number=2) - + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=5,) + cluster_name = proto.Field(proto.STRING, number=2,) cluster = proto.Field(proto.MESSAGE, number=3, message="Cluster",) - graceful_decommission_timeout = proto.Field( - proto.MESSAGE, number=6, message=duration.Duration, + proto.MESSAGE, number=6, message=duration_pb2.Duration, ) - - update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) - - request_id = proto.Field(proto.STRING, number=7) + update_mask = proto.Field( + proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + ) + request_id = proto.Field(proto.STRING, number=7,) class DeleteClusterRequest(proto.Message): r"""A request to delete a cluster. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -1117,15 +1035,11 @@ class DeleteClusterRequest(proto.Message): characters. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=3) - - cluster_name = proto.Field(proto.STRING, number=2) - - cluster_uuid = proto.Field(proto.STRING, number=4) - - request_id = proto.Field(proto.STRING, number=5) + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=3,) + cluster_name = proto.Field(proto.STRING, number=2,) + cluster_uuid = proto.Field(proto.STRING, number=4,) + request_id = proto.Field(proto.STRING, number=5,) class GetClusterRequest(proto.Message): @@ -1143,16 +1057,13 @@ class GetClusterRequest(proto.Message): Required. The cluster name. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=3) - - cluster_name = proto.Field(proto.STRING, number=2) + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=3,) + cluster_name = proto.Field(proto.STRING, number=2,) class ListClustersRequest(proto.Message): r"""A request to list the clusters in a project. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -1188,20 +1099,15 @@ class ListClustersRequest(proto.Message): Optional. The standard List page token. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=4) - - filter = proto.Field(proto.STRING, number=5) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=4,) + filter = proto.Field(proto.STRING, number=5,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListClustersResponse(proto.Message): r"""The list of all clusters in a project. - Attributes: clusters (Sequence[google.cloud.dataproc_v1beta2.types.Cluster]): Output only. The clusters in the project. @@ -1217,13 +1123,11 @@ def raw_page(self): return self clusters = proto.RepeatedField(proto.MESSAGE, number=1, message="Cluster",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class DiagnoseClusterRequest(proto.Message): r"""A request to collect cluster diagnostic information. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -1235,16 +1139,13 @@ class DiagnoseClusterRequest(proto.Message): Required. The cluster name. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=3) - - cluster_name = proto.Field(proto.STRING, number=2) + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=3,) + cluster_name = proto.Field(proto.STRING, number=2,) class DiagnoseClusterResults(proto.Message): r"""The location of diagnostic output. - Attributes: output_uri (str): Output only. The Cloud Storage URI of the @@ -1253,12 +1154,11 @@ class DiagnoseClusterResults(proto.Message): diagnostics. """ - output_uri = proto.Field(proto.STRING, number=1) + output_uri = proto.Field(proto.STRING, number=1,) class ReservationAffinity(proto.Message): r"""Reservation Affinity for consuming Zonal reservation. - Attributes: consume_reservation_type (google.cloud.dataproc_v1beta2.types.ReservationAffinity.Type): Optional. Type of reservation to consume @@ -1280,10 +1180,8 @@ class Type(proto.Enum): SPECIFIC_RESERVATION = 3 consume_reservation_type = proto.Field(proto.ENUM, number=1, enum=Type,) - - key = proto.Field(proto.STRING, number=2) - - values = proto.RepeatedField(proto.STRING, number=3) + key = proto.Field(proto.STRING, number=2,) + values = proto.RepeatedField(proto.STRING, number=3,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/dataproc_v1beta2/types/jobs.py b/google/cloud/dataproc_v1beta2/types/jobs.py index 3b1f50b5..50343ef9 100644 --- a/google/cloud/dataproc_v1beta2/types/jobs.py +++ b/google/cloud/dataproc_v1beta2/types/jobs.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -55,7 +52,6 @@ class LoggingConfig(proto.Message): r"""The runtime logging config of the job. - Attributes: driver_log_levels (Sequence[google.cloud.dataproc_v1beta2.types.LoggingConfig.DriverLogLevelsEntry]): The per-package log levels for the driver. @@ -131,20 +127,13 @@ class HadoopJob(proto.Message): execution. """ - main_jar_file_uri = proto.Field(proto.STRING, number=1, oneof="driver") - - main_class = proto.Field(proto.STRING, number=2, oneof="driver") - - args = proto.RepeatedField(proto.STRING, number=3) - - jar_file_uris = proto.RepeatedField(proto.STRING, number=4) - - file_uris = proto.RepeatedField(proto.STRING, number=5) - - archive_uris = proto.RepeatedField(proto.STRING, number=6) - - properties = proto.MapField(proto.STRING, proto.STRING, number=7) - + main_jar_file_uri = proto.Field(proto.STRING, number=1, oneof="driver",) + main_class = proto.Field(proto.STRING, number=2, oneof="driver",) + args = proto.RepeatedField(proto.STRING, number=3,) + jar_file_uris = proto.RepeatedField(proto.STRING, number=4,) + file_uris = proto.RepeatedField(proto.STRING, number=5,) + archive_uris = proto.RepeatedField(proto.STRING, number=6,) + properties = proto.MapField(proto.STRING, proto.STRING, number=7,) logging_config = proto.Field(proto.MESSAGE, number=8, message="LoggingConfig",) @@ -194,20 +183,13 @@ class SparkJob(proto.Message): execution. """ - main_jar_file_uri = proto.Field(proto.STRING, number=1, oneof="driver") - - main_class = proto.Field(proto.STRING, number=2, oneof="driver") - - args = proto.RepeatedField(proto.STRING, number=3) - - jar_file_uris = proto.RepeatedField(proto.STRING, number=4) - - file_uris = proto.RepeatedField(proto.STRING, number=5) - - archive_uris = proto.RepeatedField(proto.STRING, number=6) - - properties = proto.MapField(proto.STRING, proto.STRING, number=7) - + main_jar_file_uri = proto.Field(proto.STRING, number=1, oneof="driver",) + main_class = proto.Field(proto.STRING, number=2, oneof="driver",) + args = proto.RepeatedField(proto.STRING, number=3,) + jar_file_uris = proto.RepeatedField(proto.STRING, number=4,) + file_uris = proto.RepeatedField(proto.STRING, number=5,) + archive_uris = proto.RepeatedField(proto.STRING, number=6,) + properties = proto.MapField(proto.STRING, proto.STRING, number=7,) logging_config = proto.Field(proto.MESSAGE, number=8, message="LoggingConfig",) @@ -254,26 +236,18 @@ class PySparkJob(proto.Message): execution. """ - main_python_file_uri = proto.Field(proto.STRING, number=1) - - args = proto.RepeatedField(proto.STRING, number=2) - - python_file_uris = proto.RepeatedField(proto.STRING, number=3) - - jar_file_uris = proto.RepeatedField(proto.STRING, number=4) - - file_uris = proto.RepeatedField(proto.STRING, number=5) - - archive_uris = proto.RepeatedField(proto.STRING, number=6) - - properties = proto.MapField(proto.STRING, proto.STRING, number=7) - + main_python_file_uri = proto.Field(proto.STRING, number=1,) + args = proto.RepeatedField(proto.STRING, number=2,) + python_file_uris = proto.RepeatedField(proto.STRING, number=3,) + jar_file_uris = proto.RepeatedField(proto.STRING, number=4,) + file_uris = proto.RepeatedField(proto.STRING, number=5,) + archive_uris = proto.RepeatedField(proto.STRING, number=6,) + properties = proto.MapField(proto.STRING, proto.STRING, number=7,) logging_config = proto.Field(proto.MESSAGE, number=8, message="LoggingConfig",) class QueryList(proto.Message): r"""A list of queries to run on a cluster. - Attributes: queries (Sequence[str]): Required. The queries to execute. You do not need to @@ -295,7 +269,7 @@ class QueryList(proto.Message): } """ - queries = proto.RepeatedField(proto.STRING, number=1) + queries = proto.RepeatedField(proto.STRING, number=1,) class HiveJob(proto.Message): @@ -328,19 +302,14 @@ class HiveJob(proto.Message): and UDFs. """ - query_file_uri = proto.Field(proto.STRING, number=1, oneof="queries") - + query_file_uri = proto.Field(proto.STRING, number=1, oneof="queries",) query_list = proto.Field( proto.MESSAGE, number=2, oneof="queries", message="QueryList", ) - - continue_on_failure = proto.Field(proto.BOOL, number=3) - - script_variables = proto.MapField(proto.STRING, proto.STRING, number=4) - - properties = proto.MapField(proto.STRING, proto.STRING, number=5) - - jar_file_uris = proto.RepeatedField(proto.STRING, number=6) + continue_on_failure = proto.Field(proto.BOOL, number=3,) + script_variables = proto.MapField(proto.STRING, proto.STRING, number=4,) + properties = proto.MapField(proto.STRING, proto.STRING, number=5,) + jar_file_uris = proto.RepeatedField(proto.STRING, number=6,) class SparkSqlJob(proto.Message): @@ -370,18 +339,13 @@ class SparkSqlJob(proto.Message): execution. """ - query_file_uri = proto.Field(proto.STRING, number=1, oneof="queries") - + query_file_uri = proto.Field(proto.STRING, number=1, oneof="queries",) query_list = proto.Field( proto.MESSAGE, number=2, oneof="queries", message="QueryList", ) - - script_variables = proto.MapField(proto.STRING, proto.STRING, number=3) - - properties = proto.MapField(proto.STRING, proto.STRING, number=4) - - jar_file_uris = proto.RepeatedField(proto.STRING, number=56) - + script_variables = proto.MapField(proto.STRING, proto.STRING, number=3,) + properties = proto.MapField(proto.STRING, proto.STRING, number=4,) + jar_file_uris = proto.RepeatedField(proto.STRING, number=56,) logging_config = proto.Field(proto.MESSAGE, number=6, message="LoggingConfig",) @@ -417,20 +381,14 @@ class PigJob(proto.Message): execution. """ - query_file_uri = proto.Field(proto.STRING, number=1, oneof="queries") - + query_file_uri = proto.Field(proto.STRING, number=1, oneof="queries",) query_list = proto.Field( proto.MESSAGE, number=2, oneof="queries", message="QueryList", ) - - continue_on_failure = proto.Field(proto.BOOL, number=3) - - script_variables = proto.MapField(proto.STRING, proto.STRING, number=4) - - properties = proto.MapField(proto.STRING, proto.STRING, number=5) - - jar_file_uris = proto.RepeatedField(proto.STRING, number=6) - + continue_on_failure = proto.Field(proto.BOOL, number=3,) + script_variables = proto.MapField(proto.STRING, proto.STRING, number=4,) + properties = proto.MapField(proto.STRING, proto.STRING, number=5,) + jar_file_uris = proto.RepeatedField(proto.STRING, number=6,) logging_config = proto.Field(proto.MESSAGE, number=7, message="LoggingConfig",) @@ -470,16 +428,11 @@ class SparkRJob(proto.Message): execution. """ - main_r_file_uri = proto.Field(proto.STRING, number=1) - - args = proto.RepeatedField(proto.STRING, number=2) - - file_uris = proto.RepeatedField(proto.STRING, number=3) - - archive_uris = proto.RepeatedField(proto.STRING, number=4) - - properties = proto.MapField(proto.STRING, proto.STRING, number=5) - + main_r_file_uri = proto.Field(proto.STRING, number=1,) + args = proto.RepeatedField(proto.STRING, number=2,) + file_uris = proto.RepeatedField(proto.STRING, number=3,) + archive_uris = proto.RepeatedField(proto.STRING, number=4,) + properties = proto.MapField(proto.STRING, proto.STRING, number=5,) logging_config = proto.Field(proto.MESSAGE, number=6, message="LoggingConfig",) @@ -517,26 +470,19 @@ class PrestoJob(proto.Message): execution. """ - query_file_uri = proto.Field(proto.STRING, number=1, oneof="queries") - + query_file_uri = proto.Field(proto.STRING, number=1, oneof="queries",) query_list = proto.Field( proto.MESSAGE, number=2, oneof="queries", message="QueryList", ) - - continue_on_failure = proto.Field(proto.BOOL, number=3) - - output_format = proto.Field(proto.STRING, number=4) - - client_tags = proto.RepeatedField(proto.STRING, number=5) - - properties = proto.MapField(proto.STRING, proto.STRING, number=6) - + continue_on_failure = proto.Field(proto.BOOL, number=3,) + output_format = proto.Field(proto.STRING, number=4,) + client_tags = proto.RepeatedField(proto.STRING, number=5,) + properties = proto.MapField(proto.STRING, proto.STRING, number=6,) logging_config = proto.Field(proto.MESSAGE, number=7, message="LoggingConfig",) class JobPlacement(proto.Message): r"""Dataproc job config. - Attributes: cluster_name (str): Required. The name of the cluster where the @@ -546,14 +492,12 @@ class JobPlacement(proto.Message): Dataproc service when the job is submitted. """ - cluster_name = proto.Field(proto.STRING, number=1) - - cluster_uuid = proto.Field(proto.STRING, number=2) + cluster_name = proto.Field(proto.STRING, number=1,) + cluster_uuid = proto.Field(proto.STRING, number=2,) class JobStatus(proto.Message): r"""Dataproc job status. - Attributes: state (google.cloud.dataproc_v1beta2.types.JobStatus.State): Output only. A state message specifying the @@ -591,19 +535,15 @@ class Substate(proto.Enum): STALE_STATUS = 3 state = proto.Field(proto.ENUM, number=1, enum=State,) - - details = proto.Field(proto.STRING, number=2) - + details = proto.Field(proto.STRING, number=2,) state_start_time = proto.Field( - proto.MESSAGE, number=6, message=timestamp.Timestamp, + proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp, ) - substate = proto.Field(proto.ENUM, number=7, enum=Substate,) class JobReference(proto.Message): r"""Encapsulates the full scoping used to reference a job. - Attributes: project_id (str): Optional. The ID of the Google Cloud Platform @@ -619,9 +559,8 @@ class JobReference(proto.Message): by the server. """ - project_id = proto.Field(proto.STRING, number=1) - - job_id = proto.Field(proto.STRING, number=2) + project_id = proto.Field(proto.STRING, number=1,) + job_id = proto.Field(proto.STRING, number=2,) class YarnApplication(proto.Message): @@ -663,18 +602,14 @@ class State(proto.Enum): FAILED = 7 KILLED = 8 - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) state = proto.Field(proto.ENUM, number=2, enum=State,) - - progress = proto.Field(proto.FLOAT, number=3) - - tracking_url = proto.Field(proto.STRING, number=4) + progress = proto.Field(proto.FLOAT, number=3,) + tracking_url = proto.Field(proto.STRING, number=4,) class Job(proto.Message): r"""A Dataproc job resource. - Attributes: reference (google.cloud.dataproc_v1beta2.types.JobReference): Optional. The fully qualified reference to the job, which @@ -747,65 +682,45 @@ class Job(proto.Message): """ reference = proto.Field(proto.MESSAGE, number=1, message="JobReference",) - placement = proto.Field(proto.MESSAGE, number=2, message="JobPlacement",) - hadoop_job = proto.Field( proto.MESSAGE, number=3, oneof="type_job", message="HadoopJob", ) - spark_job = proto.Field( proto.MESSAGE, number=4, oneof="type_job", message="SparkJob", ) - pyspark_job = proto.Field( proto.MESSAGE, number=5, oneof="type_job", message="PySparkJob", ) - hive_job = proto.Field( proto.MESSAGE, number=6, oneof="type_job", message="HiveJob", ) - pig_job = proto.Field(proto.MESSAGE, number=7, oneof="type_job", message="PigJob",) - spark_r_job = proto.Field( proto.MESSAGE, number=21, oneof="type_job", message="SparkRJob", ) - spark_sql_job = proto.Field( proto.MESSAGE, number=12, oneof="type_job", message="SparkSqlJob", ) - presto_job = proto.Field( proto.MESSAGE, number=23, oneof="type_job", message="PrestoJob", ) - status = proto.Field(proto.MESSAGE, number=8, message="JobStatus",) - status_history = proto.RepeatedField(proto.MESSAGE, number=13, message="JobStatus",) - yarn_applications = proto.RepeatedField( proto.MESSAGE, number=9, message="YarnApplication", ) - - submitted_by = proto.Field(proto.STRING, number=10) - - driver_output_resource_uri = proto.Field(proto.STRING, number=17) - - driver_control_files_uri = proto.Field(proto.STRING, number=15) - - labels = proto.MapField(proto.STRING, proto.STRING, number=18) - + submitted_by = proto.Field(proto.STRING, number=10,) + driver_output_resource_uri = proto.Field(proto.STRING, number=17,) + driver_control_files_uri = proto.Field(proto.STRING, number=15,) + labels = proto.MapField(proto.STRING, proto.STRING, number=18,) scheduling = proto.Field(proto.MESSAGE, number=20, message="JobScheduling",) - - job_uuid = proto.Field(proto.STRING, number=22) - - done = proto.Field(proto.BOOL, number=24) + job_uuid = proto.Field(proto.STRING, number=22,) + done = proto.Field(proto.BOOL, number=24,) class JobScheduling(proto.Message): r"""Job scheduling options. - Attributes: max_failures_per_hour (int): Optional. Maximum number of times per hour a @@ -820,12 +735,11 @@ class JobScheduling(proto.Message): Maximum value is 10. """ - max_failures_per_hour = proto.Field(proto.INT32, number=1) + max_failures_per_hour = proto.Field(proto.INT32, number=1,) class JobMetadata(proto.Message): r"""Job Operation metadata. - Attributes: job_id (str): Output only. The job id. @@ -837,18 +751,14 @@ class JobMetadata(proto.Message): Output only. Job submission time. """ - job_id = proto.Field(proto.STRING, number=1) - + job_id = proto.Field(proto.STRING, number=1,) status = proto.Field(proto.MESSAGE, number=2, message="JobStatus",) - - operation_type = proto.Field(proto.STRING, number=3) - - start_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + operation_type = proto.Field(proto.STRING, number=3,) + start_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) class SubmitJobRequest(proto.Message): r"""A request to submit a job. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -875,13 +785,10 @@ class SubmitJobRequest(proto.Message): characters. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=3) - + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=3,) job = proto.Field(proto.MESSAGE, number=2, message="Job",) - - request_id = proto.Field(proto.STRING, number=4) + request_id = proto.Field(proto.STRING, number=4,) class GetJobRequest(proto.Message): @@ -899,16 +806,13 @@ class GetJobRequest(proto.Message): Required. The job ID. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=3) - - job_id = proto.Field(proto.STRING, number=2) + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=3,) + job_id = proto.Field(proto.STRING, number=2,) class ListJobsRequest(proto.Message): r"""A request to list jobs in a project. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -958,24 +862,17 @@ class JobStateMatcher(proto.Enum): ACTIVE = 1 NON_ACTIVE = 2 - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=6) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - cluster_name = proto.Field(proto.STRING, number=4) - + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=6,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + cluster_name = proto.Field(proto.STRING, number=4,) job_state_matcher = proto.Field(proto.ENUM, number=5, enum=JobStateMatcher,) - - filter = proto.Field(proto.STRING, number=7) + filter = proto.Field(proto.STRING, number=7,) class UpdateJobRequest(proto.Message): r"""A request to update a job. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -995,20 +892,17 @@ class UpdateJobRequest(proto.Message): Currently, labels is the only field that can be updated. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=2) - - job_id = proto.Field(proto.STRING, number=3) - + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=2,) + job_id = proto.Field(proto.STRING, number=3,) job = proto.Field(proto.MESSAGE, number=4, message="Job",) - - update_mask = proto.Field(proto.MESSAGE, number=5, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=5, message=field_mask_pb2.FieldMask, + ) class ListJobsResponse(proto.Message): r"""A list of jobs in a project. - Attributes: jobs (Sequence[google.cloud.dataproc_v1beta2.types.Job]): Output only. Jobs list. @@ -1024,13 +918,11 @@ def raw_page(self): return self jobs = proto.RepeatedField(proto.MESSAGE, number=1, message="Job",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class CancelJobRequest(proto.Message): r"""A request to cancel a job. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -1042,16 +934,13 @@ class CancelJobRequest(proto.Message): Required. The job ID. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=3) - - job_id = proto.Field(proto.STRING, number=2) + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=3,) + job_id = proto.Field(proto.STRING, number=2,) class DeleteJobRequest(proto.Message): r"""A request to delete a job. - Attributes: project_id (str): Required. The ID of the Google Cloud Platform @@ -1063,11 +952,9 @@ class DeleteJobRequest(proto.Message): Required. The job ID. """ - project_id = proto.Field(proto.STRING, number=1) - - region = proto.Field(proto.STRING, number=3) - - job_id = proto.Field(proto.STRING, number=2) + project_id = proto.Field(proto.STRING, number=1,) + region = proto.Field(proto.STRING, number=3,) + job_id = proto.Field(proto.STRING, number=2,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/dataproc_v1beta2/types/operations.py b/google/cloud/dataproc_v1beta2/types/operations.py index 469cc92f..86c3f65c 100644 --- a/google/cloud/dataproc_v1beta2/types/operations.py +++ b/google/cloud/dataproc_v1beta2/types/operations.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,11 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -29,7 +26,6 @@ class ClusterOperationStatus(proto.Message): r"""The status of the operation. - Attributes: state (google.cloud.dataproc_v1beta2.types.ClusterOperationStatus.State): Output only. A message containing the @@ -52,19 +48,15 @@ class State(proto.Enum): DONE = 3 state = proto.Field(proto.ENUM, number=1, enum=State,) - - inner_state = proto.Field(proto.STRING, number=2) - - details = proto.Field(proto.STRING, number=3) - + inner_state = proto.Field(proto.STRING, number=2,) + details = proto.Field(proto.STRING, number=3,) state_start_time = proto.Field( - proto.MESSAGE, number=4, message=timestamp.Timestamp, + proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp, ) class ClusterOperationMetadata(proto.Message): r"""Metadata describing the operation. - Attributes: cluster_name (str): Output only. Name of the cluster for the @@ -87,23 +79,16 @@ class ClusterOperationMetadata(proto.Message): operation execution. """ - cluster_name = proto.Field(proto.STRING, number=7) - - cluster_uuid = proto.Field(proto.STRING, number=8) - + cluster_name = proto.Field(proto.STRING, number=7,) + cluster_uuid = proto.Field(proto.STRING, number=8,) status = proto.Field(proto.MESSAGE, number=9, message="ClusterOperationStatus",) - status_history = proto.RepeatedField( proto.MESSAGE, number=10, message="ClusterOperationStatus", ) - - operation_type = proto.Field(proto.STRING, number=11) - - description = proto.Field(proto.STRING, number=12) - - labels = proto.MapField(proto.STRING, proto.STRING, number=13) - - warnings = proto.RepeatedField(proto.STRING, number=14) + operation_type = proto.Field(proto.STRING, number=11,) + description = proto.Field(proto.STRING, number=12,) + labels = proto.MapField(proto.STRING, proto.STRING, number=13,) + warnings = proto.RepeatedField(proto.STRING, number=14,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/dataproc_v1beta2/types/shared.py b/google/cloud/dataproc_v1beta2/types/shared.py index 524f0916..6ba530f6 100644 --- a/google/cloud/dataproc_v1beta2/types/shared.py +++ b/google/cloud/dataproc_v1beta2/types/shared.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore diff --git a/google/cloud/dataproc_v1beta2/types/workflow_templates.py b/google/cloud/dataproc_v1beta2/types/workflow_templates.py index 6704df9e..21bb4bfe 100644 --- a/google/cloud/dataproc_v1beta2/types/workflow_templates.py +++ b/google/cloud/dataproc_v1beta2/types/workflow_templates.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,14 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.dataproc_v1beta2.types import clusters from google.cloud.dataproc_v1beta2.types import jobs as gcd_jobs -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -54,7 +51,6 @@ class WorkflowTemplate(proto.Message): r"""A Dataproc workflow template resource. - Attributes: id (str): Required. The template id. @@ -132,29 +128,20 @@ class WorkflowTemplate(proto.Message): the cluster is deleted. """ - id = proto.Field(proto.STRING, number=2) - - name = proto.Field(proto.STRING, number=1) - - version = proto.Field(proto.INT32, number=3) - - create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) - - labels = proto.MapField(proto.STRING, proto.STRING, number=6) - + id = proto.Field(proto.STRING, number=2,) + name = proto.Field(proto.STRING, number=1,) + version = proto.Field(proto.INT32, number=3,) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + labels = proto.MapField(proto.STRING, proto.STRING, number=6,) placement = proto.Field( proto.MESSAGE, number=7, message="WorkflowTemplatePlacement", ) - jobs = proto.RepeatedField(proto.MESSAGE, number=8, message="OrderedJob",) - parameters = proto.RepeatedField( proto.MESSAGE, number=9, message="TemplateParameter", ) - - dag_timeout = proto.Field(proto.MESSAGE, number=10, message=duration.Duration,) + dag_timeout = proto.Field(proto.MESSAGE, number=10, message=duration_pb2.Duration,) class WorkflowTemplatePlacement(proto.Message): @@ -177,7 +164,6 @@ class WorkflowTemplatePlacement(proto.Message): managed_cluster = proto.Field( proto.MESSAGE, number=1, oneof="placement", message="ManagedCluster", ) - cluster_selector = proto.Field( proto.MESSAGE, number=2, oneof="placement", message="ClusterSelector", ) @@ -185,7 +171,6 @@ class WorkflowTemplatePlacement(proto.Message): class ManagedCluster(proto.Message): r"""Cluster that is managed by the workflow. - Attributes: cluster_name (str): Required. The cluster name prefix. A unique @@ -213,11 +198,9 @@ class ManagedCluster(proto.Message): cluster. """ - cluster_name = proto.Field(proto.STRING, number=2) - + cluster_name = proto.Field(proto.STRING, number=2,) config = proto.Field(proto.MESSAGE, number=3, message=clusters.ClusterConfig,) - - labels = proto.MapField(proto.STRING, proto.STRING, number=4) + labels = proto.MapField(proto.STRING, proto.STRING, number=4,) class ClusterSelector(proto.Message): @@ -236,14 +219,12 @@ class ClusterSelector(proto.Message): have all labels to match. """ - zone = proto.Field(proto.STRING, number=1) - - cluster_labels = proto.MapField(proto.STRING, proto.STRING, number=2) + zone = proto.Field(proto.STRING, number=1,) + cluster_labels = proto.MapField(proto.STRING, proto.STRING, number=2,) class OrderedJob(proto.Message): r"""A job executed by the workflow. - Attributes: step_id (str): Required. The step id. The id must be unique among all jobs @@ -294,45 +275,34 @@ class OrderedJob(proto.Message): workflow. """ - step_id = proto.Field(proto.STRING, number=1) - + step_id = proto.Field(proto.STRING, number=1,) hadoop_job = proto.Field( proto.MESSAGE, number=2, oneof="job_type", message=gcd_jobs.HadoopJob, ) - spark_job = proto.Field( proto.MESSAGE, number=3, oneof="job_type", message=gcd_jobs.SparkJob, ) - pyspark_job = proto.Field( proto.MESSAGE, number=4, oneof="job_type", message=gcd_jobs.PySparkJob, ) - hive_job = proto.Field( proto.MESSAGE, number=5, oneof="job_type", message=gcd_jobs.HiveJob, ) - pig_job = proto.Field( proto.MESSAGE, number=6, oneof="job_type", message=gcd_jobs.PigJob, ) - spark_r_job = proto.Field( proto.MESSAGE, number=11, oneof="job_type", message=gcd_jobs.SparkRJob, ) - spark_sql_job = proto.Field( proto.MESSAGE, number=7, oneof="job_type", message=gcd_jobs.SparkSqlJob, ) - presto_job = proto.Field( proto.MESSAGE, number=12, oneof="job_type", message=gcd_jobs.PrestoJob, ) - - labels = proto.MapField(proto.STRING, proto.STRING, number=8) - + labels = proto.MapField(proto.STRING, proto.STRING, number=8,) scheduling = proto.Field(proto.MESSAGE, number=9, message=gcd_jobs.JobScheduling,) - - prerequisite_step_ids = proto.RepeatedField(proto.STRING, number=10) + prerequisite_step_ids = proto.RepeatedField(proto.STRING, number=10,) class TemplateParameter(proto.Message): @@ -414,18 +384,14 @@ class TemplateParameter(proto.Message): this parameter's value. """ - name = proto.Field(proto.STRING, number=1) - - fields = proto.RepeatedField(proto.STRING, number=2) - - description = proto.Field(proto.STRING, number=3) - + name = proto.Field(proto.STRING, number=1,) + fields = proto.RepeatedField(proto.STRING, number=2,) + description = proto.Field(proto.STRING, number=3,) validation = proto.Field(proto.MESSAGE, number=4, message="ParameterValidation",) class ParameterValidation(proto.Message): r"""Configuration for parameter validation. - Attributes: regex (google.cloud.dataproc_v1beta2.types.RegexValidation): Validation based on regular expressions. @@ -436,7 +402,6 @@ class ParameterValidation(proto.Message): regex = proto.Field( proto.MESSAGE, number=1, oneof="validation_type", message="RegexValidation", ) - values = proto.Field( proto.MESSAGE, number=2, oneof="validation_type", message="ValueValidation", ) @@ -444,7 +409,6 @@ class ParameterValidation(proto.Message): class RegexValidation(proto.Message): r"""Validation based on regular expressions. - Attributes: regexes (Sequence[str]): Required. RE2 regular expressions used to @@ -453,24 +417,22 @@ class RegexValidation(proto.Message): matches are not sufficient). """ - regexes = proto.RepeatedField(proto.STRING, number=1) + regexes = proto.RepeatedField(proto.STRING, number=1,) class ValueValidation(proto.Message): r"""Validation based on a list of allowed values. - Attributes: values (Sequence[str]): Required. List of allowed values for the parameter. """ - values = proto.RepeatedField(proto.STRING, number=1) + values = proto.RepeatedField(proto.STRING, number=1,) class WorkflowMetadata(proto.Message): r"""A Dataproc workflow template resource. - Attributes: template (str): Output only. The resource name of the workflow template as @@ -533,38 +495,28 @@ class State(proto.Enum): RUNNING = 2 DONE = 3 - template = proto.Field(proto.STRING, number=1) - - version = proto.Field(proto.INT32, number=2) - + template = proto.Field(proto.STRING, number=1,) + version = proto.Field(proto.INT32, number=2,) create_cluster = proto.Field(proto.MESSAGE, number=3, message="ClusterOperation",) - graph = proto.Field(proto.MESSAGE, number=4, message="WorkflowGraph",) - delete_cluster = proto.Field(proto.MESSAGE, number=5, message="ClusterOperation",) - state = proto.Field(proto.ENUM, number=6, enum=State,) - - cluster_name = proto.Field(proto.STRING, number=7) - - parameters = proto.MapField(proto.STRING, proto.STRING, number=8) - - start_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) - - end_time = proto.Field(proto.MESSAGE, number=10, message=timestamp.Timestamp,) - - cluster_uuid = proto.Field(proto.STRING, number=11) - - dag_timeout = proto.Field(proto.MESSAGE, number=12, message=duration.Duration,) - - dag_start_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) - - dag_end_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) + cluster_name = proto.Field(proto.STRING, number=7,) + parameters = proto.MapField(proto.STRING, proto.STRING, number=8,) + start_time = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=10, message=timestamp_pb2.Timestamp,) + cluster_uuid = proto.Field(proto.STRING, number=11,) + dag_timeout = proto.Field(proto.MESSAGE, number=12, message=duration_pb2.Duration,) + dag_start_time = proto.Field( + proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, + ) + dag_end_time = proto.Field( + proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, + ) class ClusterOperation(proto.Message): r"""The cluster operation triggered by a workflow. - Attributes: operation_id (str): Output only. The id of the cluster operation. @@ -574,16 +526,13 @@ class ClusterOperation(proto.Message): Output only. Indicates the operation is done. """ - operation_id = proto.Field(proto.STRING, number=1) - - error = proto.Field(proto.STRING, number=2) - - done = proto.Field(proto.BOOL, number=3) + operation_id = proto.Field(proto.STRING, number=1,) + error = proto.Field(proto.STRING, number=2,) + done = proto.Field(proto.BOOL, number=3,) class WorkflowGraph(proto.Message): r"""The workflow graph. - Attributes: nodes (Sequence[google.cloud.dataproc_v1beta2.types.WorkflowNode]): Output only. The workflow nodes. @@ -594,7 +543,6 @@ class WorkflowGraph(proto.Message): class WorkflowNode(proto.Message): r"""The workflow node. - Attributes: step_id (str): Output only. The name of the node. @@ -618,20 +566,15 @@ class NodeState(proto.Enum): COMPLETED = 4 FAILED = 5 - step_id = proto.Field(proto.STRING, number=1) - - prerequisite_step_ids = proto.RepeatedField(proto.STRING, number=2) - - job_id = proto.Field(proto.STRING, number=3) - + step_id = proto.Field(proto.STRING, number=1,) + prerequisite_step_ids = proto.RepeatedField(proto.STRING, number=2,) + job_id = proto.Field(proto.STRING, number=3,) state = proto.Field(proto.ENUM, number=5, enum=NodeState,) - - error = proto.Field(proto.STRING, number=6) + error = proto.Field(proto.STRING, number=6,) class CreateWorkflowTemplateRequest(proto.Message): r"""A request to create a workflow template. - Attributes: parent (str): Required. The resource name of the region or location, as @@ -650,14 +593,12 @@ class CreateWorkflowTemplateRequest(proto.Message): create. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) template = proto.Field(proto.MESSAGE, number=2, message="WorkflowTemplate",) class GetWorkflowTemplateRequest(proto.Message): r"""A request to fetch a workflow template. - Attributes: name (str): Required. The resource name of the workflow template, as @@ -678,14 +619,12 @@ class GetWorkflowTemplateRequest(proto.Message): If unspecified, retrieves the current version. """ - name = proto.Field(proto.STRING, number=1) - - version = proto.Field(proto.INT32, number=2) + name = proto.Field(proto.STRING, number=1,) + version = proto.Field(proto.INT32, number=2,) class InstantiateWorkflowTemplateRequest(proto.Message): r"""A request to instantiate a workflow template. - Attributes: name (str): Required. The resource name of the workflow template, as @@ -727,20 +666,15 @@ class InstantiateWorkflowTemplateRequest(proto.Message): may not exceed 100 characters. """ - name = proto.Field(proto.STRING, number=1) - - version = proto.Field(proto.INT32, number=2) - - instance_id = proto.Field(proto.STRING, number=3) - - request_id = proto.Field(proto.STRING, number=5) - - parameters = proto.MapField(proto.STRING, proto.STRING, number=4) + name = proto.Field(proto.STRING, number=1,) + version = proto.Field(proto.INT32, number=2,) + instance_id = proto.Field(proto.STRING, number=3,) + request_id = proto.Field(proto.STRING, number=5,) + parameters = proto.MapField(proto.STRING, proto.STRING, number=4,) class InstantiateInlineWorkflowTemplateRequest(proto.Message): r"""A request to instantiate an inline workflow template. - Attributes: parent (str): Required. The resource name of the region or location, as @@ -774,18 +708,14 @@ class InstantiateInlineWorkflowTemplateRequest(proto.Message): characters. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) template = proto.Field(proto.MESSAGE, number=2, message="WorkflowTemplate",) - - instance_id = proto.Field(proto.STRING, number=3) - - request_id = proto.Field(proto.STRING, number=4) + instance_id = proto.Field(proto.STRING, number=3,) + request_id = proto.Field(proto.STRING, number=4,) class UpdateWorkflowTemplateRequest(proto.Message): r"""A request to update a workflow template. - Attributes: template (google.cloud.dataproc_v1beta2.types.WorkflowTemplate): Required. The updated workflow template. @@ -799,7 +729,6 @@ class UpdateWorkflowTemplateRequest(proto.Message): class ListWorkflowTemplatesRequest(proto.Message): r"""A request to list workflow templates in a project. - Attributes: parent (str): Required. The resource name of the region or location, as @@ -822,11 +751,9 @@ class ListWorkflowTemplatesRequest(proto.Message): results. """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListWorkflowTemplatesResponse(proto.Message): @@ -850,8 +777,7 @@ def raw_page(self): templates = proto.RepeatedField( proto.MESSAGE, number=1, message="WorkflowTemplate", ) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class DeleteWorkflowTemplateRequest(proto.Message): @@ -879,9 +805,8 @@ class DeleteWorkflowTemplateRequest(proto.Message): specified version. """ - name = proto.Field(proto.STRING, number=1) - - version = proto.Field(proto.INT32, number=2) + name = proto.Field(proto.STRING, number=1,) + version = proto.Field(proto.INT32, number=2,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/noxfile.py b/noxfile.py index 79ae4058..88684a9a 100644 --- a/noxfile.py +++ b/noxfile.py @@ -169,7 +169,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") + session.run("coverage", "report", "--show-missing", "--fail-under=99") session.run("coverage", "erase") diff --git a/owlbot.py b/owlbot.py index ec816c41..0b4f6cb2 100644 --- a/owlbot.py +++ b/owlbot.py @@ -39,6 +39,7 @@ templated_files = common.py_library( samples=True, # set to True only if there are samples microgenerator=True, + cov_level=99 ) s.move(templated_files, excludes=[".coveragerc"]) # microgenerator has a good .coveragerc file diff --git a/scripts/fixup_dataproc_v1_keywords.py b/scripts/fixup_dataproc_v1_keywords.py index 92228e53..a976d109 100644 --- a/scripts/fixup_dataproc_v1_keywords.py +++ b/scripts/fixup_dataproc_v1_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os import libcst as cst @@ -41,32 +39,33 @@ def partition( class dataprocCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'cancel_job': ('project_id', 'region', 'job_id', ), - 'create_autoscaling_policy': ('parent', 'policy', ), - 'create_cluster': ('project_id', 'region', 'cluster', 'request_id', ), - 'create_workflow_template': ('parent', 'template', ), - 'delete_autoscaling_policy': ('name', ), - 'delete_cluster': ('project_id', 'region', 'cluster_name', 'cluster_uuid', 'request_id', ), - 'delete_job': ('project_id', 'region', 'job_id', ), - 'delete_workflow_template': ('name', 'version', ), - 'diagnose_cluster': ('project_id', 'region', 'cluster_name', ), - 'get_autoscaling_policy': ('name', ), - 'get_cluster': ('project_id', 'region', 'cluster_name', ), - 'get_job': ('project_id', 'region', 'job_id', ), - 'get_workflow_template': ('name', 'version', ), - 'instantiate_inline_workflow_template': ('parent', 'template', 'request_id', ), - 'instantiate_workflow_template': ('name', 'version', 'request_id', 'parameters', ), - 'list_autoscaling_policies': ('parent', 'page_size', 'page_token', ), - 'list_clusters': ('project_id', 'region', 'filter', 'page_size', 'page_token', ), - 'list_jobs': ('project_id', 'region', 'page_size', 'page_token', 'cluster_name', 'job_state_matcher', 'filter', ), - 'list_workflow_templates': ('parent', 'page_size', 'page_token', ), - 'submit_job': ('project_id', 'region', 'job', 'request_id', ), - 'submit_job_as_operation': ('project_id', 'region', 'job', 'request_id', ), - 'update_autoscaling_policy': ('policy', ), - 'update_cluster': ('project_id', 'region', 'cluster_name', 'cluster', 'update_mask', 'graceful_decommission_timeout', 'request_id', ), - 'update_job': ('project_id', 'region', 'job_id', 'job', 'update_mask', ), - 'update_workflow_template': ('template', ), - + 'cancel_job': ('project_id', 'region', 'job_id', ), + 'create_autoscaling_policy': ('parent', 'policy', ), + 'create_cluster': ('project_id', 'region', 'cluster', 'request_id', ), + 'create_workflow_template': ('parent', 'template', ), + 'delete_autoscaling_policy': ('name', ), + 'delete_cluster': ('project_id', 'region', 'cluster_name', 'cluster_uuid', 'request_id', ), + 'delete_job': ('project_id', 'region', 'job_id', ), + 'delete_workflow_template': ('name', 'version', ), + 'diagnose_cluster': ('project_id', 'region', 'cluster_name', ), + 'get_autoscaling_policy': ('name', ), + 'get_cluster': ('project_id', 'region', 'cluster_name', ), + 'get_job': ('project_id', 'region', 'job_id', ), + 'get_workflow_template': ('name', 'version', ), + 'instantiate_inline_workflow_template': ('parent', 'template', 'request_id', ), + 'instantiate_workflow_template': ('name', 'version', 'request_id', 'parameters', ), + 'list_autoscaling_policies': ('parent', 'page_size', 'page_token', ), + 'list_clusters': ('project_id', 'region', 'filter', 'page_size', 'page_token', ), + 'list_jobs': ('project_id', 'region', 'page_size', 'page_token', 'cluster_name', 'job_state_matcher', 'filter', ), + 'list_workflow_templates': ('parent', 'page_size', 'page_token', ), + 'start_cluster': ('project_id', 'region', 'cluster_name', 'cluster_uuid', 'request_id', ), + 'stop_cluster': ('project_id', 'region', 'cluster_name', 'cluster_uuid', 'request_id', ), + 'submit_job': ('project_id', 'region', 'job', 'request_id', ), + 'submit_job_as_operation': ('project_id', 'region', 'job', 'request_id', ), + 'update_autoscaling_policy': ('policy', ), + 'update_cluster': ('project_id', 'region', 'cluster_name', 'cluster', 'update_mask', 'graceful_decommission_timeout', 'request_id', ), + 'update_job': ('project_id', 'region', 'job_id', 'job', 'update_mask', ), + 'update_workflow_template': ('template', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -97,7 +96,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/scripts/fixup_dataproc_v1beta2_keywords.py b/scripts/fixup_dataproc_v1beta2_keywords.py index 11f2e445..b9e52549 100644 --- a/scripts/fixup_dataproc_v1beta2_keywords.py +++ b/scripts/fixup_dataproc_v1beta2_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os import libcst as cst @@ -41,32 +39,31 @@ def partition( class dataprocCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'cancel_job': ('project_id', 'region', 'job_id', ), - 'create_autoscaling_policy': ('parent', 'policy', ), - 'create_cluster': ('project_id', 'region', 'cluster', 'request_id', ), - 'create_workflow_template': ('parent', 'template', ), - 'delete_autoscaling_policy': ('name', ), - 'delete_cluster': ('project_id', 'region', 'cluster_name', 'cluster_uuid', 'request_id', ), - 'delete_job': ('project_id', 'region', 'job_id', ), - 'delete_workflow_template': ('name', 'version', ), - 'diagnose_cluster': ('project_id', 'region', 'cluster_name', ), - 'get_autoscaling_policy': ('name', ), - 'get_cluster': ('project_id', 'region', 'cluster_name', ), - 'get_job': ('project_id', 'region', 'job_id', ), - 'get_workflow_template': ('name', 'version', ), - 'instantiate_inline_workflow_template': ('parent', 'template', 'instance_id', 'request_id', ), - 'instantiate_workflow_template': ('name', 'version', 'instance_id', 'request_id', 'parameters', ), - 'list_autoscaling_policies': ('parent', 'page_size', 'page_token', ), - 'list_clusters': ('project_id', 'region', 'filter', 'page_size', 'page_token', ), - 'list_jobs': ('project_id', 'region', 'page_size', 'page_token', 'cluster_name', 'job_state_matcher', 'filter', ), - 'list_workflow_templates': ('parent', 'page_size', 'page_token', ), - 'submit_job': ('project_id', 'region', 'job', 'request_id', ), - 'submit_job_as_operation': ('project_id', 'region', 'job', 'request_id', ), - 'update_autoscaling_policy': ('policy', ), - 'update_cluster': ('project_id', 'region', 'cluster_name', 'cluster', 'update_mask', 'graceful_decommission_timeout', 'request_id', ), - 'update_job': ('project_id', 'region', 'job_id', 'job', 'update_mask', ), - 'update_workflow_template': ('template', ), - + 'cancel_job': ('project_id', 'region', 'job_id', ), + 'create_autoscaling_policy': ('parent', 'policy', ), + 'create_cluster': ('project_id', 'region', 'cluster', 'request_id', ), + 'create_workflow_template': ('parent', 'template', ), + 'delete_autoscaling_policy': ('name', ), + 'delete_cluster': ('project_id', 'region', 'cluster_name', 'cluster_uuid', 'request_id', ), + 'delete_job': ('project_id', 'region', 'job_id', ), + 'delete_workflow_template': ('name', 'version', ), + 'diagnose_cluster': ('project_id', 'region', 'cluster_name', ), + 'get_autoscaling_policy': ('name', ), + 'get_cluster': ('project_id', 'region', 'cluster_name', ), + 'get_job': ('project_id', 'region', 'job_id', ), + 'get_workflow_template': ('name', 'version', ), + 'instantiate_inline_workflow_template': ('parent', 'template', 'instance_id', 'request_id', ), + 'instantiate_workflow_template': ('name', 'version', 'instance_id', 'request_id', 'parameters', ), + 'list_autoscaling_policies': ('parent', 'page_size', 'page_token', ), + 'list_clusters': ('project_id', 'region', 'filter', 'page_size', 'page_token', ), + 'list_jobs': ('project_id', 'region', 'page_size', 'page_token', 'cluster_name', 'job_state_matcher', 'filter', ), + 'list_workflow_templates': ('parent', 'page_size', 'page_token', ), + 'submit_job': ('project_id', 'region', 'job', 'request_id', ), + 'submit_job_as_operation': ('project_id', 'region', 'job', 'request_id', ), + 'update_autoscaling_policy': ('policy', ), + 'update_cluster': ('project_id', 'region', 'cluster_name', 'cluster', 'update_mask', 'graceful_decommission_timeout', 'request_id', ), + 'update_job': ('project_id', 'region', 'job_id', 'job', 'update_mask', ), + 'update_workflow_template': ('template', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -97,7 +94,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/setup.py b/setup.py index a7e7088f..029b61a9 100644 --- a/setup.py +++ b/setup.py @@ -31,6 +31,7 @@ dependencies = [ "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "proto-plus >= 1.4.0", + "packaging >= 14.3", ] extras = {"libcst": "libcst >= 0.2.5"} diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index 16e003fe..77770bc1 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -8,3 +8,5 @@ google-api-core==1.22.2 libcst==0.2.5 proto-plus==1.4.0 +packaging==14.3 +google-auth==1.24.0 # TODO: remove after google-auth>=1.25.0 is transitively required through google-api-core diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/dataproc_v1/__init__.py b/tests/unit/gapic/dataproc_v1/__init__.py index 42ffdf2b..4de65971 100644 --- a/tests/unit/gapic/dataproc_v1/__init__.py +++ b/tests/unit/gapic/dataproc_v1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py b/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py index 7516619f..7ed79126 100644 --- a/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py +++ b/tests/unit/gapic/dataproc_v1/test_autoscaling_policy_service.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,13 +23,13 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.dataproc_v1.services.autoscaling_policy_service import ( AutoscalingPolicyServiceAsyncClient, @@ -40,9 +39,39 @@ ) from google.cloud.dataproc_v1.services.autoscaling_policy_service import pagers from google.cloud.dataproc_v1.services.autoscaling_policy_service import transports +from google.cloud.dataproc_v1.services.autoscaling_policy_service.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.dataproc_v1.services.autoscaling_policy_service.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.dataproc_v1.types import autoscaling_policies from google.oauth2 import service_account -from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import duration_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -95,7 +124,7 @@ def test__get_default_mtls_endpoint(): [AutoscalingPolicyServiceClient, AutoscalingPolicyServiceAsyncClient,], ) def test_autoscaling_policy_service_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -113,7 +142,7 @@ def test_autoscaling_policy_service_client_from_service_account_info(client_clas [AutoscalingPolicyServiceClient, AutoscalingPolicyServiceAsyncClient,], ) def test_autoscaling_policy_service_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -172,7 +201,7 @@ def test_autoscaling_policy_service_client_client_options( with mock.patch.object( AutoscalingPolicyServiceClient, "get_transport_class" ) as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -481,7 +510,7 @@ def test_create_autoscaling_policy( request_type=autoscaling_policies.CreateAutoscalingPolicyRequest, ): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -498,25 +527,20 @@ def test_create_autoscaling_policy( name="name_value", basic_algorithm=autoscaling_policies.BasicAutoscalingAlgorithm( yarn_config=autoscaling_policies.BasicYarnAutoscalingConfig( - graceful_decommission_timeout=duration.Duration(seconds=751) + graceful_decommission_timeout=duration_pb2.Duration(seconds=751) ) ), ) - response = client.create_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.CreateAutoscalingPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == "id_value" - assert response.name == "name_value" @@ -528,7 +552,7 @@ def test_create_autoscaling_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -538,7 +562,6 @@ def test_create_autoscaling_policy_empty_call(): client.create_autoscaling_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.CreateAutoscalingPolicyRequest() @@ -548,7 +571,7 @@ async def test_create_autoscaling_policy_async( request_type=autoscaling_policies.CreateAutoscalingPolicyRequest, ): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -563,20 +586,16 @@ async def test_create_autoscaling_policy_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( autoscaling_policies.AutoscalingPolicy(id="id_value", name="name_value",) ) - response = await client.create_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.CreateAutoscalingPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == "id_value" - assert response.name == "name_value" @@ -587,12 +606,13 @@ async def test_create_autoscaling_policy_async_from_dict(): def test_create_autoscaling_policy_field_headers(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.CreateAutoscalingPolicyRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -600,7 +620,6 @@ def test_create_autoscaling_policy_field_headers(): type(client.transport.create_autoscaling_policy), "__call__" ) as call: call.return_value = autoscaling_policies.AutoscalingPolicy() - client.create_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. @@ -616,12 +635,13 @@ def test_create_autoscaling_policy_field_headers(): @pytest.mark.asyncio async def test_create_autoscaling_policy_field_headers_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.CreateAutoscalingPolicyRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -631,7 +651,6 @@ async def test_create_autoscaling_policy_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( autoscaling_policies.AutoscalingPolicy() ) - await client.create_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. @@ -646,7 +665,7 @@ async def test_create_autoscaling_policy_field_headers_async(): def test_create_autoscaling_policy_flattened(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -655,7 +674,6 @@ def test_create_autoscaling_policy_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = autoscaling_policies.AutoscalingPolicy() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_autoscaling_policy( @@ -667,15 +685,13 @@ def test_create_autoscaling_policy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].policy == autoscaling_policies.AutoscalingPolicy(id="id_value") def test_create_autoscaling_policy_flattened_error(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -691,7 +707,7 @@ def test_create_autoscaling_policy_flattened_error(): @pytest.mark.asyncio async def test_create_autoscaling_policy_flattened_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -715,16 +731,14 @@ async def test_create_autoscaling_policy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].policy == autoscaling_policies.AutoscalingPolicy(id="id_value") @pytest.mark.asyncio async def test_create_autoscaling_policy_flattened_error_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -742,7 +756,7 @@ def test_update_autoscaling_policy( request_type=autoscaling_policies.UpdateAutoscalingPolicyRequest, ): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -759,25 +773,20 @@ def test_update_autoscaling_policy( name="name_value", basic_algorithm=autoscaling_policies.BasicAutoscalingAlgorithm( yarn_config=autoscaling_policies.BasicYarnAutoscalingConfig( - graceful_decommission_timeout=duration.Duration(seconds=751) + graceful_decommission_timeout=duration_pb2.Duration(seconds=751) ) ), ) - response = client.update_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.UpdateAutoscalingPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == "id_value" - assert response.name == "name_value" @@ -789,7 +798,7 @@ def test_update_autoscaling_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -799,7 +808,6 @@ def test_update_autoscaling_policy_empty_call(): client.update_autoscaling_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.UpdateAutoscalingPolicyRequest() @@ -809,7 +817,7 @@ async def test_update_autoscaling_policy_async( request_type=autoscaling_policies.UpdateAutoscalingPolicyRequest, ): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -824,20 +832,16 @@ async def test_update_autoscaling_policy_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( autoscaling_policies.AutoscalingPolicy(id="id_value", name="name_value",) ) - response = await client.update_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.UpdateAutoscalingPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == "id_value" - assert response.name == "name_value" @@ -848,12 +852,13 @@ async def test_update_autoscaling_policy_async_from_dict(): def test_update_autoscaling_policy_field_headers(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.UpdateAutoscalingPolicyRequest() + request.policy.name = "policy.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -861,7 +866,6 @@ def test_update_autoscaling_policy_field_headers(): type(client.transport.update_autoscaling_policy), "__call__" ) as call: call.return_value = autoscaling_policies.AutoscalingPolicy() - client.update_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. @@ -877,12 +881,13 @@ def test_update_autoscaling_policy_field_headers(): @pytest.mark.asyncio async def test_update_autoscaling_policy_field_headers_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.UpdateAutoscalingPolicyRequest() + request.policy.name = "policy.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -892,7 +897,6 @@ async def test_update_autoscaling_policy_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( autoscaling_policies.AutoscalingPolicy() ) - await client.update_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. @@ -907,7 +911,7 @@ async def test_update_autoscaling_policy_field_headers_async(): def test_update_autoscaling_policy_flattened(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -916,7 +920,6 @@ def test_update_autoscaling_policy_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = autoscaling_policies.AutoscalingPolicy() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_autoscaling_policy( @@ -927,13 +930,12 @@ def test_update_autoscaling_policy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].policy == autoscaling_policies.AutoscalingPolicy(id="id_value") def test_update_autoscaling_policy_flattened_error(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -948,7 +950,7 @@ def test_update_autoscaling_policy_flattened_error(): @pytest.mark.asyncio async def test_update_autoscaling_policy_flattened_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -971,14 +973,13 @@ async def test_update_autoscaling_policy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].policy == autoscaling_policies.AutoscalingPolicy(id="id_value") @pytest.mark.asyncio async def test_update_autoscaling_policy_flattened_error_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -995,7 +996,7 @@ def test_get_autoscaling_policy( request_type=autoscaling_policies.GetAutoscalingPolicyRequest, ): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1012,25 +1013,20 @@ def test_get_autoscaling_policy( name="name_value", basic_algorithm=autoscaling_policies.BasicAutoscalingAlgorithm( yarn_config=autoscaling_policies.BasicYarnAutoscalingConfig( - graceful_decommission_timeout=duration.Duration(seconds=751) + graceful_decommission_timeout=duration_pb2.Duration(seconds=751) ) ), ) - response = client.get_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.GetAutoscalingPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == "id_value" - assert response.name == "name_value" @@ -1042,7 +1038,7 @@ def test_get_autoscaling_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1052,7 +1048,6 @@ def test_get_autoscaling_policy_empty_call(): client.get_autoscaling_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.GetAutoscalingPolicyRequest() @@ -1062,7 +1057,7 @@ async def test_get_autoscaling_policy_async( request_type=autoscaling_policies.GetAutoscalingPolicyRequest, ): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1077,20 +1072,16 @@ async def test_get_autoscaling_policy_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( autoscaling_policies.AutoscalingPolicy(id="id_value", name="name_value",) ) - response = await client.get_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.GetAutoscalingPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == "id_value" - assert response.name == "name_value" @@ -1101,12 +1092,13 @@ async def test_get_autoscaling_policy_async_from_dict(): def test_get_autoscaling_policy_field_headers(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.GetAutoscalingPolicyRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1114,7 +1106,6 @@ def test_get_autoscaling_policy_field_headers(): type(client.transport.get_autoscaling_policy), "__call__" ) as call: call.return_value = autoscaling_policies.AutoscalingPolicy() - client.get_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. @@ -1130,12 +1121,13 @@ def test_get_autoscaling_policy_field_headers(): @pytest.mark.asyncio async def test_get_autoscaling_policy_field_headers_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.GetAutoscalingPolicyRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1145,7 +1137,6 @@ async def test_get_autoscaling_policy_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( autoscaling_policies.AutoscalingPolicy() ) - await client.get_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. @@ -1160,7 +1151,7 @@ async def test_get_autoscaling_policy_field_headers_async(): def test_get_autoscaling_policy_flattened(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1169,7 +1160,6 @@ def test_get_autoscaling_policy_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = autoscaling_policies.AutoscalingPolicy() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_autoscaling_policy(name="name_value",) @@ -1178,13 +1168,12 @@ def test_get_autoscaling_policy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_autoscaling_policy_flattened_error(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1198,7 +1187,7 @@ def test_get_autoscaling_policy_flattened_error(): @pytest.mark.asyncio async def test_get_autoscaling_policy_flattened_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1219,14 +1208,13 @@ async def test_get_autoscaling_policy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_autoscaling_policy_flattened_error_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1242,7 +1230,7 @@ def test_list_autoscaling_policies( request_type=autoscaling_policies.ListAutoscalingPoliciesRequest, ): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1257,19 +1245,15 @@ def test_list_autoscaling_policies( call.return_value = autoscaling_policies.ListAutoscalingPoliciesResponse( next_page_token="next_page_token_value", ) - response = client.list_autoscaling_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.ListAutoscalingPoliciesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutoscalingPoliciesPager) - assert response.next_page_token == "next_page_token_value" @@ -1281,7 +1265,7 @@ def test_list_autoscaling_policies_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1291,7 +1275,6 @@ def test_list_autoscaling_policies_empty_call(): client.list_autoscaling_policies() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.ListAutoscalingPoliciesRequest() @@ -1301,7 +1284,7 @@ async def test_list_autoscaling_policies_async( request_type=autoscaling_policies.ListAutoscalingPoliciesRequest, ): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1318,18 +1301,15 @@ async def test_list_autoscaling_policies_async( next_page_token="next_page_token_value", ) ) - response = await client.list_autoscaling_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.ListAutoscalingPoliciesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAutoscalingPoliciesAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1340,12 +1320,13 @@ async def test_list_autoscaling_policies_async_from_dict(): def test_list_autoscaling_policies_field_headers(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.ListAutoscalingPoliciesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1353,7 +1334,6 @@ def test_list_autoscaling_policies_field_headers(): type(client.transport.list_autoscaling_policies), "__call__" ) as call: call.return_value = autoscaling_policies.ListAutoscalingPoliciesResponse() - client.list_autoscaling_policies(request) # Establish that the underlying gRPC stub method was called. @@ -1369,12 +1349,13 @@ def test_list_autoscaling_policies_field_headers(): @pytest.mark.asyncio async def test_list_autoscaling_policies_field_headers_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.ListAutoscalingPoliciesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1384,7 +1365,6 @@ async def test_list_autoscaling_policies_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( autoscaling_policies.ListAutoscalingPoliciesResponse() ) - await client.list_autoscaling_policies(request) # Establish that the underlying gRPC stub method was called. @@ -1399,7 +1379,7 @@ async def test_list_autoscaling_policies_field_headers_async(): def test_list_autoscaling_policies_flattened(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1408,7 +1388,6 @@ def test_list_autoscaling_policies_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = autoscaling_policies.ListAutoscalingPoliciesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_autoscaling_policies(parent="parent_value",) @@ -1417,13 +1396,12 @@ def test_list_autoscaling_policies_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_autoscaling_policies_flattened_error(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1438,7 +1416,7 @@ def test_list_autoscaling_policies_flattened_error(): @pytest.mark.asyncio async def test_list_autoscaling_policies_flattened_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1459,14 +1437,13 @@ async def test_list_autoscaling_policies_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_autoscaling_policies_flattened_error_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1480,7 +1457,7 @@ async def test_list_autoscaling_policies_flattened_error_async(): def test_list_autoscaling_policies_pager(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1530,7 +1507,7 @@ def test_list_autoscaling_policies_pager(): def test_list_autoscaling_policies_pages(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1570,7 +1547,7 @@ def test_list_autoscaling_policies_pages(): @pytest.mark.asyncio async def test_list_autoscaling_policies_async_pager(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1619,7 +1596,7 @@ async def test_list_autoscaling_policies_async_pager(): @pytest.mark.asyncio async def test_list_autoscaling_policies_async_pages(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1665,7 +1642,7 @@ def test_delete_autoscaling_policy( request_type=autoscaling_policies.DeleteAutoscalingPolicyRequest, ): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1678,13 +1655,11 @@ def test_delete_autoscaling_policy( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.DeleteAutoscalingPolicyRequest() # Establish that the response is the type that we expect. @@ -1699,7 +1674,7 @@ def test_delete_autoscaling_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1709,7 +1684,6 @@ def test_delete_autoscaling_policy_empty_call(): client.delete_autoscaling_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.DeleteAutoscalingPolicyRequest() @@ -1719,7 +1693,7 @@ async def test_delete_autoscaling_policy_async( request_type=autoscaling_policies.DeleteAutoscalingPolicyRequest, ): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1732,13 +1706,11 @@ async def test_delete_autoscaling_policy_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.DeleteAutoscalingPolicyRequest() # Establish that the response is the type that we expect. @@ -1752,12 +1724,13 @@ async def test_delete_autoscaling_policy_async_from_dict(): def test_delete_autoscaling_policy_field_headers(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.DeleteAutoscalingPolicyRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1765,7 +1738,6 @@ def test_delete_autoscaling_policy_field_headers(): type(client.transport.delete_autoscaling_policy), "__call__" ) as call: call.return_value = None - client.delete_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. @@ -1781,12 +1753,13 @@ def test_delete_autoscaling_policy_field_headers(): @pytest.mark.asyncio async def test_delete_autoscaling_policy_field_headers_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.DeleteAutoscalingPolicyRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1794,7 +1767,6 @@ async def test_delete_autoscaling_policy_field_headers_async(): type(client.transport.delete_autoscaling_policy), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. @@ -1809,7 +1781,7 @@ async def test_delete_autoscaling_policy_field_headers_async(): def test_delete_autoscaling_policy_flattened(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1818,7 +1790,6 @@ def test_delete_autoscaling_policy_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_autoscaling_policy(name="name_value",) @@ -1827,13 +1798,12 @@ def test_delete_autoscaling_policy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_autoscaling_policy_flattened_error(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1847,7 +1817,7 @@ def test_delete_autoscaling_policy_flattened_error(): @pytest.mark.asyncio async def test_delete_autoscaling_policy_flattened_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1866,14 +1836,13 @@ async def test_delete_autoscaling_policy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_autoscaling_policy_flattened_error_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1887,16 +1856,16 @@ async def test_delete_autoscaling_policy_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.AutoscalingPolicyServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.AutoscalingPolicyServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = AutoscalingPolicyServiceClient( @@ -1906,7 +1875,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.AutoscalingPolicyServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = AutoscalingPolicyServiceClient( @@ -1917,7 +1886,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.AutoscalingPolicyServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = AutoscalingPolicyServiceClient(transport=transport) assert client.transport is transport @@ -1926,13 +1895,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.AutoscalingPolicyServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.AutoscalingPolicyServiceGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -1947,8 +1916,8 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() @@ -1956,7 +1925,7 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance( client.transport, transports.AutoscalingPolicyServiceGrpcTransport, @@ -1965,9 +1934,9 @@ def test_transport_grpc_default(): def test_autoscaling_policy_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.AutoscalingPolicyServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -1979,7 +1948,7 @@ def test_autoscaling_policy_service_base_transport(): ) as Transport: Transport.return_value = None transport = transports.AutoscalingPolicyServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1996,15 +1965,37 @@ def test_autoscaling_policy_service_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_autoscaling_policy_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataproc_v1.services.autoscaling_policy_service.transports.AutoscalingPolicyServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AutoscalingPolicyServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_autoscaling_policy_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.dataproc_v1.services.autoscaling_policy_service.transports.AutoscalingPolicyServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.AutoscalingPolicyServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -2017,19 +2008,33 @@ def test_autoscaling_policy_service_base_transport_with_credentials_file(): def test_autoscaling_policy_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.dataproc_v1.services.autoscaling_policy_service.transports.AutoscalingPolicyServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.AutoscalingPolicyServiceTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_autoscaling_policy_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AutoscalingPolicyServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_autoscaling_policy_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) AutoscalingPolicyServiceClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -2037,20 +2042,158 @@ def test_autoscaling_policy_service_auth_adc(): ) -def test_autoscaling_policy_service_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutoscalingPolicyServiceGrpcTransport, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_autoscaling_policy_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.AutoscalingPolicyServiceGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutoscalingPolicyServiceGrpcTransport, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_autoscaling_policy_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AutoscalingPolicyServiceGrpcTransport, grpc_helpers), + (transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_autoscaling_policy_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AutoscalingPolicyServiceGrpcTransport, grpc_helpers), + (transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_autoscaling_policy_service_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AutoscalingPolicyServiceGrpcTransport, grpc_helpers), + (transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_autoscaling_policy_service_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -2061,7 +2204,7 @@ def test_autoscaling_policy_service_transport_auth_adc(): def test_autoscaling_policy_service_grpc_transport_client_cert_source_for_mtls( transport_class, ): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2100,7 +2243,7 @@ def test_autoscaling_policy_service_grpc_transport_client_cert_source_for_mtls( def test_autoscaling_policy_service_host_no_port(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), @@ -2110,7 +2253,7 @@ def test_autoscaling_policy_service_host_no_port(): def test_autoscaling_policy_service_host_with_port(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), @@ -2166,9 +2309,9 @@ def test_autoscaling_policy_service_transport_channel_mtls_with_client_cert_sour mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2246,7 +2389,6 @@ def test_autoscaling_policy_path(): project = "squid" location = "clam" autoscaling_policy = "whelk" - expected = "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}".format( project=project, location=location, autoscaling_policy=autoscaling_policy, ) @@ -2271,7 +2413,6 @@ def test_parse_autoscaling_policy_path(): def test_common_billing_account_path(): billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2292,7 +2433,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format(folder=folder,) actual = AutoscalingPolicyServiceClient.common_folder_path(folder) assert expected == actual @@ -2311,7 +2451,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format(organization=organization,) actual = AutoscalingPolicyServiceClient.common_organization_path(organization) assert expected == actual @@ -2330,7 +2469,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format(project=project,) actual = AutoscalingPolicyServiceClient.common_project_path(project) assert expected == actual @@ -2350,7 +2488,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "whelk" location = "octopus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2377,7 +2514,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.AutoscalingPolicyServiceTransport, "_prep_wrapped_messages" ) as prep: client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2386,6 +2523,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = AutoscalingPolicyServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/dataproc_v1/test_cluster_controller.py b/tests/unit/gapic/dataproc_v1/test_cluster_controller.py index e12e0109..e3f3690e 100644 --- a/tests/unit/gapic/dataproc_v1/test_cluster_controller.py +++ b/tests/unit/gapic/dataproc_v1/test_cluster_controller.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,16 +23,16 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.dataproc_v1.services.cluster_controller import ( ClusterControllerAsyncClient, @@ -41,14 +40,44 @@ from google.cloud.dataproc_v1.services.cluster_controller import ClusterControllerClient from google.cloud.dataproc_v1.services.cluster_controller import pagers from google.cloud.dataproc_v1.services.cluster_controller import transports +from google.cloud.dataproc_v1.services.cluster_controller.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.dataproc_v1.services.cluster_controller.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.dataproc_v1.types import clusters from google.cloud.dataproc_v1.types import operations from google.cloud.dataproc_v1.types import shared from google.longrunning import operations_pb2 from google.oauth2 import service_account -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -100,7 +129,7 @@ def test__get_default_mtls_endpoint(): "client_class", [ClusterControllerClient, ClusterControllerAsyncClient,] ) def test_cluster_controller_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -117,7 +146,7 @@ def test_cluster_controller_client_from_service_account_info(client_class): "client_class", [ClusterControllerClient, ClusterControllerAsyncClient,] ) def test_cluster_controller_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -170,7 +199,7 @@ def test_cluster_controller_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(ClusterControllerClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -468,7 +497,7 @@ def test_create_cluster( transport: str = "grpc", request_type=clusters.CreateClusterRequest ): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -479,13 +508,11 @@ def test_create_cluster( with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == clusters.CreateClusterRequest() # Establish that the response is the type that we expect. @@ -500,7 +527,7 @@ def test_create_cluster_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -508,7 +535,6 @@ def test_create_cluster_empty_call(): client.create_cluster() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == clusters.CreateClusterRequest() @@ -517,7 +543,7 @@ async def test_create_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.CreateClusterRequest ): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -530,13 +556,11 @@ async def test_create_cluster_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == clusters.CreateClusterRequest() # Establish that the response is the type that we expect. @@ -549,13 +573,12 @@ async def test_create_cluster_async_from_dict(): def test_create_cluster_flattened(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_cluster( @@ -568,16 +591,13 @@ def test_create_cluster_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster == clusters.Cluster(project_id="project_id_value") def test_create_cluster_flattened_error(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -593,7 +613,7 @@ def test_create_cluster_flattened_error(): @pytest.mark.asyncio async def test_create_cluster_flattened_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -616,18 +636,15 @@ async def test_create_cluster_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster == clusters.Cluster(project_id="project_id_value") @pytest.mark.asyncio async def test_create_cluster_flattened_error_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -645,7 +662,7 @@ def test_update_cluster( transport: str = "grpc", request_type=clusters.UpdateClusterRequest ): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -656,13 +673,11 @@ def test_update_cluster( with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == clusters.UpdateClusterRequest() # Establish that the response is the type that we expect. @@ -677,7 +692,7 @@ def test_update_cluster_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -685,7 +700,6 @@ def test_update_cluster_empty_call(): client.update_cluster() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == clusters.UpdateClusterRequest() @@ -694,7 +708,7 @@ async def test_update_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.UpdateClusterRequest ): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -707,13 +721,11 @@ async def test_update_cluster_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == clusters.UpdateClusterRequest() # Establish that the response is the type that we expect. @@ -726,13 +738,12 @@ async def test_update_cluster_async_from_dict(): def test_update_cluster_flattened(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_cluster( @@ -740,27 +751,22 @@ def test_update_cluster_flattened(): region="region_value", cluster_name="cluster_name_value", cluster=clusters.Cluster(project_id="project_id_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" - assert args[0].cluster == clusters.Cluster(project_id="project_id_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_cluster_flattened_error(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -771,14 +777,14 @@ def test_update_cluster_flattened_error(): region="region_value", cluster_name="cluster_name_value", cluster=clusters.Cluster(project_id="project_id_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_cluster_flattened_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -796,29 +802,24 @@ async def test_update_cluster_flattened_async(): region="region_value", cluster_name="cluster_name_value", cluster=clusters.Cluster(project_id="project_id_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" - assert args[0].cluster == clusters.Cluster(project_id="project_id_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_cluster_flattened_error_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -830,15 +831,173 @@ async def test_update_cluster_flattened_error_async(): region="region_value", cluster_name="cluster_name_value", cluster=clusters.Cluster(project_id="project_id_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_stop_cluster( + transport: str = "grpc", request_type=clusters.StopClusterRequest +): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.stop_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.StopClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_stop_cluster_from_dict(): + test_stop_cluster(request_type=dict) + + +def test_stop_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_cluster), "__call__") as call: + client.stop_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.StopClusterRequest() + + +@pytest.mark.asyncio +async def test_stop_cluster_async( + transport: str = "grpc_asyncio", request_type=clusters.StopClusterRequest +): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.stop_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.stop_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.StopClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_stop_cluster_async_from_dict(): + await test_stop_cluster_async(request_type=dict) + + +def test_start_cluster( + transport: str = "grpc", request_type=clusters.StartClusterRequest +): + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.start_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.StartClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_start_cluster_from_dict(): + test_start_cluster(request_type=dict) + + +def test_start_cluster_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ClusterControllerClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_cluster), "__call__") as call: + client.start_cluster() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.StartClusterRequest() + + +@pytest.mark.asyncio +async def test_start_cluster_async( + transport: str = "grpc_asyncio", request_type=clusters.StartClusterRequest +): + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.start_cluster), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") ) + response = await client.start_cluster(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clusters.StartClusterRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_start_cluster_async_from_dict(): + await test_start_cluster_async(request_type=dict) def test_delete_cluster( transport: str = "grpc", request_type=clusters.DeleteClusterRequest ): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -849,13 +1008,11 @@ def test_delete_cluster( with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DeleteClusterRequest() # Establish that the response is the type that we expect. @@ -870,7 +1027,7 @@ def test_delete_cluster_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -878,7 +1035,6 @@ def test_delete_cluster_empty_call(): client.delete_cluster() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DeleteClusterRequest() @@ -887,7 +1043,7 @@ async def test_delete_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.DeleteClusterRequest ): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -900,13 +1056,11 @@ async def test_delete_cluster_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DeleteClusterRequest() # Establish that the response is the type that we expect. @@ -919,13 +1073,12 @@ async def test_delete_cluster_async_from_dict(): def test_delete_cluster_flattened(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_cluster( @@ -938,16 +1091,13 @@ def test_delete_cluster_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" def test_delete_cluster_flattened_error(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -963,7 +1113,7 @@ def test_delete_cluster_flattened_error(): @pytest.mark.asyncio async def test_delete_cluster_flattened_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -986,18 +1136,15 @@ async def test_delete_cluster_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" @pytest.mark.asyncio async def test_delete_cluster_flattened_error_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1013,7 +1160,7 @@ async def test_delete_cluster_flattened_error_async(): def test_get_cluster(transport: str = "grpc", request_type=clusters.GetClusterRequest): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1028,23 +1175,17 @@ def test_get_cluster(transport: str = "grpc", request_type=clusters.GetClusterRe cluster_name="cluster_name_value", cluster_uuid="cluster_uuid_value", ) - response = client.get_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == clusters.GetClusterRequest() # Establish that the response is the type that we expect. - assert isinstance(response, clusters.Cluster) - assert response.project_id == "project_id_value" - assert response.cluster_name == "cluster_name_value" - assert response.cluster_uuid == "cluster_uuid_value" @@ -1056,7 +1197,7 @@ def test_get_cluster_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1064,7 +1205,6 @@ def test_get_cluster_empty_call(): client.get_cluster() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == clusters.GetClusterRequest() @@ -1073,7 +1213,7 @@ async def test_get_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.GetClusterRequest ): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1090,22 +1230,17 @@ async def test_get_cluster_async( cluster_uuid="cluster_uuid_value", ) ) - response = await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == clusters.GetClusterRequest() # Establish that the response is the type that we expect. assert isinstance(response, clusters.Cluster) - assert response.project_id == "project_id_value" - assert response.cluster_name == "cluster_name_value" - assert response.cluster_uuid == "cluster_uuid_value" @@ -1115,13 +1250,12 @@ async def test_get_cluster_async_from_dict(): def test_get_cluster_flattened(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = clusters.Cluster() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_cluster( @@ -1134,16 +1268,13 @@ def test_get_cluster_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" def test_get_cluster_flattened_error(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1159,7 +1290,7 @@ def test_get_cluster_flattened_error(): @pytest.mark.asyncio async def test_get_cluster_flattened_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1180,18 +1311,15 @@ async def test_get_cluster_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" @pytest.mark.asyncio async def test_get_cluster_flattened_error_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1209,7 +1337,7 @@ def test_list_clusters( transport: str = "grpc", request_type=clusters.ListClustersRequest ): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1222,19 +1350,15 @@ def test_list_clusters( call.return_value = clusters.ListClustersResponse( next_page_token="next_page_token_value", ) - response = client.list_clusters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == clusters.ListClustersRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClustersPager) - assert response.next_page_token == "next_page_token_value" @@ -1246,7 +1370,7 @@ def test_list_clusters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1254,7 +1378,6 @@ def test_list_clusters_empty_call(): client.list_clusters() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == clusters.ListClustersRequest() @@ -1263,7 +1386,7 @@ async def test_list_clusters_async( transport: str = "grpc_asyncio", request_type=clusters.ListClustersRequest ): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1276,18 +1399,15 @@ async def test_list_clusters_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( clusters.ListClustersResponse(next_page_token="next_page_token_value",) ) - response = await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == clusters.ListClustersRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListClustersAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1297,13 +1417,12 @@ async def test_list_clusters_async_from_dict(): def test_list_clusters_flattened(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = clusters.ListClustersResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_clusters( @@ -1314,16 +1433,13 @@ def test_list_clusters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].filter == "filter_value" def test_list_clusters_flattened_error(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1339,7 +1455,7 @@ def test_list_clusters_flattened_error(): @pytest.mark.asyncio async def test_list_clusters_flattened_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1360,18 +1476,15 @@ async def test_list_clusters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].filter == "filter_value" @pytest.mark.asyncio async def test_list_clusters_flattened_error_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1386,7 +1499,7 @@ async def test_list_clusters_flattened_error_async(): def test_list_clusters_pager(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials,) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: @@ -1417,7 +1530,7 @@ def test_list_clusters_pager(): def test_list_clusters_pages(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials,) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: @@ -1443,7 +1556,9 @@ def test_list_clusters_pages(): @pytest.mark.asyncio async def test_list_clusters_async_pager(): - client = ClusterControllerAsyncClient(credentials=credentials.AnonymousCredentials,) + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1476,7 +1591,9 @@ async def test_list_clusters_async_pager(): @pytest.mark.asyncio async def test_list_clusters_async_pages(): - client = ClusterControllerAsyncClient(credentials=credentials.AnonymousCredentials,) + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1508,7 +1625,7 @@ def test_diagnose_cluster( transport: str = "grpc", request_type=clusters.DiagnoseClusterRequest ): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1519,13 +1636,11 @@ def test_diagnose_cluster( with mock.patch.object(type(client.transport.diagnose_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.diagnose_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DiagnoseClusterRequest() # Establish that the response is the type that we expect. @@ -1540,7 +1655,7 @@ def test_diagnose_cluster_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1548,7 +1663,6 @@ def test_diagnose_cluster_empty_call(): client.diagnose_cluster() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DiagnoseClusterRequest() @@ -1557,7 +1671,7 @@ async def test_diagnose_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.DiagnoseClusterRequest ): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1570,13 +1684,11 @@ async def test_diagnose_cluster_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.diagnose_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DiagnoseClusterRequest() # Establish that the response is the type that we expect. @@ -1589,13 +1701,12 @@ async def test_diagnose_cluster_async_from_dict(): def test_diagnose_cluster_flattened(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.diagnose_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.diagnose_cluster( @@ -1608,16 +1719,13 @@ def test_diagnose_cluster_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" def test_diagnose_cluster_flattened_error(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1633,7 +1741,7 @@ def test_diagnose_cluster_flattened_error(): @pytest.mark.asyncio async def test_diagnose_cluster_flattened_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1656,18 +1764,15 @@ async def test_diagnose_cluster_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" @pytest.mark.asyncio async def test_diagnose_cluster_flattened_error_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1684,16 +1789,16 @@ async def test_diagnose_cluster_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.ClusterControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.ClusterControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ClusterControllerClient( @@ -1703,7 +1808,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.ClusterControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ClusterControllerClient( @@ -1714,7 +1819,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.ClusterControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = ClusterControllerClient(transport=transport) assert client.transport is transport @@ -1723,13 +1828,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.ClusterControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.ClusterControllerGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -1744,23 +1849,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.ClusterControllerGrpcTransport,) def test_cluster_controller_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.ClusterControllerTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -1772,7 +1877,7 @@ def test_cluster_controller_base_transport(): ) as Transport: Transport.return_value = None transport = transports.ClusterControllerTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1780,6 +1885,8 @@ def test_cluster_controller_base_transport(): methods = ( "create_cluster", "update_cluster", + "stop_cluster", + "start_cluster", "delete_cluster", "get_cluster", "list_clusters", @@ -1795,15 +1902,37 @@ def test_cluster_controller_base_transport(): transport.operations_client +@requires_google_auth_gte_1_25_0 def test_cluster_controller_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataproc_v1.services.cluster_controller.transports.ClusterControllerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ClusterControllerTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_cluster_controller_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.dataproc_v1.services.cluster_controller.transports.ClusterControllerTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ClusterControllerTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -1816,19 +1945,33 @@ def test_cluster_controller_base_transport_with_credentials_file(): def test_cluster_controller_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.dataproc_v1.services.cluster_controller.transports.ClusterControllerTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ClusterControllerTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_cluster_controller_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ClusterControllerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_cluster_controller_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) ClusterControllerClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -1836,20 +1979,156 @@ def test_cluster_controller_auth_adc(): ) -def test_cluster_controller_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterControllerGrpcTransport, + transports.ClusterControllerGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_cluster_controller_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.ClusterControllerGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterControllerGrpcTransport, + transports.ClusterControllerGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_cluster_controller_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ClusterControllerGrpcTransport, grpc_helpers), + (transports.ClusterControllerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_cluster_controller_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ClusterControllerGrpcTransport, grpc_helpers), + (transports.ClusterControllerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_cluster_controller_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ClusterControllerGrpcTransport, grpc_helpers), + (transports.ClusterControllerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_cluster_controller_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -1858,7 +2137,7 @@ def test_cluster_controller_transport_auth_adc(): ], ) def test_cluster_controller_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -1897,7 +2176,7 @@ def test_cluster_controller_grpc_transport_client_cert_source_for_mtls(transport def test_cluster_controller_host_no_port(): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), @@ -1907,7 +2186,7 @@ def test_cluster_controller_host_no_port(): def test_cluster_controller_host_with_port(): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), @@ -1963,9 +2242,9 @@ def test_cluster_controller_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2041,7 +2320,7 @@ def test_cluster_controller_transport_channel_mtls_with_adc(transport_class): def test_cluster_controller_grpc_lro_client(): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport @@ -2054,7 +2333,7 @@ def test_cluster_controller_grpc_lro_client(): def test_cluster_controller_grpc_lro_async_client(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport @@ -2065,9 +2344,56 @@ def test_cluster_controller_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client +def test_cluster_path(): + project = "squid" + location = "clam" + cluster = "whelk" + expected = "projects/{project}/locations/{location}/clusters/{cluster}".format( + project=project, location=location, cluster=cluster, + ) + actual = ClusterControllerClient.cluster_path(project, location, cluster) + assert expected == actual + + +def test_parse_cluster_path(): + expected = { + "project": "octopus", + "location": "oyster", + "cluster": "nudibranch", + } + path = ClusterControllerClient.cluster_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterControllerClient.parse_cluster_path(path) + assert expected == actual + + +def test_service_path(): + project = "cuttlefish" + location = "mussel" + service = "winkle" + expected = "projects/{project}/locations/{location}/services/{service}".format( + project=project, location=location, service=service, + ) + actual = ClusterControllerClient.service_path(project, location, service) + assert expected == actual + + +def test_parse_service_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "service": "abalone", + } + path = ClusterControllerClient.service_path(**expected) + + # Check that the path construction is reversible. + actual = ClusterControllerClient.parse_service_path(path) + assert expected == actual + + def test_common_billing_account_path(): billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2088,7 +2414,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) actual = ClusterControllerClient.common_folder_path(folder) assert expected == actual @@ -2107,7 +2432,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) actual = ClusterControllerClient.common_organization_path(organization) assert expected == actual @@ -2126,7 +2450,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) actual = ClusterControllerClient.common_project_path(project) assert expected == actual @@ -2146,7 +2469,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "winkle" location = "nautilus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2173,7 +2495,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.ClusterControllerTransport, "_prep_wrapped_messages" ) as prep: client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2182,6 +2504,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = ClusterControllerClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/dataproc_v1/test_job_controller.py b/tests/unit/gapic/dataproc_v1/test_job_controller.py index 8ba7c041..65754833 100644 --- a/tests/unit/gapic/dataproc_v1/test_job_controller.py +++ b/tests/unit/gapic/dataproc_v1/test_job_controller.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,26 +23,56 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.dataproc_v1.services.job_controller import JobControllerAsyncClient from google.cloud.dataproc_v1.services.job_controller import JobControllerClient from google.cloud.dataproc_v1.services.job_controller import pagers from google.cloud.dataproc_v1.services.job_controller import transports +from google.cloud.dataproc_v1.services.job_controller.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.dataproc_v1.services.job_controller.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.dataproc_v1.types import jobs from google.longrunning import operations_pb2 from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -94,7 +123,7 @@ def test__get_default_mtls_endpoint(): "client_class", [JobControllerClient, JobControllerAsyncClient,] ) def test_job_controller_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -111,7 +140,7 @@ def test_job_controller_client_from_service_account_info(client_class): "client_class", [JobControllerClient, JobControllerAsyncClient,] ) def test_job_controller_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -164,7 +193,7 @@ def test_job_controller_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(JobControllerClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -450,7 +479,7 @@ def test_job_controller_client_client_options_from_dict(): def test_submit_job(transport: str = "grpc", request_type=jobs.SubmitJobRequest): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -467,25 +496,18 @@ def test_submit_job(transport: str = "grpc", request_type=jobs.SubmitJobRequest) done=True, hadoop_job=jobs.HadoopJob(main_jar_file_uri="main_jar_file_uri_value"), ) - response = client.submit_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == "driver_output_resource_uri_value" - assert response.driver_control_files_uri == "driver_control_files_uri_value" - assert response.job_uuid == "job_uuid_value" - assert response.done is True @@ -497,7 +519,7 @@ def test_submit_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -505,7 +527,6 @@ def test_submit_job_empty_call(): client.submit_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() @@ -514,7 +535,7 @@ async def test_submit_job_async( transport: str = "grpc_asyncio", request_type=jobs.SubmitJobRequest ): client = JobControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -532,24 +553,18 @@ async def test_submit_job_async( done=True, ) ) - response = await client.submit_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() # Establish that the response is the type that we expect. assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == "driver_output_resource_uri_value" - assert response.driver_control_files_uri == "driver_control_files_uri_value" - assert response.job_uuid == "job_uuid_value" - assert response.done is True @@ -559,13 +574,12 @@ async def test_submit_job_async_from_dict(): def test_submit_job_flattened(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.submit_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = jobs.Job() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.submit_job( @@ -578,18 +592,15 @@ def test_submit_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job == jobs.Job( reference=jobs.JobReference(project_id="project_id_value") ) def test_submit_job_flattened_error(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -604,7 +615,9 @@ def test_submit_job_flattened_error(): @pytest.mark.asyncio async def test_submit_job_flattened_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.submit_job), "__call__") as call: @@ -624,11 +637,8 @@ async def test_submit_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job == jobs.Job( reference=jobs.JobReference(project_id="project_id_value") ) @@ -636,7 +646,9 @@ async def test_submit_job_flattened_async(): @pytest.mark.asyncio async def test_submit_job_flattened_error_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -653,7 +665,7 @@ def test_submit_job_as_operation( transport: str = "grpc", request_type=jobs.SubmitJobRequest ): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -666,13 +678,11 @@ def test_submit_job_as_operation( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.submit_job_as_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() # Establish that the response is the type that we expect. @@ -687,7 +697,7 @@ def test_submit_job_as_operation_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -697,7 +707,6 @@ def test_submit_job_as_operation_empty_call(): client.submit_job_as_operation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() @@ -706,7 +715,7 @@ async def test_submit_job_as_operation_async( transport: str = "grpc_asyncio", request_type=jobs.SubmitJobRequest ): client = JobControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -721,13 +730,11 @@ async def test_submit_job_as_operation_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.submit_job_as_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() # Establish that the response is the type that we expect. @@ -740,7 +747,7 @@ async def test_submit_job_as_operation_async_from_dict(): def test_submit_job_as_operation_flattened(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -748,7 +755,6 @@ def test_submit_job_as_operation_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.submit_job_as_operation( @@ -761,18 +767,15 @@ def test_submit_job_as_operation_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job == jobs.Job( reference=jobs.JobReference(project_id="project_id_value") ) def test_submit_job_as_operation_flattened_error(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -787,7 +790,9 @@ def test_submit_job_as_operation_flattened_error(): @pytest.mark.asyncio async def test_submit_job_as_operation_flattened_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -811,11 +816,8 @@ async def test_submit_job_as_operation_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job == jobs.Job( reference=jobs.JobReference(project_id="project_id_value") ) @@ -823,7 +825,9 @@ async def test_submit_job_as_operation_flattened_async(): @pytest.mark.asyncio async def test_submit_job_as_operation_flattened_error_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -838,7 +842,7 @@ async def test_submit_job_as_operation_flattened_error_async(): def test_get_job(transport: str = "grpc", request_type=jobs.GetJobRequest): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -855,25 +859,18 @@ def test_get_job(transport: str = "grpc", request_type=jobs.GetJobRequest): done=True, hadoop_job=jobs.HadoopJob(main_jar_file_uri="main_jar_file_uri_value"), ) - response = client.get_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == jobs.GetJobRequest() # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == "driver_output_resource_uri_value" - assert response.driver_control_files_uri == "driver_control_files_uri_value" - assert response.job_uuid == "job_uuid_value" - assert response.done is True @@ -885,7 +882,7 @@ def test_get_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -893,7 +890,6 @@ def test_get_job_empty_call(): client.get_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == jobs.GetJobRequest() @@ -902,7 +898,7 @@ async def test_get_job_async( transport: str = "grpc_asyncio", request_type=jobs.GetJobRequest ): client = JobControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -920,24 +916,18 @@ async def test_get_job_async( done=True, ) ) - response = await client.get_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == jobs.GetJobRequest() # Establish that the response is the type that we expect. assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == "driver_output_resource_uri_value" - assert response.driver_control_files_uri == "driver_control_files_uri_value" - assert response.job_uuid == "job_uuid_value" - assert response.done is True @@ -947,13 +937,12 @@ async def test_get_job_async_from_dict(): def test_get_job_flattened(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = jobs.Job() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_job( @@ -964,16 +953,13 @@ def test_get_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" def test_get_job_flattened_error(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -988,7 +974,9 @@ def test_get_job_flattened_error(): @pytest.mark.asyncio async def test_get_job_flattened_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job), "__call__") as call: @@ -1006,17 +994,16 @@ async def test_get_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" @pytest.mark.asyncio async def test_get_job_flattened_error_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1031,7 +1018,7 @@ async def test_get_job_flattened_error_async(): def test_list_jobs(transport: str = "grpc", request_type=jobs.ListJobsRequest): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1044,19 +1031,15 @@ def test_list_jobs(transport: str = "grpc", request_type=jobs.ListJobsRequest): call.return_value = jobs.ListJobsResponse( next_page_token="next_page_token_value", ) - response = client.list_jobs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == jobs.ListJobsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == "next_page_token_value" @@ -1068,7 +1051,7 @@ def test_list_jobs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1076,7 +1059,6 @@ def test_list_jobs_empty_call(): client.list_jobs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == jobs.ListJobsRequest() @@ -1085,7 +1067,7 @@ async def test_list_jobs_async( transport: str = "grpc_asyncio", request_type=jobs.ListJobsRequest ): client = JobControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1098,18 +1080,15 @@ async def test_list_jobs_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( jobs.ListJobsResponse(next_page_token="next_page_token_value",) ) - response = await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == jobs.ListJobsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListJobsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1119,13 +1098,12 @@ async def test_list_jobs_async_from_dict(): def test_list_jobs_flattened(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = jobs.ListJobsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_jobs( @@ -1136,16 +1114,13 @@ def test_list_jobs_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].filter == "filter_value" def test_list_jobs_flattened_error(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1160,7 +1135,9 @@ def test_list_jobs_flattened_error(): @pytest.mark.asyncio async def test_list_jobs_flattened_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: @@ -1180,17 +1157,16 @@ async def test_list_jobs_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].filter == "filter_value" @pytest.mark.asyncio async def test_list_jobs_flattened_error_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1204,7 +1180,7 @@ async def test_list_jobs_flattened_error_async(): def test_list_jobs_pager(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials,) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: @@ -1230,7 +1206,7 @@ def test_list_jobs_pager(): def test_list_jobs_pages(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials,) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: @@ -1251,7 +1227,7 @@ def test_list_jobs_pages(): @pytest.mark.asyncio async def test_list_jobs_async_pager(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials,) + client = JobControllerAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1279,7 +1255,7 @@ async def test_list_jobs_async_pager(): @pytest.mark.asyncio async def test_list_jobs_async_pages(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials,) + client = JobControllerAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1304,7 +1280,7 @@ async def test_list_jobs_async_pages(): def test_update_job(transport: str = "grpc", request_type=jobs.UpdateJobRequest): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1321,25 +1297,18 @@ def test_update_job(transport: str = "grpc", request_type=jobs.UpdateJobRequest) done=True, hadoop_job=jobs.HadoopJob(main_jar_file_uri="main_jar_file_uri_value"), ) - response = client.update_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == jobs.UpdateJobRequest() # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == "driver_output_resource_uri_value" - assert response.driver_control_files_uri == "driver_control_files_uri_value" - assert response.job_uuid == "job_uuid_value" - assert response.done is True @@ -1351,7 +1320,7 @@ def test_update_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1359,7 +1328,6 @@ def test_update_job_empty_call(): client.update_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == jobs.UpdateJobRequest() @@ -1368,7 +1336,7 @@ async def test_update_job_async( transport: str = "grpc_asyncio", request_type=jobs.UpdateJobRequest ): client = JobControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1386,24 +1354,18 @@ async def test_update_job_async( done=True, ) ) - response = await client.update_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == jobs.UpdateJobRequest() # Establish that the response is the type that we expect. assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == "driver_output_resource_uri_value" - assert response.driver_control_files_uri == "driver_control_files_uri_value" - assert response.job_uuid == "job_uuid_value" - assert response.done is True @@ -1414,7 +1376,7 @@ async def test_update_job_async_from_dict(): def test_cancel_job(transport: str = "grpc", request_type=jobs.CancelJobRequest): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1431,25 +1393,18 @@ def test_cancel_job(transport: str = "grpc", request_type=jobs.CancelJobRequest) done=True, hadoop_job=jobs.HadoopJob(main_jar_file_uri="main_jar_file_uri_value"), ) - response = client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == jobs.CancelJobRequest() # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == "driver_output_resource_uri_value" - assert response.driver_control_files_uri == "driver_control_files_uri_value" - assert response.job_uuid == "job_uuid_value" - assert response.done is True @@ -1461,7 +1416,7 @@ def test_cancel_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1469,7 +1424,6 @@ def test_cancel_job_empty_call(): client.cancel_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == jobs.CancelJobRequest() @@ -1478,7 +1432,7 @@ async def test_cancel_job_async( transport: str = "grpc_asyncio", request_type=jobs.CancelJobRequest ): client = JobControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1496,24 +1450,18 @@ async def test_cancel_job_async( done=True, ) ) - response = await client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == jobs.CancelJobRequest() # Establish that the response is the type that we expect. assert isinstance(response, jobs.Job) - assert response.driver_output_resource_uri == "driver_output_resource_uri_value" - assert response.driver_control_files_uri == "driver_control_files_uri_value" - assert response.job_uuid == "job_uuid_value" - assert response.done is True @@ -1523,13 +1471,12 @@ async def test_cancel_job_async_from_dict(): def test_cancel_job_flattened(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = jobs.Job() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.cancel_job( @@ -1540,16 +1487,13 @@ def test_cancel_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" def test_cancel_job_flattened_error(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1564,7 +1508,9 @@ def test_cancel_job_flattened_error(): @pytest.mark.asyncio async def test_cancel_job_flattened_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: @@ -1582,17 +1528,16 @@ async def test_cancel_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" @pytest.mark.asyncio async def test_cancel_job_flattened_error_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1607,7 +1552,7 @@ async def test_cancel_job_flattened_error_async(): def test_delete_job(transport: str = "grpc", request_type=jobs.DeleteJobRequest): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1618,13 +1563,11 @@ def test_delete_job(transport: str = "grpc", request_type=jobs.DeleteJobRequest) with mock.patch.object(type(client.transport.delete_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == jobs.DeleteJobRequest() # Establish that the response is the type that we expect. @@ -1639,7 +1582,7 @@ def test_delete_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1647,7 +1590,6 @@ def test_delete_job_empty_call(): client.delete_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == jobs.DeleteJobRequest() @@ -1656,7 +1598,7 @@ async def test_delete_job_async( transport: str = "grpc_asyncio", request_type=jobs.DeleteJobRequest ): client = JobControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1667,13 +1609,11 @@ async def test_delete_job_async( with mock.patch.object(type(client.transport.delete_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == jobs.DeleteJobRequest() # Establish that the response is the type that we expect. @@ -1686,13 +1626,12 @@ async def test_delete_job_async_from_dict(): def test_delete_job_flattened(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_job( @@ -1703,16 +1642,13 @@ def test_delete_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" def test_delete_job_flattened_error(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1727,7 +1663,9 @@ def test_delete_job_flattened_error(): @pytest.mark.asyncio async def test_delete_job_flattened_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_job), "__call__") as call: @@ -1745,17 +1683,16 @@ async def test_delete_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" @pytest.mark.asyncio async def test_delete_job_flattened_error_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1771,16 +1708,16 @@ async def test_delete_job_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.JobControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.JobControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = JobControllerClient( @@ -1790,7 +1727,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.JobControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = JobControllerClient( @@ -1801,7 +1738,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.JobControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = JobControllerClient(transport=transport) assert client.transport is transport @@ -1810,13 +1747,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.JobControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.JobControllerGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -1831,23 +1768,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.JobControllerGrpcTransport,) def test_job_controller_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.JobControllerTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -1859,7 +1796,7 @@ def test_job_controller_base_transport(): ) as Transport: Transport.return_value = None transport = transports.JobControllerTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1883,15 +1820,37 @@ def test_job_controller_base_transport(): transport.operations_client +@requires_google_auth_gte_1_25_0 def test_job_controller_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataproc_v1.services.job_controller.transports.JobControllerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.JobControllerTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_job_controller_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.dataproc_v1.services.job_controller.transports.JobControllerTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.JobControllerTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -1904,19 +1863,33 @@ def test_job_controller_base_transport_with_credentials_file(): def test_job_controller_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.dataproc_v1.services.job_controller.transports.JobControllerTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.JobControllerTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_job_controller_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + JobControllerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_job_controller_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) JobControllerClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -1924,20 +1897,156 @@ def test_job_controller_auth_adc(): ) -def test_job_controller_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobControllerGrpcTransport, + transports.JobControllerGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_job_controller_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.JobControllerGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobControllerGrpcTransport, + transports.JobControllerGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_job_controller_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.JobControllerGrpcTransport, grpc_helpers), + (transports.JobControllerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_job_controller_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.JobControllerGrpcTransport, grpc_helpers), + (transports.JobControllerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_job_controller_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.JobControllerGrpcTransport, grpc_helpers), + (transports.JobControllerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_job_controller_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -1946,7 +2055,7 @@ def test_job_controller_transport_auth_adc(): ], ) def test_job_controller_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -1985,7 +2094,7 @@ def test_job_controller_grpc_transport_client_cert_source_for_mtls(transport_cla def test_job_controller_host_no_port(): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), @@ -1995,7 +2104,7 @@ def test_job_controller_host_no_port(): def test_job_controller_host_with_port(): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), @@ -2049,9 +2158,9 @@ def test_job_controller_transport_channel_mtls_with_client_cert_source(transport mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2127,7 +2236,7 @@ def test_job_controller_transport_channel_mtls_with_adc(transport_class): def test_job_controller_grpc_lro_client(): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport @@ -2140,7 +2249,7 @@ def test_job_controller_grpc_lro_client(): def test_job_controller_grpc_lro_async_client(): client = JobControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport @@ -2153,7 +2262,6 @@ def test_job_controller_grpc_lro_async_client(): def test_common_billing_account_path(): billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2174,7 +2282,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) actual = JobControllerClient.common_folder_path(folder) assert expected == actual @@ -2193,7 +2300,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) actual = JobControllerClient.common_organization_path(organization) assert expected == actual @@ -2212,7 +2318,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) actual = JobControllerClient.common_project_path(project) assert expected == actual @@ -2232,7 +2337,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "winkle" location = "nautilus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2259,7 +2363,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.JobControllerTransport, "_prep_wrapped_messages" ) as prep: client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2268,6 +2372,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = JobControllerClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py b/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py index 379887a9..caa1aaf5 100644 --- a/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py +++ b/tests/unit/gapic/dataproc_v1/test_workflow_template_service.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,16 +23,16 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.dataproc_v1.services.workflow_template_service import ( WorkflowTemplateServiceAsyncClient, @@ -43,14 +42,44 @@ ) from google.cloud.dataproc_v1.services.workflow_template_service import pagers from google.cloud.dataproc_v1.services.workflow_template_service import transports +from google.cloud.dataproc_v1.services.workflow_template_service.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.dataproc_v1.services.workflow_template_service.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.dataproc_v1.types import clusters from google.cloud.dataproc_v1.types import jobs from google.cloud.dataproc_v1.types import shared from google.cloud.dataproc_v1.types import workflow_templates from google.longrunning import operations_pb2 from google.oauth2 import service_account -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -102,7 +131,7 @@ def test__get_default_mtls_endpoint(): "client_class", [WorkflowTemplateServiceClient, WorkflowTemplateServiceAsyncClient,] ) def test_workflow_template_service_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -119,7 +148,7 @@ def test_workflow_template_service_client_from_service_account_info(client_class "client_class", [WorkflowTemplateServiceClient, WorkflowTemplateServiceAsyncClient,] ) def test_workflow_template_service_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -176,7 +205,7 @@ def test_workflow_template_service_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(WorkflowTemplateServiceClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -483,7 +512,7 @@ def test_create_workflow_template( request_type=workflow_templates.CreateWorkflowTemplateRequest, ): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -498,23 +527,17 @@ def test_create_workflow_template( call.return_value = workflow_templates.WorkflowTemplate( id="id_value", name="name_value", version=774, ) - response = client.create_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.CreateWorkflowTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == "id_value" - assert response.name == "name_value" - assert response.version == 774 @@ -526,7 +549,7 @@ def test_create_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -536,7 +559,6 @@ def test_create_workflow_template_empty_call(): client.create_workflow_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.CreateWorkflowTemplateRequest() @@ -546,7 +568,7 @@ async def test_create_workflow_template_async( request_type=workflow_templates.CreateWorkflowTemplateRequest, ): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -563,22 +585,17 @@ async def test_create_workflow_template_async( id="id_value", name="name_value", version=774, ) ) - response = await client.create_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.CreateWorkflowTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == "id_value" - assert response.name == "name_value" - assert response.version == 774 @@ -589,12 +606,13 @@ async def test_create_workflow_template_async_from_dict(): def test_create_workflow_template_field_headers(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.CreateWorkflowTemplateRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -602,7 +620,6 @@ def test_create_workflow_template_field_headers(): type(client.transport.create_workflow_template), "__call__" ) as call: call.return_value = workflow_templates.WorkflowTemplate() - client.create_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -618,12 +635,13 @@ def test_create_workflow_template_field_headers(): @pytest.mark.asyncio async def test_create_workflow_template_field_headers_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.CreateWorkflowTemplateRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -633,7 +651,6 @@ async def test_create_workflow_template_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( workflow_templates.WorkflowTemplate() ) - await client.create_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -648,7 +665,7 @@ async def test_create_workflow_template_field_headers_async(): def test_create_workflow_template_flattened(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -657,7 +674,6 @@ def test_create_workflow_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = workflow_templates.WorkflowTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_workflow_template( @@ -669,15 +685,13 @@ def test_create_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") def test_create_workflow_template_flattened_error(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -693,7 +707,7 @@ def test_create_workflow_template_flattened_error(): @pytest.mark.asyncio async def test_create_workflow_template_flattened_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -717,16 +731,14 @@ async def test_create_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") @pytest.mark.asyncio async def test_create_workflow_template_flattened_error_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -743,7 +755,7 @@ def test_get_workflow_template( transport: str = "grpc", request_type=workflow_templates.GetWorkflowTemplateRequest ): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -758,23 +770,17 @@ def test_get_workflow_template( call.return_value = workflow_templates.WorkflowTemplate( id="id_value", name="name_value", version=774, ) - response = client.get_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.GetWorkflowTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == "id_value" - assert response.name == "name_value" - assert response.version == 774 @@ -786,7 +792,7 @@ def test_get_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -796,7 +802,6 @@ def test_get_workflow_template_empty_call(): client.get_workflow_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.GetWorkflowTemplateRequest() @@ -806,7 +811,7 @@ async def test_get_workflow_template_async( request_type=workflow_templates.GetWorkflowTemplateRequest, ): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -823,22 +828,17 @@ async def test_get_workflow_template_async( id="id_value", name="name_value", version=774, ) ) - response = await client.get_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.GetWorkflowTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == "id_value" - assert response.name == "name_value" - assert response.version == 774 @@ -849,12 +849,13 @@ async def test_get_workflow_template_async_from_dict(): def test_get_workflow_template_field_headers(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.GetWorkflowTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -862,7 +863,6 @@ def test_get_workflow_template_field_headers(): type(client.transport.get_workflow_template), "__call__" ) as call: call.return_value = workflow_templates.WorkflowTemplate() - client.get_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -878,12 +878,13 @@ def test_get_workflow_template_field_headers(): @pytest.mark.asyncio async def test_get_workflow_template_field_headers_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.GetWorkflowTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -893,7 +894,6 @@ async def test_get_workflow_template_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( workflow_templates.WorkflowTemplate() ) - await client.get_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -908,7 +908,7 @@ async def test_get_workflow_template_field_headers_async(): def test_get_workflow_template_flattened(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -917,7 +917,6 @@ def test_get_workflow_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = workflow_templates.WorkflowTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_workflow_template(name="name_value",) @@ -926,13 +925,12 @@ def test_get_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_workflow_template_flattened_error(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -946,7 +944,7 @@ def test_get_workflow_template_flattened_error(): @pytest.mark.asyncio async def test_get_workflow_template_flattened_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -967,14 +965,13 @@ async def test_get_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_workflow_template_flattened_error_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -990,7 +987,7 @@ def test_instantiate_workflow_template( request_type=workflow_templates.InstantiateWorkflowTemplateRequest, ): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1003,13 +1000,11 @@ def test_instantiate_workflow_template( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.instantiate_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateWorkflowTemplateRequest() # Establish that the response is the type that we expect. @@ -1024,7 +1019,7 @@ def test_instantiate_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1034,7 +1029,6 @@ def test_instantiate_workflow_template_empty_call(): client.instantiate_workflow_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateWorkflowTemplateRequest() @@ -1044,7 +1038,7 @@ async def test_instantiate_workflow_template_async( request_type=workflow_templates.InstantiateWorkflowTemplateRequest, ): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1059,13 +1053,11 @@ async def test_instantiate_workflow_template_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.instantiate_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateWorkflowTemplateRequest() # Establish that the response is the type that we expect. @@ -1079,12 +1071,13 @@ async def test_instantiate_workflow_template_async_from_dict(): def test_instantiate_workflow_template_field_headers(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.InstantiateWorkflowTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1092,7 +1085,6 @@ def test_instantiate_workflow_template_field_headers(): type(client.transport.instantiate_workflow_template), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.instantiate_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -1108,12 +1100,13 @@ def test_instantiate_workflow_template_field_headers(): @pytest.mark.asyncio async def test_instantiate_workflow_template_field_headers_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.InstantiateWorkflowTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1123,7 +1116,6 @@ async def test_instantiate_workflow_template_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.instantiate_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -1138,7 +1130,7 @@ async def test_instantiate_workflow_template_field_headers_async(): def test_instantiate_workflow_template_flattened(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1147,7 +1139,6 @@ def test_instantiate_workflow_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.instantiate_workflow_template( @@ -1158,15 +1149,13 @@ def test_instantiate_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].parameters == {"key_value": "value_value"} def test_instantiate_workflow_template_flattened_error(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1182,7 +1171,7 @@ def test_instantiate_workflow_template_flattened_error(): @pytest.mark.asyncio async def test_instantiate_workflow_template_flattened_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1205,16 +1194,14 @@ async def test_instantiate_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].parameters == {"key_value": "value_value"} @pytest.mark.asyncio async def test_instantiate_workflow_template_flattened_error_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1232,7 +1219,7 @@ def test_instantiate_inline_workflow_template( request_type=workflow_templates.InstantiateInlineWorkflowTemplateRequest, ): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1245,13 +1232,11 @@ def test_instantiate_inline_workflow_template( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.instantiate_inline_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateInlineWorkflowTemplateRequest() # Establish that the response is the type that we expect. @@ -1266,7 +1251,7 @@ def test_instantiate_inline_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1276,7 +1261,6 @@ def test_instantiate_inline_workflow_template_empty_call(): client.instantiate_inline_workflow_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateInlineWorkflowTemplateRequest() @@ -1286,7 +1270,7 @@ async def test_instantiate_inline_workflow_template_async( request_type=workflow_templates.InstantiateInlineWorkflowTemplateRequest, ): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1301,13 +1285,11 @@ async def test_instantiate_inline_workflow_template_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.instantiate_inline_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateInlineWorkflowTemplateRequest() # Establish that the response is the type that we expect. @@ -1321,12 +1303,13 @@ async def test_instantiate_inline_workflow_template_async_from_dict(): def test_instantiate_inline_workflow_template_field_headers(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.InstantiateInlineWorkflowTemplateRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1334,7 +1317,6 @@ def test_instantiate_inline_workflow_template_field_headers(): type(client.transport.instantiate_inline_workflow_template), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.instantiate_inline_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -1350,12 +1332,13 @@ def test_instantiate_inline_workflow_template_field_headers(): @pytest.mark.asyncio async def test_instantiate_inline_workflow_template_field_headers_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.InstantiateInlineWorkflowTemplateRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1365,7 +1348,6 @@ async def test_instantiate_inline_workflow_template_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.instantiate_inline_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -1380,7 +1362,7 @@ async def test_instantiate_inline_workflow_template_field_headers_async(): def test_instantiate_inline_workflow_template_flattened(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1389,7 +1371,6 @@ def test_instantiate_inline_workflow_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.instantiate_inline_workflow_template( @@ -1401,15 +1382,13 @@ def test_instantiate_inline_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") def test_instantiate_inline_workflow_template_flattened_error(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1425,7 +1404,7 @@ def test_instantiate_inline_workflow_template_flattened_error(): @pytest.mark.asyncio async def test_instantiate_inline_workflow_template_flattened_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1449,16 +1428,14 @@ async def test_instantiate_inline_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") @pytest.mark.asyncio async def test_instantiate_inline_workflow_template_flattened_error_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1476,7 +1453,7 @@ def test_update_workflow_template( request_type=workflow_templates.UpdateWorkflowTemplateRequest, ): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1491,23 +1468,17 @@ def test_update_workflow_template( call.return_value = workflow_templates.WorkflowTemplate( id="id_value", name="name_value", version=774, ) - response = client.update_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.UpdateWorkflowTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == "id_value" - assert response.name == "name_value" - assert response.version == 774 @@ -1519,7 +1490,7 @@ def test_update_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1529,7 +1500,6 @@ def test_update_workflow_template_empty_call(): client.update_workflow_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.UpdateWorkflowTemplateRequest() @@ -1539,7 +1509,7 @@ async def test_update_workflow_template_async( request_type=workflow_templates.UpdateWorkflowTemplateRequest, ): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1556,22 +1526,17 @@ async def test_update_workflow_template_async( id="id_value", name="name_value", version=774, ) ) - response = await client.update_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.UpdateWorkflowTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == "id_value" - assert response.name == "name_value" - assert response.version == 774 @@ -1582,12 +1547,13 @@ async def test_update_workflow_template_async_from_dict(): def test_update_workflow_template_field_headers(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.UpdateWorkflowTemplateRequest() + request.template.name = "template.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1595,7 +1561,6 @@ def test_update_workflow_template_field_headers(): type(client.transport.update_workflow_template), "__call__" ) as call: call.return_value = workflow_templates.WorkflowTemplate() - client.update_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -1613,12 +1578,13 @@ def test_update_workflow_template_field_headers(): @pytest.mark.asyncio async def test_update_workflow_template_field_headers_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.UpdateWorkflowTemplateRequest() + request.template.name = "template.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1628,7 +1594,6 @@ async def test_update_workflow_template_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( workflow_templates.WorkflowTemplate() ) - await client.update_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -1645,7 +1610,7 @@ async def test_update_workflow_template_field_headers_async(): def test_update_workflow_template_flattened(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1654,7 +1619,6 @@ def test_update_workflow_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = workflow_templates.WorkflowTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_workflow_template( @@ -1665,13 +1629,12 @@ def test_update_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") def test_update_workflow_template_flattened_error(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1686,7 +1649,7 @@ def test_update_workflow_template_flattened_error(): @pytest.mark.asyncio async def test_update_workflow_template_flattened_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1709,14 +1672,13 @@ async def test_update_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") @pytest.mark.asyncio async def test_update_workflow_template_flattened_error_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1733,7 +1695,7 @@ def test_list_workflow_templates( request_type=workflow_templates.ListWorkflowTemplatesRequest, ): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1748,19 +1710,15 @@ def test_list_workflow_templates( call.return_value = workflow_templates.ListWorkflowTemplatesResponse( next_page_token="next_page_token_value", ) - response = client.list_workflow_templates(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.ListWorkflowTemplatesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListWorkflowTemplatesPager) - assert response.next_page_token == "next_page_token_value" @@ -1772,7 +1730,7 @@ def test_list_workflow_templates_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1782,7 +1740,6 @@ def test_list_workflow_templates_empty_call(): client.list_workflow_templates() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.ListWorkflowTemplatesRequest() @@ -1792,7 +1749,7 @@ async def test_list_workflow_templates_async( request_type=workflow_templates.ListWorkflowTemplatesRequest, ): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1809,18 +1766,15 @@ async def test_list_workflow_templates_async( next_page_token="next_page_token_value", ) ) - response = await client.list_workflow_templates(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.ListWorkflowTemplatesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListWorkflowTemplatesAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1831,12 +1785,13 @@ async def test_list_workflow_templates_async_from_dict(): def test_list_workflow_templates_field_headers(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.ListWorkflowTemplatesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1844,7 +1799,6 @@ def test_list_workflow_templates_field_headers(): type(client.transport.list_workflow_templates), "__call__" ) as call: call.return_value = workflow_templates.ListWorkflowTemplatesResponse() - client.list_workflow_templates(request) # Establish that the underlying gRPC stub method was called. @@ -1860,12 +1814,13 @@ def test_list_workflow_templates_field_headers(): @pytest.mark.asyncio async def test_list_workflow_templates_field_headers_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.ListWorkflowTemplatesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1875,7 +1830,6 @@ async def test_list_workflow_templates_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( workflow_templates.ListWorkflowTemplatesResponse() ) - await client.list_workflow_templates(request) # Establish that the underlying gRPC stub method was called. @@ -1890,7 +1844,7 @@ async def test_list_workflow_templates_field_headers_async(): def test_list_workflow_templates_flattened(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1899,7 +1853,6 @@ def test_list_workflow_templates_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = workflow_templates.ListWorkflowTemplatesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_workflow_templates(parent="parent_value",) @@ -1908,13 +1861,12 @@ def test_list_workflow_templates_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_workflow_templates_flattened_error(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1928,7 +1880,7 @@ def test_list_workflow_templates_flattened_error(): @pytest.mark.asyncio async def test_list_workflow_templates_flattened_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1949,14 +1901,13 @@ async def test_list_workflow_templates_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_workflow_templates_flattened_error_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1969,7 +1920,7 @@ async def test_list_workflow_templates_flattened_error_async(): def test_list_workflow_templates_pager(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2017,7 +1968,7 @@ def test_list_workflow_templates_pager(): def test_list_workflow_templates_pages(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2057,7 +2008,7 @@ def test_list_workflow_templates_pages(): @pytest.mark.asyncio async def test_list_workflow_templates_async_pager(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2106,7 +2057,7 @@ async def test_list_workflow_templates_async_pager(): @pytest.mark.asyncio async def test_list_workflow_templates_async_pages(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2152,7 +2103,7 @@ def test_delete_workflow_template( request_type=workflow_templates.DeleteWorkflowTemplateRequest, ): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2165,13 +2116,11 @@ def test_delete_workflow_template( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.DeleteWorkflowTemplateRequest() # Establish that the response is the type that we expect. @@ -2186,7 +2135,7 @@ def test_delete_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2196,7 +2145,6 @@ def test_delete_workflow_template_empty_call(): client.delete_workflow_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.DeleteWorkflowTemplateRequest() @@ -2206,7 +2154,7 @@ async def test_delete_workflow_template_async( request_type=workflow_templates.DeleteWorkflowTemplateRequest, ): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2219,13 +2167,11 @@ async def test_delete_workflow_template_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.DeleteWorkflowTemplateRequest() # Establish that the response is the type that we expect. @@ -2239,12 +2185,13 @@ async def test_delete_workflow_template_async_from_dict(): def test_delete_workflow_template_field_headers(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.DeleteWorkflowTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2252,7 +2199,6 @@ def test_delete_workflow_template_field_headers(): type(client.transport.delete_workflow_template), "__call__" ) as call: call.return_value = None - client.delete_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -2268,12 +2214,13 @@ def test_delete_workflow_template_field_headers(): @pytest.mark.asyncio async def test_delete_workflow_template_field_headers_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.DeleteWorkflowTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2281,7 +2228,6 @@ async def test_delete_workflow_template_field_headers_async(): type(client.transport.delete_workflow_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -2296,7 +2242,7 @@ async def test_delete_workflow_template_field_headers_async(): def test_delete_workflow_template_flattened(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2305,7 +2251,6 @@ def test_delete_workflow_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_workflow_template(name="name_value",) @@ -2314,13 +2259,12 @@ def test_delete_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_workflow_template_flattened_error(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2334,7 +2278,7 @@ def test_delete_workflow_template_flattened_error(): @pytest.mark.asyncio async def test_delete_workflow_template_flattened_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2353,14 +2297,13 @@ async def test_delete_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_workflow_template_flattened_error_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2374,16 +2317,16 @@ async def test_delete_workflow_template_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.WorkflowTemplateServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.WorkflowTemplateServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = WorkflowTemplateServiceClient( @@ -2393,7 +2336,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.WorkflowTemplateServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = WorkflowTemplateServiceClient( @@ -2404,7 +2347,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.WorkflowTemplateServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = WorkflowTemplateServiceClient(transport=transport) assert client.transport is transport @@ -2413,13 +2356,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.WorkflowTemplateServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.WorkflowTemplateServiceGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -2434,8 +2377,8 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() @@ -2443,7 +2386,7 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance( client.transport, transports.WorkflowTemplateServiceGrpcTransport, @@ -2452,9 +2395,9 @@ def test_transport_grpc_default(): def test_workflow_template_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.WorkflowTemplateServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -2466,7 +2409,7 @@ def test_workflow_template_service_base_transport(): ) as Transport: Transport.return_value = None transport = transports.WorkflowTemplateServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -2490,15 +2433,37 @@ def test_workflow_template_service_base_transport(): transport.operations_client +@requires_google_auth_gte_1_25_0 def test_workflow_template_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataproc_v1.services.workflow_template_service.transports.WorkflowTemplateServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.WorkflowTemplateServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_workflow_template_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.dataproc_v1.services.workflow_template_service.transports.WorkflowTemplateServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.WorkflowTemplateServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -2511,19 +2476,33 @@ def test_workflow_template_service_base_transport_with_credentials_file(): def test_workflow_template_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.dataproc_v1.services.workflow_template_service.transports.WorkflowTemplateServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.WorkflowTemplateServiceTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_workflow_template_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + WorkflowTemplateServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_workflow_template_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) WorkflowTemplateServiceClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -2531,20 +2510,158 @@ def test_workflow_template_service_auth_adc(): ) -def test_workflow_template_service_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkflowTemplateServiceGrpcTransport, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_workflow_template_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.WorkflowTemplateServiceGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkflowTemplateServiceGrpcTransport, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_workflow_template_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.WorkflowTemplateServiceGrpcTransport, grpc_helpers), + (transports.WorkflowTemplateServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_workflow_template_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.WorkflowTemplateServiceGrpcTransport, grpc_helpers), + (transports.WorkflowTemplateServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_workflow_template_service_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.WorkflowTemplateServiceGrpcTransport, grpc_helpers), + (transports.WorkflowTemplateServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_workflow_template_service_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -2555,7 +2672,7 @@ def test_workflow_template_service_transport_auth_adc(): def test_workflow_template_service_grpc_transport_client_cert_source_for_mtls( transport_class, ): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2594,7 +2711,7 @@ def test_workflow_template_service_grpc_transport_client_cert_source_for_mtls( def test_workflow_template_service_host_no_port(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), @@ -2604,7 +2721,7 @@ def test_workflow_template_service_host_no_port(): def test_workflow_template_service_host_with_port(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), @@ -2660,9 +2777,9 @@ def test_workflow_template_service_transport_channel_mtls_with_client_cert_sourc mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2738,7 +2855,7 @@ def test_workflow_template_service_transport_channel_mtls_with_adc(transport_cla def test_workflow_template_service_grpc_lro_client(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport @@ -2751,7 +2868,7 @@ def test_workflow_template_service_grpc_lro_client(): def test_workflow_template_service_grpc_lro_async_client(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport @@ -2762,11 +2879,58 @@ def test_workflow_template_service_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client +def test_cluster_path(): + project = "squid" + location = "clam" + cluster = "whelk" + expected = "projects/{project}/locations/{location}/clusters/{cluster}".format( + project=project, location=location, cluster=cluster, + ) + actual = WorkflowTemplateServiceClient.cluster_path(project, location, cluster) + assert expected == actual + + +def test_parse_cluster_path(): + expected = { + "project": "octopus", + "location": "oyster", + "cluster": "nudibranch", + } + path = WorkflowTemplateServiceClient.cluster_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowTemplateServiceClient.parse_cluster_path(path) + assert expected == actual + + +def test_service_path(): + project = "cuttlefish" + location = "mussel" + service = "winkle" + expected = "projects/{project}/locations/{location}/services/{service}".format( + project=project, location=location, service=service, + ) + actual = WorkflowTemplateServiceClient.service_path(project, location, service) + assert expected == actual + + +def test_parse_service_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "service": "abalone", + } + path = WorkflowTemplateServiceClient.service_path(**expected) + + # Check that the path construction is reversible. + actual = WorkflowTemplateServiceClient.parse_service_path(path) + assert expected == actual + + def test_workflow_template_path(): project = "squid" region = "clam" workflow_template = "whelk" - expected = "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}".format( project=project, region=region, workflow_template=workflow_template, ) @@ -2791,7 +2955,6 @@ def test_parse_workflow_template_path(): def test_common_billing_account_path(): billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2812,7 +2975,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format(folder=folder,) actual = WorkflowTemplateServiceClient.common_folder_path(folder) assert expected == actual @@ -2831,7 +2993,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format(organization=organization,) actual = WorkflowTemplateServiceClient.common_organization_path(organization) assert expected == actual @@ -2850,7 +3011,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format(project=project,) actual = WorkflowTemplateServiceClient.common_project_path(project) assert expected == actual @@ -2870,7 +3030,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "whelk" location = "octopus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2897,7 +3056,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.WorkflowTemplateServiceTransport, "_prep_wrapped_messages" ) as prep: client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2906,6 +3065,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = WorkflowTemplateServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/dataproc_v1beta2/__init__.py b/tests/unit/gapic/dataproc_v1beta2/__init__.py index 42ffdf2b..4de65971 100644 --- a/tests/unit/gapic/dataproc_v1beta2/__init__.py +++ b/tests/unit/gapic/dataproc_v1beta2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/dataproc_v1beta2/test_autoscaling_policy_service.py b/tests/unit/gapic/dataproc_v1beta2/test_autoscaling_policy_service.py index 9af5109f..62e8ab4a 100644 --- a/tests/unit/gapic/dataproc_v1beta2/test_autoscaling_policy_service.py +++ b/tests/unit/gapic/dataproc_v1beta2/test_autoscaling_policy_service.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,13 +23,13 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.dataproc_v1beta2.services.autoscaling_policy_service import ( AutoscalingPolicyServiceAsyncClient, @@ -40,9 +39,39 @@ ) from google.cloud.dataproc_v1beta2.services.autoscaling_policy_service import pagers from google.cloud.dataproc_v1beta2.services.autoscaling_policy_service import transports +from google.cloud.dataproc_v1beta2.services.autoscaling_policy_service.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.dataproc_v1beta2.services.autoscaling_policy_service.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.dataproc_v1beta2.types import autoscaling_policies from google.oauth2 import service_account -from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import duration_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -95,7 +124,7 @@ def test__get_default_mtls_endpoint(): [AutoscalingPolicyServiceClient, AutoscalingPolicyServiceAsyncClient,], ) def test_autoscaling_policy_service_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -113,7 +142,7 @@ def test_autoscaling_policy_service_client_from_service_account_info(client_clas [AutoscalingPolicyServiceClient, AutoscalingPolicyServiceAsyncClient,], ) def test_autoscaling_policy_service_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -172,7 +201,7 @@ def test_autoscaling_policy_service_client_client_options( with mock.patch.object( AutoscalingPolicyServiceClient, "get_transport_class" ) as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -481,7 +510,7 @@ def test_create_autoscaling_policy( request_type=autoscaling_policies.CreateAutoscalingPolicyRequest, ): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -498,25 +527,20 @@ def test_create_autoscaling_policy( name="name_value", basic_algorithm=autoscaling_policies.BasicAutoscalingAlgorithm( yarn_config=autoscaling_policies.BasicYarnAutoscalingConfig( - graceful_decommission_timeout=duration.Duration(seconds=751) + graceful_decommission_timeout=duration_pb2.Duration(seconds=751) ) ), ) - response = client.create_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.CreateAutoscalingPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == "id_value" - assert response.name == "name_value" @@ -528,7 +552,7 @@ def test_create_autoscaling_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -538,7 +562,6 @@ def test_create_autoscaling_policy_empty_call(): client.create_autoscaling_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.CreateAutoscalingPolicyRequest() @@ -548,7 +571,7 @@ async def test_create_autoscaling_policy_async( request_type=autoscaling_policies.CreateAutoscalingPolicyRequest, ): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -563,20 +586,16 @@ async def test_create_autoscaling_policy_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( autoscaling_policies.AutoscalingPolicy(id="id_value", name="name_value",) ) - response = await client.create_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.CreateAutoscalingPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == "id_value" - assert response.name == "name_value" @@ -587,12 +606,13 @@ async def test_create_autoscaling_policy_async_from_dict(): def test_create_autoscaling_policy_field_headers(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.CreateAutoscalingPolicyRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -600,7 +620,6 @@ def test_create_autoscaling_policy_field_headers(): type(client.transport.create_autoscaling_policy), "__call__" ) as call: call.return_value = autoscaling_policies.AutoscalingPolicy() - client.create_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. @@ -616,12 +635,13 @@ def test_create_autoscaling_policy_field_headers(): @pytest.mark.asyncio async def test_create_autoscaling_policy_field_headers_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.CreateAutoscalingPolicyRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -631,7 +651,6 @@ async def test_create_autoscaling_policy_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( autoscaling_policies.AutoscalingPolicy() ) - await client.create_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. @@ -646,7 +665,7 @@ async def test_create_autoscaling_policy_field_headers_async(): def test_create_autoscaling_policy_flattened(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -655,7 +674,6 @@ def test_create_autoscaling_policy_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = autoscaling_policies.AutoscalingPolicy() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_autoscaling_policy( @@ -667,15 +685,13 @@ def test_create_autoscaling_policy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].policy == autoscaling_policies.AutoscalingPolicy(id="id_value") def test_create_autoscaling_policy_flattened_error(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -691,7 +707,7 @@ def test_create_autoscaling_policy_flattened_error(): @pytest.mark.asyncio async def test_create_autoscaling_policy_flattened_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -715,16 +731,14 @@ async def test_create_autoscaling_policy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].policy == autoscaling_policies.AutoscalingPolicy(id="id_value") @pytest.mark.asyncio async def test_create_autoscaling_policy_flattened_error_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -742,7 +756,7 @@ def test_update_autoscaling_policy( request_type=autoscaling_policies.UpdateAutoscalingPolicyRequest, ): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -759,25 +773,20 @@ def test_update_autoscaling_policy( name="name_value", basic_algorithm=autoscaling_policies.BasicAutoscalingAlgorithm( yarn_config=autoscaling_policies.BasicYarnAutoscalingConfig( - graceful_decommission_timeout=duration.Duration(seconds=751) + graceful_decommission_timeout=duration_pb2.Duration(seconds=751) ) ), ) - response = client.update_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.UpdateAutoscalingPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == "id_value" - assert response.name == "name_value" @@ -789,7 +798,7 @@ def test_update_autoscaling_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -799,7 +808,6 @@ def test_update_autoscaling_policy_empty_call(): client.update_autoscaling_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.UpdateAutoscalingPolicyRequest() @@ -809,7 +817,7 @@ async def test_update_autoscaling_policy_async( request_type=autoscaling_policies.UpdateAutoscalingPolicyRequest, ): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -824,20 +832,16 @@ async def test_update_autoscaling_policy_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( autoscaling_policies.AutoscalingPolicy(id="id_value", name="name_value",) ) - response = await client.update_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.UpdateAutoscalingPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == "id_value" - assert response.name == "name_value" @@ -848,12 +852,13 @@ async def test_update_autoscaling_policy_async_from_dict(): def test_update_autoscaling_policy_field_headers(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.UpdateAutoscalingPolicyRequest() + request.policy.name = "policy.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -861,7 +866,6 @@ def test_update_autoscaling_policy_field_headers(): type(client.transport.update_autoscaling_policy), "__call__" ) as call: call.return_value = autoscaling_policies.AutoscalingPolicy() - client.update_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. @@ -877,12 +881,13 @@ def test_update_autoscaling_policy_field_headers(): @pytest.mark.asyncio async def test_update_autoscaling_policy_field_headers_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.UpdateAutoscalingPolicyRequest() + request.policy.name = "policy.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -892,7 +897,6 @@ async def test_update_autoscaling_policy_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( autoscaling_policies.AutoscalingPolicy() ) - await client.update_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. @@ -907,7 +911,7 @@ async def test_update_autoscaling_policy_field_headers_async(): def test_update_autoscaling_policy_flattened(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -916,7 +920,6 @@ def test_update_autoscaling_policy_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = autoscaling_policies.AutoscalingPolicy() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_autoscaling_policy( @@ -927,13 +930,12 @@ def test_update_autoscaling_policy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].policy == autoscaling_policies.AutoscalingPolicy(id="id_value") def test_update_autoscaling_policy_flattened_error(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -948,7 +950,7 @@ def test_update_autoscaling_policy_flattened_error(): @pytest.mark.asyncio async def test_update_autoscaling_policy_flattened_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -971,14 +973,13 @@ async def test_update_autoscaling_policy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].policy == autoscaling_policies.AutoscalingPolicy(id="id_value") @pytest.mark.asyncio async def test_update_autoscaling_policy_flattened_error_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -995,7 +996,7 @@ def test_get_autoscaling_policy( request_type=autoscaling_policies.GetAutoscalingPolicyRequest, ): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1012,25 +1013,20 @@ def test_get_autoscaling_policy( name="name_value", basic_algorithm=autoscaling_policies.BasicAutoscalingAlgorithm( yarn_config=autoscaling_policies.BasicYarnAutoscalingConfig( - graceful_decommission_timeout=duration.Duration(seconds=751) + graceful_decommission_timeout=duration_pb2.Duration(seconds=751) ) ), ) - response = client.get_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.GetAutoscalingPolicyRequest() # Establish that the response is the type that we expect. - assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == "id_value" - assert response.name == "name_value" @@ -1042,7 +1038,7 @@ def test_get_autoscaling_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1052,7 +1048,6 @@ def test_get_autoscaling_policy_empty_call(): client.get_autoscaling_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.GetAutoscalingPolicyRequest() @@ -1062,7 +1057,7 @@ async def test_get_autoscaling_policy_async( request_type=autoscaling_policies.GetAutoscalingPolicyRequest, ): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1077,20 +1072,16 @@ async def test_get_autoscaling_policy_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( autoscaling_policies.AutoscalingPolicy(id="id_value", name="name_value",) ) - response = await client.get_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.GetAutoscalingPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, autoscaling_policies.AutoscalingPolicy) - assert response.id == "id_value" - assert response.name == "name_value" @@ -1101,12 +1092,13 @@ async def test_get_autoscaling_policy_async_from_dict(): def test_get_autoscaling_policy_field_headers(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.GetAutoscalingPolicyRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1114,7 +1106,6 @@ def test_get_autoscaling_policy_field_headers(): type(client.transport.get_autoscaling_policy), "__call__" ) as call: call.return_value = autoscaling_policies.AutoscalingPolicy() - client.get_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. @@ -1130,12 +1121,13 @@ def test_get_autoscaling_policy_field_headers(): @pytest.mark.asyncio async def test_get_autoscaling_policy_field_headers_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.GetAutoscalingPolicyRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1145,7 +1137,6 @@ async def test_get_autoscaling_policy_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( autoscaling_policies.AutoscalingPolicy() ) - await client.get_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. @@ -1160,7 +1151,7 @@ async def test_get_autoscaling_policy_field_headers_async(): def test_get_autoscaling_policy_flattened(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1169,7 +1160,6 @@ def test_get_autoscaling_policy_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = autoscaling_policies.AutoscalingPolicy() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_autoscaling_policy(name="name_value",) @@ -1178,13 +1168,12 @@ def test_get_autoscaling_policy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_autoscaling_policy_flattened_error(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1198,7 +1187,7 @@ def test_get_autoscaling_policy_flattened_error(): @pytest.mark.asyncio async def test_get_autoscaling_policy_flattened_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1219,14 +1208,13 @@ async def test_get_autoscaling_policy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_autoscaling_policy_flattened_error_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1242,7 +1230,7 @@ def test_list_autoscaling_policies( request_type=autoscaling_policies.ListAutoscalingPoliciesRequest, ): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1257,19 +1245,15 @@ def test_list_autoscaling_policies( call.return_value = autoscaling_policies.ListAutoscalingPoliciesResponse( next_page_token="next_page_token_value", ) - response = client.list_autoscaling_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.ListAutoscalingPoliciesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAutoscalingPoliciesPager) - assert response.next_page_token == "next_page_token_value" @@ -1281,7 +1265,7 @@ def test_list_autoscaling_policies_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1291,7 +1275,6 @@ def test_list_autoscaling_policies_empty_call(): client.list_autoscaling_policies() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.ListAutoscalingPoliciesRequest() @@ -1301,7 +1284,7 @@ async def test_list_autoscaling_policies_async( request_type=autoscaling_policies.ListAutoscalingPoliciesRequest, ): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1318,18 +1301,15 @@ async def test_list_autoscaling_policies_async( next_page_token="next_page_token_value", ) ) - response = await client.list_autoscaling_policies(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.ListAutoscalingPoliciesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAutoscalingPoliciesAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1340,12 +1320,13 @@ async def test_list_autoscaling_policies_async_from_dict(): def test_list_autoscaling_policies_field_headers(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.ListAutoscalingPoliciesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1353,7 +1334,6 @@ def test_list_autoscaling_policies_field_headers(): type(client.transport.list_autoscaling_policies), "__call__" ) as call: call.return_value = autoscaling_policies.ListAutoscalingPoliciesResponse() - client.list_autoscaling_policies(request) # Establish that the underlying gRPC stub method was called. @@ -1369,12 +1349,13 @@ def test_list_autoscaling_policies_field_headers(): @pytest.mark.asyncio async def test_list_autoscaling_policies_field_headers_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.ListAutoscalingPoliciesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1384,7 +1365,6 @@ async def test_list_autoscaling_policies_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( autoscaling_policies.ListAutoscalingPoliciesResponse() ) - await client.list_autoscaling_policies(request) # Establish that the underlying gRPC stub method was called. @@ -1399,7 +1379,7 @@ async def test_list_autoscaling_policies_field_headers_async(): def test_list_autoscaling_policies_flattened(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1408,7 +1388,6 @@ def test_list_autoscaling_policies_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = autoscaling_policies.ListAutoscalingPoliciesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_autoscaling_policies(parent="parent_value",) @@ -1417,13 +1396,12 @@ def test_list_autoscaling_policies_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_autoscaling_policies_flattened_error(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1438,7 +1416,7 @@ def test_list_autoscaling_policies_flattened_error(): @pytest.mark.asyncio async def test_list_autoscaling_policies_flattened_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1459,14 +1437,13 @@ async def test_list_autoscaling_policies_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_autoscaling_policies_flattened_error_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1480,7 +1457,7 @@ async def test_list_autoscaling_policies_flattened_error_async(): def test_list_autoscaling_policies_pager(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1530,7 +1507,7 @@ def test_list_autoscaling_policies_pager(): def test_list_autoscaling_policies_pages(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1570,7 +1547,7 @@ def test_list_autoscaling_policies_pages(): @pytest.mark.asyncio async def test_list_autoscaling_policies_async_pager(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1619,7 +1596,7 @@ async def test_list_autoscaling_policies_async_pager(): @pytest.mark.asyncio async def test_list_autoscaling_policies_async_pages(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1665,7 +1642,7 @@ def test_delete_autoscaling_policy( request_type=autoscaling_policies.DeleteAutoscalingPolicyRequest, ): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1678,13 +1655,11 @@ def test_delete_autoscaling_policy( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.DeleteAutoscalingPolicyRequest() # Establish that the response is the type that we expect. @@ -1699,7 +1674,7 @@ def test_delete_autoscaling_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1709,7 +1684,6 @@ def test_delete_autoscaling_policy_empty_call(): client.delete_autoscaling_policy() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.DeleteAutoscalingPolicyRequest() @@ -1719,7 +1693,7 @@ async def test_delete_autoscaling_policy_async( request_type=autoscaling_policies.DeleteAutoscalingPolicyRequest, ): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1732,13 +1706,11 @@ async def test_delete_autoscaling_policy_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == autoscaling_policies.DeleteAutoscalingPolicyRequest() # Establish that the response is the type that we expect. @@ -1752,12 +1724,13 @@ async def test_delete_autoscaling_policy_async_from_dict(): def test_delete_autoscaling_policy_field_headers(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.DeleteAutoscalingPolicyRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1765,7 +1738,6 @@ def test_delete_autoscaling_policy_field_headers(): type(client.transport.delete_autoscaling_policy), "__call__" ) as call: call.return_value = None - client.delete_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. @@ -1781,12 +1753,13 @@ def test_delete_autoscaling_policy_field_headers(): @pytest.mark.asyncio async def test_delete_autoscaling_policy_field_headers_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = autoscaling_policies.DeleteAutoscalingPolicyRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1794,7 +1767,6 @@ async def test_delete_autoscaling_policy_field_headers_async(): type(client.transport.delete_autoscaling_policy), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_autoscaling_policy(request) # Establish that the underlying gRPC stub method was called. @@ -1809,7 +1781,7 @@ async def test_delete_autoscaling_policy_field_headers_async(): def test_delete_autoscaling_policy_flattened(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1818,7 +1790,6 @@ def test_delete_autoscaling_policy_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_autoscaling_policy(name="name_value",) @@ -1827,13 +1798,12 @@ def test_delete_autoscaling_policy_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_autoscaling_policy_flattened_error(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1847,7 +1817,7 @@ def test_delete_autoscaling_policy_flattened_error(): @pytest.mark.asyncio async def test_delete_autoscaling_policy_flattened_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1866,14 +1836,13 @@ async def test_delete_autoscaling_policy_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_autoscaling_policy_flattened_error_async(): client = AutoscalingPolicyServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1887,16 +1856,16 @@ async def test_delete_autoscaling_policy_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.AutoscalingPolicyServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.AutoscalingPolicyServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = AutoscalingPolicyServiceClient( @@ -1906,7 +1875,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.AutoscalingPolicyServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = AutoscalingPolicyServiceClient( @@ -1917,7 +1886,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.AutoscalingPolicyServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = AutoscalingPolicyServiceClient(transport=transport) assert client.transport is transport @@ -1926,13 +1895,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.AutoscalingPolicyServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.AutoscalingPolicyServiceGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -1947,8 +1916,8 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() @@ -1956,7 +1925,7 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance( client.transport, transports.AutoscalingPolicyServiceGrpcTransport, @@ -1965,9 +1934,9 @@ def test_transport_grpc_default(): def test_autoscaling_policy_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.AutoscalingPolicyServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -1979,7 +1948,7 @@ def test_autoscaling_policy_service_base_transport(): ) as Transport: Transport.return_value = None transport = transports.AutoscalingPolicyServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1996,15 +1965,37 @@ def test_autoscaling_policy_service_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_autoscaling_policy_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataproc_v1beta2.services.autoscaling_policy_service.transports.AutoscalingPolicyServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AutoscalingPolicyServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_autoscaling_policy_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.dataproc_v1beta2.services.autoscaling_policy_service.transports.AutoscalingPolicyServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.AutoscalingPolicyServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -2017,19 +2008,33 @@ def test_autoscaling_policy_service_base_transport_with_credentials_file(): def test_autoscaling_policy_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.dataproc_v1beta2.services.autoscaling_policy_service.transports.AutoscalingPolicyServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.AutoscalingPolicyServiceTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_autoscaling_policy_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AutoscalingPolicyServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_autoscaling_policy_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) AutoscalingPolicyServiceClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -2037,20 +2042,158 @@ def test_autoscaling_policy_service_auth_adc(): ) -def test_autoscaling_policy_service_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutoscalingPolicyServiceGrpcTransport, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_autoscaling_policy_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.AutoscalingPolicyServiceGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutoscalingPolicyServiceGrpcTransport, + transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_autoscaling_policy_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AutoscalingPolicyServiceGrpcTransport, grpc_helpers), + (transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_autoscaling_policy_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AutoscalingPolicyServiceGrpcTransport, grpc_helpers), + (transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_autoscaling_policy_service_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AutoscalingPolicyServiceGrpcTransport, grpc_helpers), + (transports.AutoscalingPolicyServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_autoscaling_policy_service_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -2061,7 +2204,7 @@ def test_autoscaling_policy_service_transport_auth_adc(): def test_autoscaling_policy_service_grpc_transport_client_cert_source_for_mtls( transport_class, ): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2100,7 +2243,7 @@ def test_autoscaling_policy_service_grpc_transport_client_cert_source_for_mtls( def test_autoscaling_policy_service_host_no_port(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), @@ -2110,7 +2253,7 @@ def test_autoscaling_policy_service_host_no_port(): def test_autoscaling_policy_service_host_with_port(): client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), @@ -2166,9 +2309,9 @@ def test_autoscaling_policy_service_transport_channel_mtls_with_client_cert_sour mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2246,7 +2389,6 @@ def test_autoscaling_policy_path(): project = "squid" location = "clam" autoscaling_policy = "whelk" - expected = "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}".format( project=project, location=location, autoscaling_policy=autoscaling_policy, ) @@ -2271,7 +2413,6 @@ def test_parse_autoscaling_policy_path(): def test_common_billing_account_path(): billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2292,7 +2433,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format(folder=folder,) actual = AutoscalingPolicyServiceClient.common_folder_path(folder) assert expected == actual @@ -2311,7 +2451,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format(organization=organization,) actual = AutoscalingPolicyServiceClient.common_organization_path(organization) assert expected == actual @@ -2330,7 +2469,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format(project=project,) actual = AutoscalingPolicyServiceClient.common_project_path(project) assert expected == actual @@ -2350,7 +2488,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "whelk" location = "octopus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2377,7 +2514,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.AutoscalingPolicyServiceTransport, "_prep_wrapped_messages" ) as prep: client = AutoscalingPolicyServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2386,6 +2523,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = AutoscalingPolicyServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/dataproc_v1beta2/test_cluster_controller.py b/tests/unit/gapic/dataproc_v1beta2/test_cluster_controller.py index 91f7f5a7..2fb51151 100644 --- a/tests/unit/gapic/dataproc_v1beta2/test_cluster_controller.py +++ b/tests/unit/gapic/dataproc_v1beta2/test_cluster_controller.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,16 +23,16 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.dataproc_v1beta2.services.cluster_controller import ( ClusterControllerAsyncClient, @@ -43,14 +42,44 @@ ) from google.cloud.dataproc_v1beta2.services.cluster_controller import pagers from google.cloud.dataproc_v1beta2.services.cluster_controller import transports +from google.cloud.dataproc_v1beta2.services.cluster_controller.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.dataproc_v1beta2.services.cluster_controller.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.dataproc_v1beta2.types import clusters from google.cloud.dataproc_v1beta2.types import operations from google.cloud.dataproc_v1beta2.types import shared from google.longrunning import operations_pb2 from google.oauth2 import service_account -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -102,7 +131,7 @@ def test__get_default_mtls_endpoint(): "client_class", [ClusterControllerClient, ClusterControllerAsyncClient,] ) def test_cluster_controller_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -119,7 +148,7 @@ def test_cluster_controller_client_from_service_account_info(client_class): "client_class", [ClusterControllerClient, ClusterControllerAsyncClient,] ) def test_cluster_controller_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -172,7 +201,7 @@ def test_cluster_controller_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(ClusterControllerClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -470,7 +499,7 @@ def test_create_cluster( transport: str = "grpc", request_type=clusters.CreateClusterRequest ): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -481,13 +510,11 @@ def test_create_cluster( with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == clusters.CreateClusterRequest() # Establish that the response is the type that we expect. @@ -502,7 +529,7 @@ def test_create_cluster_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -510,7 +537,6 @@ def test_create_cluster_empty_call(): client.create_cluster() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == clusters.CreateClusterRequest() @@ -519,7 +545,7 @@ async def test_create_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.CreateClusterRequest ): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -532,13 +558,11 @@ async def test_create_cluster_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == clusters.CreateClusterRequest() # Establish that the response is the type that we expect. @@ -551,13 +575,12 @@ async def test_create_cluster_async_from_dict(): def test_create_cluster_flattened(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_cluster( @@ -570,16 +593,13 @@ def test_create_cluster_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster == clusters.Cluster(project_id="project_id_value") def test_create_cluster_flattened_error(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -595,7 +615,7 @@ def test_create_cluster_flattened_error(): @pytest.mark.asyncio async def test_create_cluster_flattened_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -618,18 +638,15 @@ async def test_create_cluster_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster == clusters.Cluster(project_id="project_id_value") @pytest.mark.asyncio async def test_create_cluster_flattened_error_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -647,7 +664,7 @@ def test_update_cluster( transport: str = "grpc", request_type=clusters.UpdateClusterRequest ): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -658,13 +675,11 @@ def test_update_cluster( with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == clusters.UpdateClusterRequest() # Establish that the response is the type that we expect. @@ -679,7 +694,7 @@ def test_update_cluster_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -687,7 +702,6 @@ def test_update_cluster_empty_call(): client.update_cluster() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == clusters.UpdateClusterRequest() @@ -696,7 +710,7 @@ async def test_update_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.UpdateClusterRequest ): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -709,13 +723,11 @@ async def test_update_cluster_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == clusters.UpdateClusterRequest() # Establish that the response is the type that we expect. @@ -728,13 +740,12 @@ async def test_update_cluster_async_from_dict(): def test_update_cluster_flattened(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_cluster( @@ -742,27 +753,22 @@ def test_update_cluster_flattened(): region="region_value", cluster_name="cluster_name_value", cluster=clusters.Cluster(project_id="project_id_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" - assert args[0].cluster == clusters.Cluster(project_id="project_id_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_cluster_flattened_error(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -773,14 +779,14 @@ def test_update_cluster_flattened_error(): region="region_value", cluster_name="cluster_name_value", cluster=clusters.Cluster(project_id="project_id_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_cluster_flattened_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -798,29 +804,24 @@ async def test_update_cluster_flattened_async(): region="region_value", cluster_name="cluster_name_value", cluster=clusters.Cluster(project_id="project_id_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" - assert args[0].cluster == clusters.Cluster(project_id="project_id_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_cluster_flattened_error_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -832,7 +833,7 @@ async def test_update_cluster_flattened_error_async(): region="region_value", cluster_name="cluster_name_value", cluster=clusters.Cluster(project_id="project_id_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -840,7 +841,7 @@ def test_delete_cluster( transport: str = "grpc", request_type=clusters.DeleteClusterRequest ): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -851,13 +852,11 @@ def test_delete_cluster( with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DeleteClusterRequest() # Establish that the response is the type that we expect. @@ -872,7 +871,7 @@ def test_delete_cluster_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -880,7 +879,6 @@ def test_delete_cluster_empty_call(): client.delete_cluster() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DeleteClusterRequest() @@ -889,7 +887,7 @@ async def test_delete_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.DeleteClusterRequest ): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -902,13 +900,11 @@ async def test_delete_cluster_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DeleteClusterRequest() # Establish that the response is the type that we expect. @@ -921,13 +917,12 @@ async def test_delete_cluster_async_from_dict(): def test_delete_cluster_flattened(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_cluster( @@ -940,16 +935,13 @@ def test_delete_cluster_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" def test_delete_cluster_flattened_error(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -965,7 +957,7 @@ def test_delete_cluster_flattened_error(): @pytest.mark.asyncio async def test_delete_cluster_flattened_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -988,18 +980,15 @@ async def test_delete_cluster_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" @pytest.mark.asyncio async def test_delete_cluster_flattened_error_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1015,7 +1004,7 @@ async def test_delete_cluster_flattened_error_async(): def test_get_cluster(transport: str = "grpc", request_type=clusters.GetClusterRequest): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1030,23 +1019,17 @@ def test_get_cluster(transport: str = "grpc", request_type=clusters.GetClusterRe cluster_name="cluster_name_value", cluster_uuid="cluster_uuid_value", ) - response = client.get_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == clusters.GetClusterRequest() # Establish that the response is the type that we expect. - assert isinstance(response, clusters.Cluster) - assert response.project_id == "project_id_value" - assert response.cluster_name == "cluster_name_value" - assert response.cluster_uuid == "cluster_uuid_value" @@ -1058,7 +1041,7 @@ def test_get_cluster_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1066,7 +1049,6 @@ def test_get_cluster_empty_call(): client.get_cluster() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == clusters.GetClusterRequest() @@ -1075,7 +1057,7 @@ async def test_get_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.GetClusterRequest ): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1092,22 +1074,17 @@ async def test_get_cluster_async( cluster_uuid="cluster_uuid_value", ) ) - response = await client.get_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == clusters.GetClusterRequest() # Establish that the response is the type that we expect. assert isinstance(response, clusters.Cluster) - assert response.project_id == "project_id_value" - assert response.cluster_name == "cluster_name_value" - assert response.cluster_uuid == "cluster_uuid_value" @@ -1117,13 +1094,12 @@ async def test_get_cluster_async_from_dict(): def test_get_cluster_flattened(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = clusters.Cluster() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_cluster( @@ -1136,16 +1112,13 @@ def test_get_cluster_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" def test_get_cluster_flattened_error(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1161,7 +1134,7 @@ def test_get_cluster_flattened_error(): @pytest.mark.asyncio async def test_get_cluster_flattened_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1182,18 +1155,15 @@ async def test_get_cluster_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" @pytest.mark.asyncio async def test_get_cluster_flattened_error_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1211,7 +1181,7 @@ def test_list_clusters( transport: str = "grpc", request_type=clusters.ListClustersRequest ): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1224,19 +1194,15 @@ def test_list_clusters( call.return_value = clusters.ListClustersResponse( next_page_token="next_page_token_value", ) - response = client.list_clusters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == clusters.ListClustersRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClustersPager) - assert response.next_page_token == "next_page_token_value" @@ -1248,7 +1214,7 @@ def test_list_clusters_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1256,7 +1222,6 @@ def test_list_clusters_empty_call(): client.list_clusters() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == clusters.ListClustersRequest() @@ -1265,7 +1230,7 @@ async def test_list_clusters_async( transport: str = "grpc_asyncio", request_type=clusters.ListClustersRequest ): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1278,18 +1243,15 @@ async def test_list_clusters_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( clusters.ListClustersResponse(next_page_token="next_page_token_value",) ) - response = await client.list_clusters(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == clusters.ListClustersRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListClustersAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1299,13 +1261,12 @@ async def test_list_clusters_async_from_dict(): def test_list_clusters_flattened(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = clusters.ListClustersResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_clusters( @@ -1316,16 +1277,13 @@ def test_list_clusters_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].filter == "filter_value" def test_list_clusters_flattened_error(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1341,7 +1299,7 @@ def test_list_clusters_flattened_error(): @pytest.mark.asyncio async def test_list_clusters_flattened_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1362,18 +1320,15 @@ async def test_list_clusters_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].filter == "filter_value" @pytest.mark.asyncio async def test_list_clusters_flattened_error_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1388,7 +1343,7 @@ async def test_list_clusters_flattened_error_async(): def test_list_clusters_pager(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials,) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: @@ -1419,7 +1374,7 @@ def test_list_clusters_pager(): def test_list_clusters_pages(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials,) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_clusters), "__call__") as call: @@ -1445,7 +1400,9 @@ def test_list_clusters_pages(): @pytest.mark.asyncio async def test_list_clusters_async_pager(): - client = ClusterControllerAsyncClient(credentials=credentials.AnonymousCredentials,) + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1478,7 +1435,9 @@ async def test_list_clusters_async_pager(): @pytest.mark.asyncio async def test_list_clusters_async_pages(): - client = ClusterControllerAsyncClient(credentials=credentials.AnonymousCredentials,) + client = ClusterControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1510,7 +1469,7 @@ def test_diagnose_cluster( transport: str = "grpc", request_type=clusters.DiagnoseClusterRequest ): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1521,13 +1480,11 @@ def test_diagnose_cluster( with mock.patch.object(type(client.transport.diagnose_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.diagnose_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DiagnoseClusterRequest() # Establish that the response is the type that we expect. @@ -1542,7 +1499,7 @@ def test_diagnose_cluster_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1550,7 +1507,6 @@ def test_diagnose_cluster_empty_call(): client.diagnose_cluster() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DiagnoseClusterRequest() @@ -1559,7 +1515,7 @@ async def test_diagnose_cluster_async( transport: str = "grpc_asyncio", request_type=clusters.DiagnoseClusterRequest ): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1572,13 +1528,11 @@ async def test_diagnose_cluster_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.diagnose_cluster(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == clusters.DiagnoseClusterRequest() # Establish that the response is the type that we expect. @@ -1591,13 +1545,12 @@ async def test_diagnose_cluster_async_from_dict(): def test_diagnose_cluster_flattened(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.diagnose_cluster), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.diagnose_cluster( @@ -1610,16 +1563,13 @@ def test_diagnose_cluster_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" def test_diagnose_cluster_flattened_error(): - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1635,7 +1585,7 @@ def test_diagnose_cluster_flattened_error(): @pytest.mark.asyncio async def test_diagnose_cluster_flattened_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1658,18 +1608,15 @@ async def test_diagnose_cluster_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].cluster_name == "cluster_name_value" @pytest.mark.asyncio async def test_diagnose_cluster_flattened_error_async(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1686,16 +1633,16 @@ async def test_diagnose_cluster_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.ClusterControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.ClusterControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ClusterControllerClient( @@ -1705,7 +1652,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.ClusterControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ClusterControllerClient( @@ -1716,7 +1663,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.ClusterControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = ClusterControllerClient(transport=transport) assert client.transport is transport @@ -1725,13 +1672,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.ClusterControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.ClusterControllerGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -1746,23 +1693,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = ClusterControllerClient(credentials=credentials.AnonymousCredentials(),) + client = ClusterControllerClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.ClusterControllerGrpcTransport,) def test_cluster_controller_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.ClusterControllerTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -1774,7 +1721,7 @@ def test_cluster_controller_base_transport(): ) as Transport: Transport.return_value = None transport = transports.ClusterControllerTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1797,15 +1744,37 @@ def test_cluster_controller_base_transport(): transport.operations_client +@requires_google_auth_gte_1_25_0 def test_cluster_controller_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.dataproc_v1beta2.services.cluster_controller.transports.ClusterControllerTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ClusterControllerTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_cluster_controller_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataproc_v1beta2.services.cluster_controller.transports.ClusterControllerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ClusterControllerTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -1818,19 +1787,33 @@ def test_cluster_controller_base_transport_with_credentials_file(): def test_cluster_controller_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.dataproc_v1beta2.services.cluster_controller.transports.ClusterControllerTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ClusterControllerTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_cluster_controller_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ClusterControllerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_cluster_controller_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) ClusterControllerClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -1838,20 +1821,156 @@ def test_cluster_controller_auth_adc(): ) -def test_cluster_controller_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterControllerGrpcTransport, + transports.ClusterControllerGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_cluster_controller_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.ClusterControllerGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ClusterControllerGrpcTransport, + transports.ClusterControllerGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_cluster_controller_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ClusterControllerGrpcTransport, grpc_helpers), + (transports.ClusterControllerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_cluster_controller_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ClusterControllerGrpcTransport, grpc_helpers), + (transports.ClusterControllerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_cluster_controller_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ClusterControllerGrpcTransport, grpc_helpers), + (transports.ClusterControllerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_cluster_controller_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -1860,7 +1979,7 @@ def test_cluster_controller_transport_auth_adc(): ], ) def test_cluster_controller_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -1899,7 +2018,7 @@ def test_cluster_controller_grpc_transport_client_cert_source_for_mtls(transport def test_cluster_controller_host_no_port(): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), @@ -1909,7 +2028,7 @@ def test_cluster_controller_host_no_port(): def test_cluster_controller_host_with_port(): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), @@ -1965,9 +2084,9 @@ def test_cluster_controller_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2043,7 +2162,7 @@ def test_cluster_controller_transport_channel_mtls_with_adc(transport_class): def test_cluster_controller_grpc_lro_client(): client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport @@ -2056,7 +2175,7 @@ def test_cluster_controller_grpc_lro_client(): def test_cluster_controller_grpc_lro_async_client(): client = ClusterControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport @@ -2071,7 +2190,6 @@ def test_cluster_path(): project = "squid" location = "clam" cluster = "whelk" - expected = "projects/{project}/locations/{location}/clusters/{cluster}".format( project=project, location=location, cluster=cluster, ) @@ -2094,7 +2212,6 @@ def test_parse_cluster_path(): def test_common_billing_account_path(): billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2115,7 +2232,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "winkle" - expected = "folders/{folder}".format(folder=folder,) actual = ClusterControllerClient.common_folder_path(folder) assert expected == actual @@ -2134,7 +2250,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "scallop" - expected = "organizations/{organization}".format(organization=organization,) actual = ClusterControllerClient.common_organization_path(organization) assert expected == actual @@ -2153,7 +2268,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "squid" - expected = "projects/{project}".format(project=project,) actual = ClusterControllerClient.common_project_path(project) assert expected == actual @@ -2173,7 +2287,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "whelk" location = "octopus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2200,7 +2313,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.ClusterControllerTransport, "_prep_wrapped_messages" ) as prep: client = ClusterControllerClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2209,6 +2322,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = ClusterControllerClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/dataproc_v1beta2/test_job_controller.py b/tests/unit/gapic/dataproc_v1beta2/test_job_controller.py index fa894d54..99bf6334 100644 --- a/tests/unit/gapic/dataproc_v1beta2/test_job_controller.py +++ b/tests/unit/gapic/dataproc_v1beta2/test_job_controller.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,16 +23,16 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.dataproc_v1beta2.services.job_controller import ( JobControllerAsyncClient, @@ -41,11 +40,41 @@ from google.cloud.dataproc_v1beta2.services.job_controller import JobControllerClient from google.cloud.dataproc_v1beta2.services.job_controller import pagers from google.cloud.dataproc_v1beta2.services.job_controller import transports +from google.cloud.dataproc_v1beta2.services.job_controller.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.dataproc_v1beta2.services.job_controller.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.dataproc_v1beta2.types import jobs from google.longrunning import operations_pb2 from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -96,7 +125,7 @@ def test__get_default_mtls_endpoint(): "client_class", [JobControllerClient, JobControllerAsyncClient,] ) def test_job_controller_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -113,7 +142,7 @@ def test_job_controller_client_from_service_account_info(client_class): "client_class", [JobControllerClient, JobControllerAsyncClient,] ) def test_job_controller_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -166,7 +195,7 @@ def test_job_controller_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(JobControllerClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -452,7 +481,7 @@ def test_job_controller_client_client_options_from_dict(): def test_submit_job(transport: str = "grpc", request_type=jobs.SubmitJobRequest): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -470,27 +499,19 @@ def test_submit_job(transport: str = "grpc", request_type=jobs.SubmitJobRequest) done=True, hadoop_job=jobs.HadoopJob(main_jar_file_uri="main_jar_file_uri_value"), ) - response = client.submit_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.submitted_by == "submitted_by_value" - assert response.driver_output_resource_uri == "driver_output_resource_uri_value" - assert response.driver_control_files_uri == "driver_control_files_uri_value" - assert response.job_uuid == "job_uuid_value" - assert response.done is True @@ -502,7 +523,7 @@ def test_submit_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -510,7 +531,6 @@ def test_submit_job_empty_call(): client.submit_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() @@ -519,7 +539,7 @@ async def test_submit_job_async( transport: str = "grpc_asyncio", request_type=jobs.SubmitJobRequest ): client = JobControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -538,26 +558,19 @@ async def test_submit_job_async( done=True, ) ) - response = await client.submit_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() # Establish that the response is the type that we expect. assert isinstance(response, jobs.Job) - assert response.submitted_by == "submitted_by_value" - assert response.driver_output_resource_uri == "driver_output_resource_uri_value" - assert response.driver_control_files_uri == "driver_control_files_uri_value" - assert response.job_uuid == "job_uuid_value" - assert response.done is True @@ -567,13 +580,12 @@ async def test_submit_job_async_from_dict(): def test_submit_job_flattened(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.submit_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = jobs.Job() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.submit_job( @@ -586,18 +598,15 @@ def test_submit_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job == jobs.Job( reference=jobs.JobReference(project_id="project_id_value") ) def test_submit_job_flattened_error(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -612,7 +621,9 @@ def test_submit_job_flattened_error(): @pytest.mark.asyncio async def test_submit_job_flattened_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.submit_job), "__call__") as call: @@ -632,11 +643,8 @@ async def test_submit_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job == jobs.Job( reference=jobs.JobReference(project_id="project_id_value") ) @@ -644,7 +652,9 @@ async def test_submit_job_flattened_async(): @pytest.mark.asyncio async def test_submit_job_flattened_error_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -661,7 +671,7 @@ def test_submit_job_as_operation( transport: str = "grpc", request_type=jobs.SubmitJobRequest ): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -674,13 +684,11 @@ def test_submit_job_as_operation( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.submit_job_as_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() # Establish that the response is the type that we expect. @@ -695,7 +703,7 @@ def test_submit_job_as_operation_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -705,7 +713,6 @@ def test_submit_job_as_operation_empty_call(): client.submit_job_as_operation() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() @@ -714,7 +721,7 @@ async def test_submit_job_as_operation_async( transport: str = "grpc_asyncio", request_type=jobs.SubmitJobRequest ): client = JobControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -729,13 +736,11 @@ async def test_submit_job_as_operation_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.submit_job_as_operation(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SubmitJobRequest() # Establish that the response is the type that we expect. @@ -748,7 +753,7 @@ async def test_submit_job_as_operation_async_from_dict(): def test_submit_job_as_operation_flattened(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -756,7 +761,6 @@ def test_submit_job_as_operation_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.submit_job_as_operation( @@ -769,18 +773,15 @@ def test_submit_job_as_operation_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job == jobs.Job( reference=jobs.JobReference(project_id="project_id_value") ) def test_submit_job_as_operation_flattened_error(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -795,7 +796,9 @@ def test_submit_job_as_operation_flattened_error(): @pytest.mark.asyncio async def test_submit_job_as_operation_flattened_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -819,11 +822,8 @@ async def test_submit_job_as_operation_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job == jobs.Job( reference=jobs.JobReference(project_id="project_id_value") ) @@ -831,7 +831,9 @@ async def test_submit_job_as_operation_flattened_async(): @pytest.mark.asyncio async def test_submit_job_as_operation_flattened_error_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -846,7 +848,7 @@ async def test_submit_job_as_operation_flattened_error_async(): def test_get_job(transport: str = "grpc", request_type=jobs.GetJobRequest): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -864,27 +866,19 @@ def test_get_job(transport: str = "grpc", request_type=jobs.GetJobRequest): done=True, hadoop_job=jobs.HadoopJob(main_jar_file_uri="main_jar_file_uri_value"), ) - response = client.get_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == jobs.GetJobRequest() # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.submitted_by == "submitted_by_value" - assert response.driver_output_resource_uri == "driver_output_resource_uri_value" - assert response.driver_control_files_uri == "driver_control_files_uri_value" - assert response.job_uuid == "job_uuid_value" - assert response.done is True @@ -896,7 +890,7 @@ def test_get_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -904,7 +898,6 @@ def test_get_job_empty_call(): client.get_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == jobs.GetJobRequest() @@ -913,7 +906,7 @@ async def test_get_job_async( transport: str = "grpc_asyncio", request_type=jobs.GetJobRequest ): client = JobControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -932,26 +925,19 @@ async def test_get_job_async( done=True, ) ) - response = await client.get_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == jobs.GetJobRequest() # Establish that the response is the type that we expect. assert isinstance(response, jobs.Job) - assert response.submitted_by == "submitted_by_value" - assert response.driver_output_resource_uri == "driver_output_resource_uri_value" - assert response.driver_control_files_uri == "driver_control_files_uri_value" - assert response.job_uuid == "job_uuid_value" - assert response.done is True @@ -961,13 +947,12 @@ async def test_get_job_async_from_dict(): def test_get_job_flattened(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = jobs.Job() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_job( @@ -978,16 +963,13 @@ def test_get_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" def test_get_job_flattened_error(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1002,7 +984,9 @@ def test_get_job_flattened_error(): @pytest.mark.asyncio async def test_get_job_flattened_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job), "__call__") as call: @@ -1020,17 +1004,16 @@ async def test_get_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" @pytest.mark.asyncio async def test_get_job_flattened_error_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1045,7 +1028,7 @@ async def test_get_job_flattened_error_async(): def test_list_jobs(transport: str = "grpc", request_type=jobs.ListJobsRequest): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1058,19 +1041,15 @@ def test_list_jobs(transport: str = "grpc", request_type=jobs.ListJobsRequest): call.return_value = jobs.ListJobsResponse( next_page_token="next_page_token_value", ) - response = client.list_jobs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == jobs.ListJobsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == "next_page_token_value" @@ -1082,7 +1061,7 @@ def test_list_jobs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1090,7 +1069,6 @@ def test_list_jobs_empty_call(): client.list_jobs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == jobs.ListJobsRequest() @@ -1099,7 +1077,7 @@ async def test_list_jobs_async( transport: str = "grpc_asyncio", request_type=jobs.ListJobsRequest ): client = JobControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1112,18 +1090,15 @@ async def test_list_jobs_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( jobs.ListJobsResponse(next_page_token="next_page_token_value",) ) - response = await client.list_jobs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == jobs.ListJobsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListJobsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1133,13 +1108,12 @@ async def test_list_jobs_async_from_dict(): def test_list_jobs_flattened(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = jobs.ListJobsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_jobs( @@ -1150,16 +1124,13 @@ def test_list_jobs_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].filter == "filter_value" def test_list_jobs_flattened_error(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1174,7 +1145,9 @@ def test_list_jobs_flattened_error(): @pytest.mark.asyncio async def test_list_jobs_flattened_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: @@ -1194,17 +1167,16 @@ async def test_list_jobs_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].filter == "filter_value" @pytest.mark.asyncio async def test_list_jobs_flattened_error_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1218,7 +1190,7 @@ async def test_list_jobs_flattened_error_async(): def test_list_jobs_pager(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials,) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: @@ -1244,7 +1216,7 @@ def test_list_jobs_pager(): def test_list_jobs_pages(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials,) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: @@ -1265,7 +1237,7 @@ def test_list_jobs_pages(): @pytest.mark.asyncio async def test_list_jobs_async_pager(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials,) + client = JobControllerAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1293,7 +1265,7 @@ async def test_list_jobs_async_pager(): @pytest.mark.asyncio async def test_list_jobs_async_pages(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials,) + client = JobControllerAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1318,7 +1290,7 @@ async def test_list_jobs_async_pages(): def test_update_job(transport: str = "grpc", request_type=jobs.UpdateJobRequest): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1336,27 +1308,19 @@ def test_update_job(transport: str = "grpc", request_type=jobs.UpdateJobRequest) done=True, hadoop_job=jobs.HadoopJob(main_jar_file_uri="main_jar_file_uri_value"), ) - response = client.update_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == jobs.UpdateJobRequest() # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.submitted_by == "submitted_by_value" - assert response.driver_output_resource_uri == "driver_output_resource_uri_value" - assert response.driver_control_files_uri == "driver_control_files_uri_value" - assert response.job_uuid == "job_uuid_value" - assert response.done is True @@ -1368,7 +1332,7 @@ def test_update_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1376,7 +1340,6 @@ def test_update_job_empty_call(): client.update_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == jobs.UpdateJobRequest() @@ -1385,7 +1348,7 @@ async def test_update_job_async( transport: str = "grpc_asyncio", request_type=jobs.UpdateJobRequest ): client = JobControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1404,26 +1367,19 @@ async def test_update_job_async( done=True, ) ) - response = await client.update_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == jobs.UpdateJobRequest() # Establish that the response is the type that we expect. assert isinstance(response, jobs.Job) - assert response.submitted_by == "submitted_by_value" - assert response.driver_output_resource_uri == "driver_output_resource_uri_value" - assert response.driver_control_files_uri == "driver_control_files_uri_value" - assert response.job_uuid == "job_uuid_value" - assert response.done is True @@ -1434,7 +1390,7 @@ async def test_update_job_async_from_dict(): def test_cancel_job(transport: str = "grpc", request_type=jobs.CancelJobRequest): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1452,27 +1408,19 @@ def test_cancel_job(transport: str = "grpc", request_type=jobs.CancelJobRequest) done=True, hadoop_job=jobs.HadoopJob(main_jar_file_uri="main_jar_file_uri_value"), ) - response = client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == jobs.CancelJobRequest() # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.submitted_by == "submitted_by_value" - assert response.driver_output_resource_uri == "driver_output_resource_uri_value" - assert response.driver_control_files_uri == "driver_control_files_uri_value" - assert response.job_uuid == "job_uuid_value" - assert response.done is True @@ -1484,7 +1432,7 @@ def test_cancel_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1492,7 +1440,6 @@ def test_cancel_job_empty_call(): client.cancel_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == jobs.CancelJobRequest() @@ -1501,7 +1448,7 @@ async def test_cancel_job_async( transport: str = "grpc_asyncio", request_type=jobs.CancelJobRequest ): client = JobControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1520,26 +1467,19 @@ async def test_cancel_job_async( done=True, ) ) - response = await client.cancel_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == jobs.CancelJobRequest() # Establish that the response is the type that we expect. assert isinstance(response, jobs.Job) - assert response.submitted_by == "submitted_by_value" - assert response.driver_output_resource_uri == "driver_output_resource_uri_value" - assert response.driver_control_files_uri == "driver_control_files_uri_value" - assert response.job_uuid == "job_uuid_value" - assert response.done is True @@ -1549,13 +1489,12 @@ async def test_cancel_job_async_from_dict(): def test_cancel_job_flattened(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = jobs.Job() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.cancel_job( @@ -1566,16 +1505,13 @@ def test_cancel_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" def test_cancel_job_flattened_error(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1590,7 +1526,9 @@ def test_cancel_job_flattened_error(): @pytest.mark.asyncio async def test_cancel_job_flattened_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_job), "__call__") as call: @@ -1608,17 +1546,16 @@ async def test_cancel_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" @pytest.mark.asyncio async def test_cancel_job_flattened_error_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1633,7 +1570,7 @@ async def test_cancel_job_flattened_error_async(): def test_delete_job(transport: str = "grpc", request_type=jobs.DeleteJobRequest): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1644,13 +1581,11 @@ def test_delete_job(transport: str = "grpc", request_type=jobs.DeleteJobRequest) with mock.patch.object(type(client.transport.delete_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == jobs.DeleteJobRequest() # Establish that the response is the type that we expect. @@ -1665,7 +1600,7 @@ def test_delete_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1673,7 +1608,6 @@ def test_delete_job_empty_call(): client.delete_job() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == jobs.DeleteJobRequest() @@ -1682,7 +1616,7 @@ async def test_delete_job_async( transport: str = "grpc_asyncio", request_type=jobs.DeleteJobRequest ): client = JobControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1693,13 +1627,11 @@ async def test_delete_job_async( with mock.patch.object(type(client.transport.delete_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == jobs.DeleteJobRequest() # Establish that the response is the type that we expect. @@ -1712,13 +1644,12 @@ async def test_delete_job_async_from_dict(): def test_delete_job_flattened(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_job( @@ -1729,16 +1660,13 @@ def test_delete_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" def test_delete_job_flattened_error(): - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1753,7 +1681,9 @@ def test_delete_job_flattened_error(): @pytest.mark.asyncio async def test_delete_job_flattened_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_job), "__call__") as call: @@ -1771,17 +1701,16 @@ async def test_delete_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].region == "region_value" - assert args[0].job_id == "job_id_value" @pytest.mark.asyncio async def test_delete_job_flattened_error_async(): - client = JobControllerAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1797,16 +1726,16 @@ async def test_delete_job_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.JobControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.JobControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = JobControllerClient( @@ -1816,7 +1745,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.JobControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = JobControllerClient( @@ -1827,7 +1756,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.JobControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = JobControllerClient(transport=transport) assert client.transport is transport @@ -1836,13 +1765,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.JobControllerGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.JobControllerGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -1857,23 +1786,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = JobControllerClient(credentials=credentials.AnonymousCredentials(),) + client = JobControllerClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.JobControllerGrpcTransport,) def test_job_controller_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.JobControllerTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -1885,7 +1814,7 @@ def test_job_controller_base_transport(): ) as Transport: Transport.return_value = None transport = transports.JobControllerTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1909,15 +1838,37 @@ def test_job_controller_base_transport(): transport.operations_client +@requires_google_auth_gte_1_25_0 def test_job_controller_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataproc_v1beta2.services.job_controller.transports.JobControllerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.JobControllerTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_job_controller_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.dataproc_v1beta2.services.job_controller.transports.JobControllerTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.JobControllerTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -1930,19 +1881,33 @@ def test_job_controller_base_transport_with_credentials_file(): def test_job_controller_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.dataproc_v1beta2.services.job_controller.transports.JobControllerTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.JobControllerTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_job_controller_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + JobControllerClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_job_controller_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) JobControllerClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -1950,20 +1915,156 @@ def test_job_controller_auth_adc(): ) -def test_job_controller_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobControllerGrpcTransport, + transports.JobControllerGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_job_controller_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.JobControllerGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobControllerGrpcTransport, + transports.JobControllerGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_job_controller_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.JobControllerGrpcTransport, grpc_helpers), + (transports.JobControllerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_job_controller_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.JobControllerGrpcTransport, grpc_helpers), + (transports.JobControllerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_job_controller_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.JobControllerGrpcTransport, grpc_helpers), + (transports.JobControllerGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_job_controller_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -1972,7 +2073,7 @@ def test_job_controller_transport_auth_adc(): ], ) def test_job_controller_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2011,7 +2112,7 @@ def test_job_controller_grpc_transport_client_cert_source_for_mtls(transport_cla def test_job_controller_host_no_port(): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), @@ -2021,7 +2122,7 @@ def test_job_controller_host_no_port(): def test_job_controller_host_with_port(): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), @@ -2075,9 +2176,9 @@ def test_job_controller_transport_channel_mtls_with_client_cert_source(transport mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2153,7 +2254,7 @@ def test_job_controller_transport_channel_mtls_with_adc(transport_class): def test_job_controller_grpc_lro_client(): client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport @@ -2166,7 +2267,7 @@ def test_job_controller_grpc_lro_client(): def test_job_controller_grpc_lro_async_client(): client = JobControllerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport @@ -2179,7 +2280,6 @@ def test_job_controller_grpc_lro_async_client(): def test_common_billing_account_path(): billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2200,7 +2300,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) actual = JobControllerClient.common_folder_path(folder) assert expected == actual @@ -2219,7 +2318,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) actual = JobControllerClient.common_organization_path(organization) assert expected == actual @@ -2238,7 +2336,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) actual = JobControllerClient.common_project_path(project) assert expected == actual @@ -2258,7 +2355,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "winkle" location = "nautilus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2285,7 +2381,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.JobControllerTransport, "_prep_wrapped_messages" ) as prep: client = JobControllerClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2294,6 +2390,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = JobControllerClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/dataproc_v1beta2/test_workflow_template_service.py b/tests/unit/gapic/dataproc_v1beta2/test_workflow_template_service.py index 4d466aa5..b274cb28 100644 --- a/tests/unit/gapic/dataproc_v1beta2/test_workflow_template_service.py +++ b/tests/unit/gapic/dataproc_v1beta2/test_workflow_template_service.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,16 +23,16 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.dataproc_v1beta2.services.workflow_template_service import ( WorkflowTemplateServiceAsyncClient, @@ -43,14 +42,44 @@ ) from google.cloud.dataproc_v1beta2.services.workflow_template_service import pagers from google.cloud.dataproc_v1beta2.services.workflow_template_service import transports +from google.cloud.dataproc_v1beta2.services.workflow_template_service.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.dataproc_v1beta2.services.workflow_template_service.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.dataproc_v1beta2.types import clusters from google.cloud.dataproc_v1beta2.types import jobs from google.cloud.dataproc_v1beta2.types import shared from google.cloud.dataproc_v1beta2.types import workflow_templates from google.longrunning import operations_pb2 from google.oauth2 import service_account -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -102,7 +131,7 @@ def test__get_default_mtls_endpoint(): "client_class", [WorkflowTemplateServiceClient, WorkflowTemplateServiceAsyncClient,] ) def test_workflow_template_service_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -119,7 +148,7 @@ def test_workflow_template_service_client_from_service_account_info(client_class "client_class", [WorkflowTemplateServiceClient, WorkflowTemplateServiceAsyncClient,] ) def test_workflow_template_service_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -176,7 +205,7 @@ def test_workflow_template_service_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(WorkflowTemplateServiceClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -483,7 +512,7 @@ def test_create_workflow_template( request_type=workflow_templates.CreateWorkflowTemplateRequest, ): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -498,23 +527,17 @@ def test_create_workflow_template( call.return_value = workflow_templates.WorkflowTemplate( id="id_value", name="name_value", version=774, ) - response = client.create_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.CreateWorkflowTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == "id_value" - assert response.name == "name_value" - assert response.version == 774 @@ -526,7 +549,7 @@ def test_create_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -536,7 +559,6 @@ def test_create_workflow_template_empty_call(): client.create_workflow_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.CreateWorkflowTemplateRequest() @@ -546,7 +568,7 @@ async def test_create_workflow_template_async( request_type=workflow_templates.CreateWorkflowTemplateRequest, ): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -563,22 +585,17 @@ async def test_create_workflow_template_async( id="id_value", name="name_value", version=774, ) ) - response = await client.create_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.CreateWorkflowTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == "id_value" - assert response.name == "name_value" - assert response.version == 774 @@ -589,12 +606,13 @@ async def test_create_workflow_template_async_from_dict(): def test_create_workflow_template_field_headers(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.CreateWorkflowTemplateRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -602,7 +620,6 @@ def test_create_workflow_template_field_headers(): type(client.transport.create_workflow_template), "__call__" ) as call: call.return_value = workflow_templates.WorkflowTemplate() - client.create_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -618,12 +635,13 @@ def test_create_workflow_template_field_headers(): @pytest.mark.asyncio async def test_create_workflow_template_field_headers_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.CreateWorkflowTemplateRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -633,7 +651,6 @@ async def test_create_workflow_template_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( workflow_templates.WorkflowTemplate() ) - await client.create_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -648,7 +665,7 @@ async def test_create_workflow_template_field_headers_async(): def test_create_workflow_template_flattened(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -657,7 +674,6 @@ def test_create_workflow_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = workflow_templates.WorkflowTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_workflow_template( @@ -669,15 +685,13 @@ def test_create_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") def test_create_workflow_template_flattened_error(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -693,7 +707,7 @@ def test_create_workflow_template_flattened_error(): @pytest.mark.asyncio async def test_create_workflow_template_flattened_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -717,16 +731,14 @@ async def test_create_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") @pytest.mark.asyncio async def test_create_workflow_template_flattened_error_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -743,7 +755,7 @@ def test_get_workflow_template( transport: str = "grpc", request_type=workflow_templates.GetWorkflowTemplateRequest ): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -758,23 +770,17 @@ def test_get_workflow_template( call.return_value = workflow_templates.WorkflowTemplate( id="id_value", name="name_value", version=774, ) - response = client.get_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.GetWorkflowTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == "id_value" - assert response.name == "name_value" - assert response.version == 774 @@ -786,7 +792,7 @@ def test_get_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -796,7 +802,6 @@ def test_get_workflow_template_empty_call(): client.get_workflow_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.GetWorkflowTemplateRequest() @@ -806,7 +811,7 @@ async def test_get_workflow_template_async( request_type=workflow_templates.GetWorkflowTemplateRequest, ): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -823,22 +828,17 @@ async def test_get_workflow_template_async( id="id_value", name="name_value", version=774, ) ) - response = await client.get_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.GetWorkflowTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == "id_value" - assert response.name == "name_value" - assert response.version == 774 @@ -849,12 +849,13 @@ async def test_get_workflow_template_async_from_dict(): def test_get_workflow_template_field_headers(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.GetWorkflowTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -862,7 +863,6 @@ def test_get_workflow_template_field_headers(): type(client.transport.get_workflow_template), "__call__" ) as call: call.return_value = workflow_templates.WorkflowTemplate() - client.get_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -878,12 +878,13 @@ def test_get_workflow_template_field_headers(): @pytest.mark.asyncio async def test_get_workflow_template_field_headers_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.GetWorkflowTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -893,7 +894,6 @@ async def test_get_workflow_template_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( workflow_templates.WorkflowTemplate() ) - await client.get_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -908,7 +908,7 @@ async def test_get_workflow_template_field_headers_async(): def test_get_workflow_template_flattened(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -917,7 +917,6 @@ def test_get_workflow_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = workflow_templates.WorkflowTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_workflow_template(name="name_value",) @@ -926,13 +925,12 @@ def test_get_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_workflow_template_flattened_error(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -946,7 +944,7 @@ def test_get_workflow_template_flattened_error(): @pytest.mark.asyncio async def test_get_workflow_template_flattened_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -967,14 +965,13 @@ async def test_get_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_workflow_template_flattened_error_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -990,7 +987,7 @@ def test_instantiate_workflow_template( request_type=workflow_templates.InstantiateWorkflowTemplateRequest, ): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1003,13 +1000,11 @@ def test_instantiate_workflow_template( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.instantiate_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateWorkflowTemplateRequest() # Establish that the response is the type that we expect. @@ -1024,7 +1019,7 @@ def test_instantiate_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1034,7 +1029,6 @@ def test_instantiate_workflow_template_empty_call(): client.instantiate_workflow_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateWorkflowTemplateRequest() @@ -1044,7 +1038,7 @@ async def test_instantiate_workflow_template_async( request_type=workflow_templates.InstantiateWorkflowTemplateRequest, ): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1059,13 +1053,11 @@ async def test_instantiate_workflow_template_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.instantiate_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateWorkflowTemplateRequest() # Establish that the response is the type that we expect. @@ -1079,12 +1071,13 @@ async def test_instantiate_workflow_template_async_from_dict(): def test_instantiate_workflow_template_field_headers(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.InstantiateWorkflowTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1092,7 +1085,6 @@ def test_instantiate_workflow_template_field_headers(): type(client.transport.instantiate_workflow_template), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.instantiate_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -1108,12 +1100,13 @@ def test_instantiate_workflow_template_field_headers(): @pytest.mark.asyncio async def test_instantiate_workflow_template_field_headers_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.InstantiateWorkflowTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1123,7 +1116,6 @@ async def test_instantiate_workflow_template_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.instantiate_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -1138,7 +1130,7 @@ async def test_instantiate_workflow_template_field_headers_async(): def test_instantiate_workflow_template_flattened(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1147,7 +1139,6 @@ def test_instantiate_workflow_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.instantiate_workflow_template( @@ -1158,15 +1149,13 @@ def test_instantiate_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].parameters == {"key_value": "value_value"} def test_instantiate_workflow_template_flattened_error(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1182,7 +1171,7 @@ def test_instantiate_workflow_template_flattened_error(): @pytest.mark.asyncio async def test_instantiate_workflow_template_flattened_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1205,16 +1194,14 @@ async def test_instantiate_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].parameters == {"key_value": "value_value"} @pytest.mark.asyncio async def test_instantiate_workflow_template_flattened_error_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1232,7 +1219,7 @@ def test_instantiate_inline_workflow_template( request_type=workflow_templates.InstantiateInlineWorkflowTemplateRequest, ): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1245,13 +1232,11 @@ def test_instantiate_inline_workflow_template( ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.instantiate_inline_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateInlineWorkflowTemplateRequest() # Establish that the response is the type that we expect. @@ -1266,7 +1251,7 @@ def test_instantiate_inline_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1276,7 +1261,6 @@ def test_instantiate_inline_workflow_template_empty_call(): client.instantiate_inline_workflow_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateInlineWorkflowTemplateRequest() @@ -1286,7 +1270,7 @@ async def test_instantiate_inline_workflow_template_async( request_type=workflow_templates.InstantiateInlineWorkflowTemplateRequest, ): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1301,13 +1285,11 @@ async def test_instantiate_inline_workflow_template_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.instantiate_inline_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.InstantiateInlineWorkflowTemplateRequest() # Establish that the response is the type that we expect. @@ -1321,12 +1303,13 @@ async def test_instantiate_inline_workflow_template_async_from_dict(): def test_instantiate_inline_workflow_template_field_headers(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.InstantiateInlineWorkflowTemplateRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1334,7 +1317,6 @@ def test_instantiate_inline_workflow_template_field_headers(): type(client.transport.instantiate_inline_workflow_template), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.instantiate_inline_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -1350,12 +1332,13 @@ def test_instantiate_inline_workflow_template_field_headers(): @pytest.mark.asyncio async def test_instantiate_inline_workflow_template_field_headers_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.InstantiateInlineWorkflowTemplateRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1365,7 +1348,6 @@ async def test_instantiate_inline_workflow_template_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.instantiate_inline_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -1380,7 +1362,7 @@ async def test_instantiate_inline_workflow_template_field_headers_async(): def test_instantiate_inline_workflow_template_flattened(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1389,7 +1371,6 @@ def test_instantiate_inline_workflow_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.instantiate_inline_workflow_template( @@ -1401,15 +1382,13 @@ def test_instantiate_inline_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") def test_instantiate_inline_workflow_template_flattened_error(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1425,7 +1404,7 @@ def test_instantiate_inline_workflow_template_flattened_error(): @pytest.mark.asyncio async def test_instantiate_inline_workflow_template_flattened_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1449,16 +1428,14 @@ async def test_instantiate_inline_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") @pytest.mark.asyncio async def test_instantiate_inline_workflow_template_flattened_error_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1476,7 +1453,7 @@ def test_update_workflow_template( request_type=workflow_templates.UpdateWorkflowTemplateRequest, ): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1491,23 +1468,17 @@ def test_update_workflow_template( call.return_value = workflow_templates.WorkflowTemplate( id="id_value", name="name_value", version=774, ) - response = client.update_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.UpdateWorkflowTemplateRequest() # Establish that the response is the type that we expect. - assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == "id_value" - assert response.name == "name_value" - assert response.version == 774 @@ -1519,7 +1490,7 @@ def test_update_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1529,7 +1500,6 @@ def test_update_workflow_template_empty_call(): client.update_workflow_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.UpdateWorkflowTemplateRequest() @@ -1539,7 +1509,7 @@ async def test_update_workflow_template_async( request_type=workflow_templates.UpdateWorkflowTemplateRequest, ): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1556,22 +1526,17 @@ async def test_update_workflow_template_async( id="id_value", name="name_value", version=774, ) ) - response = await client.update_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.UpdateWorkflowTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, workflow_templates.WorkflowTemplate) - assert response.id == "id_value" - assert response.name == "name_value" - assert response.version == 774 @@ -1582,12 +1547,13 @@ async def test_update_workflow_template_async_from_dict(): def test_update_workflow_template_field_headers(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.UpdateWorkflowTemplateRequest() + request.template.name = "template.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1595,7 +1561,6 @@ def test_update_workflow_template_field_headers(): type(client.transport.update_workflow_template), "__call__" ) as call: call.return_value = workflow_templates.WorkflowTemplate() - client.update_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -1613,12 +1578,13 @@ def test_update_workflow_template_field_headers(): @pytest.mark.asyncio async def test_update_workflow_template_field_headers_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.UpdateWorkflowTemplateRequest() + request.template.name = "template.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1628,7 +1594,6 @@ async def test_update_workflow_template_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( workflow_templates.WorkflowTemplate() ) - await client.update_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -1645,7 +1610,7 @@ async def test_update_workflow_template_field_headers_async(): def test_update_workflow_template_flattened(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1654,7 +1619,6 @@ def test_update_workflow_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = workflow_templates.WorkflowTemplate() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_workflow_template( @@ -1665,13 +1629,12 @@ def test_update_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") def test_update_workflow_template_flattened_error(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1686,7 +1649,7 @@ def test_update_workflow_template_flattened_error(): @pytest.mark.asyncio async def test_update_workflow_template_flattened_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1709,14 +1672,13 @@ async def test_update_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].template == workflow_templates.WorkflowTemplate(id="id_value") @pytest.mark.asyncio async def test_update_workflow_template_flattened_error_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1733,7 +1695,7 @@ def test_list_workflow_templates( request_type=workflow_templates.ListWorkflowTemplatesRequest, ): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1748,19 +1710,15 @@ def test_list_workflow_templates( call.return_value = workflow_templates.ListWorkflowTemplatesResponse( next_page_token="next_page_token_value", ) - response = client.list_workflow_templates(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.ListWorkflowTemplatesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListWorkflowTemplatesPager) - assert response.next_page_token == "next_page_token_value" @@ -1772,7 +1730,7 @@ def test_list_workflow_templates_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1782,7 +1740,6 @@ def test_list_workflow_templates_empty_call(): client.list_workflow_templates() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.ListWorkflowTemplatesRequest() @@ -1792,7 +1749,7 @@ async def test_list_workflow_templates_async( request_type=workflow_templates.ListWorkflowTemplatesRequest, ): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1809,18 +1766,15 @@ async def test_list_workflow_templates_async( next_page_token="next_page_token_value", ) ) - response = await client.list_workflow_templates(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.ListWorkflowTemplatesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListWorkflowTemplatesAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1831,12 +1785,13 @@ async def test_list_workflow_templates_async_from_dict(): def test_list_workflow_templates_field_headers(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.ListWorkflowTemplatesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1844,7 +1799,6 @@ def test_list_workflow_templates_field_headers(): type(client.transport.list_workflow_templates), "__call__" ) as call: call.return_value = workflow_templates.ListWorkflowTemplatesResponse() - client.list_workflow_templates(request) # Establish that the underlying gRPC stub method was called. @@ -1860,12 +1814,13 @@ def test_list_workflow_templates_field_headers(): @pytest.mark.asyncio async def test_list_workflow_templates_field_headers_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.ListWorkflowTemplatesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1875,7 +1830,6 @@ async def test_list_workflow_templates_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( workflow_templates.ListWorkflowTemplatesResponse() ) - await client.list_workflow_templates(request) # Establish that the underlying gRPC stub method was called. @@ -1890,7 +1844,7 @@ async def test_list_workflow_templates_field_headers_async(): def test_list_workflow_templates_flattened(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1899,7 +1853,6 @@ def test_list_workflow_templates_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = workflow_templates.ListWorkflowTemplatesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_workflow_templates(parent="parent_value",) @@ -1908,13 +1861,12 @@ def test_list_workflow_templates_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_workflow_templates_flattened_error(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1928,7 +1880,7 @@ def test_list_workflow_templates_flattened_error(): @pytest.mark.asyncio async def test_list_workflow_templates_flattened_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1949,14 +1901,13 @@ async def test_list_workflow_templates_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_workflow_templates_flattened_error_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1969,7 +1920,7 @@ async def test_list_workflow_templates_flattened_error_async(): def test_list_workflow_templates_pager(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2017,7 +1968,7 @@ def test_list_workflow_templates_pager(): def test_list_workflow_templates_pages(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2057,7 +2008,7 @@ def test_list_workflow_templates_pages(): @pytest.mark.asyncio async def test_list_workflow_templates_async_pager(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2106,7 +2057,7 @@ async def test_list_workflow_templates_async_pager(): @pytest.mark.asyncio async def test_list_workflow_templates_async_pages(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2152,7 +2103,7 @@ def test_delete_workflow_template( request_type=workflow_templates.DeleteWorkflowTemplateRequest, ): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2165,13 +2116,11 @@ def test_delete_workflow_template( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.DeleteWorkflowTemplateRequest() # Establish that the response is the type that we expect. @@ -2186,7 +2135,7 @@ def test_delete_workflow_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2196,7 +2145,6 @@ def test_delete_workflow_template_empty_call(): client.delete_workflow_template() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.DeleteWorkflowTemplateRequest() @@ -2206,7 +2154,7 @@ async def test_delete_workflow_template_async( request_type=workflow_templates.DeleteWorkflowTemplateRequest, ): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2219,13 +2167,11 @@ async def test_delete_workflow_template_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_workflow_template(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == workflow_templates.DeleteWorkflowTemplateRequest() # Establish that the response is the type that we expect. @@ -2239,12 +2185,13 @@ async def test_delete_workflow_template_async_from_dict(): def test_delete_workflow_template_field_headers(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.DeleteWorkflowTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2252,7 +2199,6 @@ def test_delete_workflow_template_field_headers(): type(client.transport.delete_workflow_template), "__call__" ) as call: call.return_value = None - client.delete_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -2268,12 +2214,13 @@ def test_delete_workflow_template_field_headers(): @pytest.mark.asyncio async def test_delete_workflow_template_field_headers_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = workflow_templates.DeleteWorkflowTemplateRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2281,7 +2228,6 @@ async def test_delete_workflow_template_field_headers_async(): type(client.transport.delete_workflow_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_workflow_template(request) # Establish that the underlying gRPC stub method was called. @@ -2296,7 +2242,7 @@ async def test_delete_workflow_template_field_headers_async(): def test_delete_workflow_template_flattened(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2305,7 +2251,6 @@ def test_delete_workflow_template_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_workflow_template(name="name_value",) @@ -2314,13 +2259,12 @@ def test_delete_workflow_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_workflow_template_flattened_error(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2334,7 +2278,7 @@ def test_delete_workflow_template_flattened_error(): @pytest.mark.asyncio async def test_delete_workflow_template_flattened_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2353,14 +2297,13 @@ async def test_delete_workflow_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_workflow_template_flattened_error_async(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2374,16 +2317,16 @@ async def test_delete_workflow_template_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.WorkflowTemplateServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.WorkflowTemplateServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = WorkflowTemplateServiceClient( @@ -2393,7 +2336,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.WorkflowTemplateServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = WorkflowTemplateServiceClient( @@ -2404,7 +2347,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.WorkflowTemplateServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = WorkflowTemplateServiceClient(transport=transport) assert client.transport is transport @@ -2413,13 +2356,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.WorkflowTemplateServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.WorkflowTemplateServiceGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -2434,8 +2377,8 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() @@ -2443,7 +2386,7 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance( client.transport, transports.WorkflowTemplateServiceGrpcTransport, @@ -2452,9 +2395,9 @@ def test_transport_grpc_default(): def test_workflow_template_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.WorkflowTemplateServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -2466,7 +2409,7 @@ def test_workflow_template_service_base_transport(): ) as Transport: Transport.return_value = None transport = transports.WorkflowTemplateServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -2490,15 +2433,37 @@ def test_workflow_template_service_base_transport(): transport.operations_client +@requires_google_auth_gte_1_25_0 def test_workflow_template_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.dataproc_v1beta2.services.workflow_template_service.transports.WorkflowTemplateServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.WorkflowTemplateServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_workflow_template_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataproc_v1beta2.services.workflow_template_service.transports.WorkflowTemplateServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.WorkflowTemplateServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -2511,19 +2476,33 @@ def test_workflow_template_service_base_transport_with_credentials_file(): def test_workflow_template_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.dataproc_v1beta2.services.workflow_template_service.transports.WorkflowTemplateServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.WorkflowTemplateServiceTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_workflow_template_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + WorkflowTemplateServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_workflow_template_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) WorkflowTemplateServiceClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -2531,20 +2510,158 @@ def test_workflow_template_service_auth_adc(): ) -def test_workflow_template_service_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkflowTemplateServiceGrpcTransport, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_workflow_template_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.WorkflowTemplateServiceGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.WorkflowTemplateServiceGrpcTransport, + transports.WorkflowTemplateServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_workflow_template_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.WorkflowTemplateServiceGrpcTransport, grpc_helpers), + (transports.WorkflowTemplateServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_workflow_template_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataproc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.WorkflowTemplateServiceGrpcTransport, grpc_helpers), + (transports.WorkflowTemplateServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_workflow_template_service_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.WorkflowTemplateServiceGrpcTransport, grpc_helpers), + (transports.WorkflowTemplateServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_workflow_template_service_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataproc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -2555,7 +2672,7 @@ def test_workflow_template_service_transport_auth_adc(): def test_workflow_template_service_grpc_transport_client_cert_source_for_mtls( transport_class, ): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2594,7 +2711,7 @@ def test_workflow_template_service_grpc_transport_client_cert_source_for_mtls( def test_workflow_template_service_host_no_port(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com" ), @@ -2604,7 +2721,7 @@ def test_workflow_template_service_host_no_port(): def test_workflow_template_service_host_with_port(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataproc.googleapis.com:8000" ), @@ -2660,9 +2777,9 @@ def test_workflow_template_service_transport_channel_mtls_with_client_cert_sourc mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2738,7 +2855,7 @@ def test_workflow_template_service_transport_channel_mtls_with_adc(transport_cla def test_workflow_template_service_grpc_lro_client(): client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport @@ -2751,7 +2868,7 @@ def test_workflow_template_service_grpc_lro_client(): def test_workflow_template_service_grpc_lro_async_client(): client = WorkflowTemplateServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport @@ -2766,7 +2883,6 @@ def test_cluster_path(): project = "squid" location = "clam" cluster = "whelk" - expected = "projects/{project}/locations/{location}/clusters/{cluster}".format( project=project, location=location, cluster=cluster, ) @@ -2791,7 +2907,6 @@ def test_workflow_template_path(): project = "cuttlefish" region = "mussel" workflow_template = "winkle" - expected = "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}".format( project=project, region=region, workflow_template=workflow_template, ) @@ -2816,7 +2931,6 @@ def test_parse_workflow_template_path(): def test_common_billing_account_path(): billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2837,7 +2951,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) actual = WorkflowTemplateServiceClient.common_folder_path(folder) assert expected == actual @@ -2856,7 +2969,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) actual = WorkflowTemplateServiceClient.common_organization_path(organization) assert expected == actual @@ -2875,7 +2987,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) actual = WorkflowTemplateServiceClient.common_project_path(project) assert expected == actual @@ -2895,7 +3006,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "winkle" location = "nautilus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2922,7 +3032,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.WorkflowTemplateServiceTransport, "_prep_wrapped_messages" ) as prep: client = WorkflowTemplateServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2931,6 +3041,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = WorkflowTemplateServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info)